Detailed changes
@@ -13,7 +13,7 @@ dependencies = [
"gpui2",
"language",
"project",
- "settings2",
+ "settings",
"smallvec",
"theme2",
"ui2",
@@ -317,7 +317,7 @@ dependencies = [
"isahc",
"language",
"log",
- "menu2",
+ "menu",
"multi_buffer",
"ordered-float 2.10.0",
"parking_lot 0.11.2",
@@ -329,7 +329,7 @@ dependencies = [
"semantic_index",
"serde",
"serde_json",
- "settings2",
+ "settings",
"smol",
"theme2",
"tiktoken-rs",
@@ -658,19 +658,6 @@ dependencies = [
[[package]]
name = "audio"
version = "0.1.0"
-dependencies = [
- "anyhow",
- "collections",
- "gpui",
- "log",
- "parking_lot 0.11.2",
- "rodio",
- "util",
-]
-
-[[package]]
-name = "audio2"
-version = "0.1.0"
dependencies = [
"anyhow",
"collections",
@@ -693,12 +680,12 @@ dependencies = [
"isahc",
"lazy_static",
"log",
- "menu2",
+ "menu",
"project",
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"smol",
"tempdir",
"theme2",
@@ -1029,7 +1016,7 @@ dependencies = [
"outline",
"project",
"search",
- "settings2",
+ "settings",
"theme2",
"ui2",
"workspace",
@@ -1119,7 +1106,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-broadcast",
- "audio2",
+ "audio",
"client",
"collections",
"fs",
@@ -1136,7 +1123,7 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"smallvec",
"util",
]
@@ -1225,7 +1212,7 @@ dependencies = [
"schemars",
"serde",
"serde_derive",
- "settings2",
+ "settings",
"smallvec",
"smol",
"sum_tree",
@@ -1398,7 +1385,7 @@ dependencies = [
"schemars",
"serde",
"serde_derive",
- "settings2",
+ "settings",
"smol",
"sum_tree",
"sysinfo",
@@ -1459,79 +1446,6 @@ dependencies = [
[[package]]
name = "collab"
-version = "0.32.0"
-dependencies = [
- "anyhow",
- "async-trait",
- "async-tungstenite",
- "audio",
- "axum",
- "axum-extra",
- "base64 0.13.1",
- "call",
- "channel",
- "clap 3.2.25",
- "client",
- "clock",
- "collab_ui",
- "collections",
- "ctor",
- "dashmap",
- "editor",
- "env_logger",
- "envy",
- "fs",
- "futures 0.3.28",
- "git",
- "gpui",
- "hyper",
- "indoc",
- "language",
- "lazy_static",
- "lipsum",
- "live_kit_client",
- "live_kit_server",
- "log",
- "lsp",
- "nanoid",
- "node_runtime",
- "notifications",
- "parking_lot 0.11.2",
- "pretty_assertions",
- "project",
- "prometheus",
- "prost 0.8.0",
- "rand 0.8.5",
- "reqwest",
- "rpc",
- "scrypt",
- "sea-orm",
- "serde",
- "serde_derive",
- "serde_json",
- "settings",
- "sha-1 0.9.8",
- "smallvec",
- "sqlx",
- "text",
- "theme",
- "time",
- "tokio",
- "tokio-tungstenite",
- "toml 0.5.11",
- "tonic",
- "tower",
- "tracing",
- "tracing-log",
- "tracing-subscriber",
- "unindent",
- "util",
- "uuid 1.4.1",
- "workspace",
-]
-
-[[package]]
-name = "collab2"
version = "0.28.0"
dependencies = [
"anyhow",
@@ -1582,7 +1496,7 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"sha-1 0.9.8",
"smallvec",
"sqlx",
@@ -1624,7 +1538,7 @@ dependencies = [
"language",
"lazy_static",
"log",
- "menu2",
+ "menu",
"notifications",
"picker",
"postage",
@@ -1636,7 +1550,7 @@ dependencies = [
"schemars",
"serde",
"serde_derive",
- "settings2",
+ "settings",
"smallvec",
"theme2",
"theme_selector",
@@ -1691,12 +1605,12 @@ dependencies = [
"go_to_line",
"gpui2",
"language",
- "menu2",
+ "menu",
"picker",
"project",
"serde",
"serde_json",
- "settings2",
+ "settings",
"theme2",
"ui2",
"util",
@@ -1797,7 +1711,7 @@ dependencies = [
"rpc",
"serde",
"serde_derive",
- "settings2",
+ "settings",
"smol",
"theme2",
"ui2",
@@ -1815,7 +1729,7 @@ dependencies = [
"futures 0.3.28",
"gpui2",
"language",
- "settings2",
+ "settings",
"smol",
"theme2",
"util",
@@ -2325,7 +2239,7 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"smallvec",
"theme2",
"ui2",
@@ -2502,7 +2416,7 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"smallvec",
"smol",
"snippet",
@@ -2711,14 +2625,14 @@ dependencies = [
"language",
"lazy_static",
"log",
- "menu2",
+ "menu",
"postage",
"project",
"regex",
"search",
"serde",
"serde_derive",
- "settings2",
+ "settings",
"smallvec",
"smol",
"sysinfo",
@@ -2741,13 +2655,13 @@ dependencies = [
"fuzzy",
"gpui2",
"language",
- "menu2",
+ "menu",
"picker",
"postage",
"project",
"serde",
"serde_json",
- "settings2",
+ "settings",
"text2",
"theme2",
"ui2",
@@ -2914,7 +2828,7 @@ dependencies = [
"log",
"parking_lot 0.11.2",
"regex",
- "rope2",
+ "rope",
"serde",
"serde_derive",
"serde_json",
@@ -3236,10 +3150,10 @@ version = "0.1.0"
dependencies = [
"editor",
"gpui2",
- "menu2",
+ "menu",
"postage",
"serde",
- "settings2",
+ "settings",
"text2",
"theme2",
"ui2",
@@ -3937,7 +3851,7 @@ dependencies = [
"log",
"schemars",
"serde",
- "settings2",
+ "settings",
"shellexpand",
"util",
"workspace",
@@ -4041,7 +3955,7 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"similar",
"smallvec",
"smol",
@@ -4075,7 +3989,7 @@ dependencies = [
"language",
"picker",
"project",
- "settings2",
+ "settings",
"theme2",
"ui2",
"util",
@@ -4097,7 +4011,7 @@ dependencies = [
"lsp",
"project",
"serde",
- "settings2",
+ "settings",
"theme2",
"tree-sitter",
"ui2",
@@ -4517,13 +4431,6 @@ dependencies = [
[[package]]
name = "menu"
version = "0.1.0"
-dependencies = [
- "gpui",
-]
-
-[[package]]
-name = "menu2"
-version = "0.1.0"
dependencies = [
"gpui2",
"serde",
@@ -4712,7 +4619,7 @@ dependencies = [
"schemars",
"serde",
"serde_derive",
- "settings2",
+ "settings",
"smallvec",
"smol",
"snippet",
@@ -4889,7 +4796,7 @@ dependencies = [
"feature_flags",
"gpui2",
"rpc",
- "settings2",
+ "settings",
"sum_tree",
"text2",
"time",
@@ -5293,7 +5200,7 @@ dependencies = [
"ordered-float 2.10.0",
"picker",
"postage",
- "settings2",
+ "settings",
"smol",
"text2",
"theme2",
@@ -5513,10 +5420,10 @@ dependencies = [
"editor",
"env_logger",
"gpui2",
- "menu2",
+ "menu",
"parking_lot 0.11.2",
"serde_json",
- "settings2",
+ "settings",
"theme2",
"ui2",
"util",
@@ -5832,13 +5739,13 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"sha2 0.10.7",
"similar",
"smol",
"sum_tree",
"tempdir",
- "terminal2",
+ "terminal",
"text2",
"thiserror",
"toml 0.5.11",
@@ -5858,7 +5765,7 @@ dependencies = [
"futures 0.3.28",
"gpui2",
"language",
- "menu2",
+ "menu",
"postage",
"pretty_assertions",
"project",
@@ -5867,7 +5774,7 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"smallvec",
"theme2",
"ui2",
@@ -5891,7 +5798,7 @@ dependencies = [
"picker",
"postage",
"project",
- "settings2",
+ "settings",
"smol",
"text2",
"theme2",
@@ -6240,7 +6147,7 @@ dependencies = [
"ordered-float 2.10.0",
"picker",
"postage",
- "settings2",
+ "settings",
"smol",
"text2",
"theme2",
@@ -6520,20 +6427,6 @@ dependencies = [
[[package]]
name = "rope"
version = "0.1.0"
-dependencies = [
- "arrayvec 0.7.4",
- "bromberg_sl2",
- "gpui",
- "log",
- "rand 0.8.5",
- "smallvec",
- "sum_tree",
- "util",
-]
-
-[[package]]
-name = "rope2"
-version = "0.1.0"
dependencies = [
"arrayvec 0.7.4",
"bromberg_sl2",
@@ -7030,14 +6923,14 @@ dependencies = [
"gpui2",
"language",
"log",
- "menu2",
+ "menu",
"postage",
"project",
"semantic_index",
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"smallvec",
"smol",
"theme2",
@@ -7101,7 +6994,7 @@ dependencies = [
"schemars",
"serde",
"serde_json",
- "settings2",
+ "settings",
"sha1",
"smol",
"tempdir",
@@ -7244,35 +7137,6 @@ dependencies = [
[[package]]
name = "settings"
version = "0.1.0"
-dependencies = [
- "anyhow",
- "collections",
- "feature_flags",
- "fs",
- "futures 0.3.28",
- "gpui",
- "indoc",
- "lazy_static",
- "postage",
- "pretty_assertions",
- "rust-embed",
- "schemars",
- "serde",
- "serde_derive",
- "serde_json",
- "serde_json_lenient",
- "smallvec",
- "sqlez",
- "toml 0.5.11",
- "tree-sitter",
- "tree-sitter-json 0.19.0",
- "unindent",
- "util",
-]
-
-[[package]]
-name = "settings2"
-version = "0.1.0"
dependencies = [
"anyhow",
"collections",
@@ -7904,11 +7768,11 @@ dependencies = [
"itertools 0.11.0",
"language",
"log",
- "menu2",
+ "menu",
"picker",
"rust-embed",
"serde",
- "settings2",
+ "settings",
"simplelog",
"smallvec",
"story",
@@ -8196,35 +8060,6 @@ dependencies = [
[[package]]
name = "terminal"
version = "0.1.0"
-dependencies = [
- "alacritty_terminal",
- "anyhow",
- "db",
- "dirs 4.0.0",
- "futures 0.3.28",
- "gpui",
- "itertools 0.10.5",
- "lazy_static",
- "libc",
- "mio-extras",
- "ordered-float 2.10.0",
- "procinfo",
- "rand 0.8.5",
- "schemars",
- "serde",
- "serde_derive",
- "settings",
- "shellexpand",
- "smallvec",
- "smol",
- "theme",
- "thiserror",
- "util",
-]
-
-[[package]]
-name = "terminal2"
-version = "0.1.0"
dependencies = [
"alacritty_terminal",
"anyhow",
@@ -8242,7 +8077,7 @@ dependencies = [
"schemars",
"serde",
"serde_derive",
- "settings2",
+ "settings",
"shellexpand",
"smallvec",
"smol",
@@ -8273,11 +8108,11 @@ dependencies = [
"rand 0.8.5",
"serde",
"serde_derive",
- "settings2",
+ "settings",
"shellexpand",
"smallvec",
"smol",
- "terminal2",
+ "terminal",
"theme2",
"thiserror",
"ui2",
@@ -8325,7 +8160,7 @@ dependencies = [
"postage",
"rand 0.8.5",
"regex",
- "rope2",
+ "rope",
"smallvec",
"sum_tree",
"util",
@@ -8370,7 +8205,7 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"story",
"toml 0.5.11",
"util",
@@ -8413,7 +8248,7 @@ dependencies = [
"parking_lot 0.11.2",
"picker",
"postage",
- "settings2",
+ "settings",
"smol",
"theme2",
"ui2",
@@ -9227,10 +9062,10 @@ dependencies = [
"chrono",
"gpui2",
"itertools 0.11.0",
- "menu2",
+ "menu",
"rand 0.8.5",
"serde",
- "settings2",
+ "settings",
"smallvec",
"story",
"strum",
@@ -9516,7 +9351,7 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"theme2",
"tokio",
"ui2",
@@ -9929,7 +9764,7 @@ dependencies = [
"project",
"schemars",
"serde",
- "settings2",
+ "settings",
"theme2",
"theme_selector",
"ui2",
@@ -10206,9 +10041,9 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"smallvec",
- "terminal2",
+ "terminal",
"theme2",
"ui2",
"util",
@@ -10305,7 +10140,7 @@ dependencies = [
"async-recursion 0.3.2",
"async-tar",
"async-trait",
- "audio2",
+ "audio",
"auto_update",
"backtrace",
"breadcrumbs",
@@ -10345,7 +10180,7 @@ dependencies = [
"libc",
"log",
"lsp",
- "menu2",
+ "menu",
"node_runtime",
"notifications",
"num_cpus",
@@ -10359,7 +10194,7 @@ dependencies = [
"rand 0.8.5",
"recent_projects",
"regex",
- "rope2",
+ "rope",
"rpc",
"rsa 0.4.0",
"rust-embed",
@@ -10369,7 +10204,7 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
- "settings2",
+ "settings",
"shellexpand",
"simplelog",
"smallvec",
@@ -4,7 +4,6 @@ members = [
"crates/ai",
"crates/assistant",
"crates/audio",
- "crates/audio2",
"crates/auto_update",
"crates/breadcrumbs",
"crates/call",
@@ -13,7 +12,6 @@ members = [
"crates/client",
"crates/clock",
"crates/collab",
- "crates/collab2",
"crates/collab_ui",
"crates/collections",
"crates/command_palette",
@@ -50,7 +48,6 @@ members = [
"crates/lsp",
"crates/media",
"crates/menu",
- "crates/menu2",
"crates/multi_buffer",
"crates/node_runtime",
"crates/notifications",
@@ -69,7 +66,6 @@ members = [
"crates/search",
"crates/semantic_index",
"crates/settings",
- "crates/settings2",
"crates/snippet",
"crates/sqlez",
"crates/sqlez_macros",
@@ -77,7 +73,6 @@ members = [
"crates/storybook2",
"crates/sum_tree",
"crates/terminal",
- "crates/terminal2",
"crates/terminal_view",
"crates/text",
"crates/theme",
@@ -14,7 +14,7 @@ editor = { path = "../editor" }
language = { path = "../language" }
gpui = { path = "../gpui2", package = "gpui2" }
project = { path = "../project" }
-settings = { path = "../settings2", package = "settings2" }
+settings = { path = "../settings" }
ui = { path = "../ui2", package = "ui2" }
util = { path = "../util" }
theme = { path = "../theme2", package = "theme2" }
@@ -16,12 +16,12 @@ editor = { path = "../editor" }
fs = { path = "../fs" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { path = "../language" }
-menu = { package = "menu2", path = "../menu2" }
+menu = { path = "../menu" }
multi_buffer = { path = "../multi_buffer" }
project = { path = "../project" }
search = { path = "../search" }
semantic_index = { path = "../semantic_index" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
theme = { package = "theme2", path = "../theme2" }
ui = { package = "ui2", path = "../ui2" }
util = { path = "../util" }
@@ -9,15 +9,14 @@ path = "src/audio.rs"
doctest = false
[dependencies]
-gpui = { path = "../gpui" }
+gpui = { package = "gpui2", path = "../gpui2" }
collections = { path = "../collections" }
util = { path = "../util" }
+
rodio ={version = "0.17.1", default-features=false, features = ["wav"]}
log.workspace = true
-
+futures.workspace = true
anyhow.workspace = true
parking_lot.workspace = true
-
-[dev-dependencies]
@@ -60,7 +60,7 @@ impl Audio {
return;
}
- cx.update_global::<Self, _, _>(|this, cx| {
+ cx.update_global::<Self, _>(|this, cx| {
let output_handle = this.ensure_output_exists()?;
let source = SoundRegistry::global(cx).get(sound.file()).log_err()?;
output_handle.play_raw(source).log_err()?;
@@ -73,7 +73,7 @@ impl Audio {
return;
}
- cx.update_global::<Self, _, _>(|this, _| {
+ cx.update_global::<Self, _>(|this, _| {
this._output_stream.take();
this.output_handle.take();
});
@@ -1,24 +0,0 @@
-[package]
-name = "audio2"
-version = "0.1.0"
-edition = "2021"
-publish = false
-
-[lib]
-path = "src/audio2.rs"
-doctest = false
-
-[dependencies]
-gpui = { package = "gpui2", path = "../gpui2" }
-collections = { path = "../collections" }
-util = { path = "../util" }
-
-
-rodio ={version = "0.17.1", default-features=false, features = ["wav"]}
-
-log.workspace = true
-futures.workspace = true
-anyhow.workspace = true
-parking_lot.workspace = true
-
-[dev-dependencies]
@@ -1,23 +0,0 @@
-[package]
-name = "audio"
-version = "0.1.0"
-edition = "2021"
-publish = false
-
-[lib]
-path = "src/audio.rs"
-doctest = false
-
-[dependencies]
-gpui = { path = "../gpui" }
-collections = { path = "../collections" }
-util = { path = "../util" }
-
-rodio ={version = "0.17.1", default-features=false, features = ["wav"]}
-
-log.workspace = true
-
-anyhow.workspace = true
-parking_lot.workspace = true
-
-[dev-dependencies]
@@ -1,44 +0,0 @@
-use std::{io::Cursor, sync::Arc};
-
-use anyhow::Result;
-use collections::HashMap;
-use gpui::{AppContext, AssetSource};
-use rodio::{
- source::{Buffered, SamplesConverter},
- Decoder, Source,
-};
-
-type Sound = Buffered<SamplesConverter<Decoder<Cursor<Vec<u8>>>, f32>>;
-
-pub struct SoundRegistry {
- cache: Arc<parking_lot::Mutex<HashMap<String, Sound>>>,
- assets: Box<dyn AssetSource>,
-}
-
-impl SoundRegistry {
- pub fn new(source: impl AssetSource) -> Arc<Self> {
- Arc::new(Self {
- cache: Default::default(),
- assets: Box::new(source),
- })
- }
-
- pub fn global(cx: &AppContext) -> Arc<Self> {
- cx.global::<Arc<Self>>().clone()
- }
-
- pub fn get(&self, name: &str) -> Result<impl Source<Item = f32>> {
- if let Some(wav) = self.cache.lock().get(name) {
- return Ok(wav.clone());
- }
-
- let path = format!("sounds/{}.wav", name);
- let bytes = self.assets.load(&path)?.into_owned();
- let cursor = Cursor::new(bytes);
- let source = Decoder::new(cursor)?.convert_samples::<f32>().buffered();
-
- self.cache.lock().insert(name.to_string(), source.clone());
-
- Ok(source)
- }
-}
@@ -1,81 +0,0 @@
-use assets::SoundRegistry;
-use gpui::{AppContext, AssetSource};
-use rodio::{OutputStream, OutputStreamHandle};
-use util::ResultExt;
-
-mod assets;
-
-pub fn init(source: impl AssetSource, cx: &mut AppContext) {
- cx.set_global(SoundRegistry::new(source));
- cx.set_global(Audio::new());
-}
-
-pub enum Sound {
- Joined,
- Leave,
- Mute,
- Unmute,
- StartScreenshare,
- StopScreenshare,
-}
-
-impl Sound {
- fn file(&self) -> &'static str {
- match self {
- Self::Joined => "joined_call",
- Self::Leave => "leave_call",
- Self::Mute => "mute",
- Self::Unmute => "unmute",
- Self::StartScreenshare => "start_screenshare",
- Self::StopScreenshare => "stop_screenshare",
- }
- }
-}
-
-pub struct Audio {
- _output_stream: Option<OutputStream>,
- output_handle: Option<OutputStreamHandle>,
-}
-
-impl Audio {
- pub fn new() -> Self {
- Self {
- _output_stream: None,
- output_handle: None,
- }
- }
-
- fn ensure_output_exists(&mut self) -> Option<&OutputStreamHandle> {
- if self.output_handle.is_none() {
- let (_output_stream, output_handle) = OutputStream::try_default().log_err().unzip();
- self.output_handle = output_handle;
- self._output_stream = _output_stream;
- }
-
- self.output_handle.as_ref()
- }
-
- pub fn play_sound(sound: Sound, cx: &mut AppContext) {
- if !cx.has_global::<Self>() {
- return;
- }
-
- cx.update_global::<Self, _, _>(|this, cx| {
- let output_handle = this.ensure_output_exists()?;
- let source = SoundRegistry::global(cx).get(sound.file()).log_err()?;
- output_handle.play_raw(source).log_err()?;
- Some(())
- });
- }
-
- pub fn end_call(cx: &mut AppContext) {
- if !cx.has_global::<Self>() {
- return;
- }
-
- cx.update_global::<Self, _, _>(|this, _| {
- this._output_stream.take();
- this.output_handle.take();
- });
- }
-}
@@ -1,44 +0,0 @@
-use std::{io::Cursor, sync::Arc};
-
-use anyhow::Result;
-use collections::HashMap;
-use gpui::{AppContext, AssetSource};
-use rodio::{
- source::{Buffered, SamplesConverter},
- Decoder, Source,
-};
-
-type Sound = Buffered<SamplesConverter<Decoder<Cursor<Vec<u8>>>, f32>>;
-
-pub struct SoundRegistry {
- cache: Arc<parking_lot::Mutex<HashMap<String, Sound>>>,
- assets: Box<dyn AssetSource>,
-}
-
-impl SoundRegistry {
- pub fn new(source: impl AssetSource) -> Arc<Self> {
- Arc::new(Self {
- cache: Default::default(),
- assets: Box::new(source),
- })
- }
-
- pub fn global(cx: &AppContext) -> Arc<Self> {
- cx.global::<Arc<Self>>().clone()
- }
-
- pub fn get(&self, name: &str) -> Result<impl Source<Item = f32>> {
- if let Some(wav) = self.cache.lock().get(name) {
- return Ok(wav.clone());
- }
-
- let path = format!("sounds/{}.wav", name);
- let bytes = self.assets.load(&path)?.into_owned();
- let cursor = Cursor::new(bytes);
- let source = Decoder::new(cursor)?.convert_samples::<f32>().buffered();
-
- self.cache.lock().insert(name.to_string(), source.clone());
-
- Ok(source)
- }
-}
@@ -1,81 +0,0 @@
-use assets::SoundRegistry;
-use gpui::{AppContext, AssetSource};
-use rodio::{OutputStream, OutputStreamHandle};
-use util::ResultExt;
-
-mod assets;
-
-pub fn init(source: impl AssetSource, cx: &mut AppContext) {
- cx.set_global(SoundRegistry::new(source));
- cx.set_global(Audio::new());
-}
-
-pub enum Sound {
- Joined,
- Leave,
- Mute,
- Unmute,
- StartScreenshare,
- StopScreenshare,
-}
-
-impl Sound {
- fn file(&self) -> &'static str {
- match self {
- Self::Joined => "joined_call",
- Self::Leave => "leave_call",
- Self::Mute => "mute",
- Self::Unmute => "unmute",
- Self::StartScreenshare => "start_screenshare",
- Self::StopScreenshare => "stop_screenshare",
- }
- }
-}
-
-pub struct Audio {
- _output_stream: Option<OutputStream>,
- output_handle: Option<OutputStreamHandle>,
-}
-
-impl Audio {
- pub fn new() -> Self {
- Self {
- _output_stream: None,
- output_handle: None,
- }
- }
-
- fn ensure_output_exists(&mut self) -> Option<&OutputStreamHandle> {
- if self.output_handle.is_none() {
- let (_output_stream, output_handle) = OutputStream::try_default().log_err().unzip();
- self.output_handle = output_handle;
- self._output_stream = _output_stream;
- }
-
- self.output_handle.as_ref()
- }
-
- pub fn play_sound(sound: Sound, cx: &mut AppContext) {
- if !cx.has_global::<Self>() {
- return;
- }
-
- cx.update_global::<Self, _>(|this, cx| {
- let output_handle = this.ensure_output_exists()?;
- let source = SoundRegistry::global(cx).get(sound.file()).log_err()?;
- output_handle.play_raw(source).log_err()?;
- Some(())
- });
- }
-
- pub fn end_call(cx: &mut AppContext) {
- if !cx.has_global::<Self>() {
- return;
- }
-
- cx.update_global::<Self, _>(|this, _| {
- this._output_stream.take();
- this.output_handle.take();
- });
- }
-}
@@ -12,9 +12,9 @@ doctest = false
db = { path = "../db" }
client = { path = "../client" }
gpui = { package = "gpui2", path = "../gpui2" }
-menu = { package = "menu2", path = "../menu2" }
+menu = { path = "../menu" }
project = { path = "../project" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
theme = { package = "theme2", path = "../theme2" }
workspace = { path = "../workspace" }
util = { path = "../util" }
@@ -16,7 +16,7 @@ ui = { package = "ui2", path = "../ui2" }
language = { path = "../language" }
project = { path = "../project" }
search = { path = "../search" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
theme = { package = "theme2", path = "../theme2" }
workspace = { path = "../workspace" }
outline = { path = "../outline" }
@@ -19,7 +19,7 @@ test-support = [
]
[dependencies]
-audio = { package = "audio2", path = "../audio2" }
+audio = { path = "../audio" }
client = { path = "../client" }
collections = { path = "../collections" }
gpui = { package = "gpui2", path = "../gpui2" }
@@ -29,7 +29,7 @@ fs = { path = "../fs" }
language = { path = "../language" }
media = { path = "../media" }
project = { path = "../project" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
util = { path = "../util" }
anyhow.workspace = true
@@ -20,7 +20,7 @@ util = { path = "../util" }
rpc = { path = "../rpc" }
text = { package = "text2", path = "../text2" }
language = { path = "../language" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
feature_flags = { path = "../feature_flags" }
sum_tree = { path = "../sum_tree" }
clock = { path = "../clock" }
@@ -50,5 +50,5 @@ collections = { path = "../collections", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
client = { path = "../client", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
@@ -19,7 +19,7 @@ gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" }
rpc = { path = "../rpc" }
text = { package = "text2", path = "../text2" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
feature_flags = { path = "../feature_flags" }
sum_tree = { path = "../sum_tree" }
@@ -49,5 +49,5 @@ url = "2.2"
collections = { path = "../collections", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathan@zed.dev>"]
default-run = "collab"
edition = "2021"
name = "collab"
-version = "0.32.0"
+version = "0.28.0"
publish = false
[[bin]]
@@ -17,7 +17,7 @@ required-features = ["seed-support"]
clock = { path = "../clock" }
collections = { path = "../collections" }
live_kit_server = { path = "../live_kit_server" }
-text = { path = "../text" }
+text = { package = "text2", path = "../text2" }
rpc = { path = "../rpc" }
util = { path = "../util" }
@@ -62,7 +62,7 @@ uuid.workspace = true
[dev-dependencies]
audio = { path = "../audio" }
collections = { path = "../collections", features = ["test-support"] }
-gpui = { path = "../gpui", features = ["test-support"] }
+gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
call = { path = "../call", features = ["test-support"] }
client = { path = "../client", features = ["test-support"] }
channel = { path = "../channel" }
@@ -70,15 +70,17 @@ editor = { path = "../editor", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] }
git = { path = "../git", features = ["test-support"] }
-live_kit_client = { path = "../live_kit_client", features = ["test-support"] }
+live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
node_runtime = { path = "../node_runtime" }
notifications = { path = "../notifications", features = ["test-support"] }
+
project = { path = "../project", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] }
-theme = { path = "../theme" }
+theme = { package = "theme2", path = "../theme2" }
workspace = { path = "../workspace", features = ["test-support"] }
+
collab_ui = { path = "../collab_ui", features = ["test-support"] }
async-trait.workspace = true
@@ -1 +0,0 @@
-../collab2/k8s
@@ -5,7 +5,7 @@ mod feature_flag_tests;
mod message_tests;
use super::*;
-use gpui::executor::Background;
+use gpui::BackgroundExecutor;
use parking_lot::Mutex;
use sea_orm::ConnectionTrait;
use sqlx::migrate::MigrateDatabase;
@@ -22,7 +22,7 @@ pub struct TestDb {
}
impl TestDb {
- pub fn sqlite(background: Arc<Background>) -> Self {
+ pub fn sqlite(background: BackgroundExecutor) -> Self {
let url = format!("sqlite::memory:");
let runtime = tokio::runtime::Builder::new_current_thread()
.enable_io()
@@ -59,7 +59,7 @@ impl TestDb {
}
}
- pub fn postgres(background: Arc<Background>) -> Self {
+ pub fn postgres(background: BackgroundExecutor) -> Self {
static LOCK: Mutex<()> = Mutex::new(());
let _guard = LOCK.lock();
@@ -108,17 +108,14 @@ impl TestDb {
macro_rules! test_both_dbs {
($test_name:ident, $postgres_test_name:ident, $sqlite_test_name:ident) => {
#[gpui::test]
- async fn $postgres_test_name() {
- let test_db = crate::db::TestDb::postgres(
- gpui::executor::Deterministic::new(0).build_background(),
- );
+ async fn $postgres_test_name(cx: &mut gpui::TestAppContext) {
+ let test_db = crate::db::TestDb::postgres(cx.executor().clone());
$test_name(test_db.db()).await;
}
#[gpui::test]
- async fn $sqlite_test_name() {
- let test_db =
- crate::db::TestDb::sqlite(gpui::executor::Deterministic::new(0).build_background());
+ async fn $sqlite_test_name(cx: &mut gpui::TestAppContext) {
+ let test_db = crate::db::TestDb::sqlite(cx.executor().clone());
$test_name(test_db.db()).await;
}
};
@@ -420,8 +420,6 @@ async fn test_db_channel_moving_bugs(db: &Arc<Database>) {
.await
.unwrap();
- // Dag is: zed - projects - livestreaming
-
// Move to same parent should be a no-op
assert!(db
.move_channel(projects_id, Some(zed_id), user_id)
@@ -1,6 +1,6 @@
use super::*;
use crate::test_both_dbs;
-use gpui::executor::{Background, Deterministic};
+use gpui::TestAppContext;
use pretty_assertions::{assert_eq, assert_ne};
use std::sync::Arc;
use tests::TestDb;
@@ -509,8 +509,8 @@ fn test_fuzzy_like_string() {
}
#[gpui::test]
-async fn test_fuzzy_search_users() {
- let test_db = TestDb::postgres(build_background_executor());
+async fn test_fuzzy_search_users(cx: &mut TestAppContext) {
+ let test_db = TestDb::postgres(cx.executor());
let db = test_db.db();
for (i, github_login) in [
"California",
@@ -631,7 +631,3 @@ async fn test_non_matching_release_channels(db: &Arc<Database>) {
assert!(result.is_ok())
}
-
-fn build_background_executor() -> Arc<Background> {
- Deterministic::new(0).build_background()
-}
@@ -1,10 +1,13 @@
use std::{future::Future, time::Duration};
+#[cfg(test)]
+use gpui::BackgroundExecutor;
+
#[derive(Clone)]
pub enum Executor {
Production,
#[cfg(test)]
- Deterministic(std::sync::Arc<gpui::executor::Background>),
+ Deterministic(BackgroundExecutor),
}
impl Executor {
@@ -33,12 +36,4 @@ impl Executor {
}
}
}
-
- pub fn record_backtrace(&self) {
- match self {
- Executor::Production => {}
- #[cfg(test)]
- Executor::Deterministic(background) => background.record_backtrace(),
- }
- }
}
@@ -110,7 +110,7 @@ struct Session {
peer: Arc<Peer>,
connection_pool: Arc<parking_lot::Mutex<ConnectionPool>>,
live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
- executor: Executor,
+ _executor: Executor,
}
impl Session {
@@ -612,7 +612,7 @@ impl Server {
peer: this.peer.clone(),
connection_pool: this.connection_pool.clone(),
live_kit_client: this.app_state.live_kit_client.clone(),
- executor: executor.clone(),
+ _executor: executor.clone()
};
update_user_contacts(user_id, &session).await?;
@@ -1723,7 +1723,6 @@ async fn update_language_server(
request: proto::UpdateLanguageServer,
session: Session,
) -> Result<()> {
- session.executor.record_backtrace();
let project_id = ProjectId::from_proto(request.project_id);
let project_connection_ids = session
.db()
@@ -1750,7 +1749,6 @@ async fn forward_project_request<T>(
where
T: EntityMessage + RequestMessage,
{
- session.executor.record_backtrace();
let project_id = ProjectId::from_proto(request.remote_entity_id());
let host_connection_id = {
let collaborators = session
@@ -1778,7 +1776,6 @@ async fn create_buffer_for_peer(
request: proto::CreateBufferForPeer,
session: Session,
) -> Result<()> {
- session.executor.record_backtrace();
let peer_id = request.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?;
session
.peer
@@ -1791,7 +1788,6 @@ async fn update_buffer(
response: Response<proto::UpdateBuffer>,
session: Session,
) -> Result<()> {
- session.executor.record_backtrace();
let project_id = ProjectId::from_proto(request.project_id);
let mut guest_connection_ids;
let mut host_connection_id = None;
@@ -1812,7 +1808,6 @@ async fn update_buffer(
}
let host_connection_id = host_connection_id.ok_or_else(|| anyhow!("host not found"))?;
- session.executor.record_backtrace();
broadcast(
Some(session.connection_id),
guest_connection_ids,
@@ -1,9 +1,10 @@
use call::Room;
-use gpui::{ModelHandle, TestAppContext};
+use gpui::{Model, TestAppContext};
mod channel_buffer_tests;
mod channel_message_tests;
mod channel_tests;
+mod editor_tests;
mod following_tests;
mod integration_tests;
mod notification_tests;
@@ -23,7 +24,7 @@ struct RoomParticipants {
pending: Vec<String>,
}
-fn room_participants(room: &ModelHandle<Room>, cx: &mut TestAppContext) -> RoomParticipants {
+fn room_participants(room: &Model<Room>, cx: &mut TestAppContext) -> RoomParticipants {
room.read_with(cx, |room, _| {
let mut remote = room
.remote_participants()
@@ -41,6 +42,6 @@ fn room_participants(room: &ModelHandle<Room>, cx: &mut TestAppContext) -> RoomP
})
}
-fn channel_id(room: &ModelHandle<Room>, cx: &mut TestAppContext) -> Option<u64> {
+fn channel_id(room: &Model<Room>, cx: &mut TestAppContext) -> Option<u64> {
cx.read(|cx| room.read(cx).channel_id())
}
@@ -4,25 +4,23 @@ use crate::{
};
use call::ActiveCall;
use channel::ACKNOWLEDGE_DEBOUNCE_INTERVAL;
-use client::ParticipantIndex;
-use client::{Collaborator, UserId};
+use client::{Collaborator, ParticipantIndex, UserId};
use collab_ui::channel_view::ChannelView;
use collections::HashMap;
use editor::{Anchor, Editor, ToOffset};
use futures::future;
-use gpui::{executor::Deterministic, ModelHandle, TestAppContext, ViewContext};
+use gpui::{BackgroundExecutor, Model, TestAppContext, ViewContext};
use rpc::{proto::PeerId, RECEIVE_TIMEOUT};
use serde_json::json;
-use std::{ops::Range, sync::Arc};
+use std::ops::Range;
#[gpui::test]
async fn test_core_channel_buffers(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -50,7 +48,7 @@ async fn test_core_channel_buffers(
});
buffer_a.update(cx_a, |buffer, cx| buffer.undo(cx));
assert_eq!(buffer_text(&buffer_a, cx_a), "hello, cruel world");
- deterministic.run_until_parked();
+ executor.run_until_parked();
// Client B joins the channel buffer
let channel_buffer_b = client_b
@@ -77,13 +75,13 @@ async fn test_core_channel_buffers(
});
// Both A and B see the new edit
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(buffer_text(&buffer_a, cx_a), "hello, beautiful world");
assert_eq!(buffer_text(&buffer_b, cx_b), "hello, beautiful world");
// Client A closes the channel buffer.
cx_a.update(|_| drop(channel_buffer_a));
- deterministic.run_until_parked();
+ executor.run_until_parked();
// Client B sees that client A is gone from the channel buffer.
channel_buffer_b.read_with(cx_b, |buffer, _| {
@@ -96,7 +94,7 @@ async fn test_core_channel_buffers(
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
// Sanity test, make sure we saw A rejoining
channel_buffer_b.read_with(cx_b, |buffer, _| {
@@ -109,7 +107,7 @@ async fn test_core_channel_buffers(
// Client A loses connection.
server.forbid_connections();
server.disconnect_client(client_a.peer_id().unwrap());
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
+ executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
// Client B observes A disconnect
channel_buffer_b.read_with(cx_b, |buffer, _| {
@@ -123,13 +121,12 @@ async fn test_core_channel_buffers(
#[gpui::test]
async fn test_channel_notes_participant_indices(
- deterministic: Arc<Deterministic>,
- mut cx_a: &mut TestAppContext,
- mut cx_b: &mut TestAppContext,
+ executor: BackgroundExecutor,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let client_c = server.create_client(cx_c, "user_c").await;
@@ -157,9 +154,10 @@ async fn test_channel_notes_participant_indices(
let (project_a, worktree_id_a) = client_a.build_local_project("/root", cx_a).await;
let project_b = client_b.build_empty_local_project(cx_b);
let project_c = client_c.build_empty_local_project(cx_c);
- let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
- let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
- let workspace_c = client_c.build_workspace(&project_c, cx_c).root(cx_c);
+
+ let (workspace_a, mut cx_a) = client_a.build_workspace(&project_a, cx_a);
+ let (workspace_b, mut cx_b) = client_b.build_workspace(&project_b, cx_b);
+ let (workspace_c, cx_c) = client_c.build_workspace(&project_c, cx_c);
// Clients A, B, and C open the channel notes
let channel_view_a = cx_a
@@ -184,7 +182,7 @@ async fn test_channel_notes_participant_indices(
});
});
});
- deterministic.run_until_parked();
+ executor.run_until_parked();
channel_view_b.update(cx_b, |notes, cx| {
notes.editor.update(cx, |editor, cx| {
editor.move_down(&Default::default(), cx);
@@ -194,7 +192,7 @@ async fn test_channel_notes_participant_indices(
});
});
});
- deterministic.run_until_parked();
+ executor.run_until_parked();
channel_view_c.update(cx_c, |notes, cx| {
notes.editor.update(cx, |editor, cx| {
editor.move_down(&Default::default(), cx);
@@ -207,7 +205,7 @@ async fn test_channel_notes_participant_indices(
// Client A sees clients B and C without assigned colors, because they aren't
// in a call together.
- deterministic.run_until_parked();
+ executor.run_until_parked();
channel_view_a.update(cx_a, |notes, cx| {
notes.editor.update(cx, |editor, cx| {
assert_remote_selections(editor, &[(None, 1..2), (None, 2..3)], cx);
@@ -223,7 +221,7 @@ async fn test_channel_notes_participant_indices(
// Clients A and B see each other with two different assigned colors. Client C
// still doesn't have a color.
- deterministic.run_until_parked();
+ executor.run_until_parked();
channel_view_a.update(cx_a, |notes, cx| {
notes.editor.update(cx, |editor, cx| {
assert_remote_selections(
@@ -249,7 +247,7 @@ async fn test_channel_notes_participant_indices(
.await
.unwrap();
let project_b = client_b.build_remote_project(project_id, cx_b).await;
- let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
+ let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
// Clients A and B open the same file.
let editor_a = workspace_a
@@ -279,7 +277,7 @@ async fn test_channel_notes_participant_indices(
selections.select_ranges(vec![2..3]);
});
});
- deterministic.run_until_parked();
+ executor.run_until_parked();
// Clients A and B see each other with the same colors as in the channel notes.
editor_a.update(cx_a, |editor, cx| {
@@ -314,11 +312,10 @@ fn assert_remote_selections(
#[gpui::test]
async fn test_multiple_handles_to_channel_buffer(
- deterministic: Arc<Deterministic>,
+ deterministic: BackgroundExecutor,
cx_a: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(deterministic.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let channel_id = server
@@ -340,7 +337,7 @@ async fn test_multiple_handles_to_channel_buffer(
future::try_join3(channel_buffer_1, channel_buffer_2, channel_buffer_3)
.await
.unwrap();
- let channel_buffer_model_id = channel_buffer.id();
+ let channel_buffer_model_id = channel_buffer.entity_id();
assert_eq!(channel_buffer, channel_buffer_2);
assert_eq!(channel_buffer, channel_buffer_3);
@@ -364,7 +361,7 @@ async fn test_multiple_handles_to_channel_buffer(
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
.await
.unwrap();
- assert_ne!(channel_buffer.id(), channel_buffer_model_id);
+ assert_ne!(channel_buffer.entity_id(), channel_buffer_model_id);
channel_buffer.update(cx_a, |buffer, cx| {
buffer.buffer().update(cx, |buffer, _| {
assert_eq!(buffer.text(), "hello");
@@ -374,12 +371,11 @@ async fn test_multiple_handles_to_channel_buffer(
#[gpui::test]
async fn test_channel_buffer_disconnect(
- deterministic: Arc<Deterministic>,
+ deterministic: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(deterministic.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -397,6 +393,7 @@ async fn test_channel_buffer_disconnect(
.update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
.await
.unwrap();
+
let channel_buffer_b = client_b
.channel_store()
.update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
@@ -437,12 +434,11 @@ async fn test_channel_buffer_disconnect(
#[gpui::test]
async fn test_rejoin_channel_buffer(
- deterministic: Arc<Deterministic>,
+ deterministic: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(deterministic.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -518,13 +514,12 @@ async fn test_rejoin_channel_buffer(
#[gpui::test]
async fn test_channel_buffers_and_server_restarts(
- deterministic: Arc<Deterministic>,
+ deterministic: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(deterministic.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let client_c = server.create_client(cx_c, "user_c").await;
@@ -606,13 +601,12 @@ async fn test_channel_buffers_and_server_restarts(
#[gpui::test(iterations = 10)]
async fn test_following_to_channel_notes_without_a_shared_project(
- deterministic: Arc<Deterministic>,
+ deterministic: BackgroundExecutor,
mut cx_a: &mut TestAppContext,
mut cx_b: &mut TestAppContext,
mut cx_c: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(deterministic.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -664,9 +658,9 @@ async fn test_following_to_channel_notes_without_a_shared_project(
let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
let (project_b, _) = client_b.build_local_project("/b", cx_b).await;
let (project_c, _) = client_b.build_local_project("/c", cx_c).await;
- let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
- let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
- let _workspace_c = client_c.build_workspace(&project_c, cx_c).root(cx_c);
+ let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
+ let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
+ let (_workspace_c, _cx_c) = client_c.build_workspace(&project_c, cx_c);
active_call_a
.update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
@@ -691,7 +685,9 @@ async fn test_following_to_channel_notes_without_a_shared_project(
// Client B follows client A.
workspace_b
.update(cx_b, |workspace, cx| {
- workspace.follow(client_a.peer_id().unwrap(), cx).unwrap()
+ workspace
+ .start_following(client_a.peer_id().unwrap(), cx)
+ .unwrap()
})
.await
.unwrap();
@@ -699,7 +695,7 @@ async fn test_following_to_channel_notes_without_a_shared_project(
// Client B is taken to the notes for channel 1, with the same
// text selected as client A.
deterministic.run_until_parked();
- let channel_view_1_b = workspace_b.read_with(cx_b, |workspace, cx| {
+ let channel_view_1_b = workspace_b.update(cx_b, |workspace, cx| {
assert_eq!(
workspace.leader_for_pane(workspace.active_pane()),
Some(client_a.peer_id().unwrap())
@@ -710,7 +706,7 @@ async fn test_following_to_channel_notes_without_a_shared_project(
.downcast::<ChannelView>()
.expect("active item is not a channel view")
});
- channel_view_1_b.read_with(cx_b, |notes, cx| {
+ channel_view_1_b.update(cx_b, |notes, cx| {
assert_eq!(notes.channel(cx).unwrap().name, "channel-1");
let editor = notes.editor.read(cx);
assert_eq!(editor.text(cx), "Hello from A.");
@@ -718,17 +714,22 @@ async fn test_following_to_channel_notes_without_a_shared_project(
});
// Client A opens the notes for channel 2.
+ eprintln!("opening -------------------->");
+
let channel_view_2_a = cx_a
.update(|cx| ChannelView::open(channel_2_id, workspace_a.clone(), cx))
.await
.unwrap();
- channel_view_2_a.read_with(cx_a, |notes, cx| {
+ channel_view_2_a.update(cx_a, |notes, cx| {
assert_eq!(notes.channel(cx).unwrap().name, "channel-2");
});
// Client B is taken to the notes for channel 2.
deterministic.run_until_parked();
- let channel_view_2_b = workspace_b.read_with(cx_b, |workspace, cx| {
+
+ eprintln!("opening <--------------------");
+
+ let channel_view_2_b = workspace_b.update(cx_b, |workspace, cx| {
assert_eq!(
workspace.leader_for_pane(workspace.active_pane()),
Some(client_a.peer_id().unwrap())
@@ -739,19 +740,18 @@ async fn test_following_to_channel_notes_without_a_shared_project(
.downcast::<ChannelView>()
.expect("active item is not a channel view")
});
- channel_view_2_b.read_with(cx_b, |notes, cx| {
+ channel_view_2_b.update(cx_b, |notes, cx| {
assert_eq!(notes.channel(cx).unwrap().name, "channel-2");
});
}
#[gpui::test]
async fn test_channel_buffer_changes(
- deterministic: Arc<Deterministic>,
+ deterministic: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(deterministic.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -778,7 +778,7 @@ async fn test_channel_buffer_changes(
});
deterministic.run_until_parked();
- let has_buffer_changed = cx_b.read(|cx| {
+ let has_buffer_changed = cx_b.update(|cx| {
client_b
.channel_store()
.read(cx)
@@ -789,14 +789,14 @@ async fn test_channel_buffer_changes(
// Opening the buffer should clear the changed flag.
let project_b = client_b.build_empty_local_project(cx_b);
- let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
+ let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
let channel_view_b = cx_b
.update(|cx| ChannelView::open(channel_id, workspace_b.clone(), cx))
.await
.unwrap();
deterministic.run_until_parked();
- let has_buffer_changed = cx_b.read(|cx| {
+ let has_buffer_changed = cx_b.update(|cx| {
client_b
.channel_store()
.read(cx)
@@ -826,7 +826,8 @@ async fn test_channel_buffer_changes(
// Test that the server is tracking things correctly, and we retain our 'not changed'
// state across a disconnect
- server.simulate_long_connection_interruption(client_b.peer_id().unwrap(), &deterministic);
+ server
+ .simulate_long_connection_interruption(client_b.peer_id().unwrap(), deterministic.clone());
let has_buffer_changed = cx_b.read(|cx| {
client_b
.channel_store()
@@ -877,6 +878,6 @@ fn assert_collaborators(collaborators: &HashMap<PeerId, Collaborator>, ids: &[Op
);
}
-fn buffer_text(channel_buffer: &ModelHandle<language::Buffer>, cx: &mut TestAppContext) -> String {
+fn buffer_text(channel_buffer: &Model<language::Buffer>, cx: &mut TestAppContext) -> String {
channel_buffer.read_with(cx, |buffer, _| buffer.text())
}
@@ -1,20 +1,16 @@
use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
use channel::{ChannelChat, ChannelMessageId, MessageParams};
-use collab_ui::chat_panel::ChatPanel;
-use gpui::{executor::Deterministic, BorrowAppContext, ModelHandle, TestAppContext};
+use gpui::{BackgroundExecutor, Model, TestAppContext};
use rpc::Notification;
-use std::sync::Arc;
-use workspace::dock::Panel;
#[gpui::test]
async fn test_basic_channel_messages(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
mut cx_a: &mut TestAppContext,
mut cx_b: &mut TestAppContext,
mut cx_c: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let client_c = server.create_client(cx_c, "user_c").await;
@@ -57,13 +53,13 @@ async fn test_basic_channel_messages(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
channel_chat_b
.update(cx_b, |c, cx| c.send_message("three".into(), cx).unwrap())
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
let channel_chat_c = client_c
.channel_store()
@@ -117,12 +113,11 @@ async fn test_basic_channel_messages(
#[gpui::test]
async fn test_rejoin_channel_chat(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -178,7 +173,7 @@ async fn test_rejoin_channel_chat(
// Client A reconnects.
server.allow_connections();
- deterministic.advance_clock(RECONNECT_TIMEOUT);
+ executor.advance_clock(RECONNECT_TIMEOUT);
// Client A fetches the messages that were sent while they were disconnected
// and resends their own messages which failed to send.
@@ -189,13 +184,12 @@ async fn test_rejoin_channel_chat(
#[gpui::test]
async fn test_remove_channel_message(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let client_c = server.create_client(cx_c, "user_c").await;
@@ -235,7 +229,7 @@ async fn test_remove_channel_message(
.unwrap();
// Clients A and B see all of the messages.
- deterministic.run_until_parked();
+ executor.run_until_parked();
let expected_messages = &["one", "two", "three"];
assert_messages(&channel_chat_a, expected_messages, cx_a);
assert_messages(&channel_chat_b, expected_messages, cx_b);
@@ -252,7 +246,7 @@ async fn test_remove_channel_message(
.unwrap();
// Client B sees that the message is gone.
- deterministic.run_until_parked();
+ executor.run_until_parked();
let expected_messages = &["one", "three"];
assert_messages(&channel_chat_a, expected_messages, cx_a);
assert_messages(&channel_chat_b, expected_messages, cx_b);
@@ -267,146 +261,148 @@ async fn test_remove_channel_message(
}
#[track_caller]
-fn assert_messages(chat: &ModelHandle<ChannelChat>, messages: &[&str], cx: &mut TestAppContext) {
+fn assert_messages(chat: &Model<ChannelChat>, messages: &[&str], cx: &mut TestAppContext) {
+ // todo!(don't directly borrow here)
assert_eq!(
- chat.read_with(cx, |chat, _| chat
- .messages()
- .iter()
- .map(|m| m.body.clone())
- .collect::<Vec<_>>(),),
+ chat.read_with(cx, |chat, _| {
+ chat.messages()
+ .iter()
+ .map(|m| m.body.clone())
+ .collect::<Vec<_>>()
+ }),
messages
);
}
-#[gpui::test]
-async fn test_channel_message_changes(
- deterministic: Arc<Deterministic>,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let channel_id = server
- .make_channel(
- "the-channel",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b)],
- )
- .await;
-
- // Client A sends a message, client B should see that there is a new message.
- let channel_chat_a = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_chat(channel_id, cx))
- .await
- .unwrap();
-
- channel_chat_a
- .update(cx_a, |c, cx| c.send_message("one".into(), cx).unwrap())
- .await
- .unwrap();
-
- deterministic.run_until_parked();
-
- let b_has_messages = cx_b.read_with(|cx| {
- client_b
- .channel_store()
- .read(cx)
- .has_new_messages(channel_id)
- .unwrap()
- });
-
- assert!(b_has_messages);
-
- // Opening the chat should clear the changed flag.
- cx_b.update(|cx| {
- collab_ui::init(&client_b.app_state, cx);
- });
- let project_b = client_b.build_empty_local_project(cx_b);
- let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
- let chat_panel_b = workspace_b.update(cx_b, |workspace, cx| ChatPanel::new(workspace, cx));
- chat_panel_b
- .update(cx_b, |chat_panel, cx| {
- chat_panel.set_active(true, cx);
- chat_panel.select_channel(channel_id, None, cx)
- })
- .await
- .unwrap();
-
- deterministic.run_until_parked();
-
- let b_has_messages = cx_b.read_with(|cx| {
- client_b
- .channel_store()
- .read(cx)
- .has_new_messages(channel_id)
- .unwrap()
- });
-
- assert!(!b_has_messages);
-
- // Sending a message while the chat is open should not change the flag.
- channel_chat_a
- .update(cx_a, |c, cx| c.send_message("two".into(), cx).unwrap())
- .await
- .unwrap();
-
- deterministic.run_until_parked();
-
- let b_has_messages = cx_b.read_with(|cx| {
- client_b
- .channel_store()
- .read(cx)
- .has_new_messages(channel_id)
- .unwrap()
- });
-
- assert!(!b_has_messages);
-
- // Sending a message while the chat is closed should change the flag.
- chat_panel_b.update(cx_b, |chat_panel, cx| {
- chat_panel.set_active(false, cx);
- });
-
- // Sending a message while the chat is open should not change the flag.
- channel_chat_a
- .update(cx_a, |c, cx| c.send_message("three".into(), cx).unwrap())
- .await
- .unwrap();
-
- deterministic.run_until_parked();
-
- let b_has_messages = cx_b.read_with(|cx| {
- client_b
- .channel_store()
- .read(cx)
- .has_new_messages(channel_id)
- .unwrap()
- });
-
- assert!(b_has_messages);
-
- // Closing the chat should re-enable change tracking
- cx_b.update(|_| drop(chat_panel_b));
-
- channel_chat_a
- .update(cx_a, |c, cx| c.send_message("four".into(), cx).unwrap())
- .await
- .unwrap();
-
- deterministic.run_until_parked();
-
- let b_has_messages = cx_b.read_with(|cx| {
- client_b
- .channel_store()
- .read(cx)
- .has_new_messages(channel_id)
- .unwrap()
- });
-
- assert!(b_has_messages);
-}
+//todo!(collab_ui)
+// #[gpui::test]
+// async fn test_channel_message_changes(
+// executor: BackgroundExecutor,
+// cx_a: &mut TestAppContext,
+// cx_b: &mut TestAppContext,
+// ) {
+// let mut server = TestServer::start(&executor).await;
+// let client_a = server.create_client(cx_a, "user_a").await;
+// let client_b = server.create_client(cx_b, "user_b").await;
+
+// let channel_id = server
+// .make_channel(
+// "the-channel",
+// None,
+// (&client_a, cx_a),
+// &mut [(&client_b, cx_b)],
+// )
+// .await;
+
+// // Client A sends a message, client B should see that there is a new message.
+// let channel_chat_a = client_a
+// .channel_store()
+// .update(cx_a, |store, cx| store.open_channel_chat(channel_id, cx))
+// .await
+// .unwrap();
+
+// channel_chat_a
+// .update(cx_a, |c, cx| c.send_message("one".into(), cx).unwrap())
+// .await
+// .unwrap();
+
+// executor.run_until_parked();
+
+// let b_has_messages = cx_b.read_with(|cx| {
+// client_b
+// .channel_store()
+// .read(cx)
+// .has_new_messages(channel_id)
+// .unwrap()
+// });
+
+// assert!(b_has_messages);
+
+// // Opening the chat should clear the changed flag.
+// cx_b.update(|cx| {
+// collab_ui::init(&client_b.app_state, cx);
+// });
+// let project_b = client_b.build_empty_local_project(cx_b);
+// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
+// let chat_panel_b = workspace_b.update(cx_b, |workspace, cx| ChatPanel::new(workspace, cx));
+// chat_panel_b
+// .update(cx_b, |chat_panel, cx| {
+// chat_panel.set_active(true, cx);
+// chat_panel.select_channel(channel_id, None, cx)
+// })
+// .await
+// .unwrap();
+
+// executor.run_until_parked();
+
+// let b_has_messages = cx_b.read_with(|cx| {
+// client_b
+// .channel_store()
+// .read(cx)
+// .has_new_messages(channel_id)
+// .unwrap()
+// });
+
+// assert!(!b_has_messages);
+
+// // Sending a message while the chat is open should not change the flag.
+// channel_chat_a
+// .update(cx_a, |c, cx| c.send_message("two".into(), cx).unwrap())
+// .await
+// .unwrap();
+
+// executor.run_until_parked();
+
+// let b_has_messages = cx_b.read_with(|cx| {
+// client_b
+// .channel_store()
+// .read(cx)
+// .has_new_messages(channel_id)
+// .unwrap()
+// });
+
+// assert!(!b_has_messages);
+
+// // Sending a message while the chat is closed should change the flag.
+// chat_panel_b.update(cx_b, |chat_panel, cx| {
+// chat_panel.set_active(false, cx);
+// });
+
+// // Sending a message while the chat is open should not change the flag.
+// channel_chat_a
+// .update(cx_a, |c, cx| c.send_message("three".into(), cx).unwrap())
+// .await
+// .unwrap();
+
+// executor.run_until_parked();
+
+// let b_has_messages = cx_b.read_with(|cx| {
+// client_b
+// .channel_store()
+// .read(cx)
+// .has_new_messages(channel_id)
+// .unwrap()
+// });
+
+// assert!(b_has_messages);
+
+// // Closing the chat should re-enable change tracking
+// cx_b.update(|_| drop(chat_panel_b));
+
+// channel_chat_a
+// .update(cx_a, |c, cx| c.send_message("four".into(), cx).unwrap())
+// .await
+// .unwrap();
+
+// executor.run_until_parked();
+
+// let b_has_messages = cx_b.read_with(|cx| {
+// client_b
+// .channel_store()
+// .read(cx)
+// .has_new_messages(channel_id)
+// .unwrap()
+// });
+
+// assert!(b_has_messages);
+// }
@@ -7,7 +7,7 @@ use call::ActiveCall;
use channel::{ChannelId, ChannelMembership, ChannelStore};
use client::User;
use futures::future::try_join_all;
-use gpui::{executor::Deterministic, ModelHandle, TestAppContext};
+use gpui::{BackgroundExecutor, Model, SharedString, TestAppContext};
use rpc::{
proto::{self, ChannelRole},
RECEIVE_TIMEOUT,
@@ -16,12 +16,11 @@ use std::sync::Arc;
#[gpui::test]
async fn test_core_channels(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -40,28 +39,30 @@ async fn test_core_channels(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_channels(
client_a.channel_store(),
cx_a,
&[
ExpectedChannel {
id: channel_a_id,
- name: "channel-a".to_string(),
+ name: "channel-a".into(),
depth: 0,
role: ChannelRole::Admin,
},
ExpectedChannel {
id: channel_b_id,
- name: "channel-b".to_string(),
+ name: "channel-b".into(),
depth: 1,
role: ChannelRole::Admin,
},
],
);
- client_b.channel_store().read_with(cx_b, |channels, _| {
- assert!(channels.ordered_channels().collect::<Vec<_>>().is_empty())
+ cx_b.read(|cx| {
+ client_b.channel_store().read_with(cx, |channels, _| {
+ assert!(channels.ordered_channels().collect::<Vec<_>>().is_empty())
+ })
});
// Invite client B to channel A as client A.
@@ -85,13 +86,13 @@ async fn test_core_channels(
.unwrap();
// Client A sees that B has been invited.
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_channel_invitations(
client_b.channel_store(),
cx_b,
&[ExpectedChannel {
id: channel_a_id,
- name: "channel-a".to_string(),
+ name: "channel-a".into(),
depth: 0,
role: ChannelRole::Member,
}],
@@ -129,7 +130,7 @@ async fn test_core_channels(
})
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
// Client B now sees that they are a member of channel A and its existing subchannels.
assert_channel_invitations(client_b.channel_store(), cx_b, &[]);
@@ -139,13 +140,13 @@ async fn test_core_channels(
&[
ExpectedChannel {
id: channel_a_id,
- name: "channel-a".to_string(),
+ name: "channel-a".into(),
role: ChannelRole::Member,
depth: 0,
},
ExpectedChannel {
id: channel_b_id,
- name: "channel-b".to_string(),
+ name: "channel-b".into(),
role: ChannelRole::Member,
depth: 1,
},
@@ -160,26 +161,26 @@ async fn test_core_channels(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_channels(
client_b.channel_store(),
cx_b,
&[
ExpectedChannel {
id: channel_a_id,
- name: "channel-a".to_string(),
+ name: "channel-a".into(),
role: ChannelRole::Member,
depth: 0,
},
ExpectedChannel {
id: channel_b_id,
- name: "channel-b".to_string(),
+ name: "channel-b".into(),
role: ChannelRole::Member,
depth: 1,
},
ExpectedChannel {
id: channel_c_id,
- name: "channel-c".to_string(),
+ name: "channel-c".into(),
role: ChannelRole::Member,
depth: 2,
},
@@ -199,7 +200,7 @@ async fn test_core_channels(
})
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
// Observe that client B is now an admin of channel A, and that
// their admin priveleges extend to subchannels of channel A.
@@ -210,19 +211,19 @@ async fn test_core_channels(
&[
ExpectedChannel {
id: channel_a_id,
- name: "channel-a".to_string(),
+ name: "channel-a".into(),
depth: 0,
role: ChannelRole::Admin,
},
ExpectedChannel {
id: channel_b_id,
- name: "channel-b".to_string(),
+ name: "channel-b".into(),
depth: 1,
role: ChannelRole::Admin,
},
ExpectedChannel {
id: channel_c_id,
- name: "channel-c".to_string(),
+ name: "channel-c".into(),
depth: 2,
role: ChannelRole::Admin,
},
@@ -238,13 +239,13 @@ async fn test_core_channels(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_channels(
client_a.channel_store(),
cx_a,
&[ExpectedChannel {
id: channel_a_id,
- name: "channel-a".to_string(),
+ name: "channel-a".into(),
depth: 0,
role: ChannelRole::Admin,
}],
@@ -254,7 +255,7 @@ async fn test_core_channels(
cx_b,
&[ExpectedChannel {
id: channel_a_id,
- name: "channel-a".to_string(),
+ name: "channel-a".into(),
depth: 0,
role: ChannelRole::Admin,
}],
@@ -269,7 +270,7 @@ async fn test_core_channels(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
// Client A still has their channel
assert_channels(
@@ -277,7 +278,7 @@ async fn test_core_channels(
cx_a,
&[ExpectedChannel {
id: channel_a_id,
- name: "channel-a".to_string(),
+ name: "channel-a".into(),
depth: 0,
role: ChannelRole::Admin,
}],
@@ -288,7 +289,7 @@ async fn test_core_channels(
server.forbid_connections();
server.disconnect_client(client_a.peer_id().unwrap());
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
+ executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
server
.app_state
@@ -302,13 +303,13 @@ async fn test_core_channels(
.unwrap();
server.allow_connections();
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
+ executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
assert_channels(
client_a.channel_store(),
cx_a,
&[ExpectedChannel {
id: channel_a_id,
- name: "channel-a-renamed".to_string(),
+ name: "channel-a-renamed".into(),
depth: 0,
role: ChannelRole::Admin,
}],
@@ -339,12 +340,11 @@ fn assert_members_eq(
#[gpui::test]
async fn test_joining_channel_ancestor_member(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -371,13 +371,12 @@ async fn test_joining_channel_ancestor_member(
#[gpui::test]
async fn test_channel_room(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let client_c = server.create_client(cx_c, "user_c").await;
@@ -400,15 +399,18 @@ async fn test_channel_room(
.unwrap();
// Give everyone a chance to observe user A joining
- deterministic.run_until_parked();
- let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- room_a.read_with(cx_a, |room, _| assert!(room.is_connected()));
-
- client_a.channel_store().read_with(cx_a, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap()],
- );
+ executor.run_until_parked();
+ let room_a =
+ cx_a.read(|cx| active_call_a.read_with(cx, |call, _| call.room().unwrap().clone()));
+ cx_a.read(|cx| room_a.read_with(cx, |room, _| assert!(room.is_connected())));
+
+ cx_a.read(|cx| {
+ client_a.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(zed_id),
+ &[client_a.user_id().unwrap()],
+ );
+ })
});
assert_channels(
@@ -416,23 +418,27 @@ async fn test_channel_room(
cx_b,
&[ExpectedChannel {
id: zed_id,
- name: "zed".to_string(),
+ name: "zed".into(),
depth: 0,
role: ChannelRole::Member,
}],
);
- client_b.channel_store().read_with(cx_b, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap()],
- );
+ cx_b.read(|cx| {
+ client_b.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(zed_id),
+ &[client_a.user_id().unwrap()],
+ );
+ })
});
- client_c.channel_store().read_with(cx_c, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap()],
- );
+ cx_c.read(|cx| {
+ client_c.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(zed_id),
+ &[client_a.user_id().unwrap()],
+ );
+ })
});
active_call_b
@@ -440,31 +446,38 @@ async fn test_channel_room(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
- client_a.channel_store().read_with(cx_a, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
- );
+ cx_a.read(|cx| {
+ client_a.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(zed_id),
+ &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
+ );
+ })
});
- client_b.channel_store().read_with(cx_b, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
- );
+ cx_b.read(|cx| {
+ client_b.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(zed_id),
+ &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
+ );
+ })
});
- client_c.channel_store().read_with(cx_c, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
- );
+ cx_c.read(|cx| {
+ client_c.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(zed_id),
+ &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
+ );
+ })
});
- let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- room_a.read_with(cx_a, |room, _| assert!(room.is_connected()));
+ let room_a =
+ cx_a.read(|cx| active_call_a.read_with(cx, |call, _| call.room().unwrap().clone()));
+ cx_a.read(|cx| room_a.read_with(cx, |room, _| assert!(room.is_connected())));
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -473,8 +486,9 @@ async fn test_channel_room(
}
);
- let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
- room_b.read_with(cx_b, |room, _| assert!(room.is_connected()));
+ let room_b =
+ cx_b.read(|cx| active_call_b.read_with(cx, |call, _| call.room().unwrap().clone()));
+ cx_b.read(|cx| room_b.read_with(cx, |room, _| assert!(room.is_connected())));
assert_eq!(
room_participants(&room_b, cx_b),
RoomParticipants {
@@ -490,27 +504,33 @@ async fn test_channel_room(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
- client_a.channel_store().read_with(cx_a, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_b.user_id().unwrap()],
- );
+ cx_a.read(|cx| {
+ client_a.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(zed_id),
+ &[client_b.user_id().unwrap()],
+ );
+ })
});
- client_b.channel_store().read_with(cx_b, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_b.user_id().unwrap()],
- );
+ cx_b.read(|cx| {
+ client_b.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(zed_id),
+ &[client_b.user_id().unwrap()],
+ );
+ })
});
- client_c.channel_store().read_with(cx_c, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_b.user_id().unwrap()],
- );
+ cx_c.read(|cx| {
+ client_c.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(zed_id),
+ &[client_b.user_id().unwrap()],
+ );
+ })
});
active_call_b
@@ -518,18 +538,24 @@ async fn test_channel_room(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
- client_a.channel_store().read_with(cx_a, |channels, _| {
- assert_participants_eq(channels.channel_participants(zed_id), &[]);
+ cx_a.read(|cx| {
+ client_a.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(channels.channel_participants(zed_id), &[]);
+ })
});
- client_b.channel_store().read_with(cx_b, |channels, _| {
- assert_participants_eq(channels.channel_participants(zed_id), &[]);
+ cx_b.read(|cx| {
+ client_b.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(channels.channel_participants(zed_id), &[]);
+ })
});
- client_c.channel_store().read_with(cx_c, |channels, _| {
- assert_participants_eq(channels.channel_participants(zed_id), &[]);
+ cx_c.read(|cx| {
+ client_c.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(channels.channel_participants(zed_id), &[]);
+ })
});
active_call_a
@@ -542,10 +568,11 @@ async fn test_channel_room(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
- let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- room_a.read_with(cx_a, |room, _| assert!(room.is_connected()));
+ let room_a =
+ cx_a.read(|cx| active_call_a.read_with(cx, |call, _| call.room().unwrap().clone()));
+ cx_a.read(|cx| room_a.read_with(cx, |room, _| assert!(room.is_connected())));
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -554,8 +581,9 @@ async fn test_channel_room(
}
);
- let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
- room_b.read_with(cx_b, |room, _| assert!(room.is_connected()));
+ let room_b =
+ cx_b.read(|cx| active_call_b.read_with(cx, |call, _| call.room().unwrap().clone()));
+ cx_b.read(|cx| room_b.read_with(cx, |room, _| assert!(room.is_connected())));
assert_eq!(
room_participants(&room_b, cx_b),
RoomParticipants {
@@ -566,9 +594,8 @@ async fn test_channel_room(
}
#[gpui::test]
-async fn test_channel_jumping(deterministic: Arc<Deterministic>, cx_a: &mut TestAppContext) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+async fn test_channel_jumping(executor: BackgroundExecutor, cx_a: &mut TestAppContext) {
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let zed_id = server
@@ -586,14 +613,16 @@ async fn test_channel_jumping(deterministic: Arc<Deterministic>, cx_a: &mut Test
.unwrap();
// Give everything a chance to observe user A joining
- deterministic.run_until_parked();
-
- client_a.channel_store().read_with(cx_a, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap()],
- );
- assert_participants_eq(channels.channel_participants(rust_id), &[]);
+ executor.run_until_parked();
+
+ cx_a.read(|cx| {
+ client_a.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(zed_id),
+ &[client_a.user_id().unwrap()],
+ );
+ assert_participants_eq(channels.channel_participants(rust_id), &[]);
+ })
});
active_call_a
@@ -603,25 +632,26 @@ async fn test_channel_jumping(deterministic: Arc<Deterministic>, cx_a: &mut Test
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
- client_a.channel_store().read_with(cx_a, |channels, _| {
- assert_participants_eq(channels.channel_participants(zed_id), &[]);
- assert_participants_eq(
- channels.channel_participants(rust_id),
- &[client_a.user_id().unwrap()],
- );
+ cx_a.read(|cx| {
+ client_a.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(channels.channel_participants(zed_id), &[]);
+ assert_participants_eq(
+ channels.channel_participants(rust_id),
+ &[client_a.user_id().unwrap()],
+ );
+ })
});
}
#[gpui::test]
async fn test_permissions_update_while_invited(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -642,7 +672,7 @@ async fn test_permissions_update_while_invited(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_channel_invitations(
client_b.channel_store(),
@@ -650,7 +680,7 @@ async fn test_permissions_update_while_invited(
&[ExpectedChannel {
depth: 0,
id: rust_id,
- name: "rust".to_string(),
+ name: "rust".into(),
role: ChannelRole::Member,
}],
);
@@ -670,7 +700,7 @@ async fn test_permissions_update_while_invited(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_channel_invitations(
client_b.channel_store(),
@@ -678,7 +708,7 @@ async fn test_permissions_update_while_invited(
&[ExpectedChannel {
depth: 0,
id: rust_id,
- name: "rust".to_string(),
+ name: "rust".into(),
role: ChannelRole::Member,
}],
);
@@ -687,12 +717,11 @@ async fn test_permissions_update_while_invited(
#[gpui::test]
async fn test_channel_rename(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -709,7 +738,7 @@ async fn test_channel_rename(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
// Client A sees the channel with its new name.
assert_channels(
@@ -718,7 +747,7 @@ async fn test_channel_rename(
&[ExpectedChannel {
depth: 0,
id: rust_id,
- name: "rust-archive".to_string(),
+ name: "rust-archive".into(),
role: ChannelRole::Admin,
}],
);
@@ -730,7 +759,7 @@ async fn test_channel_rename(
&[ExpectedChannel {
depth: 0,
id: rust_id,
- name: "rust-archive".to_string(),
+ name: "rust-archive".into(),
role: ChannelRole::Member,
}],
);
@@ -738,13 +767,12 @@ async fn test_channel_rename(
#[gpui::test]
async fn test_call_from_channel(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let client_c = server.create_client(cx_c, "user_c").await;
@@ -778,47 +806,54 @@ async fn test_call_from_channel(
.unwrap();
// Client B accepts the call.
- deterministic.run_until_parked();
+ executor.run_until_parked();
active_call_b
.update(cx_b, |call, cx| call.accept_incoming(cx))
.await
.unwrap();
// Client B sees that they are now in the channel
- deterministic.run_until_parked();
- active_call_b.read_with(cx_b, |call, cx| {
- assert_eq!(call.channel_id(cx), Some(channel_id));
+ executor.run_until_parked();
+ cx_b.read(|cx| {
+ active_call_b.read_with(cx, |call, cx| {
+ assert_eq!(call.channel_id(cx), Some(channel_id));
+ })
});
- client_b.channel_store().read_with(cx_b, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(channel_id),
- &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
- );
+ cx_b.read(|cx| {
+ client_b.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(channel_id),
+ &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
+ );
+ })
});
// Clients A and C also see that client B is in the channel.
- client_a.channel_store().read_with(cx_a, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(channel_id),
- &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
- );
+ cx_a.read(|cx| {
+ client_a.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(channel_id),
+ &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
+ );
+ })
});
- client_c.channel_store().read_with(cx_c, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(channel_id),
- &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
- );
+ cx_c.read(|cx| {
+ client_c.channel_store().read_with(cx, |channels, _| {
+ assert_participants_eq(
+ channels.channel_participants(channel_id),
+ &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
+ );
+ })
});
}
#[gpui::test]
async fn test_lost_channel_creation(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -844,7 +879,7 @@ async fn test_lost_channel_creation(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
// Sanity check, B has the invitation
assert_channel_invitations(
@@ -853,7 +888,7 @@ async fn test_lost_channel_creation(
&[ExpectedChannel {
depth: 0,
id: channel_id,
- name: "x".to_string(),
+ name: "x".into(),
role: ChannelRole::Member,
}],
);
@@ -867,7 +902,7 @@ async fn test_lost_channel_creation(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
// Make sure A sees their new channel
assert_channels(
@@ -877,13 +912,13 @@ async fn test_lost_channel_creation(
ExpectedChannel {
depth: 0,
id: channel_id,
- name: "x".to_string(),
+ name: "x".into(),
role: ChannelRole::Admin,
},
ExpectedChannel {
depth: 1,
id: subchannel_id,
- name: "subchannel".to_string(),
+ name: "subchannel".into(),
role: ChannelRole::Admin,
},
],
@@ -898,7 +933,7 @@ async fn test_lost_channel_creation(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
// Client B should now see the channel
assert_channels(
@@ -908,13 +943,13 @@ async fn test_lost_channel_creation(
ExpectedChannel {
depth: 0,
id: channel_id,
- name: "x".to_string(),
+ name: "x".into(),
role: ChannelRole::Member,
},
ExpectedChannel {
depth: 1,
id: subchannel_id,
- name: "subchannel".to_string(),
+ name: "subchannel".into(),
role: ChannelRole::Member,
},
],
@@ -923,14 +958,12 @@ async fn test_lost_channel_creation(
#[gpui::test]
async fn test_channel_link_notifications(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
- deterministic.forbid_parking();
-
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let client_c = server.create_client(cx_c, "user_c").await;
@@ -953,7 +986,7 @@ async fn test_channel_link_notifications(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
client_b
.channel_store()
@@ -971,7 +1004,7 @@ async fn test_channel_link_notifications(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
// we have an admin (a), member (b) and guest (c) all part of the zed channel.
@@ -984,6 +1017,8 @@ async fn test_channel_link_notifications(
.await
.unwrap();
+ executor.run_until_parked();
+
// the new channel shows for b and not c
assert_channels_list_shape(
client_a.channel_store(),
@@ -1021,7 +1056,7 @@ async fn test_channel_link_notifications(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
// the new channel shows for b and c
assert_channels_list_shape(
@@ -1093,6 +1128,8 @@ async fn test_channel_link_notifications(
.await
.unwrap();
+ executor.run_until_parked();
+
// the members-only channel is still shown for c, but hidden for b
assert_channels_list_shape(
client_b.channel_store(),
@@ -1104,9 +1141,8 @@ async fn test_channel_link_notifications(
(helix_channel, 3),
],
);
- client_b
- .channel_store()
- .read_with(cx_b, |channel_store, _| {
+ cx_b.read(|cx| {
+ client_b.channel_store().read_with(cx, |channel_store, _| {
assert_eq!(
channel_store
.channel_for_id(vim_channel)
@@ -1114,22 +1150,19 @@ async fn test_channel_link_notifications(
.visibility,
proto::ChannelVisibility::Members
)
- });
+ })
+ });
assert_channels_list_shape(client_c.channel_store(), cx_c, &[(zed_channel, 0)]);
}
#[gpui::test]
async fn test_channel_membership_notifications(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
-
- deterministic.forbid_parking();
-
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_c").await;
@@ -1160,7 +1193,7 @@ async fn test_channel_membership_notifications(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
client_b
.channel_store()
@@ -1178,7 +1211,7 @@ async fn test_channel_membership_notifications(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
// we have an admin (a), and a guest (b) with access to all of zed, and membership in vim.
assert_channels(
@@ -1188,13 +1221,13 @@ async fn test_channel_membership_notifications(
ExpectedChannel {
depth: 0,
id: zed_channel,
- name: "zed".to_string(),
+ name: "zed".into(),
role: ChannelRole::Guest,
},
ExpectedChannel {
depth: 1,
id: vim_channel,
- name: "vim".to_string(),
+ name: "vim".into(),
role: ChannelRole::Member,
},
],
@@ -1208,7 +1241,7 @@ async fn test_channel_membership_notifications(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_channels(
client_b.channel_store(),
@@ -1217,13 +1250,13 @@ async fn test_channel_membership_notifications(
ExpectedChannel {
depth: 0,
id: zed_channel,
- name: "zed".to_string(),
+ name: "zed".into(),
role: ChannelRole::Guest,
},
ExpectedChannel {
depth: 1,
id: vim_channel,
- name: "vim".to_string(),
+ name: "vim".into(),
role: ChannelRole::Guest,
},
],
@@ -1232,13 +1265,11 @@ async fn test_channel_membership_notifications(
#[gpui::test]
async fn test_guest_access(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
-
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -1281,7 +1312,7 @@ async fn test_guest_access(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_channels_list_shape(
client_a.channel_store(),
cx_a,
@@ -1,1820 +1,1890 @@
-use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
-use call::ActiveCall;
-use collab_ui::notifications::project_shared_notification::ProjectSharedNotification;
-use editor::{Editor, ExcerptRange, MultiBuffer};
-use gpui::{executor::Deterministic, geometry::vector::vec2f, TestAppContext, ViewHandle};
-use live_kit_client::MacOSDisplay;
-use project::project_settings::ProjectSettings;
-use rpc::proto::PeerId;
-use serde_json::json;
-use settings::SettingsStore;
-use std::{borrow::Cow, sync::Arc};
-use workspace::{
- dock::{test::TestPanel, DockPosition},
- item::{test::TestItem, ItemHandle as _},
- shared_screen::SharedScreen,
- SplitDirection, Workspace,
-};
-
-#[gpui::test(iterations = 10)]
-async fn test_basic_following(
- deterministic: Arc<Deterministic>,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
- cx_d: &mut TestAppContext,
-) {
- deterministic.forbid_parking();
-
- let mut server = TestServer::start(&deterministic).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- let client_d = server.create_client(cx_d, "user_d").await;
- server
- .create_room(&mut [
- (&client_a, cx_a),
- (&client_b, cx_b),
- (&client_c, cx_c),
- (&client_d, cx_d),
- ])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- cx_a.update(editor::init);
- cx_b.update(editor::init);
-
- client_a
- .fs()
- .insert_tree(
- "/a",
- json!({
- "1.txt": "one\none\none",
- "2.txt": "two\ntwo\ntwo",
- "3.txt": "three\nthree\nthree",
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
- active_call_a
- .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
- .await
- .unwrap();
-
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
- active_call_b
- .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
- .await
- .unwrap();
-
- let window_a = client_a.build_workspace(&project_a, cx_a);
- let workspace_a = window_a.root(cx_a);
- let window_b = client_b.build_workspace(&project_b, cx_b);
- let workspace_b = window_b.root(cx_b);
-
- // Client A opens some editors.
- let pane_a = workspace_a.read_with(cx_a, |workspace, _| workspace.active_pane().clone());
- let editor_a1 = workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.open_path((worktree_id, "1.txt"), None, true, cx)
- })
- .await
- .unwrap()
- .downcast::<Editor>()
- .unwrap();
- let editor_a2 = workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.open_path((worktree_id, "2.txt"), None, true, cx)
- })
- .await
- .unwrap()
- .downcast::<Editor>()
- .unwrap();
-
- // Client B opens an editor.
- let editor_b1 = workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.open_path((worktree_id, "1.txt"), None, true, cx)
- })
- .await
- .unwrap()
- .downcast::<Editor>()
- .unwrap();
-
- let peer_id_a = client_a.peer_id().unwrap();
- let peer_id_b = client_b.peer_id().unwrap();
- let peer_id_c = client_c.peer_id().unwrap();
- let peer_id_d = client_d.peer_id().unwrap();
-
- // Client A updates their selections in those editors
- editor_a1.update(cx_a, |editor, cx| {
- editor.handle_input("a", cx);
- editor.handle_input("b", cx);
- editor.handle_input("c", cx);
- editor.select_left(&Default::default(), cx);
- assert_eq!(editor.selections.ranges(cx), vec![3..2]);
- });
- editor_a2.update(cx_a, |editor, cx| {
- editor.handle_input("d", cx);
- editor.handle_input("e", cx);
- editor.select_left(&Default::default(), cx);
- assert_eq!(editor.selections.ranges(cx), vec![2..1]);
- });
-
- // When client B starts following client A, all visible view states are replicated to client B.
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.follow(peer_id_a, cx).unwrap()
- })
- .await
- .unwrap();
-
- cx_c.foreground().run_until_parked();
- let editor_b2 = workspace_b.read_with(cx_b, |workspace, cx| {
- workspace
- .active_item(cx)
- .unwrap()
- .downcast::<Editor>()
- .unwrap()
- });
- assert_eq!(
- cx_b.read(|cx| editor_b2.project_path(cx)),
- Some((worktree_id, "2.txt").into())
- );
- assert_eq!(
- editor_b2.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
- vec![2..1]
- );
- assert_eq!(
- editor_b1.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
- vec![3..2]
- );
-
- cx_c.foreground().run_until_parked();
- let active_call_c = cx_c.read(ActiveCall::global);
- let project_c = client_c.build_remote_project(project_id, cx_c).await;
- let window_c = client_c.build_workspace(&project_c, cx_c);
- let workspace_c = window_c.root(cx_c);
- active_call_c
- .update(cx_c, |call, cx| call.set_location(Some(&project_c), cx))
- .await
- .unwrap();
- drop(project_c);
-
- // Client C also follows client A.
- workspace_c
- .update(cx_c, |workspace, cx| {
- workspace.follow(peer_id_a, cx).unwrap()
- })
- .await
- .unwrap();
-
- cx_d.foreground().run_until_parked();
- let active_call_d = cx_d.read(ActiveCall::global);
- let project_d = client_d.build_remote_project(project_id, cx_d).await;
- let workspace_d = client_d.build_workspace(&project_d, cx_d).root(cx_d);
- active_call_d
- .update(cx_d, |call, cx| call.set_location(Some(&project_d), cx))
- .await
- .unwrap();
- drop(project_d);
-
- // All clients see that clients B and C are following client A.
- cx_c.foreground().run_until_parked();
- for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
- assert_eq!(
- followers_by_leader(project_id, cx),
- &[(peer_id_a, vec![peer_id_b, peer_id_c])],
- "followers seen by {name}"
- );
- }
-
- // Client C unfollows client A.
- workspace_c.update(cx_c, |workspace, cx| {
- workspace.unfollow(&workspace.active_pane().clone(), cx);
- });
-
- // All clients see that clients B is following client A.
- cx_c.foreground().run_until_parked();
- for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
- assert_eq!(
- followers_by_leader(project_id, cx),
- &[(peer_id_a, vec![peer_id_b])],
- "followers seen by {name}"
- );
- }
-
- // Client C re-follows client A.
- workspace_c
- .update(cx_c, |workspace, cx| {
- workspace.follow(peer_id_a, cx).unwrap()
- })
- .await
- .unwrap();
-
- // All clients see that clients B and C are following client A.
- cx_c.foreground().run_until_parked();
- for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
- assert_eq!(
- followers_by_leader(project_id, cx),
- &[(peer_id_a, vec![peer_id_b, peer_id_c])],
- "followers seen by {name}"
- );
- }
-
- // Client D follows client B, then switches to following client C.
- workspace_d
- .update(cx_d, |workspace, cx| {
- workspace.follow(peer_id_b, cx).unwrap()
- })
- .await
- .unwrap();
- workspace_d
- .update(cx_d, |workspace, cx| {
- workspace.follow(peer_id_c, cx).unwrap()
- })
- .await
- .unwrap();
-
- // All clients see that D is following C
- cx_d.foreground().run_until_parked();
- for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
- assert_eq!(
- followers_by_leader(project_id, cx),
- &[
- (peer_id_a, vec![peer_id_b, peer_id_c]),
- (peer_id_c, vec![peer_id_d])
- ],
- "followers seen by {name}"
- );
- }
-
- // Client C closes the project.
- window_c.remove(cx_c);
- cx_c.drop_last(workspace_c);
-
- // Clients A and B see that client B is following A, and client C is not present in the followers.
- cx_c.foreground().run_until_parked();
- for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
- assert_eq!(
- followers_by_leader(project_id, cx),
- &[(peer_id_a, vec![peer_id_b]),],
- "followers seen by {name}"
- );
- }
-
- // When client A activates a different editor, client B does so as well.
- workspace_a.update(cx_a, |workspace, cx| {
- workspace.activate_item(&editor_a1, cx)
- });
- deterministic.run_until_parked();
- workspace_b.read_with(cx_b, |workspace, cx| {
- assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id());
- });
-
- // When client A opens a multibuffer, client B does so as well.
- let multibuffer_a = cx_a.add_model(|cx| {
- let buffer_a1 = project_a.update(cx, |project, cx| {
- project
- .get_open_buffer(&(worktree_id, "1.txt").into(), cx)
- .unwrap()
- });
- let buffer_a2 = project_a.update(cx, |project, cx| {
- project
- .get_open_buffer(&(worktree_id, "2.txt").into(), cx)
- .unwrap()
- });
- let mut result = MultiBuffer::new(0);
- result.push_excerpts(
- buffer_a1,
- [ExcerptRange {
- context: 0..3,
- primary: None,
- }],
- cx,
- );
- result.push_excerpts(
- buffer_a2,
- [ExcerptRange {
- context: 4..7,
- primary: None,
- }],
- cx,
- );
- result
- });
- let multibuffer_editor_a = workspace_a.update(cx_a, |workspace, cx| {
- let editor =
- cx.add_view(|cx| Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), cx));
- workspace.add_item(Box::new(editor.clone()), cx);
- editor
- });
- deterministic.run_until_parked();
- let multibuffer_editor_b = workspace_b.read_with(cx_b, |workspace, cx| {
- workspace
- .active_item(cx)
- .unwrap()
- .downcast::<Editor>()
- .unwrap()
- });
- assert_eq!(
- multibuffer_editor_a.read_with(cx_a, |editor, cx| editor.text(cx)),
- multibuffer_editor_b.read_with(cx_b, |editor, cx| editor.text(cx)),
- );
-
- // When client A navigates back and forth, client B does so as well.
- workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.go_back(workspace.active_pane().downgrade(), cx)
- })
- .await
- .unwrap();
- deterministic.run_until_parked();
- workspace_b.read_with(cx_b, |workspace, cx| {
- assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id());
- });
-
- workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.go_back(workspace.active_pane().downgrade(), cx)
- })
- .await
- .unwrap();
- deterministic.run_until_parked();
- workspace_b.read_with(cx_b, |workspace, cx| {
- assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b2.id());
- });
-
- workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.go_forward(workspace.active_pane().downgrade(), cx)
- })
- .await
- .unwrap();
- deterministic.run_until_parked();
- workspace_b.read_with(cx_b, |workspace, cx| {
- assert_eq!(workspace.active_item(cx).unwrap().id(), editor_b1.id());
- });
-
- // Changes to client A's editor are reflected on client B.
- editor_a1.update(cx_a, |editor, cx| {
- editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2]));
- });
- deterministic.run_until_parked();
- editor_b1.read_with(cx_b, |editor, cx| {
- assert_eq!(editor.selections.ranges(cx), &[1..1, 2..2]);
- });
-
- editor_a1.update(cx_a, |editor, cx| editor.set_text("TWO", cx));
- deterministic.run_until_parked();
- editor_b1.read_with(cx_b, |editor, cx| assert_eq!(editor.text(cx), "TWO"));
-
- editor_a1.update(cx_a, |editor, cx| {
- editor.change_selections(None, cx, |s| s.select_ranges([3..3]));
- editor.set_scroll_position(vec2f(0., 100.), cx);
- });
- deterministic.run_until_parked();
- editor_b1.read_with(cx_b, |editor, cx| {
- assert_eq!(editor.selections.ranges(cx), &[3..3]);
- });
-
- // After unfollowing, client B stops receiving updates from client A.
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.unfollow(&workspace.active_pane().clone(), cx)
- });
- workspace_a.update(cx_a, |workspace, cx| {
- workspace.activate_item(&editor_a2, cx)
- });
- deterministic.run_until_parked();
- assert_eq!(
- workspace_b.read_with(cx_b, |workspace, cx| workspace
- .active_item(cx)
- .unwrap()
- .id()),
- editor_b1.id()
- );
-
- // Client A starts following client B.
- workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.follow(peer_id_b, cx).unwrap()
- })
- .await
- .unwrap();
- assert_eq!(
- workspace_a.read_with(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
- Some(peer_id_b)
- );
- assert_eq!(
- workspace_a.read_with(cx_a, |workspace, cx| workspace
- .active_item(cx)
- .unwrap()
- .id()),
- editor_a1.id()
- );
-
- // Client B activates an external window, which causes a new screen-sharing item to be added to the pane.
- let display = MacOSDisplay::new();
- active_call_b
- .update(cx_b, |call, cx| call.set_location(None, cx))
- .await
- .unwrap();
- active_call_b
- .update(cx_b, |call, cx| {
- call.room().unwrap().update(cx, |room, cx| {
- room.set_display_sources(vec![display.clone()]);
- room.share_screen(cx)
- })
- })
- .await
- .unwrap();
- deterministic.run_until_parked();
- let shared_screen = workspace_a.read_with(cx_a, |workspace, cx| {
- workspace
- .active_item(cx)
- .expect("no active item")
- .downcast::<SharedScreen>()
- .expect("active item isn't a shared screen")
- });
-
- // Client B activates Zed again, which causes the previous editor to become focused again.
- active_call_b
- .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
- .await
- .unwrap();
- deterministic.run_until_parked();
- workspace_a.read_with(cx_a, |workspace, cx| {
- assert_eq!(workspace.active_item(cx).unwrap().id(), editor_a1.id())
- });
-
- // Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer.
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.activate_item(&multibuffer_editor_b, cx)
- });
- deterministic.run_until_parked();
- workspace_a.read_with(cx_a, |workspace, cx| {
- assert_eq!(
- workspace.active_item(cx).unwrap().id(),
- multibuffer_editor_a.id()
- )
- });
-
- // Client B activates a panel, and the previously-opened screen-sharing item gets activated.
- let panel = window_b.add_view(cx_b, |_| TestPanel::new(DockPosition::Left));
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.add_panel(panel, cx);
- workspace.toggle_panel_focus::<TestPanel>(cx);
- });
- deterministic.run_until_parked();
- assert_eq!(
- workspace_a.read_with(cx_a, |workspace, cx| workspace
- .active_item(cx)
- .unwrap()
- .id()),
- shared_screen.id()
- );
-
- // Toggling the focus back to the pane causes client A to return to the multibuffer.
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.toggle_panel_focus::<TestPanel>(cx);
- });
- deterministic.run_until_parked();
- workspace_a.read_with(cx_a, |workspace, cx| {
- assert_eq!(
- workspace.active_item(cx).unwrap().id(),
- multibuffer_editor_a.id()
- )
- });
-
- // Client B activates an item that doesn't implement following,
- // so the previously-opened screen-sharing item gets activated.
- let unfollowable_item = window_b.add_view(cx_b, |_| TestItem::new());
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.active_pane().update(cx, |pane, cx| {
- pane.add_item(Box::new(unfollowable_item), true, true, None, cx)
- })
- });
- deterministic.run_until_parked();
- assert_eq!(
- workspace_a.read_with(cx_a, |workspace, cx| workspace
- .active_item(cx)
- .unwrap()
- .id()),
- shared_screen.id()
- );
-
- // Following interrupts when client B disconnects.
- client_b.disconnect(&cx_b.to_async());
- deterministic.advance_clock(RECONNECT_TIMEOUT);
- assert_eq!(
- workspace_a.read_with(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
- None
- );
-}
-
-#[gpui::test]
-async fn test_following_tab_order(
- deterministic: Arc<Deterministic>,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(&deterministic).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- cx_a.update(editor::init);
- cx_b.update(editor::init);
-
- client_a
- .fs()
- .insert_tree(
- "/a",
- json!({
- "1.txt": "one",
- "2.txt": "two",
- "3.txt": "three",
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
- active_call_a
- .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
- .await
- .unwrap();
-
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
- active_call_b
- .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
- .await
- .unwrap();
-
- let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
- let pane_a = workspace_a.read_with(cx_a, |workspace, _| workspace.active_pane().clone());
-
- let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
- let pane_b = workspace_b.read_with(cx_b, |workspace, _| workspace.active_pane().clone());
-
- let client_b_id = project_a.read_with(cx_a, |project, _| {
- project.collaborators().values().next().unwrap().peer_id
- });
-
- //Open 1, 3 in that order on client A
- workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.open_path((worktree_id, "1.txt"), None, true, cx)
- })
- .await
- .unwrap();
- workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.open_path((worktree_id, "3.txt"), None, true, cx)
- })
- .await
- .unwrap();
-
- let pane_paths = |pane: &ViewHandle<workspace::Pane>, cx: &mut TestAppContext| {
- pane.update(cx, |pane, cx| {
- pane.items()
- .map(|item| {
- item.project_path(cx)
- .unwrap()
- .path
- .to_str()
- .unwrap()
- .to_owned()
- })
- .collect::<Vec<_>>()
- })
- };
-
- //Verify that the tabs opened in the order we expect
- assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt"]);
-
- //Follow client B as client A
- workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.follow(client_b_id, cx).unwrap()
- })
- .await
- .unwrap();
-
- //Open just 2 on client B
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.open_path((worktree_id, "2.txt"), None, true, cx)
- })
- .await
- .unwrap();
- deterministic.run_until_parked();
-
- // Verify that newly opened followed file is at the end
- assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
-
- //Open just 1 on client B
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.open_path((worktree_id, "1.txt"), None, true, cx)
- })
- .await
- .unwrap();
- assert_eq!(&pane_paths(&pane_b, cx_b), &["2.txt", "1.txt"]);
- deterministic.run_until_parked();
-
- // Verify that following into 1 did not reorder
- assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_peers_following_each_other(
- deterministic: Arc<Deterministic>,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- cx_a.update(editor::init);
- cx_b.update(editor::init);
-
- // Client A shares a project.
- client_a
- .fs()
- .insert_tree(
- "/a",
- json!({
- "1.txt": "one",
- "2.txt": "two",
- "3.txt": "three",
- "4.txt": "four",
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
- active_call_a
- .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
- .await
- .unwrap();
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
-
- // Client B joins the project.
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
- active_call_b
- .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
- .await
- .unwrap();
-
- // Client A opens a file.
- let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
- workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.open_path((worktree_id, "1.txt"), None, true, cx)
- })
- .await
- .unwrap()
- .downcast::<Editor>()
- .unwrap();
-
- // Client B opens a different file.
- let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.open_path((worktree_id, "2.txt"), None, true, cx)
- })
- .await
- .unwrap()
- .downcast::<Editor>()
- .unwrap();
-
- // Clients A and B follow each other in split panes
- workspace_a.update(cx_a, |workspace, cx| {
- workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx);
- });
- workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.follow(client_b.peer_id().unwrap(), cx).unwrap()
- })
- .await
- .unwrap();
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx);
- });
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.follow(client_a.peer_id().unwrap(), cx).unwrap()
- })
- .await
- .unwrap();
-
- // Clients A and B return focus to the original files they had open
- workspace_a.update(cx_a, |workspace, cx| workspace.activate_next_pane(cx));
- workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
- deterministic.run_until_parked();
-
- // Both clients see the other client's focused file in their right pane.
- assert_eq!(
- pane_summaries(&workspace_a, cx_a),
- &[
- PaneSummary {
- active: true,
- leader: None,
- items: vec![(true, "1.txt".into())]
- },
- PaneSummary {
- active: false,
- leader: client_b.peer_id(),
- items: vec![(false, "1.txt".into()), (true, "2.txt".into())]
- },
- ]
- );
- assert_eq!(
- pane_summaries(&workspace_b, cx_b),
- &[
- PaneSummary {
- active: true,
- leader: None,
- items: vec![(true, "2.txt".into())]
- },
- PaneSummary {
- active: false,
- leader: client_a.peer_id(),
- items: vec![(false, "2.txt".into()), (true, "1.txt".into())]
- },
- ]
- );
-
- // Clients A and B each open a new file.
- workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.open_path((worktree_id, "3.txt"), None, true, cx)
- })
- .await
- .unwrap();
-
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.open_path((worktree_id, "4.txt"), None, true, cx)
- })
- .await
- .unwrap();
- deterministic.run_until_parked();
-
- // Both client's see the other client open the new file, but keep their
- // focus on their own active pane.
- assert_eq!(
- pane_summaries(&workspace_a, cx_a),
- &[
- PaneSummary {
- active: true,
- leader: None,
- items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
- },
- PaneSummary {
- active: false,
- leader: client_b.peer_id(),
- items: vec![
- (false, "1.txt".into()),
- (false, "2.txt".into()),
- (true, "4.txt".into())
- ]
- },
- ]
- );
- assert_eq!(
- pane_summaries(&workspace_b, cx_b),
- &[
- PaneSummary {
- active: true,
- leader: None,
- items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
- },
- PaneSummary {
- active: false,
- leader: client_a.peer_id(),
- items: vec![
- (false, "2.txt".into()),
- (false, "1.txt".into()),
- (true, "3.txt".into())
- ]
- },
- ]
- );
-
- // Client A focuses their right pane, in which they're following client B.
- workspace_a.update(cx_a, |workspace, cx| workspace.activate_next_pane(cx));
- deterministic.run_until_parked();
-
- // Client B sees that client A is now looking at the same file as them.
- assert_eq!(
- pane_summaries(&workspace_a, cx_a),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: client_b.peer_id(),
- items: vec![
- (false, "1.txt".into()),
- (false, "2.txt".into()),
- (true, "4.txt".into())
- ]
- },
- ]
- );
- assert_eq!(
- pane_summaries(&workspace_b, cx_b),
- &[
- PaneSummary {
- active: true,
- leader: None,
- items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
- },
- PaneSummary {
- active: false,
- leader: client_a.peer_id(),
- items: vec![
- (false, "2.txt".into()),
- (false, "1.txt".into()),
- (false, "3.txt".into()),
- (true, "4.txt".into())
- ]
- },
- ]
- );
-
- // Client B focuses their right pane, in which they're following client A,
- // who is following them.
- workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
- deterministic.run_until_parked();
-
- // Client A sees that client B is now looking at the same file as them.
- assert_eq!(
- pane_summaries(&workspace_b, cx_b),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: client_a.peer_id(),
- items: vec![
- (false, "2.txt".into()),
- (false, "1.txt".into()),
- (false, "3.txt".into()),
- (true, "4.txt".into())
- ]
- },
- ]
- );
- assert_eq!(
- pane_summaries(&workspace_a, cx_a),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: client_b.peer_id(),
- items: vec![
- (false, "1.txt".into()),
- (false, "2.txt".into()),
- (true, "4.txt".into())
- ]
- },
- ]
- );
-
- // Client B focuses a file that they previously followed A to, breaking
- // the follow.
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.active_pane().update(cx, |pane, cx| {
- pane.activate_prev_item(true, cx);
- });
- });
- deterministic.run_until_parked();
-
- // Both clients see that client B is looking at that previous file.
- assert_eq!(
- pane_summaries(&workspace_b, cx_b),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: None,
- items: vec![
- (false, "2.txt".into()),
- (false, "1.txt".into()),
- (true, "3.txt".into()),
- (false, "4.txt".into())
- ]
- },
- ]
- );
- assert_eq!(
- pane_summaries(&workspace_a, cx_a),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: client_b.peer_id(),
- items: vec![
- (false, "1.txt".into()),
- (false, "2.txt".into()),
- (false, "4.txt".into()),
- (true, "3.txt".into()),
- ]
- },
- ]
- );
-
- // Client B closes tabs, some of which were originally opened by client A,
- // and some of which were originally opened by client B.
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.active_pane().update(cx, |pane, cx| {
- pane.close_inactive_items(&Default::default(), cx)
- .unwrap()
- .detach();
- });
- });
-
- deterministic.run_until_parked();
-
- // Both clients see that Client B is looking at the previous tab.
- assert_eq!(
- pane_summaries(&workspace_b, cx_b),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: None,
- items: vec![(true, "3.txt".into()),]
- },
- ]
- );
- assert_eq!(
- pane_summaries(&workspace_a, cx_a),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: client_b.peer_id(),
- items: vec![
- (false, "1.txt".into()),
- (false, "2.txt".into()),
- (false, "4.txt".into()),
- (true, "3.txt".into()),
- ]
- },
- ]
- );
-
- // Client B follows client A again.
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.follow(client_a.peer_id().unwrap(), cx).unwrap()
- })
- .await
- .unwrap();
-
- // Client A cycles through some tabs.
- workspace_a.update(cx_a, |workspace, cx| {
- workspace.active_pane().update(cx, |pane, cx| {
- pane.activate_prev_item(true, cx);
- });
- });
- deterministic.run_until_parked();
-
- // Client B follows client A into those tabs.
- assert_eq!(
- pane_summaries(&workspace_a, cx_a),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: None,
- items: vec![
- (false, "1.txt".into()),
- (false, "2.txt".into()),
- (true, "4.txt".into()),
- (false, "3.txt".into()),
- ]
- },
- ]
- );
- assert_eq!(
- pane_summaries(&workspace_b, cx_b),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: client_a.peer_id(),
- items: vec![(false, "3.txt".into()), (true, "4.txt".into())]
- },
- ]
- );
-
- workspace_a.update(cx_a, |workspace, cx| {
- workspace.active_pane().update(cx, |pane, cx| {
- pane.activate_prev_item(true, cx);
- });
- });
- deterministic.run_until_parked();
-
- assert_eq!(
- pane_summaries(&workspace_a, cx_a),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: None,
- items: vec![
- (false, "1.txt".into()),
- (true, "2.txt".into()),
- (false, "4.txt".into()),
- (false, "3.txt".into()),
- ]
- },
- ]
- );
- assert_eq!(
- pane_summaries(&workspace_b, cx_b),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: client_a.peer_id(),
- items: vec![
- (false, "3.txt".into()),
- (false, "4.txt".into()),
- (true, "2.txt".into())
- ]
- },
- ]
- );
-
- workspace_a.update(cx_a, |workspace, cx| {
- workspace.active_pane().update(cx, |pane, cx| {
- pane.activate_prev_item(true, cx);
- });
- });
- deterministic.run_until_parked();
-
- assert_eq!(
- pane_summaries(&workspace_a, cx_a),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: None,
- items: vec![
- (true, "1.txt".into()),
- (false, "2.txt".into()),
- (false, "4.txt".into()),
- (false, "3.txt".into()),
- ]
- },
- ]
- );
- assert_eq!(
- pane_summaries(&workspace_b, cx_b),
- &[
- PaneSummary {
- active: false,
- leader: None,
- items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
- },
- PaneSummary {
- active: true,
- leader: client_a.peer_id(),
- items: vec![
- (false, "3.txt".into()),
- (false, "4.txt".into()),
- (false, "2.txt".into()),
- (true, "1.txt".into()),
- ]
- },
- ]
- );
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_auto_unfollowing(
- deterministic: Arc<Deterministic>,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- deterministic.forbid_parking();
-
- // 2 clients connect to a server.
- let mut server = TestServer::start(&deterministic).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- cx_a.update(editor::init);
- cx_b.update(editor::init);
-
- // Client A shares a project.
- client_a
- .fs()
- .insert_tree(
- "/a",
- json!({
- "1.txt": "one",
- "2.txt": "two",
- "3.txt": "three",
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
- active_call_a
- .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
- .await
- .unwrap();
-
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
- active_call_b
- .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
- .await
- .unwrap();
-
- // Client A opens some editors.
- let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
- let _editor_a1 = workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.open_path((worktree_id, "1.txt"), None, true, cx)
- })
- .await
- .unwrap()
- .downcast::<Editor>()
- .unwrap();
-
- // Client B starts following client A.
- let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
- let pane_b = workspace_b.read_with(cx_b, |workspace, _| workspace.active_pane().clone());
- let leader_id = project_b.read_with(cx_b, |project, _| {
- project.collaborators().values().next().unwrap().peer_id
- });
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.follow(leader_id, cx).unwrap()
- })
- .await
- .unwrap();
- assert_eq!(
- workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
- Some(leader_id)
- );
- let editor_b2 = workspace_b.read_with(cx_b, |workspace, cx| {
- workspace
- .active_item(cx)
- .unwrap()
- .downcast::<Editor>()
- .unwrap()
- });
-
- // When client B moves, it automatically stops following client A.
- editor_b2.update(cx_b, |editor, cx| editor.move_right(&editor::MoveRight, cx));
- assert_eq!(
- workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
- None
- );
-
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.follow(leader_id, cx).unwrap()
- })
- .await
- .unwrap();
- assert_eq!(
- workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
- Some(leader_id)
- );
-
- // When client B edits, it automatically stops following client A.
- editor_b2.update(cx_b, |editor, cx| editor.insert("X", cx));
- assert_eq!(
- workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
- None
- );
-
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.follow(leader_id, cx).unwrap()
- })
- .await
- .unwrap();
- assert_eq!(
- workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
- Some(leader_id)
- );
-
- // When client B scrolls, it automatically stops following client A.
- editor_b2.update(cx_b, |editor, cx| {
- editor.set_scroll_position(vec2f(0., 3.), cx)
- });
- assert_eq!(
- workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
- None
- );
-
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.follow(leader_id, cx).unwrap()
- })
- .await
- .unwrap();
- assert_eq!(
- workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
- Some(leader_id)
- );
-
- // When client B activates a different pane, it continues following client A in the original pane.
- workspace_b.update(cx_b, |workspace, cx| {
- workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, cx)
- });
- assert_eq!(
- workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
- Some(leader_id)
- );
-
- workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
- assert_eq!(
- workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
- Some(leader_id)
- );
-
- // When client B activates a different item in the original pane, it automatically stops following client A.
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.open_path((worktree_id, "2.txt"), None, true, cx)
- })
- .await
- .unwrap();
- assert_eq!(
- workspace_b.read_with(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
- None
- );
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_peers_simultaneously_following_each_other(
- deterministic: Arc<Deterministic>,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- deterministic.forbid_parking();
-
- let mut server = TestServer::start(&deterministic).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- cx_a.update(editor::init);
- cx_b.update(editor::init);
-
- client_a.fs().insert_tree("/a", json!({})).await;
- let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
- let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
-
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
- let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
-
- deterministic.run_until_parked();
- let client_a_id = project_b.read_with(cx_b, |project, _| {
- project.collaborators().values().next().unwrap().peer_id
- });
- let client_b_id = project_a.read_with(cx_a, |project, _| {
- project.collaborators().values().next().unwrap().peer_id
- });
-
- let a_follow_b = workspace_a.update(cx_a, |workspace, cx| {
- workspace.follow(client_b_id, cx).unwrap()
- });
- let b_follow_a = workspace_b.update(cx_b, |workspace, cx| {
- workspace.follow(client_a_id, cx).unwrap()
- });
-
- futures::try_join!(a_follow_b, b_follow_a).unwrap();
- workspace_a.read_with(cx_a, |workspace, _| {
- assert_eq!(
- workspace.leader_for_pane(workspace.active_pane()),
- Some(client_b_id)
- );
- });
- workspace_b.read_with(cx_b, |workspace, _| {
- assert_eq!(
- workspace.leader_for_pane(workspace.active_pane()),
- Some(client_a_id)
- );
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_following_across_workspaces(
- deterministic: Arc<Deterministic>,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- // a and b join a channel/call
- // a shares project 1
- // b shares project 2
- //
- // b follows a: causes project 2 to be joined, and b to follow a.
- // b opens a different file in project 2, a follows b
- // b opens a different file in project 1, a cannot follow b
- // b shares the project, a joins the project and follows b
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- cx_a.update(editor::init);
- cx_b.update(editor::init);
-
- client_a
- .fs()
- .insert_tree(
- "/a",
- json!({
- "w.rs": "",
- "x.rs": "",
- }),
- )
- .await;
-
- client_b
- .fs()
- .insert_tree(
- "/b",
- json!({
- "y.rs": "",
- "z.rs": "",
- }),
- )
- .await;
-
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- let (project_a, worktree_id_a) = client_a.build_local_project("/a", cx_a).await;
- let (project_b, worktree_id_b) = client_b.build_local_project("/b", cx_b).await;
-
- let workspace_a = client_a.build_workspace(&project_a, cx_a).root(cx_a);
- let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
-
- cx_a.update(|cx| collab_ui::init(&client_a.app_state, cx));
- cx_b.update(|cx| collab_ui::init(&client_b.app_state, cx));
-
- active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
-
- active_call_a
- .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
- .await
- .unwrap();
- active_call_b
- .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
- .await
- .unwrap();
-
- workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.open_path((worktree_id_a, "w.rs"), None, true, cx)
- })
- .await
- .unwrap();
-
- deterministic.run_until_parked();
- assert_eq!(visible_push_notifications(cx_b).len(), 1);
-
- workspace_b.update(cx_b, |workspace, cx| {
- workspace
- .follow(client_a.peer_id().unwrap(), cx)
- .unwrap()
- .detach()
- });
-
- deterministic.run_until_parked();
- let workspace_b_project_a = cx_b
- .windows()
- .iter()
- .max_by_key(|window| window.id())
- .unwrap()
- .downcast::<Workspace>()
- .unwrap()
- .root(cx_b);
-
- // assert that b is following a in project a in w.rs
- workspace_b_project_a.update(cx_b, |workspace, cx| {
- assert!(workspace.is_being_followed(client_a.peer_id().unwrap()));
- assert_eq!(
- client_a.peer_id(),
- workspace.leader_for_pane(workspace.active_pane())
- );
- let item = workspace.active_item(cx).unwrap();
- assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("w.rs"));
- });
-
- // TODO: in app code, this would be done by the collab_ui.
- active_call_b
- .update(cx_b, |call, cx| {
- let project = workspace_b_project_a.read(cx).project().clone();
- call.set_location(Some(&project), cx)
- })
- .await
- .unwrap();
-
- // assert that there are no share notifications open
- assert_eq!(visible_push_notifications(cx_b).len(), 0);
-
- // b moves to x.rs in a's project, and a follows
- workspace_b_project_a
- .update(cx_b, |workspace, cx| {
- workspace.open_path((worktree_id_a, "x.rs"), None, true, cx)
- })
- .await
- .unwrap();
-
- deterministic.run_until_parked();
- workspace_b_project_a.update(cx_b, |workspace, cx| {
- let item = workspace.active_item(cx).unwrap();
- assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("x.rs"));
- });
-
- workspace_a.update(cx_a, |workspace, cx| {
- workspace
- .follow(client_b.peer_id().unwrap(), cx)
- .unwrap()
- .detach()
- });
-
- deterministic.run_until_parked();
- workspace_a.update(cx_a, |workspace, cx| {
- assert!(workspace.is_being_followed(client_b.peer_id().unwrap()));
- assert_eq!(
- client_b.peer_id(),
- workspace.leader_for_pane(workspace.active_pane())
- );
- let item = workspace.active_pane().read(cx).active_item().unwrap();
- assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("x.rs"));
- });
-
- // b moves to y.rs in b's project, a is still following but can't yet see
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.open_path((worktree_id_b, "y.rs"), None, true, cx)
- })
- .await
- .unwrap();
-
- // TODO: in app code, this would be done by the collab_ui.
- active_call_b
- .update(cx_b, |call, cx| {
- let project = workspace_b.read(cx).project().clone();
- call.set_location(Some(&project), cx)
- })
- .await
- .unwrap();
-
- let project_b_id = active_call_b
- .update(cx_b, |call, cx| call.share_project(project_b.clone(), cx))
- .await
- .unwrap();
-
- deterministic.run_until_parked();
- assert_eq!(visible_push_notifications(cx_a).len(), 1);
- cx_a.update(|cx| {
- workspace::join_remote_project(
- project_b_id,
- client_b.user_id().unwrap(),
- client_a.app_state.clone(),
- cx,
- )
- })
- .await
- .unwrap();
-
- deterministic.run_until_parked();
-
- assert_eq!(visible_push_notifications(cx_a).len(), 0);
- let workspace_a_project_b = cx_a
- .windows()
- .iter()
- .max_by_key(|window| window.id())
- .unwrap()
- .downcast::<Workspace>()
- .unwrap()
- .root(cx_a);
-
- workspace_a_project_b.update(cx_a, |workspace, cx| {
- assert_eq!(workspace.project().read(cx).remote_id(), Some(project_b_id));
- assert!(workspace.is_being_followed(client_b.peer_id().unwrap()));
- assert_eq!(
- client_b.peer_id(),
- workspace.leader_for_pane(workspace.active_pane())
- );
- let item = workspace.active_item(cx).unwrap();
- assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("y.rs"));
- });
-}
-
-#[gpui::test]
-async fn test_following_into_excluded_file(
- deterministic: Arc<Deterministic>,
- mut cx_a: &mut TestAppContext,
- mut cx_b: &mut TestAppContext,
-) {
- deterministic.forbid_parking();
-
- let mut server = TestServer::start(&deterministic).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- for cx in [&mut cx_a, &mut cx_b] {
- cx.update(|cx| {
- cx.update_global::<SettingsStore, _, _>(|store, cx| {
- store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
- project_settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]);
- });
- });
- });
- }
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- cx_a.update(editor::init);
- cx_b.update(editor::init);
-
- client_a
- .fs()
- .insert_tree(
- "/a",
- json!({
- ".git": {
- "COMMIT_EDITMSG": "write your commit message here",
- },
- "1.txt": "one\none\none",
- "2.txt": "two\ntwo\ntwo",
- "3.txt": "three\nthree\nthree",
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
- active_call_a
- .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
- .await
- .unwrap();
-
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
- active_call_b
- .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
- .await
- .unwrap();
-
- let window_a = client_a.build_workspace(&project_a, cx_a);
- let workspace_a = window_a.root(cx_a);
- let peer_id_a = client_a.peer_id().unwrap();
- let window_b = client_b.build_workspace(&project_b, cx_b);
- let workspace_b = window_b.root(cx_b);
-
- // Client A opens editors for a regular file and an excluded file.
- let editor_for_regular = workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.open_path((worktree_id, "1.txt"), None, true, cx)
- })
- .await
- .unwrap()
- .downcast::<Editor>()
- .unwrap();
- let editor_for_excluded_a = workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, cx)
- })
- .await
- .unwrap()
- .downcast::<Editor>()
- .unwrap();
-
- // Client A updates their selections in those editors
- editor_for_regular.update(cx_a, |editor, cx| {
- editor.handle_input("a", cx);
- editor.handle_input("b", cx);
- editor.handle_input("c", cx);
- editor.select_left(&Default::default(), cx);
- assert_eq!(editor.selections.ranges(cx), vec![3..2]);
- });
- editor_for_excluded_a.update(cx_a, |editor, cx| {
- editor.select_all(&Default::default(), cx);
- editor.handle_input("new commit message", cx);
- editor.select_left(&Default::default(), cx);
- assert_eq!(editor.selections.ranges(cx), vec![18..17]);
- });
-
- // When client B starts following client A, currently visible file is replicated
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.follow(peer_id_a, cx).unwrap()
- })
- .await
- .unwrap();
-
- let editor_for_excluded_b = workspace_b.read_with(cx_b, |workspace, cx| {
- workspace
- .active_item(cx)
- .unwrap()
- .downcast::<Editor>()
- .unwrap()
- });
- assert_eq!(
- cx_b.read(|cx| editor_for_excluded_b.project_path(cx)),
- Some((worktree_id, ".git/COMMIT_EDITMSG").into())
- );
- assert_eq!(
- editor_for_excluded_b.read_with(cx_b, |editor, cx| editor.selections.ranges(cx)),
- vec![18..17]
- );
-
- // Changes from B to the excluded file are replicated in A's editor
- editor_for_excluded_b.update(cx_b, |editor, cx| {
- editor.handle_input("\nCo-Authored-By: B <b@b.b>", cx);
- });
- deterministic.run_until_parked();
- editor_for_excluded_a.update(cx_a, |editor, cx| {
- assert_eq!(
- editor.text(cx),
- "new commit messag\nCo-Authored-By: B <b@b.b>"
- );
- });
-}
-
-fn visible_push_notifications(
- cx: &mut TestAppContext,
-) -> Vec<gpui::ViewHandle<ProjectSharedNotification>> {
- let mut ret = Vec::new();
- for window in cx.windows() {
- window.read_with(cx, |window| {
- if let Some(handle) = window
- .root_view()
- .clone()
- .downcast::<ProjectSharedNotification>()
- {
- ret.push(handle)
- }
- });
- }
- ret
-}
-
-#[derive(Debug, PartialEq, Eq)]
-struct PaneSummary {
- active: bool,
- leader: Option<PeerId>,
- items: Vec<(bool, String)>,
-}
-
-fn followers_by_leader(project_id: u64, cx: &TestAppContext) -> Vec<(PeerId, Vec<PeerId>)> {
- cx.read(|cx| {
- let active_call = ActiveCall::global(cx).read(cx);
- let peer_id = active_call.client().peer_id();
- let room = active_call.room().unwrap().read(cx);
- let mut result = room
- .remote_participants()
- .values()
- .map(|participant| participant.peer_id)
- .chain(peer_id)
- .filter_map(|peer_id| {
- let followers = room.followers_for(peer_id, project_id);
- if followers.is_empty() {
- None
- } else {
- Some((peer_id, followers.to_vec()))
- }
- })
- .collect::<Vec<_>>();
- result.sort_by_key(|e| e.0);
- result
- })
-}
-
-fn pane_summaries(workspace: &ViewHandle<Workspace>, cx: &mut TestAppContext) -> Vec<PaneSummary> {
- workspace.read_with(cx, |workspace, cx| {
- let active_pane = workspace.active_pane();
- workspace
- .panes()
- .iter()
- .map(|pane| {
- let leader = workspace.leader_for_pane(pane);
- let active = pane == active_pane;
- let pane = pane.read(cx);
- let active_ix = pane.active_item_index();
- PaneSummary {
- active,
- leader,
- items: pane
- .items()
- .enumerate()
- .map(|(ix, item)| {
- (
- ix == active_ix,
- item.tab_description(0, cx)
- .map_or(String::new(), |s| s.to_string()),
- )
- })
- .collect(),
- }
- })
- .collect()
- })
-}
+//todo!(workspace)
+
+// use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
+// use call::ActiveCall;
+// use collab_ui::notifications::project_shared_notification::ProjectSharedNotification;
+// use editor::{Editor, ExcerptRange, MultiBuffer};
+// use gpui::{point, BackgroundExecutor, TestAppContext, View, VisualTestContext, WindowContext};
+// use live_kit_client::MacOSDisplay;
+// use project::project_settings::ProjectSettings;
+// use rpc::proto::PeerId;
+// use serde_json::json;
+// use settings::SettingsStore;
+// use std::borrow::Cow;
+// use workspace::{
+// dock::{test::TestPanel, DockPosition},
+// item::{test::TestItem, ItemHandle as _},
+// shared_screen::SharedScreen,
+// SplitDirection, Workspace,
+// };
+
+// #[gpui::test(iterations = 10)]
+// async fn test_basic_following(
+// executor: BackgroundExecutor,
+// cx_a: &mut TestAppContext,
+// cx_b: &mut TestAppContext,
+// cx_c: &mut TestAppContext,
+// cx_d: &mut TestAppContext,
+// ) {
+// let mut server = TestServer::start(executor.clone()).await;
+// let client_a = server.create_client(cx_a, "user_a").await;
+// let client_b = server.create_client(cx_b, "user_b").await;
+// let client_c = server.create_client(cx_c, "user_c").await;
+// let client_d = server.create_client(cx_d, "user_d").await;
+// server
+// .create_room(&mut [
+// (&client_a, cx_a),
+// (&client_b, cx_b),
+// (&client_c, cx_c),
+// (&client_d, cx_d),
+// ])
+// .await;
+// let active_call_a = cx_a.read(ActiveCall::global);
+// let active_call_b = cx_b.read(ActiveCall::global);
+
+// cx_a.update(editor::init);
+// cx_b.update(editor::init);
+
+// client_a
+// .fs()
+// .insert_tree(
+// "/a",
+// json!({
+// "1.txt": "one\none\none",
+// "2.txt": "two\ntwo\ntwo",
+// "3.txt": "three\nthree\nthree",
+// }),
+// )
+// .await;
+// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
+// active_call_a
+// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
+// .await
+// .unwrap();
+
+// let project_id = active_call_a
+// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+// .await
+// .unwrap();
+// let project_b = client_b.build_remote_project(project_id, cx_b).await;
+// active_call_b
+// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+// .await
+// .unwrap();
+
+// let window_a = client_a.build_workspace(&project_a, cx_a);
+// let workspace_a = window_a.root(cx_a).unwrap();
+// let window_b = client_b.build_workspace(&project_b, cx_b);
+// let workspace_b = window_b.root(cx_b).unwrap();
+
+// todo!("could be wrong")
+// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
+// let cx_a = &mut cx_a;
+// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
+// let cx_b = &mut cx_b;
+// let mut cx_c = VisualTestContext::from_window(*window_c, cx_c);
+// let cx_c = &mut cx_c;
+// let mut cx_d = VisualTestContext::from_window(*window_d, cx_d);
+// let cx_d = &mut cx_d;
+
+// // Client A opens some editors.
+// let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone());
+// let editor_a1 = workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
+// })
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+// let editor_a2 = workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.open_path((worktree_id, "2.txt"), None, true, cx)
+// })
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+
+// // Client B opens an editor.
+// let editor_b1 = workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
+// })
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+
+// let peer_id_a = client_a.peer_id().unwrap();
+// let peer_id_b = client_b.peer_id().unwrap();
+// let peer_id_c = client_c.peer_id().unwrap();
+// let peer_id_d = client_d.peer_id().unwrap();
+
+// // Client A updates their selections in those editors
+// editor_a1.update(cx_a, |editor, cx| {
+// editor.handle_input("a", cx);
+// editor.handle_input("b", cx);
+// editor.handle_input("c", cx);
+// editor.select_left(&Default::default(), cx);
+// assert_eq!(editor.selections.ranges(cx), vec![3..2]);
+// });
+// editor_a2.update(cx_a, |editor, cx| {
+// editor.handle_input("d", cx);
+// editor.handle_input("e", cx);
+// editor.select_left(&Default::default(), cx);
+// assert_eq!(editor.selections.ranges(cx), vec![2..1]);
+// });
+
+// // When client B starts following client A, all visible view states are replicated to client B.
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.follow(peer_id_a, cx).unwrap()
+// })
+// .await
+// .unwrap();
+
+// cx_c.executor().run_until_parked();
+// let editor_b2 = workspace_b.update(cx_b, |workspace, cx| {
+// workspace
+// .active_item(cx)
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap()
+// });
+// assert_eq!(
+// cx_b.read(|cx| editor_b2.project_path(cx)),
+// Some((worktree_id, "2.txt").into())
+// );
+// assert_eq!(
+// editor_b2.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
+// vec![2..1]
+// );
+// assert_eq!(
+// editor_b1.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
+// vec![3..2]
+// );
+
+// cx_c.executor().run_until_parked();
+// let active_call_c = cx_c.read(ActiveCall::global);
+// let project_c = client_c.build_remote_project(project_id, cx_c).await;
+// let window_c = client_c.build_workspace(&project_c, cx_c);
+// let workspace_c = window_c.root(cx_c).unwrap();
+// active_call_c
+// .update(cx_c, |call, cx| call.set_location(Some(&project_c), cx))
+// .await
+// .unwrap();
+// drop(project_c);
+
+// // Client C also follows client A.
+// workspace_c
+// .update(cx_c, |workspace, cx| {
+// workspace.follow(peer_id_a, cx).unwrap()
+// })
+// .await
+// .unwrap();
+
+// cx_d.executor().run_until_parked();
+// let active_call_d = cx_d.read(ActiveCall::global);
+// let project_d = client_d.build_remote_project(project_id, cx_d).await;
+// let workspace_d = client_d
+// .build_workspace(&project_d, cx_d)
+// .root(cx_d)
+// .unwrap();
+// active_call_d
+// .update(cx_d, |call, cx| call.set_location(Some(&project_d), cx))
+// .await
+// .unwrap();
+// drop(project_d);
+
+// // All clients see that clients B and C are following client A.
+// cx_c.executor().run_until_parked();
+// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
+// assert_eq!(
+// followers_by_leader(project_id, cx),
+// &[(peer_id_a, vec![peer_id_b, peer_id_c])],
+// "followers seen by {name}"
+// );
+// }
+
+// // Client C unfollows client A.
+// workspace_c.update(cx_c, |workspace, cx| {
+// workspace.unfollow(&workspace.active_pane().clone(), cx);
+// });
+
+// // All clients see that clients B is following client A.
+// cx_c.executor().run_until_parked();
+// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
+// assert_eq!(
+// followers_by_leader(project_id, cx),
+// &[(peer_id_a, vec![peer_id_b])],
+// "followers seen by {name}"
+// );
+// }
+
+// // Client C re-follows client A.
+// workspace_c
+// .update(cx_c, |workspace, cx| {
+// workspace.follow(peer_id_a, cx).unwrap()
+// })
+// .await
+// .unwrap();
+
+// // All clients see that clients B and C are following client A.
+// cx_c.executor().run_until_parked();
+// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
+// assert_eq!(
+// followers_by_leader(project_id, cx),
+// &[(peer_id_a, vec![peer_id_b, peer_id_c])],
+// "followers seen by {name}"
+// );
+// }
+
+// // Client D follows client B, then switches to following client C.
+// workspace_d
+// .update(cx_d, |workspace, cx| {
+// workspace.follow(peer_id_b, cx).unwrap()
+// })
+// .await
+// .unwrap();
+// workspace_d
+// .update(cx_d, |workspace, cx| {
+// workspace.follow(peer_id_c, cx).unwrap()
+// })
+// .await
+// .unwrap();
+
+// // All clients see that D is following C
+// cx_d.executor().run_until_parked();
+// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
+// assert_eq!(
+// followers_by_leader(project_id, cx),
+// &[
+// (peer_id_a, vec![peer_id_b, peer_id_c]),
+// (peer_id_c, vec![peer_id_d])
+// ],
+// "followers seen by {name}"
+// );
+// }
+
+// // Client C closes the project.
+// window_c.remove(cx_c);
+// cx_c.drop_last(workspace_c);
+
+// // Clients A and B see that client B is following A, and client C is not present in the followers.
+// cx_c.executor().run_until_parked();
+// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
+// assert_eq!(
+// followers_by_leader(project_id, cx),
+// &[(peer_id_a, vec![peer_id_b]),],
+// "followers seen by {name}"
+// );
+// }
+
+// // When client A activates a different editor, client B does so as well.
+// workspace_a.update(cx_a, |workspace, cx| {
+// workspace.activate_item(&editor_a1, cx)
+// });
+// executor.run_until_parked();
+// workspace_b.update(cx_b, |workspace, cx| {
+// assert_eq!(
+// workspace.active_item(cx).unwrap().item_id(),
+// editor_b1.item_id()
+// );
+// });
+
+// // When client A opens a multibuffer, client B does so as well.
+// let multibuffer_a = cx_a.build_model(|cx| {
+// let buffer_a1 = project_a.update(cx, |project, cx| {
+// project
+// .get_open_buffer(&(worktree_id, "1.txt").into(), cx)
+// .unwrap()
+// });
+// let buffer_a2 = project_a.update(cx, |project, cx| {
+// project
+// .get_open_buffer(&(worktree_id, "2.txt").into(), cx)
+// .unwrap()
+// });
+// let mut result = MultiBuffer::new(0);
+// result.push_excerpts(
+// buffer_a1,
+// [ExcerptRange {
+// context: 0..3,
+// primary: None,
+// }],
+// cx,
+// );
+// result.push_excerpts(
+// buffer_a2,
+// [ExcerptRange {
+// context: 4..7,
+// primary: None,
+// }],
+// cx,
+// );
+// result
+// });
+// let multibuffer_editor_a = workspace_a.update(cx_a, |workspace, cx| {
+// let editor =
+// cx.build_view(|cx| Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), cx));
+// workspace.add_item(Box::new(editor.clone()), cx);
+// editor
+// });
+// executor.run_until_parked();
+// let multibuffer_editor_b = workspace_b.update(cx_b, |workspace, cx| {
+// workspace
+// .active_item(cx)
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap()
+// });
+// assert_eq!(
+// multibuffer_editor_a.update(cx_a, |editor, cx| editor.text(cx)),
+// multibuffer_editor_b.update(cx_b, |editor, cx| editor.text(cx)),
+// );
+
+// // When client A navigates back and forth, client B does so as well.
+// workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.go_back(workspace.active_pane().downgrade(), cx)
+// })
+// .await
+// .unwrap();
+// executor.run_until_parked();
+// workspace_b.update(cx_b, |workspace, cx| {
+// assert_eq!(
+// workspace.active_item(cx).unwrap().item_id(),
+// editor_b1.item_id()
+// );
+// });
+
+// workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.go_back(workspace.active_pane().downgrade(), cx)
+// })
+// .await
+// .unwrap();
+// executor.run_until_parked();
+// workspace_b.update(cx_b, |workspace, cx| {
+// assert_eq!(
+// workspace.active_item(cx).unwrap().item_id(),
+// editor_b2.item_id()
+// );
+// });
+
+// workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.go_forward(workspace.active_pane().downgrade(), cx)
+// })
+// .await
+// .unwrap();
+// executor.run_until_parked();
+// workspace_b.update(cx_b, |workspace, cx| {
+// assert_eq!(
+// workspace.active_item(cx).unwrap().item_id(),
+// editor_b1.item_id()
+// );
+// });
+
+// // Changes to client A's editor are reflected on client B.
+// editor_a1.update(cx_a, |editor, cx| {
+// editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2]));
+// });
+// executor.run_until_parked();
+// editor_b1.update(cx_b, |editor, cx| {
+// assert_eq!(editor.selections.ranges(cx), &[1..1, 2..2]);
+// });
+
+// editor_a1.update(cx_a, |editor, cx| editor.set_text("TWO", cx));
+// executor.run_until_parked();
+// editor_b1.update(cx_b, |editor, cx| assert_eq!(editor.text(cx), "TWO"));
+
+// editor_a1.update(cx_a, |editor, cx| {
+// editor.change_selections(None, cx, |s| s.select_ranges([3..3]));
+// editor.set_scroll_position(point(0., 100.), cx);
+// });
+// executor.run_until_parked();
+// editor_b1.update(cx_b, |editor, cx| {
+// assert_eq!(editor.selections.ranges(cx), &[3..3]);
+// });
+
+// // After unfollowing, client B stops receiving updates from client A.
+// workspace_b.update(cx_b, |workspace, cx| {
+// workspace.unfollow(&workspace.active_pane().clone(), cx)
+// });
+// workspace_a.update(cx_a, |workspace, cx| {
+// workspace.activate_item(&editor_a2, cx)
+// });
+// executor.run_until_parked();
+// assert_eq!(
+// workspace_b.update(cx_b, |workspace, cx| workspace
+// .active_item(cx)
+// .unwrap()
+// .item_id()),
+// editor_b1.item_id()
+// );
+
+// // Client A starts following client B.
+// workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.follow(peer_id_b, cx).unwrap()
+// })
+// .await
+// .unwrap();
+// assert_eq!(
+// workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
+// Some(peer_id_b)
+// );
+// assert_eq!(
+// workspace_a.update(cx_a, |workspace, cx| workspace
+// .active_item(cx)
+// .unwrap()
+// .item_id()),
+// editor_a1.item_id()
+// );
+
+// // Client B activates an external window, which causes a new screen-sharing item to be added to the pane.
+// let display = MacOSDisplay::new();
+// active_call_b
+// .update(cx_b, |call, cx| call.set_location(None, cx))
+// .await
+// .unwrap();
+// active_call_b
+// .update(cx_b, |call, cx| {
+// call.room().unwrap().update(cx, |room, cx| {
+// room.set_display_sources(vec![display.clone()]);
+// room.share_screen(cx)
+// })
+// })
+// .await
+// .unwrap();
+// executor.run_until_parked();
+// let shared_screen = workspace_a.update(cx_a, |workspace, cx| {
+// workspace
+// .active_item(cx)
+// .expect("no active item")
+// .downcast::<SharedScreen>()
+// .expect("active item isn't a shared screen")
+// });
+
+// // Client B activates Zed again, which causes the previous editor to become focused again.
+// active_call_b
+// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+// .await
+// .unwrap();
+// executor.run_until_parked();
+// workspace_a.update(cx_a, |workspace, cx| {
+// assert_eq!(
+// workspace.active_item(cx).unwrap().item_id(),
+// editor_a1.item_id()
+// )
+// });
+
+// // Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer.
+// workspace_b.update(cx_b, |workspace, cx| {
+// workspace.activate_item(&multibuffer_editor_b, cx)
+// });
+// executor.run_until_parked();
+// workspace_a.update(cx_a, |workspace, cx| {
+// assert_eq!(
+// workspace.active_item(cx).unwrap().item_id(),
+// multibuffer_editor_a.item_id()
+// )
+// });
+
+// // Client B activates a panel, and the previously-opened screen-sharing item gets activated.
+// let panel = window_b.build_view(cx_b, |_| TestPanel::new(DockPosition::Left));
+// workspace_b.update(cx_b, |workspace, cx| {
+// workspace.add_panel(panel, cx);
+// workspace.toggle_panel_focus::<TestPanel>(cx);
+// });
+// executor.run_until_parked();
+// assert_eq!(
+// workspace_a.update(cx_a, |workspace, cx| workspace
+// .active_item(cx)
+// .unwrap()
+// .item_id()),
+// shared_screen.item_id()
+// );
+
+// // Toggling the focus back to the pane causes client A to return to the multibuffer.
+// workspace_b.update(cx_b, |workspace, cx| {
+// workspace.toggle_panel_focus::<TestPanel>(cx);
+// });
+// executor.run_until_parked();
+// workspace_a.update(cx_a, |workspace, cx| {
+// assert_eq!(
+// workspace.active_item(cx).unwrap().item_id(),
+// multibuffer_editor_a.item_id()
+// )
+// });
+
+// // Client B activates an item that doesn't implement following,
+// // so the previously-opened screen-sharing item gets activated.
+// let unfollowable_item = window_b.build_view(cx_b, |_| TestItem::new());
+// workspace_b.update(cx_b, |workspace, cx| {
+// workspace.active_pane().update(cx, |pane, cx| {
+// pane.add_item(Box::new(unfollowable_item), true, true, None, cx)
+// })
+// });
+// executor.run_until_parked();
+// assert_eq!(
+// workspace_a.update(cx_a, |workspace, cx| workspace
+// .active_item(cx)
+// .unwrap()
+// .item_id()),
+// shared_screen.item_id()
+// );
+
+// // Following interrupts when client B disconnects.
+// client_b.disconnect(&cx_b.to_async());
+// executor.advance_clock(RECONNECT_TIMEOUT);
+// assert_eq!(
+// workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
+// None
+// );
+// }
+
+// #[gpui::test]
+// async fn test_following_tab_order(
+// executor: BackgroundExecutor,
+// cx_a: &mut TestAppContext,
+// cx_b: &mut TestAppContext,
+// ) {
+// let mut server = TestServer::start(executor.clone()).await;
+// let client_a = server.create_client(cx_a, "user_a").await;
+// let client_b = server.create_client(cx_b, "user_b").await;
+// server
+// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+// .await;
+// let active_call_a = cx_a.read(ActiveCall::global);
+// let active_call_b = cx_b.read(ActiveCall::global);
+
+// cx_a.update(editor::init);
+// cx_b.update(editor::init);
+
+// client_a
+// .fs()
+// .insert_tree(
+// "/a",
+// json!({
+// "1.txt": "one",
+// "2.txt": "two",
+// "3.txt": "three",
+// }),
+// )
+// .await;
+// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
+// active_call_a
+// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
+// .await
+// .unwrap();
+
+// let project_id = active_call_a
+// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+// .await
+// .unwrap();
+// let project_b = client_b.build_remote_project(project_id, cx_b).await;
+// active_call_b
+// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+// .await
+// .unwrap();
+
+// let workspace_a = client_a
+// .build_workspace(&project_a, cx_a)
+// .root(cx_a)
+// .unwrap();
+// let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone());
+
+// let workspace_b = client_b
+// .build_workspace(&project_b, cx_b)
+// .root(cx_b)
+// .unwrap();
+// let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone());
+
+// let client_b_id = project_a.update(cx_a, |project, _| {
+// project.collaborators().values().next().unwrap().peer_id
+// });
+
+// //Open 1, 3 in that order on client A
+// workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
+// })
+// .await
+// .unwrap();
+// workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.open_path((worktree_id, "3.txt"), None, true, cx)
+// })
+// .await
+// .unwrap();
+
+// let pane_paths = |pane: &View<workspace::Pane>, cx: &mut TestAppContext| {
+// pane.update(cx, |pane, cx| {
+// pane.items()
+// .map(|item| {
+// item.project_path(cx)
+// .unwrap()
+// .path
+// .to_str()
+// .unwrap()
+// .to_owned()
+// })
+// .collect::<Vec<_>>()
+// })
+// };
+
+// //Verify that the tabs opened in the order we expect
+// assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt"]);
+
+// //Follow client B as client A
+// workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.follow(client_b_id, cx).unwrap()
+// })
+// .await
+// .unwrap();
+
+// //Open just 2 on client B
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.open_path((worktree_id, "2.txt"), None, true, cx)
+// })
+// .await
+// .unwrap();
+// executor.run_until_parked();
+
+// // Verify that newly opened followed file is at the end
+// assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
+
+// //Open just 1 on client B
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(&pane_paths(&pane_b, cx_b), &["2.txt", "1.txt"]);
+// executor.run_until_parked();
+
+// // Verify that following into 1 did not reorder
+// assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
+// }
+
+// #[gpui::test(iterations = 10)]
+// async fn test_peers_following_each_other(
+// executor: BackgroundExecutor,
+// cx_a: &mut TestAppContext,
+// cx_b: &mut TestAppContext,
+// ) {
+// let mut server = TestServer::start(executor.clone()).await;
+// let client_a = server.create_client(cx_a, "user_a").await;
+// let client_b = server.create_client(cx_b, "user_b").await;
+// server
+// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+// .await;
+// let active_call_a = cx_a.read(ActiveCall::global);
+// let active_call_b = cx_b.read(ActiveCall::global);
+
+// cx_a.update(editor::init);
+// cx_b.update(editor::init);
+
+// // Client A shares a project.
+// client_a
+// .fs()
+// .insert_tree(
+// "/a",
+// json!({
+// "1.txt": "one",
+// "2.txt": "two",
+// "3.txt": "three",
+// "4.txt": "four",
+// }),
+// )
+// .await;
+// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
+// active_call_a
+// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
+// .await
+// .unwrap();
+// let project_id = active_call_a
+// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+// .await
+// .unwrap();
+
+// // Client B joins the project.
+// let project_b = client_b.build_remote_project(project_id, cx_b).await;
+// active_call_b
+// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+// .await
+// .unwrap();
+
+// // Client A opens a file.
+// let workspace_a = client_a
+// .build_workspace(&project_a, cx_a)
+// .root(cx_a)
+// .unwrap();
+// workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
+// })
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+
+// // Client B opens a different file.
+// let workspace_b = client_b
+// .build_workspace(&project_b, cx_b)
+// .root(cx_b)
+// .unwrap();
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.open_path((worktree_id, "2.txt"), None, true, cx)
+// })
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+
+// // Clients A and B follow each other in split panes
+// workspace_a.update(cx_a, |workspace, cx| {
+// workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx);
+// });
+// workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.follow(client_b.peer_id().unwrap(), cx).unwrap()
+// })
+// .await
+// .unwrap();
+// workspace_b.update(cx_b, |workspace, cx| {
+// workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx);
+// });
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.follow(client_a.peer_id().unwrap(), cx).unwrap()
+// })
+// .await
+// .unwrap();
+
+// // Clients A and B return focus to the original files they had open
+// workspace_a.update(cx_a, |workspace, cx| workspace.activate_next_pane(cx));
+// workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
+// executor.run_until_parked();
+
+// // Both clients see the other client's focused file in their right pane.
+// assert_eq!(
+// pane_summaries(&workspace_a, cx_a),
+// &[
+// PaneSummary {
+// active: true,
+// leader: None,
+// items: vec![(true, "1.txt".into())]
+// },
+// PaneSummary {
+// active: false,
+// leader: client_b.peer_id(),
+// items: vec![(false, "1.txt".into()), (true, "2.txt".into())]
+// },
+// ]
+// );
+// assert_eq!(
+// pane_summaries(&workspace_b, cx_b),
+// &[
+// PaneSummary {
+// active: true,
+// leader: None,
+// items: vec![(true, "2.txt".into())]
+// },
+// PaneSummary {
+// active: false,
+// leader: client_a.peer_id(),
+// items: vec![(false, "2.txt".into()), (true, "1.txt".into())]
+// },
+// ]
+// );
+
+// // Clients A and B each open a new file.
+// workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.open_path((worktree_id, "3.txt"), None, true, cx)
+// })
+// .await
+// .unwrap();
+
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.open_path((worktree_id, "4.txt"), None, true, cx)
+// })
+// .await
+// .unwrap();
+// executor.run_until_parked();
+
+// // Both client's see the other client open the new file, but keep their
+// // focus on their own active pane.
+// assert_eq!(
+// pane_summaries(&workspace_a, cx_a),
+// &[
+// PaneSummary {
+// active: true,
+// leader: None,
+// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
+// },
+// PaneSummary {
+// active: false,
+// leader: client_b.peer_id(),
+// items: vec![
+// (false, "1.txt".into()),
+// (false, "2.txt".into()),
+// (true, "4.txt".into())
+// ]
+// },
+// ]
+// );
+// assert_eq!(
+// pane_summaries(&workspace_b, cx_b),
+// &[
+// PaneSummary {
+// active: true,
+// leader: None,
+// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
+// },
+// PaneSummary {
+// active: false,
+// leader: client_a.peer_id(),
+// items: vec![
+// (false, "2.txt".into()),
+// (false, "1.txt".into()),
+// (true, "3.txt".into())
+// ]
+// },
+// ]
+// );
+
+// // Client A focuses their right pane, in which they're following client B.
+// workspace_a.update(cx_a, |workspace, cx| workspace.activate_next_pane(cx));
+// executor.run_until_parked();
+
+// // Client B sees that client A is now looking at the same file as them.
+// assert_eq!(
+// pane_summaries(&workspace_a, cx_a),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: client_b.peer_id(),
+// items: vec![
+// (false, "1.txt".into()),
+// (false, "2.txt".into()),
+// (true, "4.txt".into())
+// ]
+// },
+// ]
+// );
+// assert_eq!(
+// pane_summaries(&workspace_b, cx_b),
+// &[
+// PaneSummary {
+// active: true,
+// leader: None,
+// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
+// },
+// PaneSummary {
+// active: false,
+// leader: client_a.peer_id(),
+// items: vec![
+// (false, "2.txt".into()),
+// (false, "1.txt".into()),
+// (false, "3.txt".into()),
+// (true, "4.txt".into())
+// ]
+// },
+// ]
+// );
+
+// // Client B focuses their right pane, in which they're following client A,
+// // who is following them.
+// workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
+// executor.run_until_parked();
+
+// // Client A sees that client B is now looking at the same file as them.
+// assert_eq!(
+// pane_summaries(&workspace_b, cx_b),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: client_a.peer_id(),
+// items: vec![
+// (false, "2.txt".into()),
+// (false, "1.txt".into()),
+// (false, "3.txt".into()),
+// (true, "4.txt".into())
+// ]
+// },
+// ]
+// );
+// assert_eq!(
+// pane_summaries(&workspace_a, cx_a),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: client_b.peer_id(),
+// items: vec![
+// (false, "1.txt".into()),
+// (false, "2.txt".into()),
+// (true, "4.txt".into())
+// ]
+// },
+// ]
+// );
+
+// // Client B focuses a file that they previously followed A to, breaking
+// // the follow.
+// workspace_b.update(cx_b, |workspace, cx| {
+// workspace.active_pane().update(cx, |pane, cx| {
+// pane.activate_prev_item(true, cx);
+// });
+// });
+// executor.run_until_parked();
+
+// // Both clients see that client B is looking at that previous file.
+// assert_eq!(
+// pane_summaries(&workspace_b, cx_b),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: None,
+// items: vec![
+// (false, "2.txt".into()),
+// (false, "1.txt".into()),
+// (true, "3.txt".into()),
+// (false, "4.txt".into())
+// ]
+// },
+// ]
+// );
+// assert_eq!(
+// pane_summaries(&workspace_a, cx_a),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: client_b.peer_id(),
+// items: vec![
+// (false, "1.txt".into()),
+// (false, "2.txt".into()),
+// (false, "4.txt".into()),
+// (true, "3.txt".into()),
+// ]
+// },
+// ]
+// );
+
+// // Client B closes tabs, some of which were originally opened by client A,
+// // and some of which were originally opened by client B.
+// workspace_b.update(cx_b, |workspace, cx| {
+// workspace.active_pane().update(cx, |pane, cx| {
+// pane.close_inactive_items(&Default::default(), cx)
+// .unwrap()
+// .detach();
+// });
+// });
+
+// executor.run_until_parked();
+
+// // Both clients see that Client B is looking at the previous tab.
+// assert_eq!(
+// pane_summaries(&workspace_b, cx_b),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: None,
+// items: vec![(true, "3.txt".into()),]
+// },
+// ]
+// );
+// assert_eq!(
+// pane_summaries(&workspace_a, cx_a),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: client_b.peer_id(),
+// items: vec![
+// (false, "1.txt".into()),
+// (false, "2.txt".into()),
+// (false, "4.txt".into()),
+// (true, "3.txt".into()),
+// ]
+// },
+// ]
+// );
+
+// // Client B follows client A again.
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.follow(client_a.peer_id().unwrap(), cx).unwrap()
+// })
+// .await
+// .unwrap();
+
+// // Client A cycles through some tabs.
+// workspace_a.update(cx_a, |workspace, cx| {
+// workspace.active_pane().update(cx, |pane, cx| {
+// pane.activate_prev_item(true, cx);
+// });
+// });
+// executor.run_until_parked();
+
+// // Client B follows client A into those tabs.
+// assert_eq!(
+// pane_summaries(&workspace_a, cx_a),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: None,
+// items: vec![
+// (false, "1.txt".into()),
+// (false, "2.txt".into()),
+// (true, "4.txt".into()),
+// (false, "3.txt".into()),
+// ]
+// },
+// ]
+// );
+// assert_eq!(
+// pane_summaries(&workspace_b, cx_b),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: client_a.peer_id(),
+// items: vec![(false, "3.txt".into()), (true, "4.txt".into())]
+// },
+// ]
+// );
+
+// workspace_a.update(cx_a, |workspace, cx| {
+// workspace.active_pane().update(cx, |pane, cx| {
+// pane.activate_prev_item(true, cx);
+// });
+// });
+// executor.run_until_parked();
+
+// assert_eq!(
+// pane_summaries(&workspace_a, cx_a),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: None,
+// items: vec![
+// (false, "1.txt".into()),
+// (true, "2.txt".into()),
+// (false, "4.txt".into()),
+// (false, "3.txt".into()),
+// ]
+// },
+// ]
+// );
+// assert_eq!(
+// pane_summaries(&workspace_b, cx_b),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: client_a.peer_id(),
+// items: vec![
+// (false, "3.txt".into()),
+// (false, "4.txt".into()),
+// (true, "2.txt".into())
+// ]
+// },
+// ]
+// );
+
+// workspace_a.update(cx_a, |workspace, cx| {
+// workspace.active_pane().update(cx, |pane, cx| {
+// pane.activate_prev_item(true, cx);
+// });
+// });
+// executor.run_until_parked();
+
+// assert_eq!(
+// pane_summaries(&workspace_a, cx_a),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: None,
+// items: vec![
+// (true, "1.txt".into()),
+// (false, "2.txt".into()),
+// (false, "4.txt".into()),
+// (false, "3.txt".into()),
+// ]
+// },
+// ]
+// );
+// assert_eq!(
+// pane_summaries(&workspace_b, cx_b),
+// &[
+// PaneSummary {
+// active: false,
+// leader: None,
+// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
+// },
+// PaneSummary {
+// active: true,
+// leader: client_a.peer_id(),
+// items: vec![
+// (false, "3.txt".into()),
+// (false, "4.txt".into()),
+// (false, "2.txt".into()),
+// (true, "1.txt".into()),
+// ]
+// },
+// ]
+// );
+// }
+
+// #[gpui::test(iterations = 10)]
+// async fn test_auto_unfollowing(
+// executor: BackgroundExecutor,
+// cx_a: &mut TestAppContext,
+// cx_b: &mut TestAppContext,
+// ) {
+// // 2 clients connect to a server.
+// let mut server = TestServer::start(executor.clone()).await;
+// let client_a = server.create_client(cx_a, "user_a").await;
+// let client_b = server.create_client(cx_b, "user_b").await;
+// server
+// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+// .await;
+// let active_call_a = cx_a.read(ActiveCall::global);
+// let active_call_b = cx_b.read(ActiveCall::global);
+
+// cx_a.update(editor::init);
+// cx_b.update(editor::init);
+
+// // Client A shares a project.
+// client_a
+// .fs()
+// .insert_tree(
+// "/a",
+// json!({
+// "1.txt": "one",
+// "2.txt": "two",
+// "3.txt": "three",
+// }),
+// )
+// .await;
+// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
+// active_call_a
+// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
+// .await
+// .unwrap();
+
+// let project_id = active_call_a
+// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+// .await
+// .unwrap();
+// let project_b = client_b.build_remote_project(project_id, cx_b).await;
+// active_call_b
+// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+// .await
+// .unwrap();
+
+// todo!("could be wrong")
+// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
+// let cx_a = &mut cx_a;
+// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
+// let cx_b = &mut cx_b;
+
+// // Client A opens some editors.
+// let workspace_a = client_a
+// .build_workspace(&project_a, cx_a)
+// .root(cx_a)
+// .unwrap();
+// let _editor_a1 = workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
+// })
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+
+// // Client B starts following client A.
+// let workspace_b = client_b
+// .build_workspace(&project_b, cx_b)
+// .root(cx_b)
+// .unwrap();
+// let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone());
+// let leader_id = project_b.update(cx_b, |project, _| {
+// project.collaborators().values().next().unwrap().peer_id
+// });
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.follow(leader_id, cx).unwrap()
+// })
+// .await
+// .unwrap();
+// assert_eq!(
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// Some(leader_id)
+// );
+// let editor_b2 = workspace_b.update(cx_b, |workspace, cx| {
+// workspace
+// .active_item(cx)
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap()
+// });
+
+// // When client B moves, it automatically stops following client A.
+// editor_b2.update(cx_b, |editor, cx| editor.move_right(&editor::MoveRight, cx));
+// assert_eq!(
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// None
+// );
+
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.follow(leader_id, cx).unwrap()
+// })
+// .await
+// .unwrap();
+// assert_eq!(
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// Some(leader_id)
+// );
+
+// // When client B edits, it automatically stops following client A.
+// editor_b2.update(cx_b, |editor, cx| editor.insert("X", cx));
+// assert_eq!(
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// None
+// );
+
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.follow(leader_id, cx).unwrap()
+// })
+// .await
+// .unwrap();
+// assert_eq!(
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// Some(leader_id)
+// );
+
+// // When client B scrolls, it automatically stops following client A.
+// editor_b2.update(cx_b, |editor, cx| {
+// editor.set_scroll_position(point(0., 3.), cx)
+// });
+// assert_eq!(
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// None
+// );
+
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.follow(leader_id, cx).unwrap()
+// })
+// .await
+// .unwrap();
+// assert_eq!(
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// Some(leader_id)
+// );
+
+// // When client B activates a different pane, it continues following client A in the original pane.
+// workspace_b.update(cx_b, |workspace, cx| {
+// workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, cx)
+// });
+// assert_eq!(
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// Some(leader_id)
+// );
+
+// workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
+// assert_eq!(
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// Some(leader_id)
+// );
+
+// // When client B activates a different item in the original pane, it automatically stops following client A.
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.open_path((worktree_id, "2.txt"), None, true, cx)
+// })
+// .await
+// .unwrap();
+// assert_eq!(
+// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
+// None
+// );
+// }
+
+// #[gpui::test(iterations = 10)]
+// async fn test_peers_simultaneously_following_each_other(
+// executor: BackgroundExecutor,
+// cx_a: &mut TestAppContext,
+// cx_b: &mut TestAppContext,
+// ) {
+// let mut server = TestServer::start(executor.clone()).await;
+// let client_a = server.create_client(cx_a, "user_a").await;
+// let client_b = server.create_client(cx_b, "user_b").await;
+// server
+// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+// .await;
+// let active_call_a = cx_a.read(ActiveCall::global);
+
+// cx_a.update(editor::init);
+// cx_b.update(editor::init);
+
+// client_a.fs().insert_tree("/a", json!({})).await;
+// let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
+// let workspace_a = client_a
+// .build_workspace(&project_a, cx_a)
+// .root(cx_a)
+// .unwrap();
+// let project_id = active_call_a
+// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+// .await
+// .unwrap();
+
+// let project_b = client_b.build_remote_project(project_id, cx_b).await;
+// let workspace_b = client_b
+// .build_workspace(&project_b, cx_b)
+// .root(cx_b)
+// .unwrap();
+
+// executor.run_until_parked();
+// let client_a_id = project_b.update(cx_b, |project, _| {
+// project.collaborators().values().next().unwrap().peer_id
+// });
+// let client_b_id = project_a.update(cx_a, |project, _| {
+// project.collaborators().values().next().unwrap().peer_id
+// });
+
+// let a_follow_b = workspace_a.update(cx_a, |workspace, cx| {
+// workspace.follow(client_b_id, cx).unwrap()
+// });
+// let b_follow_a = workspace_b.update(cx_b, |workspace, cx| {
+// workspace.follow(client_a_id, cx).unwrap()
+// });
+
+// futures::try_join!(a_follow_b, b_follow_a).unwrap();
+// workspace_a.update(cx_a, |workspace, _| {
+// assert_eq!(
+// workspace.leader_for_pane(workspace.active_pane()),
+// Some(client_b_id)
+// );
+// });
+// workspace_b.update(cx_b, |workspace, _| {
+// assert_eq!(
+// workspace.leader_for_pane(workspace.active_pane()),
+// Some(client_a_id)
+// );
+// });
+// }
+
+// #[gpui::test(iterations = 10)]
+// async fn test_following_across_workspaces(
+// executor: BackgroundExecutor,
+// cx_a: &mut TestAppContext,
+// cx_b: &mut TestAppContext,
+// ) {
+// // a and b join a channel/call
+// // a shares project 1
+// // b shares project 2
+// //
+// // b follows a: causes project 2 to be joined, and b to follow a.
+// // b opens a different file in project 2, a follows b
+// // b opens a different file in project 1, a cannot follow b
+// // b shares the project, a joins the project and follows b
+// let mut server = TestServer::start(executor.clone()).await;
+// let client_a = server.create_client(cx_a, "user_a").await;
+// let client_b = server.create_client(cx_b, "user_b").await;
+// cx_a.update(editor::init);
+// cx_b.update(editor::init);
+
+// client_a
+// .fs()
+// .insert_tree(
+// "/a",
+// json!({
+// "w.rs": "",
+// "x.rs": "",
+// }),
+// )
+// .await;
+
+// client_b
+// .fs()
+// .insert_tree(
+// "/b",
+// json!({
+// "y.rs": "",
+// "z.rs": "",
+// }),
+// )
+// .await;
+
+// server
+// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+// .await;
+// let active_call_a = cx_a.read(ActiveCall::global);
+// let active_call_b = cx_b.read(ActiveCall::global);
+
+// let (project_a, worktree_id_a) = client_a.build_local_project("/a", cx_a).await;
+// let (project_b, worktree_id_b) = client_b.build_local_project("/b", cx_b).await;
+
+// let workspace_a = client_a
+// .build_workspace(&project_a, cx_a)
+// .root(cx_a)
+// .unwrap();
+// let workspace_b = client_b
+// .build_workspace(&project_b, cx_b)
+// .root(cx_b)
+// .unwrap();
+
+// cx_a.update(|cx| collab_ui::init(&client_a.app_state, cx));
+// cx_b.update(|cx| collab_ui::init(&client_b.app_state, cx));
+
+// active_call_a
+// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+// .await
+// .unwrap();
+
+// active_call_a
+// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
+// .await
+// .unwrap();
+// active_call_b
+// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+// .await
+// .unwrap();
+
+// todo!("could be wrong")
+// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
+// let cx_a = &mut cx_a;
+// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
+// let cx_b = &mut cx_b;
+
+// workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.open_path((worktree_id_a, "w.rs"), None, true, cx)
+// })
+// .await
+// .unwrap();
+
+// executor.run_until_parked();
+// assert_eq!(visible_push_notifications(cx_b).len(), 1);
+
+// workspace_b.update(cx_b, |workspace, cx| {
+// workspace
+// .follow(client_a.peer_id().unwrap(), cx)
+// .unwrap()
+// .detach()
+// });
+
+// executor.run_until_parked();
+// let workspace_b_project_a = cx_b
+// .windows()
+// .iter()
+// .max_by_key(|window| window.item_id())
+// .unwrap()
+// .downcast::<Workspace>()
+// .unwrap()
+// .root(cx_b)
+// .unwrap();
+
+// // assert that b is following a in project a in w.rs
+// workspace_b_project_a.update(cx_b, |workspace, cx| {
+// assert!(workspace.is_being_followed(client_a.peer_id().unwrap()));
+// assert_eq!(
+// client_a.peer_id(),
+// workspace.leader_for_pane(workspace.active_pane())
+// );
+// let item = workspace.active_item(cx).unwrap();
+// assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("w.rs"));
+// });
+
+// // TODO: in app code, this would be done by the collab_ui.
+// active_call_b
+// .update(cx_b, |call, cx| {
+// let project = workspace_b_project_a.read(cx).project().clone();
+// call.set_location(Some(&project), cx)
+// })
+// .await
+// .unwrap();
+
+// // assert that there are no share notifications open
+// assert_eq!(visible_push_notifications(cx_b).len(), 0);
+
+// // b moves to x.rs in a's project, and a follows
+// workspace_b_project_a
+// .update(cx_b, |workspace, cx| {
+// workspace.open_path((worktree_id_a, "x.rs"), None, true, cx)
+// })
+// .await
+// .unwrap();
+
+// executor.run_until_parked();
+// workspace_b_project_a.update(cx_b, |workspace, cx| {
+// let item = workspace.active_item(cx).unwrap();
+// assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("x.rs"));
+// });
+
+// workspace_a.update(cx_a, |workspace, cx| {
+// workspace
+// .follow(client_b.peer_id().unwrap(), cx)
+// .unwrap()
+// .detach()
+// });
+
+// executor.run_until_parked();
+// workspace_a.update(cx_a, |workspace, cx| {
+// assert!(workspace.is_being_followed(client_b.peer_id().unwrap()));
+// assert_eq!(
+// client_b.peer_id(),
+// workspace.leader_for_pane(workspace.active_pane())
+// );
+// let item = workspace.active_pane().read(cx).active_item().unwrap();
+// assert_eq!(item.tab_description(0, cx).unwrap(), "x.rs".into());
+// });
+
+// // b moves to y.rs in b's project, a is still following but can't yet see
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.open_path((worktree_id_b, "y.rs"), None, true, cx)
+// })
+// .await
+// .unwrap();
+
+// // TODO: in app code, this would be done by the collab_ui.
+// active_call_b
+// .update(cx_b, |call, cx| {
+// let project = workspace_b.read(cx).project().clone();
+// call.set_location(Some(&project), cx)
+// })
+// .await
+// .unwrap();
+
+// let project_b_id = active_call_b
+// .update(cx_b, |call, cx| call.share_project(project_b.clone(), cx))
+// .await
+// .unwrap();
+
+// executor.run_until_parked();
+// assert_eq!(visible_push_notifications(cx_a).len(), 1);
+// cx_a.update(|cx| {
+// workspace::join_remote_project(
+// project_b_id,
+// client_b.user_id().unwrap(),
+// client_a.app_state.clone(),
+// cx,
+// )
+// })
+// .await
+// .unwrap();
+
+// executor.run_until_parked();
+
+// assert_eq!(visible_push_notifications(cx_a).len(), 0);
+// let workspace_a_project_b = cx_a
+// .windows()
+// .iter()
+// .max_by_key(|window| window.item_id())
+// .unwrap()
+// .downcast::<Workspace>()
+// .unwrap()
+// .root(cx_a)
+// .unwrap();
+
+// workspace_a_project_b.update(cx_a, |workspace, cx| {
+// assert_eq!(workspace.project().read(cx).remote_id(), Some(project_b_id));
+// assert!(workspace.is_being_followed(client_b.peer_id().unwrap()));
+// assert_eq!(
+// client_b.peer_id(),
+// workspace.leader_for_pane(workspace.active_pane())
+// );
+// let item = workspace.active_item(cx).unwrap();
+// assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("y.rs"));
+// });
+// }
+
+// #[gpui::test]
+// async fn test_following_into_excluded_file(
+// executor: BackgroundExecutor,
+// mut cx_a: &mut TestAppContext,
+// mut cx_b: &mut TestAppContext,
+// ) {
+// let mut server = TestServer::start(executor.clone()).await;
+// let client_a = server.create_client(cx_a, "user_a").await;
+// let client_b = server.create_client(cx_b, "user_b").await;
+// for cx in [&mut cx_a, &mut cx_b] {
+// cx.update(|cx| {
+// cx.update_global::<SettingsStore, _>(|store, cx| {
+// store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
+// project_settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]);
+// });
+// });
+// });
+// }
+// server
+// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+// .await;
+// let active_call_a = cx_a.read(ActiveCall::global);
+// let active_call_b = cx_b.read(ActiveCall::global);
+
+// cx_a.update(editor::init);
+// cx_b.update(editor::init);
+
+// client_a
+// .fs()
+// .insert_tree(
+// "/a",
+// json!({
+// ".git": {
+// "COMMIT_EDITMSG": "write your commit message here",
+// },
+// "1.txt": "one\none\none",
+// "2.txt": "two\ntwo\ntwo",
+// "3.txt": "three\nthree\nthree",
+// }),
+// )
+// .await;
+// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
+// active_call_a
+// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
+// .await
+// .unwrap();
+
+// let project_id = active_call_a
+// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
+// .await
+// .unwrap();
+// let project_b = client_b.build_remote_project(project_id, cx_b).await;
+// active_call_b
+// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
+// .await
+// .unwrap();
+
+// let window_a = client_a.build_workspace(&project_a, cx_a);
+// let workspace_a = window_a.root(cx_a).unwrap();
+// let peer_id_a = client_a.peer_id().unwrap();
+// let window_b = client_b.build_workspace(&project_b, cx_b);
+// let workspace_b = window_b.root(cx_b).unwrap();
+
+// todo!("could be wrong")
+// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
+// let cx_a = &mut cx_a;
+// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
+// let cx_b = &mut cx_b;
+
+// // Client A opens editors for a regular file and an excluded file.
+// let editor_for_regular = workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
+// })
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+// let editor_for_excluded_a = workspace_a
+// .update(cx_a, |workspace, cx| {
+// workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, cx)
+// })
+// .await
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap();
+
+// // Client A updates their selections in those editors
+// editor_for_regular.update(cx_a, |editor, cx| {
+// editor.handle_input("a", cx);
+// editor.handle_input("b", cx);
+// editor.handle_input("c", cx);
+// editor.select_left(&Default::default(), cx);
+// assert_eq!(editor.selections.ranges(cx), vec![3..2]);
+// });
+// editor_for_excluded_a.update(cx_a, |editor, cx| {
+// editor.select_all(&Default::default(), cx);
+// editor.handle_input("new commit message", cx);
+// editor.select_left(&Default::default(), cx);
+// assert_eq!(editor.selections.ranges(cx), vec![18..17]);
+// });
+
+// // When client B starts following client A, currently visible file is replicated
+// workspace_b
+// .update(cx_b, |workspace, cx| {
+// workspace.follow(peer_id_a, cx).unwrap()
+// })
+// .await
+// .unwrap();
+
+// let editor_for_excluded_b = workspace_b.update(cx_b, |workspace, cx| {
+// workspace
+// .active_item(cx)
+// .unwrap()
+// .downcast::<Editor>()
+// .unwrap()
+// });
+// assert_eq!(
+// cx_b.read(|cx| editor_for_excluded_b.project_path(cx)),
+// Some((worktree_id, ".git/COMMIT_EDITMSG").into())
+// );
+// assert_eq!(
+// editor_for_excluded_b.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
+// vec![18..17]
+// );
+
+// // Changes from B to the excluded file are replicated in A's editor
+// editor_for_excluded_b.update(cx_b, |editor, cx| {
+// editor.handle_input("\nCo-Authored-By: B <b@b.b>", cx);
+// });
+// executor.run_until_parked();
+// editor_for_excluded_a.update(cx_a, |editor, cx| {
+// assert_eq!(
+// editor.text(cx),
+// "new commit messag\nCo-Authored-By: B <b@b.b>"
+// );
+// });
+// }
+
+// fn visible_push_notifications(
+// cx: &mut TestAppContext,
+// ) -> Vec<gpui::View<ProjectSharedNotification>> {
+// let mut ret = Vec::new();
+// for window in cx.windows() {
+// window.update(cx, |window| {
+// if let Some(handle) = window
+// .root_view()
+// .clone()
+// .downcast::<ProjectSharedNotification>()
+// {
+// ret.push(handle)
+// }
+// });
+// }
+// ret
+// }
+
+// #[derive(Debug, PartialEq, Eq)]
+// struct PaneSummary {
+// active: bool,
+// leader: Option<PeerId>,
+// items: Vec<(bool, String)>,
+// }
+
+// fn followers_by_leader(project_id: u64, cx: &TestAppContext) -> Vec<(PeerId, Vec<PeerId>)> {
+// cx.read(|cx| {
+// let active_call = ActiveCall::global(cx).read(cx);
+// let peer_id = active_call.client().peer_id();
+// let room = active_call.room().unwrap().read(cx);
+// let mut result = room
+// .remote_participants()
+// .values()
+// .map(|participant| participant.peer_id)
+// .chain(peer_id)
+// .filter_map(|peer_id| {
+// let followers = room.followers_for(peer_id, project_id);
+// if followers.is_empty() {
+// None
+// } else {
+// Some((peer_id, followers.to_vec()))
+// }
+// })
+// .collect::<Vec<_>>();
+// result.sort_by_key(|e| e.0);
+// result
+// })
+// }
+
+// fn pane_summaries(workspace: &View<Workspace>, cx: &mut WindowContext<'_>) -> Vec<PaneSummary> {
+// workspace.update(cx, |workspace, cx| {
+// let active_pane = workspace.active_pane();
+// workspace
+// .panes()
+// .iter()
+// .map(|pane| {
+// let leader = workspace.leader_for_pane(pane);
+// let active = pane == active_pane;
+// let pane = pane.read(cx);
+// let active_ix = pane.active_item_index();
+// PaneSummary {
+// active,
+// leader,
+// items: pane
+// .items()
+// .enumerate()
+// .map(|(ix, item)| {
+// (
+// ix == active_ix,
+// item.tab_description(0, cx)
+// .map_or(String::new(), |s| s.to_string()),
+// )
+// })
+// .collect(),
+// }
+// })
+// .collect()
+// })
+// }
@@ -5,18 +5,13 @@ use crate::{
use call::{room, ActiveCall, ParticipantLocation, Room};
use client::{User, RECEIVE_TIMEOUT};
use collections::{HashMap, HashSet};
-use editor::{
- test::editor_test_context::EditorTestContext, ConfirmCodeAction, ConfirmCompletion,
- ConfirmRename, Editor, Redo, Rename, ToggleCodeActions, Undo,
-};
use fs::{repository::GitFileStatus, FakeFs, Fs as _, RemoveOptions};
use futures::StreamExt as _;
-use gpui::{executor::Deterministic, test::EmptyView, AppContext, ModelHandle, TestAppContext};
-use indoc::indoc;
+use gpui::{AppContext, BackgroundExecutor, Model, TestAppContext};
use language::{
- language_settings::{AllLanguageSettings, Formatter, InlayHintSettings},
- tree_sitter_rust, Anchor, Diagnostic, DiagnosticEntry, FakeLspAdapter, Language,
- LanguageConfig, LineEnding, OffsetRangeExt, Point, Rope,
+ language_settings::{AllLanguageSettings, Formatter},
+ tree_sitter_rust, Diagnostic, DiagnosticEntry, FakeLspAdapter, Language, LanguageConfig,
+ LineEnding, OffsetRangeExt, Point, Rope,
};
use live_kit_client::MacOSDisplay;
use lsp::LanguageServerId;
@@ -32,12 +27,11 @@ use std::{
path::{Path, PathBuf},
rc::Rc,
sync::{
- atomic::{self, AtomicBool, AtomicUsize, Ordering::SeqCst},
+ atomic::{AtomicBool, Ordering::SeqCst},
Arc,
},
};
use unindent::Unindent as _;
-use workspace::Workspace;
#[ctor::ctor]
fn init_logger() {
@@ -48,14 +42,13 @@ fn init_logger() {
#[gpui::test(iterations = 10)]
async fn test_basic_calls(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_b2: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -76,7 +69,7 @@ async fn test_basic_calls(
.await
.unwrap();
let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -86,6 +79,7 @@ async fn test_basic_calls(
);
// User B receives the call.
+
let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
let call_b = incoming_call_b.next().await.unwrap().unwrap();
assert_eq!(call_b.calling_user.github_login, "user_a");
@@ -93,8 +87,9 @@ async fn test_basic_calls(
// User B connects via another client and also receives a ring on the newly-connected client.
let _client_b2 = server.create_client(cx_b2, "user_b").await;
let active_call_b2 = cx_b2.read(ActiveCall::global);
+
let mut incoming_call_b2 = active_call_b2.read_with(cx_b2, |call, _| call.incoming());
- deterministic.run_until_parked();
+ executor.run_until_parked();
let call_b2 = incoming_call_b2.next().await.unwrap().unwrap();
assert_eq!(call_b2.calling_user.github_login, "user_a");
@@ -103,10 +98,11 @@ async fn test_basic_calls(
.update(cx_b, |call, cx| call.accept_incoming(cx))
.await
.unwrap();
+
let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
assert!(incoming_call_b.next().await.unwrap().is_none());
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -123,6 +119,7 @@ async fn test_basic_calls(
);
// Call user C from client B.
+
let mut incoming_call_c = active_call_c.read_with(cx_c, |call, _| call.incoming());
active_call_b
.update(cx_b, |call, cx| {
@@ -131,7 +128,7 @@ async fn test_basic_calls(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -153,7 +150,7 @@ async fn test_basic_calls(
active_call_c.update(cx_c, |call, cx| call.decline_incoming(cx).unwrap());
assert!(incoming_call_c.next().await.unwrap().is_none());
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -177,7 +174,7 @@ async fn test_basic_calls(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -201,9 +198,10 @@ async fn test_basic_calls(
.await
.unwrap();
assert!(incoming_call_c.next().await.unwrap().is_none());
+
let room_c = active_call_c.read_with(cx_c, |call, _| call.room().unwrap().clone());
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -240,13 +238,14 @@ async fn test_basic_calls(
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
// User B observes the remote screen sharing track.
assert_eq!(events_b.borrow().len(), 1);
let event_b = events_b.borrow().first().unwrap().clone();
if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event_b {
assert_eq!(participant_id, client_a.peer_id().unwrap());
+
room_b.read_with(cx_b, |room, _| {
assert_eq!(
room.remote_participants()[&client_a.user_id().unwrap()]
@@ -264,6 +263,7 @@ async fn test_basic_calls(
let event_c = events_c.borrow().first().unwrap().clone();
if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event_c {
assert_eq!(participant_id, client_a.peer_id().unwrap());
+
room_c.read_with(cx_c, |room, _| {
assert_eq!(
room.remote_participants()[&client_a.user_id().unwrap()]
@@ -285,7 +285,7 @@ async fn test_basic_calls(
})
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -315,8 +315,10 @@ async fn test_basic_calls(
.test_live_kit_server
.disconnect_client(client_b.user_id().unwrap().to_string())
.await;
- deterministic.run_until_parked();
+ executor.run_until_parked();
+
active_call_b.read_with(cx_b, |call, _| assert!(call.room().is_none()));
+
active_call_c.read_with(cx_c, |call, _| assert!(call.room().is_none()));
assert_eq!(
room_participants(&room_a, cx_a),
@@ -343,14 +345,13 @@ async fn test_basic_calls(
#[gpui::test(iterations = 10)]
async fn test_calling_multiple_users_simultaneously(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
cx_d: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -381,7 +382,7 @@ async fn test_calling_multiple_users_simultaneously(
c_invite.await.unwrap();
let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -397,7 +398,7 @@ async fn test_calling_multiple_users_simultaneously(
})
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -418,10 +419,12 @@ async fn test_calling_multiple_users_simultaneously(
accept_c.await.unwrap();
accept_d.await.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
+
let room_c = active_call_c.read_with(cx_c, |call, _| call.room().unwrap().clone());
+
let room_d = active_call_d.read_with(cx_d, |call, _| call.room().unwrap().clone());
assert_eq!(
room_participants(&room_a, cx_a),
@@ -471,13 +474,12 @@ async fn test_calling_multiple_users_simultaneously(
#[gpui::test(iterations = 10)]
async fn test_joining_channels_and_calling_multiple_users_simultaneously(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -515,7 +517,7 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
join_channel_2.await.unwrap();
let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(channel_id(&room_a, cx_a), Some(channel_2));
@@ -543,7 +545,7 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
join_channel.await.unwrap();
let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
@@ -579,20 +581,19 @@ async fn test_joining_channels_and_calling_multiple_users_simultaneously(
c_invite.await.unwrap();
active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- deterministic.run_until_parked();
+ executor.run_until_parked();
}
#[gpui::test(iterations = 10)]
async fn test_room_uniqueness(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_a2: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_b2: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let _client_a2 = server.create_client(cx_a2, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -623,9 +624,11 @@ async fn test_room_uniqueness(
})
.await
.unwrap_err();
+
active_call_a2.read_with(cx_a2, |call, _| assert!(call.room().is_none()));
// User B receives the call from user A.
+
let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
let call_b1 = incoming_call_b.next().await.unwrap().unwrap();
assert_eq!(call_b1.calling_user.github_login, "user_a");
@@ -651,6 +654,7 @@ async fn test_room_uniqueness(
})
.await
.unwrap_err();
+
active_call_b2.read_with(cx_b2, |call, _| assert!(call.room().is_none()));
// User B joins the room and calling them after they've joined still fails.
@@ -672,6 +676,7 @@ async fn test_room_uniqueness(
})
.await
.unwrap_err();
+
active_call_b2.read_with(cx_b2, |call, _| assert!(call.room().is_none()));
// Client C can successfully call client B after client B leaves the room.
@@ -679,26 +684,25 @@ async fn test_room_uniqueness(
.update(cx_b, |call, cx| call.hang_up(cx))
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
active_call_c
.update(cx_c, |call, cx| {
call.invite(client_b.user_id().unwrap(), None, cx)
})
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
let call_b2 = incoming_call_b.next().await.unwrap().unwrap();
assert_eq!(call_b2.calling_user.github_login, "user_c");
}
#[gpui::test(iterations = 10)]
async fn test_client_disconnecting_from_room(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
server
@@ -715,17 +719,20 @@ async fn test_client_disconnecting_from_room(
})
.await
.unwrap();
+
let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
// User B receives the call and joins the room.
+
let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
incoming_call_b.next().await.unwrap().unwrap();
active_call_b
.update(cx_b, |call, cx| call.accept_incoming(cx))
.await
.unwrap();
+
let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -743,8 +750,8 @@ async fn test_client_disconnecting_from_room(
// User A automatically reconnects to the room upon disconnection.
server.disconnect_client(client_a.peer_id().unwrap());
- deterministic.advance_clock(RECEIVE_TIMEOUT);
- deterministic.run_until_parked();
+ executor.advance_clock(RECEIVE_TIMEOUT);
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -763,8 +770,10 @@ async fn test_client_disconnecting_from_room(
// When user A disconnects, both client A and B clear their room on the active call.
server.forbid_connections();
server.disconnect_client(client_a.peer_id().unwrap());
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
+ executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
+
active_call_a.read_with(cx_a, |call, _| assert!(call.room().is_none()));
+
active_call_b.read_with(cx_b, |call, _| assert!(call.room().is_none()));
assert_eq!(
room_participants(&room_a, cx_a),
@@ -783,7 +792,7 @@ async fn test_client_disconnecting_from_room(
// Allow user A to reconnect to the server.
server.allow_connections();
- deterministic.advance_clock(RECEIVE_TIMEOUT);
+ executor.advance_clock(RECEIVE_TIMEOUT);
// Call user B again from client A.
active_call_a
@@ -792,17 +801,20 @@ async fn test_client_disconnecting_from_room(
})
.await
.unwrap();
+
let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
// User B receives the call and joins the room.
+
let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
incoming_call_b.next().await.unwrap().unwrap();
active_call_b
.update(cx_b, |call, cx| call.accept_incoming(cx))
.await
.unwrap();
+
let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -824,7 +836,7 @@ async fn test_client_disconnecting_from_room(
.test_live_kit_server
.disconnect_client(client_b.user_id().unwrap().to_string())
.await;
- deterministic.run_until_parked();
+ executor.run_until_parked();
active_call_a.update(cx_a, |call, _| assert!(call.room().is_none()));
active_call_b.update(cx_b, |call, _| assert!(call.room().is_none()));
assert_eq!(
@@ -845,14 +857,13 @@ async fn test_client_disconnecting_from_room(
#[gpui::test(iterations = 10)]
async fn test_server_restarts(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
cx_d: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
client_a
.fs()
@@ -898,31 +909,37 @@ async fn test_server_restarts(
})
.await
.unwrap();
+
let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
// User B receives the call and joins the room.
+
let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
assert!(incoming_call_b.next().await.unwrap().is_some());
active_call_b
.update(cx_b, |call, cx| call.accept_incoming(cx))
.await
.unwrap();
+
let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
// User C receives the call and joins the room.
+
let mut incoming_call_c = active_call_c.read_with(cx_c, |call, _| call.incoming());
assert!(incoming_call_c.next().await.unwrap().is_some());
active_call_c
.update(cx_c, |call, cx| call.accept_incoming(cx))
.await
.unwrap();
+
let room_c = active_call_c.read_with(cx_c, |call, _| call.room().unwrap().clone());
// User D receives the call but doesn't join the room yet.
+
let mut incoming_call_d = active_call_d.read_with(cx_d, |call, _| call.incoming());
assert!(incoming_call_d.next().await.unwrap().is_some());
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -950,7 +967,7 @@ async fn test_server_restarts(
// Users A and B reconnect to the call. User C has troubles reconnecting, so it leaves the room.
client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
- deterministic.advance_clock(RECONNECT_TIMEOUT);
+ executor.advance_clock(RECONNECT_TIMEOUT);
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -979,7 +996,8 @@ async fn test_server_restarts(
.update(cx_d, |call, cx| call.accept_incoming(cx))
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
+
let room_d = active_call_d.read_with(cx_d, |call, _| call.room().unwrap().clone());
assert_eq!(
room_participants(&room_a, cx_a),
@@ -1024,7 +1042,7 @@ async fn test_server_restarts(
// The server finishes restarting, cleaning up stale connections.
server.start().await.unwrap();
- deterministic.advance_clock(CLEANUP_TIMEOUT);
+ executor.advance_clock(CLEANUP_TIMEOUT);
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -1059,7 +1077,7 @@ async fn test_server_restarts(
.update(cx_d, |call, cx| call.hang_up(cx))
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -1098,9 +1116,10 @@ async fn test_server_restarts(
.unwrap();
// User D receives the call but doesn't join the room yet.
+
let mut incoming_call_d = active_call_d.read_with(cx_d, |call, _| call.incoming());
assert!(incoming_call_d.next().await.unwrap().is_some());
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -1123,7 +1142,7 @@ async fn test_server_restarts(
client_a.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
client_b.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
- deterministic.advance_clock(RECONNECT_TIMEOUT);
+ executor.advance_clock(RECONNECT_TIMEOUT);
assert_eq!(
room_participants(&room_a, cx_a),
RoomParticipants {
@@ -1145,19 +1164,18 @@ async fn test_server_restarts(
// The server finishes restarting, cleaning up stale connections and canceling the
// call to user D because the room has become empty.
server.start().await.unwrap();
- deterministic.advance_clock(CLEANUP_TIMEOUT);
+ executor.advance_clock(CLEANUP_TIMEOUT);
assert!(incoming_call_d.next().await.unwrap().is_none());
}
#[gpui::test(iterations = 10)]
async fn test_calls_on_multiple_connections(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b1: &mut TestAppContext,
cx_b2: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b1 = server.create_client(cx_b1, "user_b").await;
let client_b2 = server.create_client(cx_b2, "user_b").await;
@@ -1168,7 +1186,9 @@ async fn test_calls_on_multiple_connections(
let active_call_a = cx_a.read(ActiveCall::global);
let active_call_b1 = cx_b1.read(ActiveCall::global);
let active_call_b2 = cx_b2.read(ActiveCall::global);
+
let mut incoming_call_b1 = active_call_b1.read_with(cx_b1, |call, _| call.incoming());
+
let mut incoming_call_b2 = active_call_b2.read_with(cx_b2, |call, _| call.incoming());
assert!(incoming_call_b1.next().await.unwrap().is_none());
assert!(incoming_call_b2.next().await.unwrap().is_none());
@@ -1180,14 +1200,14 @@ async fn test_calls_on_multiple_connections(
})
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert!(incoming_call_b1.next().await.unwrap().is_some());
assert!(incoming_call_b2.next().await.unwrap().is_some());
// User B declines the call on one of the two connections, causing both connections
// to stop ringing.
active_call_b2.update(cx_b2, |call, cx| call.decline_incoming(cx).unwrap());
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert!(incoming_call_b1.next().await.unwrap().is_none());
assert!(incoming_call_b2.next().await.unwrap().is_none());
@@ -1198,7 +1218,7 @@ async fn test_calls_on_multiple_connections(
})
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert!(incoming_call_b1.next().await.unwrap().is_some());
assert!(incoming_call_b2.next().await.unwrap().is_some());
@@ -1208,13 +1228,13 @@ async fn test_calls_on_multiple_connections(
.update(cx_b2, |call, cx| call.accept_incoming(cx))
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert!(incoming_call_b1.next().await.unwrap().is_none());
assert!(incoming_call_b2.next().await.unwrap().is_none());
// User B disconnects the client that is not on the call. Everything should be fine.
client_b1.disconnect(&cx_b1.to_async());
- deterministic.advance_clock(RECEIVE_TIMEOUT);
+ executor.advance_clock(RECEIVE_TIMEOUT);
client_b1
.authenticate_and_connect(false, &cx_b1.to_async())
.await
@@ -1225,14 +1245,14 @@ async fn test_calls_on_multiple_connections(
.update(cx_b2, |call, cx| call.hang_up(cx))
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
active_call_a
.update(cx_a, |call, cx| {
call.invite(client_b1.user_id().unwrap(), None, cx)
})
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert!(incoming_call_b1.next().await.unwrap().is_some());
assert!(incoming_call_b2.next().await.unwrap().is_some());
@@ -1243,7 +1263,7 @@ async fn test_calls_on_multiple_connections(
})
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert!(incoming_call_b1.next().await.unwrap().is_none());
assert!(incoming_call_b2.next().await.unwrap().is_none());
@@ -1254,7 +1274,7 @@ async fn test_calls_on_multiple_connections(
})
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert!(incoming_call_b1.next().await.unwrap().is_some());
assert!(incoming_call_b2.next().await.unwrap().is_some());
@@ -1263,7 +1283,7 @@ async fn test_calls_on_multiple_connections(
.update(cx_a, |call, cx| call.hang_up(cx))
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert!(incoming_call_b1.next().await.unwrap().is_none());
assert!(incoming_call_b2.next().await.unwrap().is_none());
@@ -1274,27 +1294,27 @@ async fn test_calls_on_multiple_connections(
})
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert!(incoming_call_b1.next().await.unwrap().is_some());
assert!(incoming_call_b2.next().await.unwrap().is_some());
// User A disconnects, causing both connections to stop ringing.
server.forbid_connections();
server.disconnect_client(client_a.peer_id().unwrap());
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
+ executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
assert!(incoming_call_b1.next().await.unwrap().is_none());
assert!(incoming_call_b2.next().await.unwrap().is_none());
// User A reconnects automatically, then calls user B again.
server.allow_connections();
- deterministic.advance_clock(RECEIVE_TIMEOUT);
+ executor.advance_clock(RECEIVE_TIMEOUT);
active_call_a
.update(cx_a, |call, cx| {
call.invite(client_b1.user_id().unwrap(), None, cx)
})
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
assert!(incoming_call_b1.next().await.unwrap().is_some());
assert!(incoming_call_b2.next().await.unwrap().is_some());
@@ -1302,187 +1322,19 @@ async fn test_calls_on_multiple_connections(
server.forbid_connections();
server.disconnect_client(client_b1.peer_id().unwrap());
server.disconnect_client(client_b2.peer_id().unwrap());
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- active_call_a.read_with(cx_a, |call, _| assert!(call.room().is_none()));
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_share_project(
- deterministic: Arc<Deterministic>,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- deterministic.forbid_parking();
- let window_b = cx_b.add_window(|_| EmptyView);
- let mut server = TestServer::start(&deterministic).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- server
- .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
- let active_call_c = cx_c.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/a",
- json!({
- ".gitignore": "ignored-dir",
- "a.txt": "a-contents",
- "b.txt": "b-contents",
- "ignored-dir": {
- "c.txt": "",
- "d.txt": "",
- }
- }),
- )
- .await;
-
- // Invite client B to collaborate on a project
- let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), Some(project_a.clone()), cx)
- })
- .await
- .unwrap();
-
- // Join that project as client B
- let incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
- deterministic.run_until_parked();
- let call = incoming_call_b.borrow().clone().unwrap();
- assert_eq!(call.calling_user.github_login, "user_a");
- let initial_project = call.initial_project.unwrap();
- active_call_b
- .update(cx_b, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
- let client_b_peer_id = client_b.peer_id().unwrap();
- let project_b = client_b
- .build_remote_project(initial_project.id, cx_b)
- .await;
- let replica_id_b = project_b.read_with(cx_b, |project, _| project.replica_id());
-
- deterministic.run_until_parked();
- project_a.read_with(cx_a, |project, _| {
- let client_b_collaborator = project.collaborators().get(&client_b_peer_id).unwrap();
- assert_eq!(client_b_collaborator.replica_id, replica_id_b);
- });
- project_b.read_with(cx_b, |project, cx| {
- let worktree = project.worktrees(cx).next().unwrap().read(cx);
- assert_eq!(
- worktree.paths().map(AsRef::as_ref).collect::<Vec<_>>(),
- [
- Path::new(".gitignore"),
- Path::new("a.txt"),
- Path::new("b.txt"),
- Path::new("ignored-dir"),
- ]
- );
- });
-
- project_b
- .update(cx_b, |project, cx| {
- let worktree = project.worktrees(cx).next().unwrap();
- let entry = worktree.read(cx).entry_for_path("ignored-dir").unwrap();
- project.expand_entry(worktree_id, entry.id, cx).unwrap()
- })
- .await
- .unwrap();
- project_b.read_with(cx_b, |project, cx| {
- let worktree = project.worktrees(cx).next().unwrap().read(cx);
- assert_eq!(
- worktree.paths().map(AsRef::as_ref).collect::<Vec<_>>(),
- [
- Path::new(".gitignore"),
- Path::new("a.txt"),
- Path::new("b.txt"),
- Path::new("ignored-dir"),
- Path::new("ignored-dir/c.txt"),
- Path::new("ignored-dir/d.txt"),
- ]
- );
- });
-
- // Open the same file as client B and client A.
- let buffer_b = project_b
- .update(cx_b, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx))
- .await
- .unwrap();
- buffer_b.read_with(cx_b, |buf, _| assert_eq!(buf.text(), "b-contents"));
- project_a.read_with(cx_a, |project, cx| {
- assert!(project.has_open_buffer((worktree_id, "b.txt"), cx))
- });
- let buffer_a = project_a
- .update(cx_a, |p, cx| p.open_buffer((worktree_id, "b.txt"), cx))
- .await
- .unwrap();
-
- let editor_b = window_b.add_view(cx_b, |cx| Editor::for_buffer(buffer_b, None, cx));
-
- // Client A sees client B's selection
- deterministic.run_until_parked();
- buffer_a.read_with(cx_a, |buffer, _| {
- buffer
- .snapshot()
- .remote_selections_in_range(Anchor::MIN..Anchor::MAX)
- .count()
- == 1
- });
+ executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- // Edit the buffer as client B and see that edit as client A.
- editor_b.update(cx_b, |editor, cx| editor.handle_input("ok, ", cx));
- deterministic.run_until_parked();
- buffer_a.read_with(cx_a, |buffer, _| {
- assert_eq!(buffer.text(), "ok, b-contents")
- });
-
- // Client B can invite client C on a project shared by client A.
- active_call_b
- .update(cx_b, |call, cx| {
- call.invite(client_c.user_id().unwrap(), Some(project_b.clone()), cx)
- })
- .await
- .unwrap();
-
- let incoming_call_c = active_call_c.read_with(cx_c, |call, _| call.incoming());
- deterministic.run_until_parked();
- let call = incoming_call_c.borrow().clone().unwrap();
- assert_eq!(call.calling_user.github_login, "user_b");
- let initial_project = call.initial_project.unwrap();
- active_call_c
- .update(cx_c, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
- let _project_c = client_c
- .build_remote_project(initial_project.id, cx_c)
- .await;
-
- // Client B closes the editor, and client A sees client B's selections removed.
- cx_b.update(move |_| drop(editor_b));
- deterministic.run_until_parked();
- buffer_a.read_with(cx_a, |buffer, _| {
- buffer
- .snapshot()
- .remote_selections_in_range(Anchor::MIN..Anchor::MAX)
- .count()
- == 0
- });
+ active_call_a.read_with(cx_a, |call, _| assert!(call.room().is_none()));
}
#[gpui::test(iterations = 10)]
async fn test_unshare_project(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
cx_c: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
let client_c = server.create_client(cx_c, "user_c").await;
@@ -1509,9 +1361,11 @@ async fn test_unshare_project(
.update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
.await
.unwrap();
- let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
+
+ let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
let project_b = client_b.build_remote_project(project_id, cx_b).await;
- deterministic.run_until_parked();
+ executor.run_until_parked();
+
assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
project_b
@@ -1524,7 +1378,8 @@ async fn test_unshare_project(
.update(cx_b, |call, cx| call.hang_up(cx))
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
+
assert!(project_b.read_with(cx_b, |project, _| project.is_read_only()));
// Client C opens the project.
@@ -1534,8 +1389,10 @@ async fn test_unshare_project(
project_a
.update(cx_a, |project, cx| project.unshare(cx))
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
+
assert!(worktree_a.read_with(cx_a, |tree, _| !tree.as_local().unwrap().is_shared()));
+
assert!(project_c.read_with(cx_c, |project, _| project.is_read_only()));
// Client C can open the project again after client A re-shares.
@@ -1544,7 +1401,8 @@ async fn test_unshare_project(
.await
.unwrap();
let project_c2 = client_c.build_remote_project(project_id, cx_c).await;
- deterministic.run_until_parked();
+ executor.run_until_parked();
+
assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
project_c2
.update(cx_c, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
@@ -1556,130 +1414,23 @@ async fn test_unshare_project(
.update(cx_a, |call, cx| call.hang_up(cx))
.await
.unwrap();
- deterministic.run_until_parked();
+ executor.run_until_parked();
+
project_a.read_with(cx_a, |project, _| assert!(!project.is_shared()));
+
project_c2.read_with(cx_c, |project, _| {
assert!(project.is_read_only());
assert!(project.collaborators().is_empty());
});
}
-#[gpui::test(iterations = 10)]
-async fn test_host_disconnect(
- deterministic: Arc<Deterministic>,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
- .await;
-
- cx_b.update(editor::init);
-
- client_a
- .fs()
- .insert_tree(
- "/a",
- json!({
- "a.txt": "a-contents",
- "b.txt": "b-contents",
- }),
- )
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
- let worktree_a = project_a.read_with(cx_a, |project, cx| project.worktrees(cx).next().unwrap());
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
-
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
- deterministic.run_until_parked();
- assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
-
- let window_b =
- cx_b.add_window(|cx| Workspace::new(0, project_b.clone(), client_b.app_state.clone(), cx));
- let workspace_b = window_b.root(cx_b);
- let editor_b = workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.open_path((worktree_id, "b.txt"), None, true, cx)
- })
- .await
- .unwrap()
- .downcast::<Editor>()
- .unwrap();
- assert!(window_b.read_with(cx_b, |cx| editor_b.is_focused(cx)));
- editor_b.update(cx_b, |editor, cx| editor.insert("X", cx));
- assert!(window_b.is_edited(cx_b));
-
- // Drop client A's connection. Collaborators should disappear and the project should not be shown as shared.
- server.forbid_connections();
- server.disconnect_client(client_a.peer_id().unwrap());
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- project_a.read_with(cx_a, |project, _| project.collaborators().is_empty());
- project_a.read_with(cx_a, |project, _| assert!(!project.is_shared()));
- project_b.read_with(cx_b, |project, _| project.is_read_only());
- assert!(worktree_a.read_with(cx_a, |tree, _| !tree.as_local().unwrap().is_shared()));
-
- // Ensure client B's edited state is reset and that the whole window is blurred.
- window_b.read_with(cx_b, |cx| {
- assert_eq!(cx.focused_view_id(), None);
- });
- assert!(!window_b.is_edited(cx_b));
-
- // Ensure client B is not prompted to save edits when closing window after disconnecting.
- let can_close = workspace_b
- .update(cx_b, |workspace, cx| workspace.prepare_to_close(true, cx))
- .await
- .unwrap();
- assert!(can_close);
-
- // Allow client A to reconnect to the server.
- server.allow_connections();
- deterministic.advance_clock(RECEIVE_TIMEOUT);
-
- // Client B calls client A again after they reconnected.
- let active_call_b = cx_b.read(ActiveCall::global);
- active_call_b
- .update(cx_b, |call, cx| {
- call.invite(client_a.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- deterministic.run_until_parked();
- active_call_a
- .update(cx_a, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
-
- active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
-
- // Drop client A's connection again. We should still unshare it successfully.
- server.forbid_connections();
- server.disconnect_client(client_a.peer_id().unwrap());
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- project_a.read_with(cx_a, |project, _| assert!(!project.is_shared()));
-}
-
#[gpui::test(iterations = 10)]
async fn test_project_reconnect(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
server
@@ -1,18 +1,19 @@
-use crate::tests::TestServer;
-use gpui::{executor::Deterministic, TestAppContext};
+use std::sync::Arc;
+
+use gpui::{BackgroundExecutor, TestAppContext};
use notifications::NotificationEvent;
use parking_lot::Mutex;
use rpc::{proto, Notification};
-use std::sync::Arc;
+
+use crate::tests::TestServer;
#[gpui::test]
async fn test_notifications(
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
cx_a: &mut TestAppContext,
cx_b: &mut TestAppContext,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let client_a = server.create_client(cx_a, "user_a").await;
let client_b = server.create_client(cx_b, "user_b").await;
@@ -42,7 +43,7 @@ async fn test_notifications(
// Client B receives a contact request notification and responds to the
// request, accepting it.
- deterministic.run_until_parked();
+ executor.run_until_parked();
client_b.notification_store().update(cx_b, |store, cx| {
assert_eq!(store.notification_count(), 1);
assert_eq!(store.unread_notification_count(), 1);
@@ -72,7 +73,7 @@ async fn test_notifications(
});
// Client B sees the notification is now read, and that they responded.
- deterministic.run_until_parked();
+ executor.run_until_parked();
client_b.notification_store().read_with(cx_b, |store, _| {
assert_eq!(store.notification_count(), 1);
assert_eq!(store.unread_notification_count(), 0);
@@ -127,7 +128,7 @@ async fn test_notifications(
// Client B receives a channel invitation notification and responds to the
// invitation, accepting it.
- deterministic.run_until_parked();
+ executor.run_until_parked();
client_b.notification_store().update(cx_b, |store, cx| {
assert_eq!(store.notification_count(), 2);
assert_eq!(store.unread_notification_count(), 1);
@@ -147,7 +148,7 @@ async fn test_notifications(
});
// Client B sees the notification is now read, and that they responded.
- deterministic.run_until_parked();
+ executor.run_until_parked();
client_b.notification_store().read_with(cx_b, |store, _| {
assert_eq!(store.notification_count(), 2);
assert_eq!(store.unread_notification_count(), 0);
@@ -3,10 +3,14 @@ use crate::db::ChannelRole;
use super::{run_randomized_test, RandomizedTest, TestClient, TestError, TestServer, UserTestPlan};
use anyhow::Result;
use async_trait::async_trait;
-use gpui::{executor::Deterministic, TestAppContext};
+use gpui::{BackgroundExecutor, SharedString, TestAppContext};
use rand::prelude::*;
use serde_derive::{Deserialize, Serialize};
-use std::{ops::Range, rc::Rc, sync::Arc};
+use std::{
+ ops::{Deref, DerefMut, Range},
+ rc::Rc,
+ sync::Arc,
+};
use text::Bias;
#[gpui::test(
@@ -15,10 +19,10 @@ use text::Bias;
)]
async fn test_random_channel_buffers(
cx: &mut TestAppContext,
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
rng: StdRng,
) {
- run_randomized_test::<RandomChannelBufferTest>(cx, deterministic, rng).await;
+ run_randomized_test::<RandomChannelBufferTest>(cx, executor, rng).await;
}
struct RandomChannelBufferTest;
@@ -26,13 +30,13 @@ struct RandomChannelBufferTest;
#[derive(Clone, Serialize, Deserialize)]
enum ChannelBufferOperation {
JoinChannelNotes {
- channel_name: String,
+ channel_name: SharedString,
},
LeaveChannelNotes {
- channel_name: String,
+ channel_name: SharedString,
},
EditChannelNotes {
- channel_name: String,
+ channel_name: SharedString,
edits: Vec<(Range<usize>, Arc<str>)>,
},
Noop,
@@ -69,11 +73,11 @@ impl RandomizedTest for RandomChannelBufferTest {
cx: &TestAppContext,
) -> ChannelBufferOperation {
let channel_store = client.channel_store().clone();
- let channel_buffers = client.channel_buffers();
+ let mut channel_buffers = client.channel_buffers();
// When signed out, we can't do anything unless a channel buffer is
// already open.
- if channel_buffers.is_empty()
+ if channel_buffers.deref_mut().is_empty()
&& channel_store.read_with(cx, |store, _| store.channel_count() == 0)
{
return ChannelBufferOperation::Noop;
@@ -97,7 +101,7 @@ impl RandomizedTest for RandomChannelBufferTest {
}
30..=40 => {
- if let Some(buffer) = channel_buffers.iter().choose(rng) {
+ if let Some(buffer) = channel_buffers.deref().iter().choose(rng) {
let channel_name =
buffer.read_with(cx, |b, cx| b.channel(cx).unwrap().name.clone());
break ChannelBufferOperation::LeaveChannelNotes { channel_name };
@@ -105,7 +109,7 @@ impl RandomizedTest for RandomChannelBufferTest {
}
_ => {
- if let Some(buffer) = channel_buffers.iter().choose(rng) {
+ if let Some(buffer) = channel_buffers.deref().iter().choose(rng) {
break buffer.read_with(cx, |b, cx| {
let channel_name = b.channel(cx).unwrap().name.clone();
let edits = b
@@ -147,13 +151,13 @@ impl RandomizedTest for RandomChannelBufferTest {
"{}: opening notes for channel {channel_name}",
client.username
);
- client.channel_buffers().insert(buffer.await?);
+ client.channel_buffers().deref_mut().insert(buffer.await?);
}
ChannelBufferOperation::LeaveChannelNotes { channel_name } => {
let buffer = cx.update(|cx| {
let mut left_buffer = Err(TestError::Inapplicable);
- client.channel_buffers().retain(|buffer| {
+ client.channel_buffers().deref_mut().retain(|buffer| {
if buffer.read(cx).channel(cx).unwrap().name == channel_name {
left_buffer = Ok(buffer.clone());
false
@@ -179,6 +183,7 @@ impl RandomizedTest for RandomChannelBufferTest {
.read(|cx| {
client
.channel_buffers()
+ .deref()
.iter()
.find(|buffer| {
buffer.read(cx).channel(cx).unwrap().name == channel_name
@@ -215,13 +220,6 @@ impl RandomizedTest for RandomChannelBufferTest {
Ok(())
}
- async fn on_client_added(client: &Rc<TestClient>, cx: &mut TestAppContext) {
- let channel_store = client.channel_store();
- while channel_store.read_with(cx, |store, _| store.channel_count() == 0) {
- channel_store.next_notification(cx).await;
- }
- }
-
async fn on_quiesce(server: &mut TestServer, clients: &mut [(Rc<TestClient>, TestAppContext)]) {
let channels = server.app_state.db.all_channels().await.unwrap();
@@ -229,6 +227,7 @@ impl RandomizedTest for RandomChannelBufferTest {
client_cx.update(|cx| {
client
.channel_buffers()
+ .deref_mut()
.retain(|b| b.read(cx).is_connected());
});
}
@@ -252,6 +251,7 @@ impl RandomizedTest for RandomChannelBufferTest {
client_cx.read(|cx| {
if let Some(channel_buffer) = client
.channel_buffers()
+ .deref()
.iter()
.find(|b| b.read(cx).channel_id == channel_id.to_proto())
{
@@ -1,5 +1,5 @@
-use super::{run_randomized_test, RandomizedTest, TestClient, TestError, TestServer, UserTestPlan};
-use crate::db::UserId;
+use super::{RandomizedTest, TestClient, TestError, TestServer, UserTestPlan};
+use crate::{db::UserId, tests::run_randomized_test};
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use call::ActiveCall;
@@ -7,7 +7,7 @@ use collections::{BTreeMap, HashMap};
use editor::Bias;
use fs::{repository::GitFileStatus, FakeFs, Fs as _};
use futures::StreamExt;
-use gpui::{executor::Deterministic, ModelHandle, TestAppContext};
+use gpui::{BackgroundExecutor, Model, TestAppContext};
use language::{range_to_lsp, FakeLspAdapter, Language, LanguageConfig, PointUtf16};
use lsp::FakeLanguageServer;
use pretty_assertions::assert_eq;
@@ -18,7 +18,7 @@ use rand::{
};
use serde::{Deserialize, Serialize};
use std::{
- ops::Range,
+ ops::{Deref, Range},
path::{Path, PathBuf},
rc::Rc,
sync::Arc,
@@ -31,10 +31,10 @@ use util::ResultExt;
)]
async fn test_random_project_collaboration(
cx: &mut TestAppContext,
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
rng: StdRng,
) {
- run_randomized_test::<ProjectCollaborationTest>(cx, deterministic, rng).await;
+ run_randomized_test::<ProjectCollaborationTest>(cx, executor, rng).await;
}
#[derive(Clone, Debug, Serialize, Deserialize)]
@@ -295,7 +295,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let is_local = project.read_with(cx, |project, _| project.is_local());
let worktree = project.read_with(cx, |project, cx| {
project
- .worktrees(cx)
+ .worktrees()
.filter(|worktree| {
let worktree = worktree.read(cx);
worktree.is_visible()
@@ -417,7 +417,7 @@ impl RandomizedTest for ProjectCollaborationTest {
81.. => {
let worktree = project.read_with(cx, |project, cx| {
project
- .worktrees(cx)
+ .worktrees()
.filter(|worktree| {
let worktree = worktree.read(cx);
worktree.is_visible()
@@ -624,7 +624,7 @@ impl RandomizedTest for ProjectCollaborationTest {
room.join_project(
project_id,
client.language_registry().clone(),
- FakeFs::new(cx.background().clone()),
+ FakeFs::new(cx.background_executor().clone()),
cx,
)
}))
@@ -782,6 +782,7 @@ impl RandomizedTest for ProjectCollaborationTest {
.map_err(|err| anyhow!("save request failed: {:?}", err))?;
assert!(buffer
.read_with(&cx, |buffer, _| { buffer.saved_version().to_owned() })
+ .expect("App should not be dropped")
.observed_all(&requested_version));
anyhow::Ok(())
});
@@ -817,30 +818,30 @@ impl RandomizedTest for ProjectCollaborationTest {
use futures::{FutureExt as _, TryFutureExt as _};
let offset = buffer.read_with(cx, |b, _| b.clip_offset(offset, Bias::Left));
- let request = cx.foreground().spawn(project.update(cx, |project, cx| {
- match kind {
- LspRequestKind::Rename => project
- .prepare_rename(buffer, offset, cx)
- .map_ok(|_| ())
- .boxed(),
- LspRequestKind::Completion => project
- .completions(&buffer, offset, cx)
- .map_ok(|_| ())
- .boxed(),
- LspRequestKind::CodeAction => project
- .code_actions(&buffer, offset..offset, cx)
- .map_ok(|_| ())
- .boxed(),
- LspRequestKind::Definition => project
- .definition(&buffer, offset, cx)
- .map_ok(|_| ())
- .boxed(),
- LspRequestKind::Highlights => project
- .document_highlights(&buffer, offset, cx)
- .map_ok(|_| ())
- .boxed(),
- }
- }));
+
+ let process_lsp_request = project.update(cx, |project, cx| match kind {
+ LspRequestKind::Rename => project
+ .prepare_rename(buffer, offset, cx)
+ .map_ok(|_| ())
+ .boxed(),
+ LspRequestKind::Completion => project
+ .completions(&buffer, offset, cx)
+ .map_ok(|_| ())
+ .boxed(),
+ LspRequestKind::CodeAction => project
+ .code_actions(&buffer, offset..offset, cx)
+ .map_ok(|_| ())
+ .boxed(),
+ LspRequestKind::Definition => project
+ .definition(&buffer, offset, cx)
+ .map_ok(|_| ())
+ .boxed(),
+ LspRequestKind::Highlights => project
+ .document_highlights(&buffer, offset, cx)
+ .map_ok(|_| ())
+ .boxed(),
+ });
+ let request = cx.foreground_executor().spawn(process_lsp_request);
if detach {
request.detach();
} else {
@@ -874,7 +875,7 @@ impl RandomizedTest for ProjectCollaborationTest {
)
});
drop(project);
- let search = cx.background().spawn(async move {
+ let search = cx.executor().spawn(async move {
let mut results = HashMap::default();
while let Some((buffer, ranges)) = search.next().await {
results.entry(buffer).or_insert(ranges);
@@ -1075,12 +1076,12 @@ impl RandomizedTest for ProjectCollaborationTest {
fake_server.handle_request::<lsp::request::GotoDefinition, _, _>({
let fs = fs.clone();
move |_, cx| {
- let background = cx.background();
+ let background = cx.background_executor();
let mut rng = background.rng();
let count = rng.gen_range::<usize, _>(1..3);
let files = fs.as_fake().files();
let files = (0..count)
- .map(|_| files.choose(&mut *rng).unwrap().clone())
+ .map(|_| files.choose(&mut rng).unwrap().clone())
.collect::<Vec<_>>();
async move {
log::info!("LSP: Returning definitions in files {:?}", &files);
@@ -1100,7 +1101,7 @@ impl RandomizedTest for ProjectCollaborationTest {
fake_server.handle_request::<lsp::request::DocumentHighlightRequest, _, _>(
move |_, cx| {
let mut highlights = Vec::new();
- let background = cx.background();
+ let background = cx.background_executor();
let mut rng = background.rng();
let highlight_count = rng.gen_range(1..=5);
@@ -1153,7 +1154,7 @@ impl RandomizedTest for ProjectCollaborationTest {
let host_worktree_snapshots =
host_project.read_with(host_cx, |host_project, cx| {
host_project
- .worktrees(cx)
+ .worktrees()
.map(|worktree| {
let worktree = worktree.read(cx);
(worktree.id(), worktree.snapshot())
@@ -1161,7 +1162,7 @@ impl RandomizedTest for ProjectCollaborationTest {
.collect::<BTreeMap<_, _>>()
});
let guest_worktree_snapshots = guest_project
- .worktrees(cx)
+ .worktrees()
.map(|worktree| {
let worktree = worktree.read(cx);
(worktree.id(), worktree.snapshot())
@@ -1218,7 +1219,7 @@ impl RandomizedTest for ProjectCollaborationTest {
}
}
- for buffer in guest_project.opened_buffers(cx) {
+ for buffer in guest_project.opened_buffers() {
let buffer = buffer.read(cx);
assert_eq!(
buffer.deferred_ops_len(),
@@ -1268,8 +1269,8 @@ impl RandomizedTest for ProjectCollaborationTest {
for guest_buffer in guest_buffers {
let buffer_id =
guest_buffer.read_with(client_cx, |buffer, _| buffer.remote_id());
- let host_buffer = host_project.read_with(host_cx, |project, cx| {
- project.buffer_for_id(buffer_id, cx).unwrap_or_else(|| {
+ let host_buffer = host_project.read_with(host_cx, |project, _| {
+ project.buffer_for_id(buffer_id).unwrap_or_else(|| {
panic!(
"host does not have buffer for guest:{}, peer:{:?}, id:{}",
client.username,
@@ -1457,10 +1458,10 @@ fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation
fn buffer_for_full_path(
client: &TestClient,
- project: &ModelHandle<Project>,
+ project: &Model<Project>,
full_path: &PathBuf,
cx: &TestAppContext,
-) -> Option<ModelHandle<language::Buffer>> {
+) -> Option<Model<language::Buffer>> {
client
.buffers_for_project(project)
.iter()
@@ -1476,18 +1477,18 @@ fn project_for_root_name(
client: &TestClient,
root_name: &str,
cx: &TestAppContext,
-) -> Option<ModelHandle<Project>> {
- if let Some(ix) = project_ix_for_root_name(&*client.local_projects(), root_name, cx) {
+) -> Option<Model<Project>> {
+ if let Some(ix) = project_ix_for_root_name(&*client.local_projects().deref(), root_name, cx) {
return Some(client.local_projects()[ix].clone());
}
- if let Some(ix) = project_ix_for_root_name(&*client.remote_projects(), root_name, cx) {
+ if let Some(ix) = project_ix_for_root_name(&*client.remote_projects().deref(), root_name, cx) {
return Some(client.remote_projects()[ix].clone());
}
None
}
fn project_ix_for_root_name(
- projects: &[ModelHandle<Project>],
+ projects: &[Model<Project>],
root_name: &str,
cx: &TestAppContext,
) -> Option<usize> {
@@ -1499,7 +1500,7 @@ fn project_ix_for_root_name(
})
}
-fn root_name_for_project(project: &ModelHandle<Project>, cx: &TestAppContext) -> String {
+fn root_name_for_project(project: &Model<Project>, cx: &TestAppContext) -> String {
project.read_with(cx, |project, cx| {
project
.visible_worktrees(cx)
@@ -1512,7 +1513,7 @@ fn root_name_for_project(project: &ModelHandle<Project>, cx: &TestAppContext) ->
}
fn project_path_for_full_path(
- project: &ModelHandle<Project>,
+ project: &Model<Project>,
full_path: &Path,
cx: &TestAppContext,
) -> Option<ProjectPath> {
@@ -1520,7 +1521,7 @@ fn project_path_for_full_path(
let root_name = components.next().unwrap().as_os_str().to_str().unwrap();
let path = components.as_path().into();
let worktree_id = project.read_with(cx, |project, cx| {
- project.worktrees(cx).find_map(|worktree| {
+ project.worktrees().find_map(|worktree| {
let worktree = worktree.read(cx);
if worktree.root_name() == root_name {
Some(worktree.id())
@@ -1533,7 +1534,7 @@ fn project_path_for_full_path(
}
async fn ensure_project_shared(
- project: &ModelHandle<Project>,
+ project: &Model<Project>,
client: &TestClient,
cx: &mut TestAppContext,
) {
@@ -1566,9 +1567,10 @@ async fn ensure_project_shared(
}
}
-fn choose_random_project(client: &TestClient, rng: &mut StdRng) -> Option<ModelHandle<Project>> {
+fn choose_random_project(client: &TestClient, rng: &mut StdRng) -> Option<Model<Project>> {
client
.local_projects()
+ .deref()
.iter()
.chain(client.remote_projects().iter())
.choose(rng)
@@ -5,7 +5,7 @@ use crate::{
};
use async_trait::async_trait;
use futures::StreamExt;
-use gpui::{executor::Deterministic, Task, TestAppContext};
+use gpui::{BackgroundExecutor, Task, TestAppContext};
use parking_lot::Mutex;
use rand::prelude::*;
use rpc::RECEIVE_TIMEOUT;
@@ -115,18 +115,17 @@ pub trait RandomizedTest: 'static + Sized {
async fn initialize(server: &mut TestServer, users: &[UserTestPlan]);
- async fn on_client_added(client: &Rc<TestClient>, cx: &mut TestAppContext);
+ async fn on_client_added(_client: &Rc<TestClient>, _cx: &mut TestAppContext) {}
async fn on_quiesce(server: &mut TestServer, client: &mut [(Rc<TestClient>, TestAppContext)]);
}
pub async fn run_randomized_test<T: RandomizedTest>(
cx: &mut TestAppContext,
- deterministic: Arc<Deterministic>,
+ executor: BackgroundExecutor,
rng: StdRng,
) {
- deterministic.forbid_parking();
- let mut server = TestServer::start(&deterministic).await;
+ let mut server = TestServer::start(executor.clone()).await;
let plan = TestPlan::<T>::new(&mut server, rng).await;
LAST_PLAN.lock().replace({
@@ -144,7 +143,7 @@ pub async fn run_randomized_test<T: RandomizedTest>(
applied.store(true, SeqCst);
let did_apply = TestPlan::apply_server_operation(
plan.clone(),
- deterministic.clone(),
+ executor.clone(),
&mut server,
&mut clients,
&mut client_tasks,
@@ -159,14 +158,14 @@ pub async fn run_randomized_test<T: RandomizedTest>(
}
drop(operation_channels);
- deterministic.start_waiting();
+ executor.start_waiting();
futures::future::join_all(client_tasks).await;
- deterministic.finish_waiting();
+ executor.finish_waiting();
- deterministic.run_until_parked();
+ executor.run_until_parked();
T::on_quiesce(&mut server, &mut clients).await;
- for (client, mut cx) in clients {
+ for (client, cx) in clients {
cx.update(|cx| {
let store = cx.remove_global::<SettingsStore>();
cx.clear_globals();
@@ -174,7 +173,7 @@ pub async fn run_randomized_test<T: RandomizedTest>(
drop(client);
});
}
- deterministic.run_until_parked();
+ executor.run_until_parked();
if let Some(path) = &*PLAN_SAVE_PATH {
eprintln!("saved test plan to path {:?}", path);
@@ -450,7 +449,7 @@ impl<T: RandomizedTest> TestPlan<T> {
async fn apply_server_operation(
plan: Arc<Mutex<Self>>,
- deterministic: Arc<Deterministic>,
+ deterministic: BackgroundExecutor,
server: &mut TestServer,
clients: &mut Vec<(Rc<TestClient>, TestAppContext)>,
client_tasks: &mut Vec<Task<()>>,
@@ -471,28 +470,18 @@ impl<T: RandomizedTest> TestPlan<T> {
username = user.username.clone();
};
log::info!("adding new connection for {}", username);
- let next_entity_id = (user_id.0 * 10_000) as usize;
- let mut client_cx = TestAppContext::new(
- cx.foreground_platform(),
- cx.platform(),
- deterministic.build_foreground(user_id.0 as usize),
- deterministic.build_background(),
- cx.font_cache(),
- cx.leak_detector(),
- next_entity_id,
- cx.function_name.clone(),
- );
+
+ let mut client_cx = cx.new_app();
let (operation_tx, operation_rx) = futures::channel::mpsc::unbounded();
let client = Rc::new(server.create_client(&mut client_cx, &username).await);
operation_channels.push(operation_tx);
clients.push((client.clone(), client_cx.clone()));
- client_tasks.push(client_cx.foreground().spawn(Self::simulate_client(
- plan.clone(),
- client,
- operation_rx,
- client_cx,
- )));
+
+ let foreground_executor = client_cx.foreground_executor().clone();
+ let simulate_client =
+ Self::simulate_client(plan.clone(), client, operation_rx, client_cx);
+ client_tasks.push(foreground_executor.spawn(simulate_client));
log::info!("added connection for {}", username);
}
@@ -514,7 +503,7 @@ impl<T: RandomizedTest> TestPlan<T> {
.collect::<Vec<_>>();
assert_eq!(user_connection_ids.len(), 1);
let removed_peer_id = user_connection_ids[0].into();
- let (client, mut client_cx) = clients.remove(client_ix);
+ let (client, client_cx) = clients.remove(client_ix);
let client_task = client_tasks.remove(client_ix);
operation_channels.remove(client_ix);
server.forbid_connections();
@@ -647,7 +636,7 @@ impl<T: RandomizedTest> TestPlan<T> {
log::error!("{} error: {}", client.username, error);
}
}
- cx.background().simulate_random_delay().await;
+ cx.executor().simulate_random_delay().await;
}
log::info!("{}: done", client.username);
}
@@ -13,9 +13,10 @@ use client::{
use collections::{HashMap, HashSet};
use fs::FakeFs;
use futures::{channel::oneshot, StreamExt as _};
-use gpui::{executor::Deterministic, ModelHandle, Task, TestAppContext, WindowHandle};
+use gpui::{BackgroundExecutor, Context, Model, TestAppContext, View, VisualTestContext};
use language::LanguageRegistry;
use node_runtime::FakeNodeRuntime;
+
use notifications::NotificationStore;
use parking_lot::Mutex;
use project::{Project, WorktreeId};
@@ -46,17 +47,17 @@ pub struct TestServer {
pub struct TestClient {
pub username: String,
pub app_state: Arc<workspace::AppState>,
- channel_store: ModelHandle<ChannelStore>,
- notification_store: ModelHandle<NotificationStore>,
+ channel_store: Model<ChannelStore>,
+ notification_store: Model<NotificationStore>,
state: RefCell<TestClientState>,
}
#[derive(Default)]
struct TestClientState {
- local_projects: Vec<ModelHandle<Project>>,
- remote_projects: Vec<ModelHandle<Project>>,
- buffers: HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>,
- channel_buffers: HashSet<ModelHandle<ChannelBuffer>>,
+ local_projects: Vec<Model<Project>>,
+ remote_projects: Vec<Model<Project>>,
+ buffers: HashMap<Model<Project>, HashSet<Model<language::Buffer>>>,
+ channel_buffers: HashSet<Model<ChannelBuffer>>,
}
pub struct ContactsSummary {
@@ -66,22 +67,22 @@ pub struct ContactsSummary {
}
impl TestServer {
- pub async fn start(deterministic: &Arc<Deterministic>) -> Self {
+ pub async fn start(deterministic: BackgroundExecutor) -> Self {
static NEXT_LIVE_KIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0);
let use_postgres = env::var("USE_POSTGRES").ok();
let use_postgres = use_postgres.as_deref();
let test_db = if use_postgres == Some("true") || use_postgres == Some("1") {
- TestDb::postgres(deterministic.build_background())
+ TestDb::postgres(deterministic.clone())
} else {
- TestDb::sqlite(deterministic.build_background())
+ TestDb::sqlite(deterministic.clone())
};
let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst);
let live_kit_server = live_kit_client::TestServer::create(
format!("http://livekit.{}.test", live_kit_server_id),
format!("devkey-{}", live_kit_server_id),
format!("secret-{}", live_kit_server_id),
- deterministic.build_background(),
+ deterministic.clone(),
)
.unwrap();
let app_state = Self::build_app_state(&test_db, &live_kit_server).await;
@@ -93,7 +94,7 @@ impl TestServer {
let server = Server::new(
epoch,
app_state.clone(),
- Executor::Deterministic(deterministic.build_background()),
+ Executor::Deterministic(deterministic.clone()),
);
server.start().await.unwrap();
// Advance clock to ensure the server's cleanup task is finished.
@@ -124,8 +125,8 @@ impl TestServer {
if cx.has_global::<SettingsStore>() {
panic!("Same cx used to create two test clients")
}
-
- cx.set_global(SettingsStore::test(cx));
+ let settings = SettingsStore::test(cx);
+ cx.set_global(settings);
});
let http = FakeHttpClient::with_404_response();
@@ -148,7 +149,7 @@ impl TestServer {
.user_id
};
let client_name = name.to_string();
- let mut client = cx.read(|cx| Client::new(http.clone(), cx));
+ let mut client = cx.update(|cx| Client::new(http.clone(), cx));
let server = self.server.clone();
let db = self.app_state.db.clone();
let connection_killers = self.connection_killers.clone();
@@ -182,20 +183,20 @@ impl TestServer {
)))
} else {
let (client_conn, server_conn, killed) =
- Connection::in_memory(cx.background());
+ Connection::in_memory(cx.background_executor().clone());
let (connection_id_tx, connection_id_rx) = oneshot::channel();
let user = db
.get_user_by_id(user_id)
.await
.expect("retrieving user failed")
.unwrap();
- cx.background()
+ cx.background_executor()
.spawn(server.handle_connection(
server_conn,
client_name,
user,
Some(connection_id_tx),
- Executor::Deterministic(cx.background()),
+ Executor::Deterministic(cx.background_executor().clone()),
))
.detach();
let connection_id = connection_id_rx.await.unwrap();
@@ -207,11 +208,11 @@ impl TestServer {
})
});
- let fs = FakeFs::new(cx.background());
- let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http, cx));
- let workspace_store = cx.add_model(|cx| WorkspaceStore::new(client.clone(), cx));
+ let fs = FakeFs::new(cx.executor());
+ let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
+ let workspace_store = cx.new_model(|cx| WorkspaceStore::new(client.clone(), cx));
let mut language_registry = LanguageRegistry::test();
- language_registry.set_executor(cx.background());
+ language_registry.set_executor(cx.executor());
let app_state = Arc::new(workspace::AppState {
client: client.clone(),
user_store: user_store.clone(),
@@ -219,13 +220,11 @@ impl TestServer {
languages: Arc::new(language_registry),
fs: fs.clone(),
build_window_options: |_, _, _| Default::default(),
- initialize_workspace: |_, _, _, _| Task::ready(Ok(())),
- background_actions: || &[],
node_runtime: FakeNodeRuntime::new(),
});
cx.update(|cx| {
- theme::init((), cx);
+ theme::init(theme::LoadThemes::JustBase, cx);
Project::init(&client, cx);
client::init(&client, cx);
language::init(cx);
@@ -264,7 +263,7 @@ impl TestServer {
pub fn simulate_long_connection_interruption(
&self,
peer_id: PeerId,
- deterministic: &Arc<Deterministic>,
+ deterministic: BackgroundExecutor,
) {
self.forbid_connections();
self.disconnect_client(peer_id);
@@ -295,7 +294,7 @@ impl TestServer {
})
.await
.unwrap();
- cx_a.foreground().run_until_parked();
+ cx_a.executor().run_until_parked();
client_b
.app_state
.user_store
@@ -338,7 +337,7 @@ impl TestServer {
.await
.unwrap();
- admin_cx.foreground().run_until_parked();
+ admin_cx.executor().run_until_parked();
member_cx
.read(ChannelStore::global)
@@ -399,7 +398,7 @@ impl TestServer {
.await
.unwrap();
- cx_b.foreground().run_until_parked();
+ cx_b.executor().run_until_parked();
let active_call_b = cx_b.read(ActiveCall::global);
active_call_b
.update(*cx_b, |call, cx| call.accept_incoming(cx))
@@ -448,15 +447,15 @@ impl TestClient {
self.app_state.fs.as_fake()
}
- pub fn channel_store(&self) -> &ModelHandle<ChannelStore> {
+ pub fn channel_store(&self) -> &Model<ChannelStore> {
&self.channel_store
}
- pub fn notification_store(&self) -> &ModelHandle<NotificationStore> {
+ pub fn notification_store(&self) -> &Model<NotificationStore> {
&self.notification_store
}
- pub fn user_store(&self) -> &ModelHandle<UserStore> {
+ pub fn user_store(&self) -> &Model<UserStore> {
&self.app_state.user_store
}
@@ -491,30 +490,26 @@ impl TestClient {
.await;
}
- pub fn local_projects<'a>(&'a self) -> impl Deref<Target = Vec<ModelHandle<Project>>> + 'a {
+ pub fn local_projects<'a>(&'a self) -> impl Deref<Target = Vec<Model<Project>>> + 'a {
Ref::map(self.state.borrow(), |state| &state.local_projects)
}
- pub fn remote_projects<'a>(&'a self) -> impl Deref<Target = Vec<ModelHandle<Project>>> + 'a {
+ pub fn remote_projects<'a>(&'a self) -> impl Deref<Target = Vec<Model<Project>>> + 'a {
Ref::map(self.state.borrow(), |state| &state.remote_projects)
}
- pub fn local_projects_mut<'a>(
- &'a self,
- ) -> impl DerefMut<Target = Vec<ModelHandle<Project>>> + 'a {
+ pub fn local_projects_mut<'a>(&'a self) -> impl DerefMut<Target = Vec<Model<Project>>> + 'a {
RefMut::map(self.state.borrow_mut(), |state| &mut state.local_projects)
}
- pub fn remote_projects_mut<'a>(
- &'a self,
- ) -> impl DerefMut<Target = Vec<ModelHandle<Project>>> + 'a {
+ pub fn remote_projects_mut<'a>(&'a self) -> impl DerefMut<Target = Vec<Model<Project>>> + 'a {
RefMut::map(self.state.borrow_mut(), |state| &mut state.remote_projects)
}
pub fn buffers_for_project<'a>(
&'a self,
- project: &ModelHandle<Project>,
- ) -> impl DerefMut<Target = HashSet<ModelHandle<language::Buffer>>> + 'a {
+ project: &Model<Project>,
+ ) -> impl DerefMut<Target = HashSet<Model<language::Buffer>>> + 'a {
RefMut::map(self.state.borrow_mut(), |state| {
state.buffers.entry(project.clone()).or_default()
})
@@ -522,14 +517,14 @@ impl TestClient {
pub fn buffers<'a>(
&'a self,
- ) -> impl DerefMut<Target = HashMap<ModelHandle<Project>, HashSet<ModelHandle<language::Buffer>>>> + 'a
+ ) -> impl DerefMut<Target = HashMap<Model<Project>, HashSet<Model<language::Buffer>>>> + 'a
{
RefMut::map(self.state.borrow_mut(), |state| &mut state.buffers)
}
pub fn channel_buffers<'a>(
&'a self,
- ) -> impl DerefMut<Target = HashSet<ModelHandle<ChannelBuffer>>> + 'a {
+ ) -> impl DerefMut<Target = HashSet<Model<ChannelBuffer>>> + 'a {
RefMut::map(self.state.borrow_mut(), |state| &mut state.channel_buffers)
}
@@ -559,7 +554,7 @@ impl TestClient {
&self,
root_path: impl AsRef<Path>,
cx: &mut TestAppContext,
- ) -> (ModelHandle<Project>, WorktreeId) {
+ ) -> (Model<Project>, WorktreeId) {
let project = self.build_empty_local_project(cx);
let (worktree, _) = project
.update(cx, |p, cx| {
@@ -573,7 +568,7 @@ impl TestClient {
(project, worktree.read_with(cx, |tree, _| tree.id()))
}
- pub fn build_empty_local_project(&self, cx: &mut TestAppContext) -> ModelHandle<Project> {
+ pub fn build_empty_local_project(&self, cx: &mut TestAppContext) -> Model<Project> {
cx.update(|cx| {
Project::local(
self.client().clone(),
@@ -590,7 +585,7 @@ impl TestClient {
&self,
host_project_id: u64,
guest_cx: &mut TestAppContext,
- ) -> ModelHandle<Project> {
+ ) -> Model<Project> {
let active_call = guest_cx.read(ActiveCall::global);
let room = active_call.read_with(guest_cx, |call, _| call.room().unwrap().clone());
room.update(guest_cx, |room, cx| {
@@ -605,12 +600,12 @@ impl TestClient {
.unwrap()
}
- pub fn build_workspace(
- &self,
- project: &ModelHandle<Project>,
- cx: &mut TestAppContext,
- ) -> WindowHandle<Workspace> {
- cx.add_window(|cx| Workspace::new(0, project.clone(), self.app_state.clone(), cx))
+ pub fn build_workspace<'a>(
+ &'a self,
+ project: &Model<Project>,
+ cx: &'a mut TestAppContext,
+ ) -> (View<Workspace>, &'a mut VisualTestContext) {
+ cx.add_window_view(|cx| Workspace::new(0, project.clone(), self.app_state.clone(), cx))
}
}
@@ -1,12 +0,0 @@
-DATABASE_URL = "postgres://postgres@localhost/zed"
-DATABASE_MAX_CONNECTIONS = 5
-HTTP_PORT = 8080
-API_TOKEN = "secret"
-INVITE_LINK_PREFIX = "http://localhost:3000/invites/"
-ZED_ENVIRONMENT = "development"
-LIVE_KIT_SERVER = "http://localhost:7880"
-LIVE_KIT_KEY = "devkey"
-LIVE_KIT_SECRET = "secret"
-
-# RUST_LOG=info
-# LOG_JSON=true
@@ -1,99 +0,0 @@
-[package]
-authors = ["Nathan Sobo <nathan@zed.dev>"]
-default-run = "collab2"
-edition = "2021"
-name = "collab2"
-version = "0.28.0"
-publish = false
-
-[[bin]]
-name = "collab2"
-
-[[bin]]
-name = "seed2"
-required-features = ["seed-support"]
-
-[dependencies]
-clock = { path = "../clock" }
-collections = { path = "../collections" }
-live_kit_server = { path = "../live_kit_server" }
-text = { package = "text2", path = "../text2" }
-rpc = { path = "../rpc" }
-util = { path = "../util" }
-
-anyhow.workspace = true
-async-tungstenite = "0.16"
-axum = { version = "0.5", features = ["json", "headers", "ws"] }
-axum-extra = { version = "0.3", features = ["erased-json"] }
-base64 = "0.13"
-clap = { version = "3.1", features = ["derive"], optional = true }
-dashmap = "5.4"
-envy = "0.4.2"
-futures.workspace = true
-hyper = "0.14"
-lazy_static.workspace = true
-lipsum = { version = "0.8", optional = true }
-log.workspace = true
-nanoid = "0.4"
-parking_lot.workspace = true
-prometheus = "0.13"
-prost.workspace = true
-rand.workspace = true
-reqwest = { version = "0.11", features = ["json"], optional = true }
-scrypt = "0.7"
-smallvec.workspace = true
-sea-orm = { version = "0.12.x", features = ["sqlx-postgres", "postgres-array", "runtime-tokio-rustls", "with-uuid"] }
-serde.workspace = true
-serde_derive.workspace = true
-serde_json.workspace = true
-sha-1 = "0.9"
-sqlx = { version = "0.7", features = ["runtime-tokio-rustls", "postgres", "json", "time", "uuid", "any"] }
-time.workspace = true
-tokio = { version = "1", features = ["full"] }
-tokio-tungstenite = "0.17"
-tonic = "0.6"
-tower = "0.4"
-toml.workspace = true
-tracing = "0.1.34"
-tracing-log = "0.1.3"
-tracing-subscriber = { version = "0.3.11", features = ["env-filter", "json"] }
-uuid.workspace = true
-
-[dev-dependencies]
-audio = { path = "../audio" }
-collections = { path = "../collections", features = ["test-support"] }
-gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
-call = { path = "../call", features = ["test-support"] }
-client = { path = "../client", features = ["test-support"] }
-channel = { path = "../channel" }
-editor = { path = "../editor", features = ["test-support"] }
-language = { path = "../language", features = ["test-support"] }
-fs = { path = "../fs", features = ["test-support"] }
-git = { path = "../git", features = ["test-support"] }
-live_kit_client = { package = "live_kit_client2", path = "../live_kit_client2", features = ["test-support"] }
-lsp = { path = "../lsp", features = ["test-support"] }
-node_runtime = { path = "../node_runtime" }
-notifications = { path = "../notifications", features = ["test-support"] }
-
-project = { path = "../project", features = ["test-support"] }
-rpc = { path = "../rpc", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
-theme = { package = "theme2", path = "../theme2" }
-workspace = { path = "../workspace", features = ["test-support"] }
-
-collab_ui = { path = "../collab_ui", features = ["test-support"] }
-
-async-trait.workspace = true
-pretty_assertions.workspace = true
-ctor.workspace = true
-env_logger.workspace = true
-indoc.workspace = true
-util = { path = "../util" }
-lazy_static.workspace = true
-sea-orm = { version = "0.12.x", features = ["sqlx-sqlite"] }
-serde_json.workspace = true
-sqlx = { version = "0.7", features = ["sqlite"] }
-unindent.workspace = true
-
-[features]
-seed-support = ["clap", "lipsum", "reqwest"]
@@ -1,5 +0,0 @@
-# Zed Server
-
-This crate is what we run at https://collab.zed.dev.
-
-It contains our back-end logic for collaboration, to which we connect from the Zed client via a websocket after authenticating via https://zed.dev, which is a separate repo running on Vercel.
@@ -1,4 +0,0 @@
-db-uri = "postgres://postgres@localhost/zed"
-server-port = 8081
-jwt-secret = "the-postgrest-jwt-secret-for-authorization"
-log-level = "info"
@@ -1,12 +0,0 @@
-
-[Interface]
-PrivateKey = B5Fp/yVfP0QYlb+YJv9ea+EMI1mWODPD3akh91cVjvc=
-Address = fdaa:0:2ce3:a7b:bea:0:a:2/120
-DNS = fdaa:0:2ce3::3
-
-[Peer]
-PublicKey = RKAYPljEJiuaELNDdQIEJmQienT9+LRISfIHwH45HAw=
-AllowedIPs = fdaa:0:2ce3::/48
-Endpoint = ord1.gateway.6pn.dev:51820
-PersistentKeepalive = 15
-
@@ -1,344 +0,0 @@
-CREATE TABLE "users" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "github_login" VARCHAR,
- "admin" BOOLEAN,
- "email_address" VARCHAR(255) DEFAULT NULL,
- "invite_code" VARCHAR(64),
- "invite_count" INTEGER NOT NULL DEFAULT 0,
- "inviter_id" INTEGER REFERENCES users (id),
- "connected_once" BOOLEAN NOT NULL DEFAULT false,
- "created_at" TIMESTAMP NOT NULL DEFAULT now,
- "metrics_id" TEXT,
- "github_user_id" INTEGER
-);
-CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
-CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");
-CREATE INDEX "index_users_on_email_address" ON "users" ("email_address");
-CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
-
-CREATE TABLE "access_tokens" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "user_id" INTEGER REFERENCES users (id),
- "hash" VARCHAR(128)
-);
-CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id");
-
-CREATE TABLE "contacts" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "user_id_a" INTEGER REFERENCES users (id) NOT NULL,
- "user_id_b" INTEGER REFERENCES users (id) NOT NULL,
- "a_to_b" BOOLEAN NOT NULL,
- "should_notify" BOOLEAN NOT NULL,
- "accepted" BOOLEAN NOT NULL
-);
-CREATE UNIQUE INDEX "index_contacts_user_ids" ON "contacts" ("user_id_a", "user_id_b");
-CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b");
-
-CREATE TABLE "rooms" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "live_kit_room" VARCHAR NOT NULL,
- "enviroment" VARCHAR,
- "channel_id" INTEGER REFERENCES channels (id) ON DELETE CASCADE
-);
-CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id");
-
-CREATE TABLE "projects" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "room_id" INTEGER REFERENCES rooms (id) ON DELETE CASCADE NOT NULL,
- "host_user_id" INTEGER REFERENCES users (id) NOT NULL,
- "host_connection_id" INTEGER,
- "host_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
- "unregistered" BOOLEAN NOT NULL DEFAULT FALSE
-);
-CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");
-CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id");
-
-CREATE TABLE "worktrees" (
- "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
- "id" INTEGER NOT NULL,
- "root_name" VARCHAR NOT NULL,
- "abs_path" VARCHAR NOT NULL,
- "visible" BOOL NOT NULL,
- "scan_id" INTEGER NOT NULL,
- "is_complete" BOOL NOT NULL DEFAULT FALSE,
- "completed_scan_id" INTEGER NOT NULL,
- PRIMARY KEY(project_id, id)
-);
-CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
-
-CREATE TABLE "worktree_entries" (
- "project_id" INTEGER NOT NULL,
- "worktree_id" INTEGER NOT NULL,
- "scan_id" INTEGER NOT NULL,
- "id" INTEGER NOT NULL,
- "is_dir" BOOL NOT NULL,
- "path" VARCHAR NOT NULL,
- "inode" INTEGER NOT NULL,
- "mtime_seconds" INTEGER NOT NULL,
- "mtime_nanos" INTEGER NOT NULL,
- "is_symlink" BOOL NOT NULL,
- "is_external" BOOL NOT NULL,
- "is_ignored" BOOL NOT NULL,
- "is_deleted" BOOL NOT NULL,
- "git_status" INTEGER,
- PRIMARY KEY(project_id, worktree_id, id),
- FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
-);
-CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
-CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
-
-CREATE TABLE "worktree_repositories" (
- "project_id" INTEGER NOT NULL,
- "worktree_id" INTEGER NOT NULL,
- "work_directory_id" INTEGER NOT NULL,
- "branch" VARCHAR,
- "scan_id" INTEGER NOT NULL,
- "is_deleted" BOOL NOT NULL,
- PRIMARY KEY(project_id, worktree_id, work_directory_id),
- FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
- FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
-);
-CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
-CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");
-
-CREATE TABLE "worktree_settings_files" (
- "project_id" INTEGER NOT NULL,
- "worktree_id" INTEGER NOT NULL,
- "path" VARCHAR NOT NULL,
- "content" TEXT,
- PRIMARY KEY(project_id, worktree_id, path),
- FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
-);
-CREATE INDEX "index_worktree_settings_files_on_project_id" ON "worktree_settings_files" ("project_id");
-CREATE INDEX "index_worktree_settings_files_on_project_id_and_worktree_id" ON "worktree_settings_files" ("project_id", "worktree_id");
-
-CREATE TABLE "worktree_diagnostic_summaries" (
- "project_id" INTEGER NOT NULL,
- "worktree_id" INTEGER NOT NULL,
- "path" VARCHAR NOT NULL,
- "language_server_id" INTEGER NOT NULL,
- "error_count" INTEGER NOT NULL,
- "warning_count" INTEGER NOT NULL,
- PRIMARY KEY(project_id, worktree_id, path),
- FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
-);
-CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id");
-CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id");
-
-CREATE TABLE "language_servers" (
- "id" INTEGER NOT NULL,
- "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
- "name" VARCHAR NOT NULL,
- PRIMARY KEY(project_id, id)
-);
-CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id");
-
-CREATE TABLE "project_collaborators" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
- "connection_id" INTEGER NOT NULL,
- "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
- "user_id" INTEGER NOT NULL,
- "replica_id" INTEGER NOT NULL,
- "is_host" BOOLEAN NOT NULL
-);
-CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id");
-CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id");
-CREATE INDEX "index_project_collaborators_on_connection_server_id" ON "project_collaborators" ("connection_server_id");
-CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id");
-CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_server_id" ON "project_collaborators" ("project_id", "connection_id", "connection_server_id");
-
-CREATE TABLE "room_participants" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "room_id" INTEGER NOT NULL REFERENCES rooms (id),
- "user_id" INTEGER NOT NULL REFERENCES users (id),
- "answering_connection_id" INTEGER,
- "answering_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE CASCADE,
- "answering_connection_lost" BOOLEAN NOT NULL,
- "location_kind" INTEGER,
- "location_project_id" INTEGER,
- "initial_project_id" INTEGER,
- "calling_user_id" INTEGER NOT NULL REFERENCES users (id),
- "calling_connection_id" INTEGER NOT NULL,
- "calling_connection_server_id" INTEGER REFERENCES servers (id) ON DELETE SET NULL,
- "participant_index" INTEGER
-);
-CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id");
-CREATE INDEX "index_room_participants_on_room_id" ON "room_participants" ("room_id");
-CREATE INDEX "index_room_participants_on_answering_connection_server_id" ON "room_participants" ("answering_connection_server_id");
-CREATE INDEX "index_room_participants_on_calling_connection_server_id" ON "room_participants" ("calling_connection_server_id");
-CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id");
-CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_server_id" ON "room_participants" ("answering_connection_id", "answering_connection_server_id");
-
-CREATE TABLE "servers" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "environment" VARCHAR NOT NULL
-);
-
-CREATE TABLE "followers" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "room_id" INTEGER NOT NULL REFERENCES rooms (id) ON DELETE CASCADE,
- "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
- "leader_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
- "leader_connection_id" INTEGER NOT NULL,
- "follower_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
- "follower_connection_id" INTEGER NOT NULL
-);
-CREATE UNIQUE INDEX
- "index_followers_on_project_id_and_leader_connection_server_id_and_leader_connection_id_and_follower_connection_server_id_and_follower_connection_id"
-ON "followers" ("project_id", "leader_connection_server_id", "leader_connection_id", "follower_connection_server_id", "follower_connection_id");
-CREATE INDEX "index_followers_on_room_id" ON "followers" ("room_id");
-
-CREATE TABLE "channels" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "name" VARCHAR NOT NULL,
- "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
- "visibility" VARCHAR NOT NULL,
- "parent_path" TEXT
-);
-
-CREATE INDEX "index_channels_on_parent_path" ON "channels" ("parent_path");
-
-CREATE TABLE IF NOT EXISTS "channel_chat_participants" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "user_id" INTEGER NOT NULL REFERENCES users (id),
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "connection_id" INTEGER NOT NULL,
- "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE
-);
-CREATE INDEX "index_channel_chat_participants_on_channel_id" ON "channel_chat_participants" ("channel_id");
-
-CREATE TABLE IF NOT EXISTS "channel_messages" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "sender_id" INTEGER NOT NULL REFERENCES users (id),
- "body" TEXT NOT NULL,
- "sent_at" TIMESTAMP,
- "nonce" BLOB NOT NULL
-);
-CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id");
-CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce");
-
-CREATE TABLE "channel_message_mentions" (
- "message_id" INTEGER NOT NULL REFERENCES channel_messages (id) ON DELETE CASCADE,
- "start_offset" INTEGER NOT NULL,
- "end_offset" INTEGER NOT NULL,
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- PRIMARY KEY(message_id, start_offset)
-);
-
-CREATE TABLE "channel_members" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "admin" BOOLEAN NOT NULL DEFAULT false,
- "role" VARCHAR,
- "accepted" BOOLEAN NOT NULL DEFAULT false,
- "updated_at" TIMESTAMP NOT NULL DEFAULT now
-);
-
-CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channel_members" ("channel_id", "user_id");
-
-CREATE TABLE "buffers" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "epoch" INTEGER NOT NULL DEFAULT 0
-);
-
-CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id");
-
-CREATE TABLE "buffer_operations" (
- "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
- "epoch" INTEGER NOT NULL,
- "replica_id" INTEGER NOT NULL,
- "lamport_timestamp" INTEGER NOT NULL,
- "value" BLOB NOT NULL,
- PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id)
-);
-
-CREATE TABLE "buffer_snapshots" (
- "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
- "epoch" INTEGER NOT NULL,
- "text" TEXT NOT NULL,
- "operation_serialization_version" INTEGER NOT NULL,
- PRIMARY KEY(buffer_id, epoch)
-);
-
-CREATE TABLE "channel_buffer_collaborators" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "connection_id" INTEGER NOT NULL,
- "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
- "connection_lost" BOOLEAN NOT NULL DEFAULT false,
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "replica_id" INTEGER NOT NULL
-);
-
-CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id");
-CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id");
-CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id");
-CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id");
-CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id");
-
-
-CREATE TABLE "feature_flags" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "flag" TEXT NOT NULL UNIQUE
-);
-
-CREATE INDEX "index_feature_flags" ON "feature_flags" ("id");
-
-
-CREATE TABLE "user_features" (
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "feature_id" INTEGER NOT NULL REFERENCES feature_flags (id) ON DELETE CASCADE,
- PRIMARY KEY (user_id, feature_id)
-);
-
-CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id");
-CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id");
-CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id");
-
-
-CREATE TABLE "observed_buffer_edits" (
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
- "epoch" INTEGER NOT NULL,
- "lamport_timestamp" INTEGER NOT NULL,
- "replica_id" INTEGER NOT NULL,
- PRIMARY KEY (user_id, buffer_id)
-);
-
-CREATE UNIQUE INDEX "index_observed_buffers_user_and_buffer_id" ON "observed_buffer_edits" ("user_id", "buffer_id");
-
-CREATE TABLE IF NOT EXISTS "observed_channel_messages" (
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "channel_message_id" INTEGER NOT NULL,
- PRIMARY KEY (user_id, channel_id)
-);
-
-CREATE UNIQUE INDEX "index_observed_channel_messages_user_and_channel_id" ON "observed_channel_messages" ("user_id", "channel_id");
-
-CREATE TABLE "notification_kinds" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "name" VARCHAR NOT NULL
-);
-
-CREATE UNIQUE INDEX "index_notification_kinds_on_name" ON "notification_kinds" ("name");
-
-CREATE TABLE "notifications" (
- "id" INTEGER PRIMARY KEY AUTOINCREMENT,
- "created_at" TIMESTAMP NOT NULL default CURRENT_TIMESTAMP,
- "recipient_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "kind" INTEGER NOT NULL REFERENCES notification_kinds (id),
- "entity_id" INTEGER,
- "content" TEXT,
- "is_read" BOOLEAN NOT NULL DEFAULT FALSE,
- "response" BOOLEAN
-);
-
-CREATE INDEX
- "index_notifications_on_recipient_id_is_read_kind_entity_id"
- ON "notifications"
- ("recipient_id", "is_read", "kind", "entity_id");
@@ -1,20 +0,0 @@
-CREATE TABLE IF NOT EXISTS "sessions" (
- "id" VARCHAR NOT NULL PRIMARY KEY,
- "expires" TIMESTAMP WITH TIME ZONE NULL,
- "session" TEXT NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS "users" (
- "id" SERIAL PRIMARY KEY,
- "github_login" VARCHAR,
- "admin" BOOLEAN
-);
-
-CREATE UNIQUE INDEX "index_users_github_login" ON "users" ("github_login");
-
-CREATE TABLE IF NOT EXISTS "signups" (
- "id" SERIAL PRIMARY KEY,
- "github_login" VARCHAR,
- "email_address" VARCHAR,
- "about" TEXT
-);
@@ -1,7 +0,0 @@
-CREATE TABLE IF NOT EXISTS "access_tokens" (
- "id" SERIAL PRIMARY KEY,
- "user_id" INTEGER REFERENCES users (id),
- "hash" VARCHAR(128)
-);
-
-CREATE INDEX "index_access_tokens_user_id" ON "access_tokens" ("user_id");
@@ -1,46 +0,0 @@
-CREATE TABLE IF NOT EXISTS "orgs" (
- "id" SERIAL PRIMARY KEY,
- "name" VARCHAR NOT NULL,
- "slug" VARCHAR NOT NULL
-);
-
-CREATE UNIQUE INDEX "index_orgs_slug" ON "orgs" ("slug");
-
-CREATE TABLE IF NOT EXISTS "org_memberships" (
- "id" SERIAL PRIMARY KEY,
- "org_id" INTEGER REFERENCES orgs (id) NOT NULL,
- "user_id" INTEGER REFERENCES users (id) NOT NULL,
- "admin" BOOLEAN NOT NULL
-);
-
-CREATE INDEX "index_org_memberships_user_id" ON "org_memberships" ("user_id");
-CREATE UNIQUE INDEX "index_org_memberships_org_id_and_user_id" ON "org_memberships" ("org_id", "user_id");
-
-CREATE TABLE IF NOT EXISTS "channels" (
- "id" SERIAL PRIMARY KEY,
- "owner_id" INTEGER NOT NULL,
- "owner_is_user" BOOLEAN NOT NULL,
- "name" VARCHAR NOT NULL
-);
-
-CREATE UNIQUE INDEX "index_channels_owner_and_name" ON "channels" ("owner_is_user", "owner_id", "name");
-
-CREATE TABLE IF NOT EXISTS "channel_memberships" (
- "id" SERIAL PRIMARY KEY,
- "channel_id" INTEGER REFERENCES channels (id) NOT NULL,
- "user_id" INTEGER REFERENCES users (id) NOT NULL,
- "admin" BOOLEAN NOT NULL
-);
-
-CREATE INDEX "index_channel_memberships_user_id" ON "channel_memberships" ("user_id");
-CREATE UNIQUE INDEX "index_channel_memberships_channel_id_and_user_id" ON "channel_memberships" ("channel_id", "user_id");
-
-CREATE TABLE IF NOT EXISTS "channel_messages" (
- "id" SERIAL PRIMARY KEY,
- "channel_id" INTEGER REFERENCES channels (id) NOT NULL,
- "sender_id" INTEGER REFERENCES users (id) NOT NULL,
- "body" TEXT NOT NULL,
- "sent_at" TIMESTAMP
-);
-
-CREATE INDEX "index_channel_messages_channel_id" ON "channel_messages" ("channel_id");
@@ -1,4 +0,0 @@
-ALTER TABLE "channel_messages"
-ADD "nonce" UUID NOT NULL DEFAULT gen_random_uuid();
-
-CREATE UNIQUE INDEX "index_channel_messages_nonce" ON "channel_messages" ("nonce");
@@ -1,4 +0,0 @@
-ALTER TABLE "signups"
- ADD "wants_releases" BOOLEAN,
- ADD "wants_updates" BOOLEAN,
- ADD "wants_community" BOOLEAN;
@@ -1 +0,0 @@
-DROP TABLE IF EXISTS "signups";
@@ -1,2 +0,0 @@
-CREATE EXTENSION IF NOT EXISTS pg_trgm;
-CREATE INDEX trigram_index_users_on_github_login ON users USING GIN(github_login gin_trgm_ops);
@@ -1,11 +0,0 @@
-CREATE TABLE IF NOT EXISTS "contacts" (
- "id" SERIAL PRIMARY KEY,
- "user_id_a" INTEGER REFERENCES users (id) NOT NULL,
- "user_id_b" INTEGER REFERENCES users (id) NOT NULL,
- "a_to_b" BOOLEAN NOT NULL,
- "should_notify" BOOLEAN NOT NULL,
- "accepted" BOOLEAN NOT NULL
-);
-
-CREATE UNIQUE INDEX "index_contacts_user_ids" ON "contacts" ("user_id_a", "user_id_b");
-CREATE INDEX "index_contacts_user_id_b" ON "contacts" ("user_id_b");
@@ -1,9 +0,0 @@
-ALTER TABLE users
-ADD email_address VARCHAR(255) DEFAULT NULL,
-ADD invite_code VARCHAR(64),
-ADD invite_count INTEGER NOT NULL DEFAULT 0,
-ADD inviter_id INTEGER REFERENCES users (id),
-ADD connected_once BOOLEAN NOT NULL DEFAULT false,
-ADD created_at TIMESTAMP NOT NULL DEFAULT NOW();
-
-CREATE UNIQUE INDEX "index_invite_code_users" ON "users" ("invite_code");
@@ -1,6 +0,0 @@
-ALTER TABLE contacts DROP CONSTRAINT contacts_user_id_a_fkey;
-ALTER TABLE contacts DROP CONSTRAINT contacts_user_id_b_fkey;
-ALTER TABLE contacts ADD CONSTRAINT contacts_user_id_a_fkey FOREIGN KEY (user_id_a) REFERENCES users(id) ON DELETE CASCADE;
-ALTER TABLE contacts ADD CONSTRAINT contacts_user_id_b_fkey FOREIGN KEY (user_id_b) REFERENCES users(id) ON DELETE CASCADE;
-ALTER TABLE users DROP CONSTRAINT users_inviter_id_fkey;
-ALTER TABLE users ADD CONSTRAINT users_inviter_id_fkey FOREIGN KEY (inviter_id) REFERENCES users(id) ON DELETE SET NULL;
@@ -1,24 +0,0 @@
-CREATE TABLE IF NOT EXISTS "projects" (
- "id" SERIAL PRIMARY KEY,
- "host_user_id" INTEGER REFERENCES users (id) NOT NULL,
- "unregistered" BOOLEAN NOT NULL DEFAULT false
-);
-
-CREATE TABLE IF NOT EXISTS "worktree_extensions" (
- "id" SERIAL PRIMARY KEY,
- "project_id" INTEGER REFERENCES projects (id) NOT NULL,
- "worktree_id" INTEGER NOT NULL,
- "extension" VARCHAR(255),
- "count" INTEGER NOT NULL
-);
-
-CREATE TABLE IF NOT EXISTS "project_activity_periods" (
- "id" SERIAL PRIMARY KEY,
- "duration_millis" INTEGER NOT NULL,
- "ended_at" TIMESTAMP NOT NULL,
- "user_id" INTEGER REFERENCES users (id) NOT NULL,
- "project_id" INTEGER REFERENCES projects (id) NOT NULL
-);
-
-CREATE INDEX "index_project_activity_periods_on_ended_at" ON "project_activity_periods" ("ended_at");
-CREATE UNIQUE INDEX "index_worktree_extensions_on_project_id_and_worktree_id_and_extension" ON "worktree_extensions" ("project_id", "worktree_id", "extension");
@@ -1,27 +0,0 @@
-CREATE TABLE IF NOT EXISTS "signups" (
- "id" SERIAL PRIMARY KEY,
- "email_address" VARCHAR NOT NULL,
- "email_confirmation_code" VARCHAR(64) NOT NULL,
- "email_confirmation_sent" BOOLEAN NOT NULL,
- "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
- "device_id" VARCHAR,
- "user_id" INTEGER REFERENCES users (id) ON DELETE CASCADE,
- "inviting_user_id" INTEGER REFERENCES users (id) ON DELETE SET NULL,
-
- "platform_mac" BOOLEAN NOT NULL,
- "platform_linux" BOOLEAN NOT NULL,
- "platform_windows" BOOLEAN NOT NULL,
- "platform_unknown" BOOLEAN NOT NULL,
-
- "editor_features" VARCHAR[],
- "programming_languages" VARCHAR[]
-);
-
-CREATE UNIQUE INDEX "index_signups_on_email_address" ON "signups" ("email_address");
-CREATE INDEX "index_signups_on_email_confirmation_sent" ON "signups" ("email_confirmation_sent");
-
-ALTER TABLE "users"
- ADD "github_user_id" INTEGER;
-
-CREATE INDEX "index_users_on_email_address" ON "users" ("email_address");
-CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
@@ -1,2 +0,0 @@
-ALTER TABLE "users"
- ADD "metrics_id" uuid NOT NULL DEFAULT gen_random_uuid();
@@ -1,90 +0,0 @@
-CREATE TABLE IF NOT EXISTS "rooms" (
- "id" SERIAL PRIMARY KEY,
- "live_kit_room" VARCHAR NOT NULL
-);
-
-ALTER TABLE "projects"
- ADD "room_id" INTEGER REFERENCES rooms (id),
- ADD "host_connection_id" INTEGER,
- ADD "host_connection_epoch" UUID;
-CREATE INDEX "index_projects_on_host_connection_epoch" ON "projects" ("host_connection_epoch");
-
-CREATE TABLE "worktrees" (
- "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
- "id" INT8 NOT NULL,
- "root_name" VARCHAR NOT NULL,
- "abs_path" VARCHAR NOT NULL,
- "visible" BOOL NOT NULL,
- "scan_id" INT8 NOT NULL,
- "is_complete" BOOL NOT NULL,
- PRIMARY KEY(project_id, id)
-);
-CREATE INDEX "index_worktrees_on_project_id" ON "worktrees" ("project_id");
-
-CREATE TABLE "worktree_entries" (
- "project_id" INTEGER NOT NULL,
- "worktree_id" INT8 NOT NULL,
- "id" INT8 NOT NULL,
- "is_dir" BOOL NOT NULL,
- "path" VARCHAR NOT NULL,
- "inode" INT8 NOT NULL,
- "mtime_seconds" INT8 NOT NULL,
- "mtime_nanos" INTEGER NOT NULL,
- "is_symlink" BOOL NOT NULL,
- "is_ignored" BOOL NOT NULL,
- PRIMARY KEY(project_id, worktree_id, id),
- FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
-);
-CREATE INDEX "index_worktree_entries_on_project_id" ON "worktree_entries" ("project_id");
-CREATE INDEX "index_worktree_entries_on_project_id_and_worktree_id" ON "worktree_entries" ("project_id", "worktree_id");
-
-CREATE TABLE "worktree_diagnostic_summaries" (
- "project_id" INTEGER NOT NULL,
- "worktree_id" INT8 NOT NULL,
- "path" VARCHAR NOT NULL,
- "language_server_id" INT8 NOT NULL,
- "error_count" INTEGER NOT NULL,
- "warning_count" INTEGER NOT NULL,
- PRIMARY KEY(project_id, worktree_id, path),
- FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
-);
-CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id" ON "worktree_diagnostic_summaries" ("project_id");
-CREATE INDEX "index_worktree_diagnostic_summaries_on_project_id_and_worktree_id" ON "worktree_diagnostic_summaries" ("project_id", "worktree_id");
-
-CREATE TABLE "language_servers" (
- "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
- "id" INT8 NOT NULL,
- "name" VARCHAR NOT NULL,
- PRIMARY KEY(project_id, id)
-);
-CREATE INDEX "index_language_servers_on_project_id" ON "language_servers" ("project_id");
-
-CREATE TABLE "project_collaborators" (
- "id" SERIAL PRIMARY KEY,
- "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
- "connection_id" INTEGER NOT NULL,
- "connection_epoch" UUID NOT NULL,
- "user_id" INTEGER NOT NULL,
- "replica_id" INTEGER NOT NULL,
- "is_host" BOOLEAN NOT NULL
-);
-CREATE INDEX "index_project_collaborators_on_project_id" ON "project_collaborators" ("project_id");
-CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_and_replica_id" ON "project_collaborators" ("project_id", "replica_id");
-CREATE INDEX "index_project_collaborators_on_connection_epoch" ON "project_collaborators" ("connection_epoch");
-
-CREATE TABLE "room_participants" (
- "id" SERIAL PRIMARY KEY,
- "room_id" INTEGER NOT NULL REFERENCES rooms (id),
- "user_id" INTEGER NOT NULL REFERENCES users (id),
- "answering_connection_id" INTEGER,
- "answering_connection_epoch" UUID,
- "location_kind" INTEGER,
- "location_project_id" INTEGER,
- "initial_project_id" INTEGER,
- "calling_user_id" INTEGER NOT NULL REFERENCES users (id),
- "calling_connection_id" INTEGER NOT NULL,
- "calling_connection_epoch" UUID NOT NULL
-);
-CREATE UNIQUE INDEX "index_room_participants_on_user_id" ON "room_participants" ("user_id");
-CREATE INDEX "index_room_participants_on_answering_connection_epoch" ON "room_participants" ("answering_connection_epoch");
-CREATE INDEX "index_room_participants_on_calling_connection_epoch" ON "room_participants" ("calling_connection_epoch");
@@ -1,2 +0,0 @@
-ALTER TABLE "signups"
- ADD "added_to_mailing_list" BOOLEAN NOT NULL DEFAULT FALSE;
@@ -1,7 +0,0 @@
-ALTER TABLE "room_participants"
- ADD "answering_connection_lost" BOOLEAN NOT NULL DEFAULT FALSE;
-
-CREATE INDEX "index_project_collaborators_on_connection_id" ON "project_collaborators" ("connection_id");
-CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_epoch" ON "project_collaborators" ("project_id", "connection_id", "connection_epoch");
-CREATE INDEX "index_room_participants_on_answering_connection_id" ON "room_participants" ("answering_connection_id");
-CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_epoch" ON "room_participants" ("answering_connection_id", "answering_connection_epoch");
@@ -1 +0,0 @@
-CREATE INDEX "index_room_participants_on_room_id" ON "room_participants" ("room_id");
@@ -1,30 +0,0 @@
-CREATE TABLE servers (
- id SERIAL PRIMARY KEY,
- environment VARCHAR NOT NULL
-);
-
-DROP TABLE worktree_extensions;
-DROP TABLE project_activity_periods;
-DELETE from projects;
-ALTER TABLE projects
- DROP COLUMN host_connection_epoch,
- ADD COLUMN host_connection_server_id INTEGER REFERENCES servers (id) ON DELETE CASCADE;
-CREATE INDEX "index_projects_on_host_connection_server_id" ON "projects" ("host_connection_server_id");
-CREATE INDEX "index_projects_on_host_connection_id_and_host_connection_server_id" ON "projects" ("host_connection_id", "host_connection_server_id");
-
-DELETE FROM project_collaborators;
-ALTER TABLE project_collaborators
- DROP COLUMN connection_epoch,
- ADD COLUMN connection_server_id INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE;
-CREATE INDEX "index_project_collaborators_on_connection_server_id" ON "project_collaborators" ("connection_server_id");
-CREATE UNIQUE INDEX "index_project_collaborators_on_project_id_connection_id_and_server_id" ON "project_collaborators" ("project_id", "connection_id", "connection_server_id");
-
-DELETE FROM room_participants;
-ALTER TABLE room_participants
- DROP COLUMN answering_connection_epoch,
- DROP COLUMN calling_connection_epoch,
- ADD COLUMN answering_connection_server_id INTEGER REFERENCES servers (id) ON DELETE CASCADE,
- ADD COLUMN calling_connection_server_id INTEGER REFERENCES servers (id) ON DELETE SET NULL;
-CREATE INDEX "index_room_participants_on_answering_connection_server_id" ON "room_participants" ("answering_connection_server_id");
-CREATE INDEX "index_room_participants_on_calling_connection_server_id" ON "room_participants" ("calling_connection_server_id");
-CREATE UNIQUE INDEX "index_room_participants_on_answering_connection_id_and_answering_connection_server_id" ON "room_participants" ("answering_connection_id", "answering_connection_server_id");
@@ -1,3 +0,0 @@
-ALTER TABLE "worktree_entries"
- ADD COLUMN "scan_id" INT8,
- ADD COLUMN "is_deleted" BOOL;
@@ -1,3 +0,0 @@
-ALTER TABLE worktrees
- ALTER COLUMN is_complete SET DEFAULT FALSE,
- ADD COLUMN completed_scan_id INT8;
@@ -1,15 +0,0 @@
-CREATE TABLE IF NOT EXISTS "followers" (
- "id" SERIAL PRIMARY KEY,
- "room_id" INTEGER NOT NULL REFERENCES rooms (id) ON DELETE CASCADE,
- "project_id" INTEGER NOT NULL REFERENCES projects (id) ON DELETE CASCADE,
- "leader_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
- "leader_connection_id" INTEGER NOT NULL,
- "follower_connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
- "follower_connection_id" INTEGER NOT NULL
-);
-
-CREATE UNIQUE INDEX
- "index_followers_on_project_id_and_leader_connection_server_id_and_leader_connection_id_and_follower_connection_server_id_and_follower_connection_id"
-ON "followers" ("project_id", "leader_connection_server_id", "leader_connection_id", "follower_connection_server_id", "follower_connection_id");
-
-CREATE INDEX "index_followers_on_room_id" ON "followers" ("room_id");
@@ -1,13 +0,0 @@
-CREATE TABLE "worktree_repositories" (
- "project_id" INTEGER NOT NULL,
- "worktree_id" INT8 NOT NULL,
- "work_directory_id" INT8 NOT NULL,
- "scan_id" INT8 NOT NULL,
- "branch" VARCHAR,
- "is_deleted" BOOL NOT NULL,
- PRIMARY KEY(project_id, worktree_id, work_directory_id),
- FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
- FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
-);
-CREATE INDEX "index_worktree_repositories_on_project_id" ON "worktree_repositories" ("project_id");
-CREATE INDEX "index_worktree_repositories_on_project_id_and_worktree_id" ON "worktree_repositories" ("project_id", "worktree_id");
@@ -1,15 +0,0 @@
-CREATE TABLE "worktree_repository_statuses" (
- "project_id" INTEGER NOT NULL,
- "worktree_id" INT8 NOT NULL,
- "work_directory_id" INT8 NOT NULL,
- "repo_path" VARCHAR NOT NULL,
- "status" INT8 NOT NULL,
- "scan_id" INT8 NOT NULL,
- "is_deleted" BOOL NOT NULL,
- PRIMARY KEY(project_id, worktree_id, work_directory_id, repo_path),
- FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE,
- FOREIGN KEY(project_id, worktree_id, work_directory_id) REFERENCES worktree_entries (project_id, worktree_id, id) ON DELETE CASCADE
-);
-CREATE INDEX "index_wt_repos_statuses_on_project_id" ON "worktree_repository_statuses" ("project_id");
-CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id" ON "worktree_repository_statuses" ("project_id", "worktree_id");
-CREATE INDEX "index_wt_repos_statuses_on_project_id_and_wt_id_and_wd_id" ON "worktree_repository_statuses" ("project_id", "worktree_id", "work_directory_id");
@@ -1,10 +0,0 @@
-CREATE TABLE "worktree_settings_files" (
- "project_id" INTEGER NOT NULL,
- "worktree_id" INT8 NOT NULL,
- "path" VARCHAR NOT NULL,
- "content" TEXT NOT NULL,
- PRIMARY KEY(project_id, worktree_id, path),
- FOREIGN KEY(project_id, worktree_id) REFERENCES worktrees (project_id, id) ON DELETE CASCADE
-);
-CREATE INDEX "index_settings_files_on_project_id" ON "worktree_settings_files" ("project_id");
-CREATE INDEX "index_settings_files_on_project_id_and_wt_id" ON "worktree_settings_files" ("project_id", "worktree_id");
@@ -1,2 +0,0 @@
-ALTER TABLE "worktree_entries"
-ADD "git_status" INT8;
@@ -1,2 +0,0 @@
-ALTER TABLE "worktree_entries"
-ADD "is_external" BOOL NOT NULL DEFAULT FALSE;
@@ -1,30 +0,0 @@
-DROP TABLE "channel_messages";
-DROP TABLE "channel_memberships";
-DROP TABLE "org_memberships";
-DROP TABLE "orgs";
-DROP TABLE "channels";
-
-CREATE TABLE "channels" (
- "id" SERIAL PRIMARY KEY,
- "name" VARCHAR NOT NULL,
- "created_at" TIMESTAMP NOT NULL DEFAULT now()
-);
-
-CREATE TABLE "channel_paths" (
- "id_path" VARCHAR NOT NULL PRIMARY KEY,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE
-);
-CREATE INDEX "index_channel_paths_on_channel_id" ON "channel_paths" ("channel_id");
-
-CREATE TABLE "channel_members" (
- "id" SERIAL PRIMARY KEY,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "admin" BOOLEAN NOT NULL DEFAULT false,
- "accepted" BOOLEAN NOT NULL DEFAULT false,
- "updated_at" TIMESTAMP NOT NULL DEFAULT now()
-);
-
-CREATE UNIQUE INDEX "index_channel_members_on_channel_id_and_user_id" ON "channel_members" ("channel_id", "user_id");
-
-ALTER TABLE rooms ADD COLUMN "channel_id" INTEGER REFERENCES channels (id) ON DELETE CASCADE;
@@ -1,40 +0,0 @@
-CREATE TABLE "buffers" (
- "id" SERIAL PRIMARY KEY,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "epoch" INTEGER NOT NULL DEFAULT 0
-);
-
-CREATE INDEX "index_buffers_on_channel_id" ON "buffers" ("channel_id");
-
-CREATE TABLE "buffer_operations" (
- "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
- "epoch" INTEGER NOT NULL,
- "replica_id" INTEGER NOT NULL,
- "lamport_timestamp" INTEGER NOT NULL,
- "value" BYTEA NOT NULL,
- PRIMARY KEY(buffer_id, epoch, lamport_timestamp, replica_id)
-);
-
-CREATE TABLE "buffer_snapshots" (
- "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
- "epoch" INTEGER NOT NULL,
- "text" TEXT NOT NULL,
- "operation_serialization_version" INTEGER NOT NULL,
- PRIMARY KEY(buffer_id, epoch)
-);
-
-CREATE TABLE "channel_buffer_collaborators" (
- "id" SERIAL PRIMARY KEY,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "connection_id" INTEGER NOT NULL,
- "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE,
- "connection_lost" BOOLEAN NOT NULL DEFAULT FALSE,
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "replica_id" INTEGER NOT NULL
-);
-
-CREATE INDEX "index_channel_buffer_collaborators_on_channel_id" ON "channel_buffer_collaborators" ("channel_id");
-CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_and_replica_id" ON "channel_buffer_collaborators" ("channel_id", "replica_id");
-CREATE INDEX "index_channel_buffer_collaborators_on_connection_server_id" ON "channel_buffer_collaborators" ("connection_server_id");
-CREATE INDEX "index_channel_buffer_collaborators_on_connection_id" ON "channel_buffer_collaborators" ("connection_id");
-CREATE UNIQUE INDEX "index_channel_buffer_collaborators_on_channel_id_connection_id_and_server_id" ON "channel_buffer_collaborators" ("channel_id", "connection_id", "connection_server_id");
@@ -1,16 +0,0 @@
-CREATE TABLE "feature_flags" (
- "id" SERIAL PRIMARY KEY,
- "flag" VARCHAR(255) NOT NULL UNIQUE
-);
-
-CREATE UNIQUE INDEX "index_feature_flags" ON "feature_flags" ("id");
-
-CREATE TABLE "user_features" (
- "user_id" INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE,
- "feature_id" INTEGER NOT NULL REFERENCES feature_flags(id) ON DELETE CASCADE,
- PRIMARY KEY (user_id, feature_id)
-);
-
-CREATE UNIQUE INDEX "index_user_features_user_id_and_feature_id" ON "user_features" ("user_id", "feature_id");
-CREATE INDEX "index_user_features_on_user_id" ON "user_features" ("user_id");
-CREATE INDEX "index_user_features_on_feature_id" ON "user_features" ("feature_id");
@@ -1,19 +0,0 @@
-CREATE TABLE IF NOT EXISTS "channel_messages" (
- "id" SERIAL PRIMARY KEY,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "sender_id" INTEGER NOT NULL REFERENCES users (id),
- "body" TEXT NOT NULL,
- "sent_at" TIMESTAMP,
- "nonce" UUID NOT NULL
-);
-CREATE INDEX "index_channel_messages_on_channel_id" ON "channel_messages" ("channel_id");
-CREATE UNIQUE INDEX "index_channel_messages_on_nonce" ON "channel_messages" ("nonce");
-
-CREATE TABLE IF NOT EXISTS "channel_chat_participants" (
- "id" SERIAL PRIMARY KEY,
- "user_id" INTEGER NOT NULL REFERENCES users (id),
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "connection_id" INTEGER NOT NULL,
- "connection_server_id" INTEGER NOT NULL REFERENCES servers (id) ON DELETE CASCADE
-);
-CREATE INDEX "index_channel_chat_participants_on_channel_id" ON "channel_chat_participants" ("channel_id");
@@ -1,19 +0,0 @@
-CREATE TABLE IF NOT EXISTS "observed_buffer_edits" (
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "buffer_id" INTEGER NOT NULL REFERENCES buffers (id) ON DELETE CASCADE,
- "epoch" INTEGER NOT NULL,
- "lamport_timestamp" INTEGER NOT NULL,
- "replica_id" INTEGER NOT NULL,
- PRIMARY KEY (user_id, buffer_id)
-);
-
-CREATE UNIQUE INDEX "index_observed_buffer_user_and_buffer_id" ON "observed_buffer_edits" ("user_id", "buffer_id");
-
-CREATE TABLE IF NOT EXISTS "observed_channel_messages" (
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "channel_id" INTEGER NOT NULL REFERENCES channels (id) ON DELETE CASCADE,
- "channel_message_id" INTEGER NOT NULL,
- PRIMARY KEY (user_id, channel_id)
-);
-
-CREATE UNIQUE INDEX "index_observed_channel_messages_user_and_channel_id" ON "observed_channel_messages" ("user_id", "channel_id");
@@ -1 +0,0 @@
-ALTER TABLE room_participants ADD COLUMN participant_index INTEGER;
@@ -1,22 +0,0 @@
-CREATE TABLE "notification_kinds" (
- "id" SERIAL PRIMARY KEY,
- "name" VARCHAR NOT NULL
-);
-
-CREATE UNIQUE INDEX "index_notification_kinds_on_name" ON "notification_kinds" ("name");
-
-CREATE TABLE notifications (
- "id" SERIAL PRIMARY KEY,
- "created_at" TIMESTAMP NOT NULL DEFAULT now(),
- "recipient_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- "kind" INTEGER NOT NULL REFERENCES notification_kinds (id),
- "entity_id" INTEGER,
- "content" TEXT,
- "is_read" BOOLEAN NOT NULL DEFAULT FALSE,
- "response" BOOLEAN
-);
-
-CREATE INDEX
- "index_notifications_on_recipient_id_is_read_kind_entity_id"
- ON "notifications"
- ("recipient_id", "is_read", "kind", "entity_id");
@@ -1 +0,0 @@
-ALTER TABLE rooms ADD COLUMN enviroment TEXT;
@@ -1 +0,0 @@
-CREATE UNIQUE INDEX "index_rooms_on_channel_id" ON "rooms" ("channel_id");
@@ -1,4 +0,0 @@
-ALTER TABLE channel_members ADD COLUMN role TEXT;
-UPDATE channel_members SET role = CASE WHEN admin THEN 'admin' ELSE 'member' END;
-
-ALTER TABLE channels ADD COLUMN visibility TEXT NOT NULL DEFAULT 'members';
@@ -1,8 +0,0 @@
--- Add migration script here
-
-ALTER TABLE projects
- DROP CONSTRAINT projects_room_id_fkey,
- ADD CONSTRAINT projects_room_id_fkey
- FOREIGN KEY (room_id)
- REFERENCES rooms (id)
- ON DELETE CASCADE;
@@ -1,11 +0,0 @@
-CREATE TABLE "channel_message_mentions" (
- "message_id" INTEGER NOT NULL REFERENCES channel_messages (id) ON DELETE CASCADE,
- "start_offset" INTEGER NOT NULL,
- "end_offset" INTEGER NOT NULL,
- "user_id" INTEGER NOT NULL REFERENCES users (id) ON DELETE CASCADE,
- PRIMARY KEY(message_id, start_offset)
-);
-
--- We use 'on conflict update' with this index, so it should be per-user.
-CREATE UNIQUE INDEX "index_channel_messages_on_sender_id_nonce" ON "channel_messages" ("sender_id", "nonce");
-DROP INDEX "index_channel_messages_on_nonce";
@@ -1,12 +0,0 @@
-ALTER TABLE channels ADD COLUMN parent_path TEXT;
-
-UPDATE channels
-SET parent_path = substr(
- channel_paths.id_path,
- 2,
- length(channel_paths.id_path) - length('/' || channel_paths.channel_id::text || '/')
-)
-FROM channel_paths
-WHERE channel_paths.channel_id = channels.id;
-
-CREATE INDEX "index_channels_on_parent_path" ON "channels" ("parent_path");
@@ -1,186 +0,0 @@
-use crate::{
- auth,
- db::{User, UserId},
- rpc, AppState, Error, Result,
-};
-use anyhow::anyhow;
-use axum::{
- body::Body,
- extract::{Path, Query},
- http::{self, Request, StatusCode},
- middleware::{self, Next},
- response::IntoResponse,
- routing::{get, post},
- Extension, Json, Router,
-};
-use axum_extra::response::ErasedJson;
-use serde::{Deserialize, Serialize};
-use std::sync::Arc;
-use tower::ServiceBuilder;
-use tracing::instrument;
-
-pub fn routes(rpc_server: Arc<rpc::Server>, state: Arc<AppState>) -> Router<Body> {
- Router::new()
- .route("/user", get(get_authenticated_user))
- .route("/users/:id/access_tokens", post(create_access_token))
- .route("/panic", post(trace_panic))
- .route("/rpc_server_snapshot", get(get_rpc_server_snapshot))
- .layer(
- ServiceBuilder::new()
- .layer(Extension(state))
- .layer(Extension(rpc_server))
- .layer(middleware::from_fn(validate_api_token)),
- )
-}
-
-pub async fn validate_api_token<B>(req: Request<B>, next: Next<B>) -> impl IntoResponse {
- let token = req
- .headers()
- .get(http::header::AUTHORIZATION)
- .and_then(|header| header.to_str().ok())
- .ok_or_else(|| {
- Error::Http(
- StatusCode::BAD_REQUEST,
- "missing authorization header".to_string(),
- )
- })?
- .strip_prefix("token ")
- .ok_or_else(|| {
- Error::Http(
- StatusCode::BAD_REQUEST,
- "invalid authorization header".to_string(),
- )
- })?;
-
- let state = req.extensions().get::<Arc<AppState>>().unwrap();
-
- if token != state.config.api_token {
- Err(Error::Http(
- StatusCode::UNAUTHORIZED,
- "invalid authorization token".to_string(),
- ))?
- }
-
- Ok::<_, Error>(next.run(req).await)
-}
-
-#[derive(Debug, Deserialize)]
-struct AuthenticatedUserParams {
- github_user_id: Option<i32>,
- github_login: String,
- github_email: Option<String>,
-}
-
-#[derive(Debug, Serialize)]
-struct AuthenticatedUserResponse {
- user: User,
- metrics_id: String,
-}
-
-async fn get_authenticated_user(
- Query(params): Query<AuthenticatedUserParams>,
- Extension(app): Extension<Arc<AppState>>,
-) -> Result<Json<AuthenticatedUserResponse>> {
- let user = app
- .db
- .get_or_create_user_by_github_account(
- ¶ms.github_login,
- params.github_user_id,
- params.github_email.as_deref(),
- )
- .await?
- .ok_or_else(|| Error::Http(StatusCode::NOT_FOUND, "user not found".into()))?;
- let metrics_id = app.db.get_user_metrics_id(user.id).await?;
- return Ok(Json(AuthenticatedUserResponse { user, metrics_id }));
-}
-
-#[derive(Deserialize, Debug)]
-struct CreateUserParams {
- github_user_id: i32,
- github_login: String,
- email_address: String,
- email_confirmation_code: Option<String>,
- #[serde(default)]
- admin: bool,
- #[serde(default)]
- invite_count: i32,
-}
-
-#[derive(Serialize, Debug)]
-struct CreateUserResponse {
- user: User,
- signup_device_id: Option<String>,
- metrics_id: String,
-}
-
-#[derive(Debug, Deserialize)]
-struct Panic {
- version: String,
- release_channel: String,
- backtrace_hash: String,
- text: String,
-}
-
-#[instrument(skip(panic))]
-async fn trace_panic(panic: Json<Panic>) -> Result<()> {
- tracing::error!(version = %panic.version, release_channel = %panic.release_channel, backtrace_hash = %panic.backtrace_hash, text = %panic.text, "panic report");
- Ok(())
-}
-
-async fn get_rpc_server_snapshot(
- Extension(rpc_server): Extension<Arc<rpc::Server>>,
-) -> Result<ErasedJson> {
- Ok(ErasedJson::pretty(rpc_server.snapshot().await))
-}
-
-#[derive(Deserialize)]
-struct CreateAccessTokenQueryParams {
- public_key: String,
- impersonate: Option<String>,
-}
-
-#[derive(Serialize)]
-struct CreateAccessTokenResponse {
- user_id: UserId,
- encrypted_access_token: String,
-}
-
-async fn create_access_token(
- Path(user_id): Path<UserId>,
- Query(params): Query<CreateAccessTokenQueryParams>,
- Extension(app): Extension<Arc<AppState>>,
-) -> Result<Json<CreateAccessTokenResponse>> {
- let user = app
- .db
- .get_user_by_id(user_id)
- .await?
- .ok_or_else(|| anyhow!("user not found"))?;
-
- let mut user_id = user.id;
- if let Some(impersonate) = params.impersonate {
- if user.admin {
- if let Some(impersonated_user) = app.db.get_user_by_github_login(&impersonate).await? {
- user_id = impersonated_user.id;
- } else {
- return Err(Error::Http(
- StatusCode::UNPROCESSABLE_ENTITY,
- format!("user {impersonate} does not exist"),
- ));
- }
- } else {
- return Err(Error::Http(
- StatusCode::UNAUTHORIZED,
- "you do not have permission to impersonate other users".to_string(),
- ));
- }
- }
-
- let access_token = auth::create_access_token(app.db.as_ref(), user_id).await?;
- let encrypted_access_token =
- auth::encrypt_access_token(&access_token, params.public_key.clone())?;
-
- Ok(Json(CreateAccessTokenResponse {
- user_id,
- encrypted_access_token,
- }))
-}
@@ -1,151 +0,0 @@
-use crate::{
- db::{self, AccessTokenId, Database, UserId},
- AppState, Error, Result,
-};
-use anyhow::{anyhow, Context};
-use axum::{
- http::{self, Request, StatusCode},
- middleware::Next,
- response::IntoResponse,
-};
-use lazy_static::lazy_static;
-use prometheus::{exponential_buckets, register_histogram, Histogram};
-use rand::thread_rng;
-use scrypt::{
- password_hash::{PasswordHash, PasswordHasher, PasswordVerifier, SaltString},
- Scrypt,
-};
-use serde::{Deserialize, Serialize};
-use std::{sync::Arc, time::Instant};
-
-lazy_static! {
- static ref METRIC_ACCESS_TOKEN_HASHING_TIME: Histogram = register_histogram!(
- "access_token_hashing_time",
- "time spent hashing access tokens",
- exponential_buckets(10.0, 2.0, 10).unwrap(),
- )
- .unwrap();
-}
-
-pub async fn validate_header<B>(mut req: Request<B>, next: Next<B>) -> impl IntoResponse {
- let mut auth_header = req
- .headers()
- .get(http::header::AUTHORIZATION)
- .and_then(|header| header.to_str().ok())
- .ok_or_else(|| {
- Error::Http(
- StatusCode::UNAUTHORIZED,
- "missing authorization header".to_string(),
- )
- })?
- .split_whitespace();
-
- let user_id = UserId(auth_header.next().unwrap_or("").parse().map_err(|_| {
- Error::Http(
- StatusCode::BAD_REQUEST,
- "missing user id in authorization header".to_string(),
- )
- })?);
-
- let access_token = auth_header.next().ok_or_else(|| {
- Error::Http(
- StatusCode::BAD_REQUEST,
- "missing access token in authorization header".to_string(),
- )
- })?;
-
- let state = req.extensions().get::<Arc<AppState>>().unwrap();
- let credentials_valid = if let Some(admin_token) = access_token.strip_prefix("ADMIN_TOKEN:") {
- state.config.api_token == admin_token
- } else {
- verify_access_token(&access_token, user_id, &state.db)
- .await
- .unwrap_or(false)
- };
-
- if credentials_valid {
- let user = state
- .db
- .get_user_by_id(user_id)
- .await?
- .ok_or_else(|| anyhow!("user {} not found", user_id))?;
- req.extensions_mut().insert(user);
- Ok::<_, Error>(next.run(req).await)
- } else {
- Err(Error::Http(
- StatusCode::UNAUTHORIZED,
- "invalid credentials".to_string(),
- ))
- }
-}
-
-const MAX_ACCESS_TOKENS_TO_STORE: usize = 8;
-
-#[derive(Serialize, Deserialize)]
-struct AccessTokenJson {
- version: usize,
- id: AccessTokenId,
- token: String,
-}
-
-pub async fn create_access_token(db: &db::Database, user_id: UserId) -> Result<String> {
- const VERSION: usize = 1;
- let access_token = rpc::auth::random_token();
- let access_token_hash =
- hash_access_token(&access_token).context("failed to hash access token")?;
- let id = db
- .create_access_token(user_id, &access_token_hash, MAX_ACCESS_TOKENS_TO_STORE)
- .await?;
- Ok(serde_json::to_string(&AccessTokenJson {
- version: VERSION,
- id,
- token: access_token,
- })?)
-}
-
-fn hash_access_token(token: &str) -> Result<String> {
- // Avoid slow hashing in debug mode.
- let params = if cfg!(debug_assertions) {
- scrypt::Params::new(1, 1, 1).unwrap()
- } else {
- scrypt::Params::new(14, 8, 1).unwrap()
- };
-
- Ok(Scrypt
- .hash_password(
- token.as_bytes(),
- None,
- params,
- &SaltString::generate(thread_rng()),
- )
- .map_err(anyhow::Error::new)?
- .to_string())
-}
-
-pub fn encrypt_access_token(access_token: &str, public_key: String) -> Result<String> {
- let native_app_public_key =
- rpc::auth::PublicKey::try_from(public_key).context("failed to parse app public key")?;
- let encrypted_access_token = native_app_public_key
- .encrypt_string(access_token)
- .context("failed to encrypt access token with public key")?;
- Ok(encrypted_access_token)
-}
-
-pub async fn verify_access_token(token: &str, user_id: UserId, db: &Arc<Database>) -> Result<bool> {
- let token: AccessTokenJson = serde_json::from_str(&token)?;
-
- let db_token = db.get_access_token(token.id).await?;
- if db_token.user_id != user_id {
- return Err(anyhow!("no such access token"))?;
- }
-
- let db_hash = PasswordHash::new(&db_token.hash).map_err(anyhow::Error::new)?;
- let t0 = Instant::now();
- let is_valid = Scrypt
- .verify_password(token.token.as_bytes(), &db_hash)
- .is_ok();
- let duration = t0.elapsed();
- log::info!("hashed access token in {:?}", duration);
- METRIC_ACCESS_TOKEN_HASHING_TIME.observe(duration.as_millis() as f64);
- Ok(is_valid)
-}
@@ -1,20 +0,0 @@
-use anyhow::anyhow;
-use std::fs;
-
-fn main() -> anyhow::Result<()> {
- let env: toml::map::Map<String, toml::Value> = toml::de::from_str(
- &fs::read_to_string("./.env.toml").map_err(|_| anyhow!("no .env.toml file found"))?,
- )?;
-
- for (key, value) in env {
- let value = match value {
- toml::Value::String(value) => value,
- toml::Value::Integer(value) => value.to_string(),
- toml::Value::Float(value) => value.to_string(),
- _ => panic!("unsupported TOML value in .env.toml for key {}", key),
- };
- println!("export {}=\"{}\"", key, value);
- }
-
- Ok(())
-}
@@ -1,107 +0,0 @@
-use collab2::{db, executor::Executor};
-use db::{ConnectOptions, Database};
-use serde::{de::DeserializeOwned, Deserialize};
-use std::fmt::Write;
-
-#[derive(Debug, Deserialize)]
-struct GitHubUser {
- id: i32,
- login: String,
- email: Option<String>,
-}
-
-#[tokio::main]
-async fn main() {
- let database_url = std::env::var("DATABASE_URL").expect("missing DATABASE_URL env var");
- let db = Database::new(ConnectOptions::new(database_url), Executor::Production)
- .await
- .expect("failed to connect to postgres database");
- let github_token = std::env::var("GITHUB_TOKEN").expect("missing GITHUB_TOKEN env var");
- let client = reqwest::Client::new();
-
- let mut current_user =
- fetch_github::<GitHubUser>(&client, &github_token, "https://api.github.com/user").await;
- current_user
- .email
- .get_or_insert_with(|| "placeholder@example.com".to_string());
- let staff_users = fetch_github::<Vec<GitHubUser>>(
- &client,
- &github_token,
- "https://api.github.com/orgs/zed-industries/teams/staff/members",
- )
- .await;
-
- let mut zed_users = Vec::new();
- zed_users.push((current_user, true));
- zed_users.extend(staff_users.into_iter().map(|user| (user, true)));
-
- let user_count = db
- .get_all_users(0, 200)
- .await
- .expect("failed to load users from db")
- .len();
- if user_count < 100 {
- let mut last_user_id = None;
- for _ in 0..10 {
- let mut uri = "https://api.github.com/users?per_page=100".to_string();
- if let Some(last_user_id) = last_user_id {
- write!(&mut uri, "&since={}", last_user_id).unwrap();
- }
- let users = fetch_github::<Vec<GitHubUser>>(&client, &github_token, &uri).await;
- if let Some(last_user) = users.last() {
- last_user_id = Some(last_user.id);
- zed_users.extend(users.into_iter().map(|user| (user, false)));
- } else {
- break;
- }
- }
- }
-
- for (github_user, admin) in zed_users {
- if db
- .get_user_by_github_login(&github_user.login)
- .await
- .expect("failed to fetch user")
- .is_none()
- {
- if admin {
- db.create_user(
- &format!("{}@zed.dev", github_user.login),
- admin,
- db::NewUserParams {
- github_login: github_user.login,
- github_user_id: github_user.id,
- },
- )
- .await
- .expect("failed to insert user");
- } else {
- db.get_or_create_user_by_github_account(
- &github_user.login,
- Some(github_user.id),
- github_user.email.as_deref(),
- )
- .await
- .expect("failed to insert user");
- }
- }
- }
-}
-
-async fn fetch_github<T: DeserializeOwned>(
- client: &reqwest::Client,
- access_token: &str,
- url: &str,
-) -> T {
- let response = client
- .get(url)
- .bearer_auth(&access_token)
- .header("user-agent", "zed")
- .send()
- .await
- .expect(&format!("failed to fetch '{}'", url));
- response
- .json()
- .await
- .expect(&format!("failed to deserialize github user from '{}'", url))
-}
@@ -1,672 +0,0 @@
-#[cfg(test)]
-pub mod tests;
-
-#[cfg(test)]
-pub use tests::TestDb;
-
-mod ids;
-mod queries;
-mod tables;
-
-use crate::{executor::Executor, Error, Result};
-use anyhow::anyhow;
-use collections::{BTreeMap, HashMap, HashSet};
-use dashmap::DashMap;
-use futures::StreamExt;
-use rand::{prelude::StdRng, Rng, SeedableRng};
-use rpc::{
- proto::{self},
- ConnectionId,
-};
-use sea_orm::{
- entity::prelude::*,
- sea_query::{Alias, Expr, OnConflict},
- ActiveValue, Condition, ConnectionTrait, DatabaseConnection, DatabaseTransaction, DbErr,
- FromQueryResult, IntoActiveModel, IsolationLevel, JoinType, QueryOrder, QuerySelect, Statement,
- TransactionTrait,
-};
-use serde::{Deserialize, Serialize};
-use sqlx::{
- migrate::{Migrate, Migration, MigrationSource},
- Connection,
-};
-use std::{
- fmt::Write as _,
- future::Future,
- marker::PhantomData,
- ops::{Deref, DerefMut},
- path::Path,
- rc::Rc,
- sync::Arc,
- time::Duration,
-};
-use tables::*;
-use tokio::sync::{Mutex, OwnedMutexGuard};
-
-pub use ids::*;
-pub use sea_orm::ConnectOptions;
-pub use tables::user::Model as User;
-
-pub struct Database {
- options: ConnectOptions,
- pool: DatabaseConnection,
- rooms: DashMap<RoomId, Arc<Mutex<()>>>,
- rng: Mutex<StdRng>,
- executor: Executor,
- notification_kinds_by_id: HashMap<NotificationKindId, &'static str>,
- notification_kinds_by_name: HashMap<String, NotificationKindId>,
- #[cfg(test)]
- runtime: Option<tokio::runtime::Runtime>,
-}
-
-// The `Database` type has so many methods that its impl blocks are split into
-// separate files in the `queries` folder.
-impl Database {
- pub async fn new(options: ConnectOptions, executor: Executor) -> Result<Self> {
- sqlx::any::install_default_drivers();
- Ok(Self {
- options: options.clone(),
- pool: sea_orm::Database::connect(options).await?,
- rooms: DashMap::with_capacity(16384),
- rng: Mutex::new(StdRng::seed_from_u64(0)),
- notification_kinds_by_id: HashMap::default(),
- notification_kinds_by_name: HashMap::default(),
- executor,
- #[cfg(test)]
- runtime: None,
- })
- }
-
- #[cfg(test)]
- pub fn reset(&self) {
- self.rooms.clear();
- }
-
- pub async fn migrate(
- &self,
- migrations_path: &Path,
- ignore_checksum_mismatch: bool,
- ) -> anyhow::Result<Vec<(Migration, Duration)>> {
- let migrations = MigrationSource::resolve(migrations_path)
- .await
- .map_err(|err| anyhow!("failed to load migrations: {err:?}"))?;
-
- let mut connection = sqlx::AnyConnection::connect(self.options.get_url()).await?;
-
- connection.ensure_migrations_table().await?;
- let applied_migrations: HashMap<_, _> = connection
- .list_applied_migrations()
- .await?
- .into_iter()
- .map(|m| (m.version, m))
- .collect();
-
- let mut new_migrations = Vec::new();
- for migration in migrations {
- match applied_migrations.get(&migration.version) {
- Some(applied_migration) => {
- if migration.checksum != applied_migration.checksum && !ignore_checksum_mismatch
- {
- Err(anyhow!(
- "checksum mismatch for applied migration {}",
- migration.description
- ))?;
- }
- }
- None => {
- let elapsed = connection.apply(&migration).await?;
- new_migrations.push((migration, elapsed));
- }
- }
- }
-
- Ok(new_migrations)
- }
-
- pub async fn initialize_static_data(&mut self) -> Result<()> {
- self.initialize_notification_kinds().await?;
- Ok(())
- }
-
- pub async fn transaction<F, Fut, T>(&self, f: F) -> Result<T>
- where
- F: Send + Fn(TransactionHandle) -> Fut,
- Fut: Send + Future<Output = Result<T>>,
- {
- let body = async {
- let mut i = 0;
- loop {
- let (tx, result) = self.with_transaction(&f).await?;
- match result {
- Ok(result) => match tx.commit().await.map_err(Into::into) {
- Ok(()) => return Ok(result),
- Err(error) => {
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- },
- Err(error) => {
- tx.rollback().await?;
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- }
- i += 1;
- }
- };
-
- self.run(body).await
- }
-
- async fn optional_room_transaction<F, Fut, T>(&self, f: F) -> Result<Option<RoomGuard<T>>>
- where
- F: Send + Fn(TransactionHandle) -> Fut,
- Fut: Send + Future<Output = Result<Option<(RoomId, T)>>>,
- {
- let body = async {
- let mut i = 0;
- loop {
- let (tx, result) = self.with_transaction(&f).await?;
- match result {
- Ok(Some((room_id, data))) => {
- let lock = self.rooms.entry(room_id).or_default().clone();
- let _guard = lock.lock_owned().await;
- match tx.commit().await.map_err(Into::into) {
- Ok(()) => {
- return Ok(Some(RoomGuard {
- data,
- _guard,
- _not_send: PhantomData,
- }));
- }
- Err(error) => {
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- }
- }
- Ok(None) => match tx.commit().await.map_err(Into::into) {
- Ok(()) => return Ok(None),
- Err(error) => {
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- },
- Err(error) => {
- tx.rollback().await?;
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- }
- i += 1;
- }
- };
-
- self.run(body).await
- }
-
- async fn room_transaction<F, Fut, T>(&self, room_id: RoomId, f: F) -> Result<RoomGuard<T>>
- where
- F: Send + Fn(TransactionHandle) -> Fut,
- Fut: Send + Future<Output = Result<T>>,
- {
- let body = async {
- let mut i = 0;
- loop {
- let lock = self.rooms.entry(room_id).or_default().clone();
- let _guard = lock.lock_owned().await;
- let (tx, result) = self.with_transaction(&f).await?;
- match result {
- Ok(data) => match tx.commit().await.map_err(Into::into) {
- Ok(()) => {
- return Ok(RoomGuard {
- data,
- _guard,
- _not_send: PhantomData,
- });
- }
- Err(error) => {
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- },
- Err(error) => {
- tx.rollback().await?;
- if !self.retry_on_serialization_error(&error, i).await {
- return Err(error);
- }
- }
- }
- i += 1;
- }
- };
-
- self.run(body).await
- }
-
- async fn with_transaction<F, Fut, T>(&self, f: &F) -> Result<(DatabaseTransaction, Result<T>)>
- where
- F: Send + Fn(TransactionHandle) -> Fut,
- Fut: Send + Future<Output = Result<T>>,
- {
- let tx = self
- .pool
- .begin_with_config(Some(IsolationLevel::Serializable), None)
- .await?;
-
- let mut tx = Arc::new(Some(tx));
- let result = f(TransactionHandle(tx.clone())).await;
- let Some(tx) = Arc::get_mut(&mut tx).and_then(|tx| tx.take()) else {
- return Err(anyhow!(
- "couldn't complete transaction because it's still in use"
- ))?;
- };
-
- Ok((tx, result))
- }
-
- async fn run<F, T>(&self, future: F) -> Result<T>
- where
- F: Future<Output = Result<T>>,
- {
- #[cfg(test)]
- {
- if let Executor::Deterministic(executor) = &self.executor {
- executor.simulate_random_delay().await;
- }
-
- self.runtime.as_ref().unwrap().block_on(future)
- }
-
- #[cfg(not(test))]
- {
- future.await
- }
- }
-
- async fn retry_on_serialization_error(&self, error: &Error, prev_attempt_count: u32) -> bool {
- // If the error is due to a failure to serialize concurrent transactions, then retry
- // this transaction after a delay. With each subsequent retry, double the delay duration.
- // Also vary the delay randomly in order to ensure different database connections retry
- // at different times.
- if is_serialization_error(error) {
- let base_delay = 4_u64 << prev_attempt_count.min(16);
- let randomized_delay = base_delay as f32 * self.rng.lock().await.gen_range(0.5..=2.0);
- log::info!(
- "retrying transaction after serialization error. delay: {} ms.",
- randomized_delay
- );
- self.executor
- .sleep(Duration::from_millis(randomized_delay as u64))
- .await;
- true
- } else {
- false
- }
- }
-}
-
-fn is_serialization_error(error: &Error) -> bool {
- const SERIALIZATION_FAILURE_CODE: &'static str = "40001";
- match error {
- Error::Database(
- DbErr::Exec(sea_orm::RuntimeErr::SqlxError(error))
- | DbErr::Query(sea_orm::RuntimeErr::SqlxError(error)),
- ) if error
- .as_database_error()
- .and_then(|error| error.code())
- .as_deref()
- == Some(SERIALIZATION_FAILURE_CODE) =>
- {
- true
- }
- _ => false,
- }
-}
-
-pub struct TransactionHandle(Arc<Option<DatabaseTransaction>>);
-
-impl Deref for TransactionHandle {
- type Target = DatabaseTransaction;
-
- fn deref(&self) -> &Self::Target {
- self.0.as_ref().as_ref().unwrap()
- }
-}
-
-pub struct RoomGuard<T> {
- data: T,
- _guard: OwnedMutexGuard<()>,
- _not_send: PhantomData<Rc<()>>,
-}
-
-impl<T> Deref for RoomGuard<T> {
- type Target = T;
-
- fn deref(&self) -> &T {
- &self.data
- }
-}
-
-impl<T> DerefMut for RoomGuard<T> {
- fn deref_mut(&mut self) -> &mut T {
- &mut self.data
- }
-}
-
-impl<T> RoomGuard<T> {
- pub fn into_inner(self) -> T {
- self.data
- }
-}
-
-#[derive(Clone, Debug, PartialEq, Eq)]
-pub enum Contact {
- Accepted { user_id: UserId, busy: bool },
- Outgoing { user_id: UserId },
- Incoming { user_id: UserId },
-}
-
-impl Contact {
- pub fn user_id(&self) -> UserId {
- match self {
- Contact::Accepted { user_id, .. } => *user_id,
- Contact::Outgoing { user_id } => *user_id,
- Contact::Incoming { user_id, .. } => *user_id,
- }
- }
-}
-
-pub type NotificationBatch = Vec<(UserId, proto::Notification)>;
-
-pub struct CreatedChannelMessage {
- pub message_id: MessageId,
- pub participant_connection_ids: Vec<ConnectionId>,
- pub channel_members: Vec<UserId>,
- pub notifications: NotificationBatch,
-}
-
-#[derive(Clone, Debug, PartialEq, Eq, FromQueryResult, Serialize, Deserialize)]
-pub struct Invite {
- pub email_address: String,
- pub email_confirmation_code: String,
-}
-
-#[derive(Clone, Debug, Deserialize)]
-pub struct NewSignup {
- pub email_address: String,
- pub platform_mac: bool,
- pub platform_windows: bool,
- pub platform_linux: bool,
- pub editor_features: Vec<String>,
- pub programming_languages: Vec<String>,
- pub device_id: Option<String>,
- pub added_to_mailing_list: bool,
- pub created_at: Option<DateTime>,
-}
-
-#[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromQueryResult)]
-pub struct WaitlistSummary {
- pub count: i64,
- pub linux_count: i64,
- pub mac_count: i64,
- pub windows_count: i64,
- pub unknown_count: i64,
-}
-
-#[derive(Debug, Serialize, Deserialize)]
-pub struct NewUserParams {
- pub github_login: String,
- pub github_user_id: i32,
-}
-
-#[derive(Debug)]
-pub struct NewUserResult {
- pub user_id: UserId,
- pub metrics_id: String,
- pub inviting_user_id: Option<UserId>,
- pub signup_device_id: Option<String>,
-}
-
-#[derive(Debug)]
-pub struct MoveChannelResult {
- pub participants_to_update: HashMap<UserId, ChannelsForUser>,
- pub participants_to_remove: HashSet<UserId>,
- pub moved_channels: HashSet<ChannelId>,
-}
-
-#[derive(Debug)]
-pub struct RenameChannelResult {
- pub channel: Channel,
- pub participants_to_update: HashMap<UserId, Channel>,
-}
-
-#[derive(Debug)]
-pub struct CreateChannelResult {
- pub channel: Channel,
- pub participants_to_update: Vec<(UserId, ChannelsForUser)>,
-}
-
-#[derive(Debug)]
-pub struct SetChannelVisibilityResult {
- pub participants_to_update: HashMap<UserId, ChannelsForUser>,
- pub participants_to_remove: HashSet<UserId>,
- pub channels_to_remove: Vec<ChannelId>,
-}
-
-#[derive(Debug)]
-pub struct MembershipUpdated {
- pub channel_id: ChannelId,
- pub new_channels: ChannelsForUser,
- pub removed_channels: Vec<ChannelId>,
-}
-
-#[derive(Debug)]
-pub enum SetMemberRoleResult {
- InviteUpdated(Channel),
- MembershipUpdated(MembershipUpdated),
-}
-
-#[derive(Debug)]
-pub struct InviteMemberResult {
- pub channel: Channel,
- pub notifications: NotificationBatch,
-}
-
-#[derive(Debug)]
-pub struct RespondToChannelInvite {
- pub membership_update: Option<MembershipUpdated>,
- pub notifications: NotificationBatch,
-}
-
-#[derive(Debug)]
-pub struct RemoveChannelMemberResult {
- pub membership_update: MembershipUpdated,
- pub notification_id: Option<NotificationId>,
-}
-
-#[derive(Debug, PartialEq, Eq, Hash)]
-pub struct Channel {
- pub id: ChannelId,
- pub name: String,
- pub visibility: ChannelVisibility,
- pub role: ChannelRole,
- pub parent_path: Vec<ChannelId>,
-}
-
-impl Channel {
- fn from_model(value: channel::Model, role: ChannelRole) -> Self {
- Channel {
- id: value.id,
- visibility: value.visibility,
- name: value.clone().name,
- role,
- parent_path: value.ancestors().collect(),
- }
- }
-
- pub fn to_proto(&self) -> proto::Channel {
- proto::Channel {
- id: self.id.to_proto(),
- name: self.name.clone(),
- visibility: self.visibility.into(),
- role: self.role.into(),
- parent_path: self.parent_path.iter().map(|c| c.to_proto()).collect(),
- }
- }
-}
-
-#[derive(Debug, PartialEq, Eq, Hash)]
-pub struct ChannelMember {
- pub role: ChannelRole,
- pub user_id: UserId,
- pub kind: proto::channel_member::Kind,
-}
-
-impl ChannelMember {
- pub fn to_proto(&self) -> proto::ChannelMember {
- proto::ChannelMember {
- role: self.role.into(),
- user_id: self.user_id.to_proto(),
- kind: self.kind.into(),
- }
- }
-}
-
-#[derive(Debug, PartialEq)]
-pub struct ChannelsForUser {
- pub channels: Vec<Channel>,
- pub channel_participants: HashMap<ChannelId, Vec<UserId>>,
- pub unseen_buffer_changes: Vec<proto::UnseenChannelBufferChange>,
- pub channel_messages: Vec<proto::UnseenChannelMessage>,
-}
-
-#[derive(Debug)]
-pub struct RejoinedChannelBuffer {
- pub buffer: proto::RejoinedChannelBuffer,
- pub old_connection_id: ConnectionId,
-}
-
-#[derive(Clone)]
-pub struct JoinRoom {
- pub room: proto::Room,
- pub channel_id: Option<ChannelId>,
- pub channel_members: Vec<UserId>,
-}
-
-pub struct RejoinedRoom {
- pub room: proto::Room,
- pub rejoined_projects: Vec<RejoinedProject>,
- pub reshared_projects: Vec<ResharedProject>,
- pub channel_id: Option<ChannelId>,
- pub channel_members: Vec<UserId>,
-}
-
-pub struct ResharedProject {
- pub id: ProjectId,
- pub old_connection_id: ConnectionId,
- pub collaborators: Vec<ProjectCollaborator>,
- pub worktrees: Vec<proto::WorktreeMetadata>,
-}
-
-pub struct RejoinedProject {
- pub id: ProjectId,
- pub old_connection_id: ConnectionId,
- pub collaborators: Vec<ProjectCollaborator>,
- pub worktrees: Vec<RejoinedWorktree>,
- pub language_servers: Vec<proto::LanguageServer>,
-}
-
-#[derive(Debug)]
-pub struct RejoinedWorktree {
- pub id: u64,
- pub abs_path: String,
- pub root_name: String,
- pub visible: bool,
- pub updated_entries: Vec<proto::Entry>,
- pub removed_entries: Vec<u64>,
- pub updated_repositories: Vec<proto::RepositoryEntry>,
- pub removed_repositories: Vec<u64>,
- pub diagnostic_summaries: Vec<proto::DiagnosticSummary>,
- pub settings_files: Vec<WorktreeSettingsFile>,
- pub scan_id: u64,
- pub completed_scan_id: u64,
-}
-
-pub struct LeftRoom {
- pub room: proto::Room,
- pub channel_id: Option<ChannelId>,
- pub channel_members: Vec<UserId>,
- pub left_projects: HashMap<ProjectId, LeftProject>,
- pub canceled_calls_to_user_ids: Vec<UserId>,
- pub deleted: bool,
-}
-
-pub struct RefreshedRoom {
- pub room: proto::Room,
- pub channel_id: Option<ChannelId>,
- pub channel_members: Vec<UserId>,
- pub stale_participant_user_ids: Vec<UserId>,
- pub canceled_calls_to_user_ids: Vec<UserId>,
-}
-
-pub struct RefreshedChannelBuffer {
- pub connection_ids: Vec<ConnectionId>,
- pub collaborators: Vec<proto::Collaborator>,
-}
-
-pub struct Project {
- pub collaborators: Vec<ProjectCollaborator>,
- pub worktrees: BTreeMap<u64, Worktree>,
- pub language_servers: Vec<proto::LanguageServer>,
-}
-
-pub struct ProjectCollaborator {
- pub connection_id: ConnectionId,
- pub user_id: UserId,
- pub replica_id: ReplicaId,
- pub is_host: bool,
-}
-
-impl ProjectCollaborator {
- pub fn to_proto(&self) -> proto::Collaborator {
- proto::Collaborator {
- peer_id: Some(self.connection_id.into()),
- replica_id: self.replica_id.0 as u32,
- user_id: self.user_id.to_proto(),
- }
- }
-}
-
-#[derive(Debug)]
-pub struct LeftProject {
- pub id: ProjectId,
- pub host_user_id: UserId,
- pub host_connection_id: ConnectionId,
- pub connection_ids: Vec<ConnectionId>,
-}
-
-pub struct Worktree {
- pub id: u64,
- pub abs_path: String,
- pub root_name: String,
- pub visible: bool,
- pub entries: Vec<proto::Entry>,
- pub repository_entries: BTreeMap<u64, proto::RepositoryEntry>,
- pub diagnostic_summaries: Vec<proto::DiagnosticSummary>,
- pub settings_files: Vec<WorktreeSettingsFile>,
- pub scan_id: u64,
- pub completed_scan_id: u64,
-}
-
-#[derive(Debug)]
-pub struct WorktreeSettingsFile {
- pub path: String,
- pub content: String,
-}
@@ -1,199 +0,0 @@
-use crate::Result;
-use rpc::proto;
-use sea_orm::{entity::prelude::*, DbErr};
-use serde::{Deserialize, Serialize};
-
-macro_rules! id_type {
- ($name:ident) => {
- #[derive(
- Clone,
- Copy,
- Debug,
- Default,
- PartialEq,
- Eq,
- PartialOrd,
- Ord,
- Hash,
- Serialize,
- Deserialize,
- DeriveValueType,
- )]
- #[serde(transparent)]
- pub struct $name(pub i32);
-
- impl $name {
- #[allow(unused)]
- pub const MAX: Self = Self(i32::MAX);
-
- #[allow(unused)]
- pub fn from_proto(value: u64) -> Self {
- Self(value as i32)
- }
-
- #[allow(unused)]
- pub fn to_proto(self) -> u64 {
- self.0 as u64
- }
- }
-
- impl std::fmt::Display for $name {
- fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
- self.0.fmt(f)
- }
- }
-
- impl sea_orm::TryFromU64 for $name {
- fn try_from_u64(n: u64) -> Result<Self, DbErr> {
- Ok(Self(n.try_into().map_err(|_| {
- DbErr::ConvertFromU64(concat!(
- "error converting ",
- stringify!($name),
- " to u64"
- ))
- })?))
- }
- }
-
- impl sea_orm::sea_query::Nullable for $name {
- fn null() -> Value {
- Value::Int(None)
- }
- }
- };
-}
-
-id_type!(BufferId);
-id_type!(AccessTokenId);
-id_type!(ChannelChatParticipantId);
-id_type!(ChannelId);
-id_type!(ChannelMemberId);
-id_type!(MessageId);
-id_type!(ContactId);
-id_type!(FollowerId);
-id_type!(RoomId);
-id_type!(RoomParticipantId);
-id_type!(ProjectId);
-id_type!(ProjectCollaboratorId);
-id_type!(ReplicaId);
-id_type!(ServerId);
-id_type!(SignupId);
-id_type!(UserId);
-id_type!(ChannelBufferCollaboratorId);
-id_type!(FlagId);
-id_type!(NotificationId);
-id_type!(NotificationKindId);
-
-#[derive(Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Default, Hash)]
-#[sea_orm(rs_type = "String", db_type = "String(None)")]
-pub enum ChannelRole {
- #[sea_orm(string_value = "admin")]
- Admin,
- #[sea_orm(string_value = "member")]
- #[default]
- Member,
- #[sea_orm(string_value = "guest")]
- Guest,
- #[sea_orm(string_value = "banned")]
- Banned,
-}
-
-impl ChannelRole {
- pub fn should_override(&self, other: Self) -> bool {
- use ChannelRole::*;
- match self {
- Admin => matches!(other, Member | Banned | Guest),
- Member => matches!(other, Banned | Guest),
- Banned => matches!(other, Guest),
- Guest => false,
- }
- }
-
- pub fn max(&self, other: Self) -> Self {
- if self.should_override(other) {
- *self
- } else {
- other
- }
- }
-
- pub fn can_see_all_descendants(&self) -> bool {
- use ChannelRole::*;
- match self {
- Admin | Member => true,
- Guest | Banned => false,
- }
- }
-
- pub fn can_only_see_public_descendants(&self) -> bool {
- use ChannelRole::*;
- match self {
- Guest => true,
- Admin | Member | Banned => false,
- }
- }
-}
-
-impl From<proto::ChannelRole> for ChannelRole {
- fn from(value: proto::ChannelRole) -> Self {
- match value {
- proto::ChannelRole::Admin => ChannelRole::Admin,
- proto::ChannelRole::Member => ChannelRole::Member,
- proto::ChannelRole::Guest => ChannelRole::Guest,
- proto::ChannelRole::Banned => ChannelRole::Banned,
- }
- }
-}
-
-impl Into<proto::ChannelRole> for ChannelRole {
- fn into(self) -> proto::ChannelRole {
- match self {
- ChannelRole::Admin => proto::ChannelRole::Admin,
- ChannelRole::Member => proto::ChannelRole::Member,
- ChannelRole::Guest => proto::ChannelRole::Guest,
- ChannelRole::Banned => proto::ChannelRole::Banned,
- }
- }
-}
-
-impl Into<i32> for ChannelRole {
- fn into(self) -> i32 {
- let proto: proto::ChannelRole = self.into();
- proto.into()
- }
-}
-
-#[derive(Eq, PartialEq, Copy, Clone, Debug, EnumIter, DeriveActiveEnum, Default, Hash)]
-#[sea_orm(rs_type = "String", db_type = "String(None)")]
-pub enum ChannelVisibility {
- #[sea_orm(string_value = "public")]
- Public,
- #[sea_orm(string_value = "members")]
- #[default]
- Members,
-}
-
-impl From<proto::ChannelVisibility> for ChannelVisibility {
- fn from(value: proto::ChannelVisibility) -> Self {
- match value {
- proto::ChannelVisibility::Public => ChannelVisibility::Public,
- proto::ChannelVisibility::Members => ChannelVisibility::Members,
- }
- }
-}
-
-impl Into<proto::ChannelVisibility> for ChannelVisibility {
- fn into(self) -> proto::ChannelVisibility {
- match self {
- ChannelVisibility::Public => proto::ChannelVisibility::Public,
- ChannelVisibility::Members => proto::ChannelVisibility::Members,
- }
- }
-}
-
-impl Into<i32> for ChannelVisibility {
- fn into(self) -> i32 {
- let proto: proto::ChannelVisibility = self.into();
- proto.into()
- }
-}
@@ -1,12 +0,0 @@
-use super::*;
-
-pub mod access_tokens;
-pub mod buffers;
-pub mod channels;
-pub mod contacts;
-pub mod messages;
-pub mod notifications;
-pub mod projects;
-pub mod rooms;
-pub mod servers;
-pub mod users;
@@ -1,54 +0,0 @@
-use super::*;
-use sea_orm::sea_query::Query;
-
-impl Database {
- pub async fn create_access_token(
- &self,
- user_id: UserId,
- access_token_hash: &str,
- max_access_token_count: usize,
- ) -> Result<AccessTokenId> {
- self.transaction(|tx| async {
- let tx = tx;
-
- let token = access_token::ActiveModel {
- user_id: ActiveValue::set(user_id),
- hash: ActiveValue::set(access_token_hash.into()),
- ..Default::default()
- }
- .insert(&*tx)
- .await?;
-
- access_token::Entity::delete_many()
- .filter(
- access_token::Column::Id.in_subquery(
- Query::select()
- .column(access_token::Column::Id)
- .from(access_token::Entity)
- .and_where(access_token::Column::UserId.eq(user_id))
- .order_by(access_token::Column::Id, sea_orm::Order::Desc)
- .limit(10000)
- .offset(max_access_token_count as u64)
- .to_owned(),
- ),
- )
- .exec(&*tx)
- .await?;
- Ok(token.id)
- })
- .await
- }
-
- pub async fn get_access_token(
- &self,
- access_token_id: AccessTokenId,
- ) -> Result<access_token::Model> {
- self.transaction(|tx| async move {
- Ok(access_token::Entity::find_by_id(access_token_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such access token"))?)
- })
- .await
- }
-}
@@ -1,1078 +0,0 @@
-use super::*;
-use prost::Message;
-use text::{EditOperation, UndoOperation};
-
-pub struct LeftChannelBuffer {
- pub channel_id: ChannelId,
- pub collaborators: Vec<proto::Collaborator>,
- pub connections: Vec<ConnectionId>,
-}
-
-impl Database {
- pub async fn join_channel_buffer(
- &self,
- channel_id: ChannelId,
- user_id: UserId,
- connection: ConnectionId,
- ) -> Result<proto::JoinChannelBufferResponse> {
- self.transaction(|tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- self.check_user_is_channel_participant(&channel, user_id, &tx)
- .await?;
-
- let buffer = channel::Model {
- id: channel_id,
- ..Default::default()
- }
- .find_related(buffer::Entity)
- .one(&*tx)
- .await?;
-
- let buffer = if let Some(buffer) = buffer {
- buffer
- } else {
- let buffer = buffer::ActiveModel {
- channel_id: ActiveValue::Set(channel_id),
- ..Default::default()
- }
- .insert(&*tx)
- .await?;
- buffer_snapshot::ActiveModel {
- buffer_id: ActiveValue::Set(buffer.id),
- epoch: ActiveValue::Set(0),
- text: ActiveValue::Set(String::new()),
- operation_serialization_version: ActiveValue::Set(
- storage::SERIALIZATION_VERSION,
- ),
- }
- .insert(&*tx)
- .await?;
- buffer
- };
-
- // Join the collaborators
- let mut collaborators = channel_buffer_collaborator::Entity::find()
- .filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
- .all(&*tx)
- .await?;
- let replica_ids = collaborators
- .iter()
- .map(|c| c.replica_id)
- .collect::<HashSet<_>>();
- let mut replica_id = ReplicaId(0);
- while replica_ids.contains(&replica_id) {
- replica_id.0 += 1;
- }
- let collaborator = channel_buffer_collaborator::ActiveModel {
- channel_id: ActiveValue::Set(channel_id),
- connection_id: ActiveValue::Set(connection.id as i32),
- connection_server_id: ActiveValue::Set(ServerId(connection.owner_id as i32)),
- user_id: ActiveValue::Set(user_id),
- replica_id: ActiveValue::Set(replica_id),
- ..Default::default()
- }
- .insert(&*tx)
- .await?;
- collaborators.push(collaborator);
-
- let (base_text, operations, max_operation) =
- self.get_buffer_state(&buffer, &tx).await?;
-
- // Save the last observed operation
- if let Some(op) = max_operation {
- observed_buffer_edits::Entity::insert(observed_buffer_edits::ActiveModel {
- user_id: ActiveValue::Set(user_id),
- buffer_id: ActiveValue::Set(buffer.id),
- epoch: ActiveValue::Set(op.epoch),
- lamport_timestamp: ActiveValue::Set(op.lamport_timestamp),
- replica_id: ActiveValue::Set(op.replica_id),
- })
- .on_conflict(
- OnConflict::columns([
- observed_buffer_edits::Column::UserId,
- observed_buffer_edits::Column::BufferId,
- ])
- .update_columns([
- observed_buffer_edits::Column::Epoch,
- observed_buffer_edits::Column::LamportTimestamp,
- ])
- .to_owned(),
- )
- .exec(&*tx)
- .await?;
- }
-
- Ok(proto::JoinChannelBufferResponse {
- buffer_id: buffer.id.to_proto(),
- replica_id: replica_id.to_proto() as u32,
- base_text,
- operations,
- epoch: buffer.epoch as u64,
- collaborators: collaborators
- .into_iter()
- .map(|collaborator| proto::Collaborator {
- peer_id: Some(collaborator.connection().into()),
- user_id: collaborator.user_id.to_proto(),
- replica_id: collaborator.replica_id.0 as u32,
- })
- .collect(),
- })
- })
- .await
- }
-
- pub async fn rejoin_channel_buffers(
- &self,
- buffers: &[proto::ChannelBufferVersion],
- user_id: UserId,
- connection_id: ConnectionId,
- ) -> Result<Vec<RejoinedChannelBuffer>> {
- self.transaction(|tx| async move {
- let mut results = Vec::new();
- for client_buffer in buffers {
- let channel = self
- .get_channel_internal(ChannelId::from_proto(client_buffer.channel_id), &*tx)
- .await?;
- if self
- .check_user_is_channel_participant(&channel, user_id, &*tx)
- .await
- .is_err()
- {
- log::info!("user is not a member of channel");
- continue;
- }
-
- let buffer = self.get_channel_buffer(channel.id, &*tx).await?;
- let mut collaborators = channel_buffer_collaborator::Entity::find()
- .filter(channel_buffer_collaborator::Column::ChannelId.eq(channel.id))
- .all(&*tx)
- .await?;
-
- // If the buffer epoch hasn't changed since the client lost
- // connection, then the client's buffer can be syncronized with
- // the server's buffer.
- if buffer.epoch as u64 != client_buffer.epoch {
- log::info!("can't rejoin buffer, epoch has changed");
- continue;
- }
-
- // Find the collaborator record for this user's previous lost
- // connection. Update it with the new connection id.
- let server_id = ServerId(connection_id.owner_id as i32);
- let Some(self_collaborator) = collaborators.iter_mut().find(|c| {
- c.user_id == user_id
- && (c.connection_lost || c.connection_server_id != server_id)
- }) else {
- log::info!("can't rejoin buffer, no previous collaborator found");
- continue;
- };
- let old_connection_id = self_collaborator.connection();
- *self_collaborator = channel_buffer_collaborator::ActiveModel {
- id: ActiveValue::Unchanged(self_collaborator.id),
- connection_id: ActiveValue::Set(connection_id.id as i32),
- connection_server_id: ActiveValue::Set(ServerId(connection_id.owner_id as i32)),
- connection_lost: ActiveValue::Set(false),
- ..Default::default()
- }
- .update(&*tx)
- .await?;
-
- let client_version = version_from_wire(&client_buffer.version);
- let serialization_version = self
- .get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &*tx)
- .await?;
-
- let mut rows = buffer_operation::Entity::find()
- .filter(
- buffer_operation::Column::BufferId
- .eq(buffer.id)
- .and(buffer_operation::Column::Epoch.eq(buffer.epoch)),
- )
- .stream(&*tx)
- .await?;
-
- // Find the server's version vector and any operations
- // that the client has not seen.
- let mut server_version = clock::Global::new();
- let mut operations = Vec::new();
- while let Some(row) = rows.next().await {
- let row = row?;
- let timestamp = clock::Lamport {
- replica_id: row.replica_id as u16,
- value: row.lamport_timestamp as u32,
- };
- server_version.observe(timestamp);
- if !client_version.observed(timestamp) {
- operations.push(proto::Operation {
- variant: Some(operation_from_storage(row, serialization_version)?),
- })
- }
- }
-
- results.push(RejoinedChannelBuffer {
- old_connection_id,
- buffer: proto::RejoinedChannelBuffer {
- channel_id: client_buffer.channel_id,
- version: version_to_wire(&server_version),
- operations,
- collaborators: collaborators
- .into_iter()
- .map(|collaborator| proto::Collaborator {
- peer_id: Some(collaborator.connection().into()),
- user_id: collaborator.user_id.to_proto(),
- replica_id: collaborator.replica_id.0 as u32,
- })
- .collect(),
- },
- });
- }
-
- Ok(results)
- })
- .await
- }
-
- pub async fn clear_stale_channel_buffer_collaborators(
- &self,
- channel_id: ChannelId,
- server_id: ServerId,
- ) -> Result<RefreshedChannelBuffer> {
- self.transaction(|tx| async move {
- let db_collaborators = channel_buffer_collaborator::Entity::find()
- .filter(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
- .all(&*tx)
- .await?;
-
- let mut connection_ids = Vec::new();
- let mut collaborators = Vec::new();
- let mut collaborator_ids_to_remove = Vec::new();
- for db_collaborator in &db_collaborators {
- if !db_collaborator.connection_lost
- && db_collaborator.connection_server_id == server_id
- {
- connection_ids.push(db_collaborator.connection());
- collaborators.push(proto::Collaborator {
- peer_id: Some(db_collaborator.connection().into()),
- replica_id: db_collaborator.replica_id.0 as u32,
- user_id: db_collaborator.user_id.to_proto(),
- })
- } else {
- collaborator_ids_to_remove.push(db_collaborator.id);
- }
- }
-
- channel_buffer_collaborator::Entity::delete_many()
- .filter(channel_buffer_collaborator::Column::Id.is_in(collaborator_ids_to_remove))
- .exec(&*tx)
- .await?;
-
- Ok(RefreshedChannelBuffer {
- connection_ids,
- collaborators,
- })
- })
- .await
- }
-
- pub async fn leave_channel_buffer(
- &self,
- channel_id: ChannelId,
- connection: ConnectionId,
- ) -> Result<LeftChannelBuffer> {
- self.transaction(|tx| async move {
- self.leave_channel_buffer_internal(channel_id, connection, &*tx)
- .await
- })
- .await
- }
-
- pub async fn channel_buffer_connection_lost(
- &self,
- connection: ConnectionId,
- tx: &DatabaseTransaction,
- ) -> Result<()> {
- channel_buffer_collaborator::Entity::update_many()
- .filter(
- Condition::all()
- .add(channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32))
- .add(
- channel_buffer_collaborator::Column::ConnectionServerId
- .eq(connection.owner_id as i32),
- ),
- )
- .set(channel_buffer_collaborator::ActiveModel {
- connection_lost: ActiveValue::set(true),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
- Ok(())
- }
-
- pub async fn leave_channel_buffers(
- &self,
- connection: ConnectionId,
- ) -> Result<Vec<LeftChannelBuffer>> {
- self.transaction(|tx| async move {
- #[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
- enum QueryChannelIds {
- ChannelId,
- }
-
- let channel_ids: Vec<ChannelId> = channel_buffer_collaborator::Entity::find()
- .select_only()
- .column(channel_buffer_collaborator::Column::ChannelId)
- .filter(Condition::all().add(
- channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32),
- ))
- .into_values::<_, QueryChannelIds>()
- .all(&*tx)
- .await?;
-
- let mut result = Vec::new();
- for channel_id in channel_ids {
- let left_channel_buffer = self
- .leave_channel_buffer_internal(channel_id, connection, &*tx)
- .await?;
- result.push(left_channel_buffer);
- }
-
- Ok(result)
- })
- .await
- }
-
- pub async fn leave_channel_buffer_internal(
- &self,
- channel_id: ChannelId,
- connection: ConnectionId,
- tx: &DatabaseTransaction,
- ) -> Result<LeftChannelBuffer> {
- let result = channel_buffer_collaborator::Entity::delete_many()
- .filter(
- Condition::all()
- .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id))
- .add(channel_buffer_collaborator::Column::ConnectionId.eq(connection.id as i32))
- .add(
- channel_buffer_collaborator::Column::ConnectionServerId
- .eq(connection.owner_id as i32),
- ),
- )
- .exec(&*tx)
- .await?;
- if result.rows_affected == 0 {
- Err(anyhow!("not a collaborator on this project"))?;
- }
-
- let mut collaborators = Vec::new();
- let mut connections = Vec::new();
- let mut rows = channel_buffer_collaborator::Entity::find()
- .filter(
- Condition::all().add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
- )
- .stream(&*tx)
- .await?;
- while let Some(row) = rows.next().await {
- let row = row?;
- let connection = row.connection();
- connections.push(connection);
- collaborators.push(proto::Collaborator {
- peer_id: Some(connection.into()),
- replica_id: row.replica_id.0 as u32,
- user_id: row.user_id.to_proto(),
- });
- }
-
- drop(rows);
-
- if collaborators.is_empty() {
- self.snapshot_channel_buffer(channel_id, &tx).await?;
- }
-
- Ok(LeftChannelBuffer {
- channel_id,
- collaborators,
- connections,
- })
- }
-
- pub async fn get_channel_buffer_collaborators(
- &self,
- channel_id: ChannelId,
- ) -> Result<Vec<UserId>> {
- self.transaction(|tx| async move {
- self.get_channel_buffer_collaborators_internal(channel_id, &*tx)
- .await
- })
- .await
- }
-
- async fn get_channel_buffer_collaborators_internal(
- &self,
- channel_id: ChannelId,
- tx: &DatabaseTransaction,
- ) -> Result<Vec<UserId>> {
- #[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
- enum QueryUserIds {
- UserId,
- }
-
- let users: Vec<UserId> = channel_buffer_collaborator::Entity::find()
- .select_only()
- .column(channel_buffer_collaborator::Column::UserId)
- .filter(
- Condition::all().add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
- )
- .into_values::<_, QueryUserIds>()
- .all(&*tx)
- .await?;
-
- Ok(users)
- }
-
- pub async fn update_channel_buffer(
- &self,
- channel_id: ChannelId,
- user: UserId,
- operations: &[proto::Operation],
- ) -> Result<(
- Vec<ConnectionId>,
- Vec<UserId>,
- i32,
- Vec<proto::VectorClockEntry>,
- )> {
- self.transaction(move |tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- self.check_user_is_channel_member(&channel, user, &*tx)
- .await?;
-
- let buffer = buffer::Entity::find()
- .filter(buffer::Column::ChannelId.eq(channel_id))
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such buffer"))?;
-
- let serialization_version = self
- .get_buffer_operation_serialization_version(buffer.id, buffer.epoch, &*tx)
- .await?;
-
- let operations = operations
- .iter()
- .filter_map(|op| operation_to_storage(op, &buffer, serialization_version))
- .collect::<Vec<_>>();
-
- let mut channel_members;
- let max_version;
-
- if !operations.is_empty() {
- let max_operation = operations
- .iter()
- .max_by_key(|op| (op.lamport_timestamp.as_ref(), op.replica_id.as_ref()))
- .unwrap();
-
- max_version = vec![proto::VectorClockEntry {
- replica_id: *max_operation.replica_id.as_ref() as u32,
- timestamp: *max_operation.lamport_timestamp.as_ref() as u32,
- }];
-
- // get current channel participants and save the max operation above
- self.save_max_operation(
- user,
- buffer.id,
- buffer.epoch,
- *max_operation.replica_id.as_ref(),
- *max_operation.lamport_timestamp.as_ref(),
- &*tx,
- )
- .await?;
-
- channel_members = self.get_channel_participants(&channel, &*tx).await?;
- let collaborators = self
- .get_channel_buffer_collaborators_internal(channel_id, &*tx)
- .await?;
- channel_members.retain(|member| !collaborators.contains(member));
-
- buffer_operation::Entity::insert_many(operations)
- .on_conflict(
- OnConflict::columns([
- buffer_operation::Column::BufferId,
- buffer_operation::Column::Epoch,
- buffer_operation::Column::LamportTimestamp,
- buffer_operation::Column::ReplicaId,
- ])
- .do_nothing()
- .to_owned(),
- )
- .exec(&*tx)
- .await?;
- } else {
- channel_members = Vec::new();
- max_version = Vec::new();
- }
-
- let mut connections = Vec::new();
- let mut rows = channel_buffer_collaborator::Entity::find()
- .filter(
- Condition::all()
- .add(channel_buffer_collaborator::Column::ChannelId.eq(channel_id)),
- )
- .stream(&*tx)
- .await?;
- while let Some(row) = rows.next().await {
- let row = row?;
- connections.push(ConnectionId {
- id: row.connection_id as u32,
- owner_id: row.connection_server_id.0 as u32,
- });
- }
-
- Ok((connections, channel_members, buffer.epoch, max_version))
- })
- .await
- }
-
- async fn save_max_operation(
- &self,
- user_id: UserId,
- buffer_id: BufferId,
- epoch: i32,
- replica_id: i32,
- lamport_timestamp: i32,
- tx: &DatabaseTransaction,
- ) -> Result<()> {
- use observed_buffer_edits::Column;
-
- observed_buffer_edits::Entity::insert(observed_buffer_edits::ActiveModel {
- user_id: ActiveValue::Set(user_id),
- buffer_id: ActiveValue::Set(buffer_id),
- epoch: ActiveValue::Set(epoch),
- replica_id: ActiveValue::Set(replica_id),
- lamport_timestamp: ActiveValue::Set(lamport_timestamp),
- })
- .on_conflict(
- OnConflict::columns([Column::UserId, Column::BufferId])
- .update_columns([Column::Epoch, Column::LamportTimestamp, Column::ReplicaId])
- .action_cond_where(
- Condition::any().add(Column::Epoch.lt(epoch)).add(
- Condition::all().add(Column::Epoch.eq(epoch)).add(
- Condition::any()
- .add(Column::LamportTimestamp.lt(lamport_timestamp))
- .add(
- Column::LamportTimestamp
- .eq(lamport_timestamp)
- .and(Column::ReplicaId.lt(replica_id)),
- ),
- ),
- ),
- )
- .to_owned(),
- )
- .exec_without_returning(tx)
- .await?;
-
- Ok(())
- }
-
- async fn get_buffer_operation_serialization_version(
- &self,
- buffer_id: BufferId,
- epoch: i32,
- tx: &DatabaseTransaction,
- ) -> Result<i32> {
- Ok(buffer_snapshot::Entity::find()
- .filter(buffer_snapshot::Column::BufferId.eq(buffer_id))
- .filter(buffer_snapshot::Column::Epoch.eq(epoch))
- .select_only()
- .column(buffer_snapshot::Column::OperationSerializationVersion)
- .into_values::<_, QueryOperationSerializationVersion>()
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("missing buffer snapshot"))?)
- }
-
- pub async fn get_channel_buffer(
- &self,
- channel_id: ChannelId,
- tx: &DatabaseTransaction,
- ) -> Result<buffer::Model> {
- Ok(channel::Model {
- id: channel_id,
- ..Default::default()
- }
- .find_related(buffer::Entity)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such buffer"))?)
- }
-
- async fn get_buffer_state(
- &self,
- buffer: &buffer::Model,
- tx: &DatabaseTransaction,
- ) -> Result<(
- String,
- Vec<proto::Operation>,
- Option<buffer_operation::Model>,
- )> {
- let id = buffer.id;
- let (base_text, version) = if buffer.epoch > 0 {
- let snapshot = buffer_snapshot::Entity::find()
- .filter(
- buffer_snapshot::Column::BufferId
- .eq(id)
- .and(buffer_snapshot::Column::Epoch.eq(buffer.epoch)),
- )
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such snapshot"))?;
-
- let version = snapshot.operation_serialization_version;
- (snapshot.text, version)
- } else {
- (String::new(), storage::SERIALIZATION_VERSION)
- };
-
- let mut rows = buffer_operation::Entity::find()
- .filter(
- buffer_operation::Column::BufferId
- .eq(id)
- .and(buffer_operation::Column::Epoch.eq(buffer.epoch)),
- )
- .order_by_asc(buffer_operation::Column::LamportTimestamp)
- .order_by_asc(buffer_operation::Column::ReplicaId)
- .stream(&*tx)
- .await?;
-
- let mut operations = Vec::new();
- let mut last_row = None;
- while let Some(row) = rows.next().await {
- let row = row?;
- last_row = Some(buffer_operation::Model {
- buffer_id: row.buffer_id,
- epoch: row.epoch,
- lamport_timestamp: row.lamport_timestamp,
- replica_id: row.lamport_timestamp,
- value: Default::default(),
- });
- operations.push(proto::Operation {
- variant: Some(operation_from_storage(row, version)?),
- });
- }
-
- Ok((base_text, operations, last_row))
- }
-
- async fn snapshot_channel_buffer(
- &self,
- channel_id: ChannelId,
- tx: &DatabaseTransaction,
- ) -> Result<()> {
- let buffer = self.get_channel_buffer(channel_id, tx).await?;
- let (base_text, operations, _) = self.get_buffer_state(&buffer, tx).await?;
- if operations.is_empty() {
- return Ok(());
- }
-
- let mut text_buffer = text::Buffer::new(0, 0, base_text);
- text_buffer
- .apply_ops(operations.into_iter().filter_map(operation_from_wire))
- .unwrap();
-
- let base_text = text_buffer.text();
- let epoch = buffer.epoch + 1;
-
- buffer_snapshot::Model {
- buffer_id: buffer.id,
- epoch,
- text: base_text,
- operation_serialization_version: storage::SERIALIZATION_VERSION,
- }
- .into_active_model()
- .insert(tx)
- .await?;
-
- buffer::ActiveModel {
- id: ActiveValue::Unchanged(buffer.id),
- epoch: ActiveValue::Set(epoch),
- ..Default::default()
- }
- .save(tx)
- .await?;
-
- Ok(())
- }
-
- pub async fn observe_buffer_version(
- &self,
- buffer_id: BufferId,
- user_id: UserId,
- epoch: i32,
- version: &[proto::VectorClockEntry],
- ) -> Result<()> {
- self.transaction(|tx| async move {
- // For now, combine concurrent operations.
- let Some(component) = version.iter().max_by_key(|version| version.timestamp) else {
- return Ok(());
- };
- self.save_max_operation(
- user_id,
- buffer_id,
- epoch,
- component.replica_id as i32,
- component.timestamp as i32,
- &*tx,
- )
- .await?;
- Ok(())
- })
- .await
- }
-
- pub async fn unseen_channel_buffer_changes(
- &self,
- user_id: UserId,
- channel_ids: &[ChannelId],
- tx: &DatabaseTransaction,
- ) -> Result<Vec<proto::UnseenChannelBufferChange>> {
- #[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
- enum QueryIds {
- ChannelId,
- Id,
- }
-
- let mut channel_ids_by_buffer_id = HashMap::default();
- let mut rows = buffer::Entity::find()
- .filter(buffer::Column::ChannelId.is_in(channel_ids.iter().copied()))
- .stream(&*tx)
- .await?;
- while let Some(row) = rows.next().await {
- let row = row?;
- channel_ids_by_buffer_id.insert(row.id, row.channel_id);
- }
- drop(rows);
-
- let mut observed_edits_by_buffer_id = HashMap::default();
- let mut rows = observed_buffer_edits::Entity::find()
- .filter(observed_buffer_edits::Column::UserId.eq(user_id))
- .filter(
- observed_buffer_edits::Column::BufferId
- .is_in(channel_ids_by_buffer_id.keys().copied()),
- )
- .stream(&*tx)
- .await?;
- while let Some(row) = rows.next().await {
- let row = row?;
- observed_edits_by_buffer_id.insert(row.buffer_id, row);
- }
- drop(rows);
-
- let latest_operations = self
- .get_latest_operations_for_buffers(channel_ids_by_buffer_id.keys().copied(), &*tx)
- .await?;
-
- let mut changes = Vec::default();
- for latest in latest_operations {
- if let Some(observed) = observed_edits_by_buffer_id.get(&latest.buffer_id) {
- if (
- observed.epoch,
- observed.lamport_timestamp,
- observed.replica_id,
- ) >= (latest.epoch, latest.lamport_timestamp, latest.replica_id)
- {
- continue;
- }
- }
-
- if let Some(channel_id) = channel_ids_by_buffer_id.get(&latest.buffer_id) {
- changes.push(proto::UnseenChannelBufferChange {
- channel_id: channel_id.to_proto(),
- epoch: latest.epoch as u64,
- version: vec![proto::VectorClockEntry {
- replica_id: latest.replica_id as u32,
- timestamp: latest.lamport_timestamp as u32,
- }],
- });
- }
- }
-
- Ok(changes)
- }
-
- pub async fn get_latest_operations_for_buffers(
- &self,
- buffer_ids: impl IntoIterator<Item = BufferId>,
- tx: &DatabaseTransaction,
- ) -> Result<Vec<buffer_operation::Model>> {
- let mut values = String::new();
- for id in buffer_ids {
- if !values.is_empty() {
- values.push_str(", ");
- }
- write!(&mut values, "({})", id).unwrap();
- }
-
- if values.is_empty() {
- return Ok(Vec::default());
- }
-
- let sql = format!(
- r#"
- SELECT
- *
- FROM
- (
- SELECT
- *,
- row_number() OVER (
- PARTITION BY buffer_id
- ORDER BY
- epoch DESC,
- lamport_timestamp DESC,
- replica_id DESC
- ) as row_number
- FROM buffer_operations
- WHERE
- buffer_id in ({values})
- ) AS last_operations
- WHERE
- row_number = 1
- "#,
- );
-
- let stmt = Statement::from_string(self.pool.get_database_backend(), sql);
- Ok(buffer_operation::Entity::find()
- .from_raw_sql(stmt)
- .all(&*tx)
- .await?)
- }
-}
-
-fn operation_to_storage(
- operation: &proto::Operation,
- buffer: &buffer::Model,
- _format: i32,
-) -> Option<buffer_operation::ActiveModel> {
- let (replica_id, lamport_timestamp, value) = match operation.variant.as_ref()? {
- proto::operation::Variant::Edit(operation) => (
- operation.replica_id,
- operation.lamport_timestamp,
- storage::Operation {
- version: version_to_storage(&operation.version),
- is_undo: false,
- edit_ranges: operation
- .ranges
- .iter()
- .map(|range| storage::Range {
- start: range.start,
- end: range.end,
- })
- .collect(),
- edit_texts: operation.new_text.clone(),
- undo_counts: Vec::new(),
- },
- ),
- proto::operation::Variant::Undo(operation) => (
- operation.replica_id,
- operation.lamport_timestamp,
- storage::Operation {
- version: version_to_storage(&operation.version),
- is_undo: true,
- edit_ranges: Vec::new(),
- edit_texts: Vec::new(),
- undo_counts: operation
- .counts
- .iter()
- .map(|entry| storage::UndoCount {
- replica_id: entry.replica_id,
- lamport_timestamp: entry.lamport_timestamp,
- count: entry.count,
- })
- .collect(),
- },
- ),
- _ => None?,
- };
-
- Some(buffer_operation::ActiveModel {
- buffer_id: ActiveValue::Set(buffer.id),
- epoch: ActiveValue::Set(buffer.epoch),
- replica_id: ActiveValue::Set(replica_id as i32),
- lamport_timestamp: ActiveValue::Set(lamport_timestamp as i32),
- value: ActiveValue::Set(value.encode_to_vec()),
- })
-}
-
-fn operation_from_storage(
- row: buffer_operation::Model,
- _format_version: i32,
-) -> Result<proto::operation::Variant, Error> {
- let operation =
- storage::Operation::decode(row.value.as_slice()).map_err(|error| anyhow!("{}", error))?;
- let version = version_from_storage(&operation.version);
- Ok(if operation.is_undo {
- proto::operation::Variant::Undo(proto::operation::Undo {
- replica_id: row.replica_id as u32,
- lamport_timestamp: row.lamport_timestamp as u32,
- version,
- counts: operation
- .undo_counts
- .iter()
- .map(|entry| proto::UndoCount {
- replica_id: entry.replica_id,
- lamport_timestamp: entry.lamport_timestamp,
- count: entry.count,
- })
- .collect(),
- })
- } else {
- proto::operation::Variant::Edit(proto::operation::Edit {
- replica_id: row.replica_id as u32,
- lamport_timestamp: row.lamport_timestamp as u32,
- version,
- ranges: operation
- .edit_ranges
- .into_iter()
- .map(|range| proto::Range {
- start: range.start,
- end: range.end,
- })
- .collect(),
- new_text: operation.edit_texts,
- })
- })
-}
-
-fn version_to_storage(version: &Vec<proto::VectorClockEntry>) -> Vec<storage::VectorClockEntry> {
- version
- .iter()
- .map(|entry| storage::VectorClockEntry {
- replica_id: entry.replica_id,
- timestamp: entry.timestamp,
- })
- .collect()
-}
-
-fn version_from_storage(version: &Vec<storage::VectorClockEntry>) -> Vec<proto::VectorClockEntry> {
- version
- .iter()
- .map(|entry| proto::VectorClockEntry {
- replica_id: entry.replica_id,
- timestamp: entry.timestamp,
- })
- .collect()
-}
-
-// This is currently a manual copy of the deserialization code in the client's langauge crate
-pub fn operation_from_wire(operation: proto::Operation) -> Option<text::Operation> {
- match operation.variant? {
- proto::operation::Variant::Edit(edit) => Some(text::Operation::Edit(EditOperation {
- timestamp: clock::Lamport {
- replica_id: edit.replica_id as text::ReplicaId,
- value: edit.lamport_timestamp,
- },
- version: version_from_wire(&edit.version),
- ranges: edit
- .ranges
- .into_iter()
- .map(|range| {
- text::FullOffset(range.start as usize)..text::FullOffset(range.end as usize)
- })
- .collect(),
- new_text: edit.new_text.into_iter().map(Arc::from).collect(),
- })),
- proto::operation::Variant::Undo(undo) => Some(text::Operation::Undo(UndoOperation {
- timestamp: clock::Lamport {
- replica_id: undo.replica_id as text::ReplicaId,
- value: undo.lamport_timestamp,
- },
- version: version_from_wire(&undo.version),
- counts: undo
- .counts
- .into_iter()
- .map(|c| {
- (
- clock::Lamport {
- replica_id: c.replica_id as text::ReplicaId,
- value: c.lamport_timestamp,
- },
- c.count,
- )
- })
- .collect(),
- })),
- _ => None,
- }
-}
-
-fn version_from_wire(message: &[proto::VectorClockEntry]) -> clock::Global {
- let mut version = clock::Global::new();
- for entry in message {
- version.observe(clock::Lamport {
- replica_id: entry.replica_id as text::ReplicaId,
- value: entry.timestamp,
- });
- }
- version
-}
-
-fn version_to_wire(version: &clock::Global) -> Vec<proto::VectorClockEntry> {
- let mut message = Vec::new();
- for entry in version.iter() {
- message.push(proto::VectorClockEntry {
- replica_id: entry.replica_id as u32,
- timestamp: entry.value,
- });
- }
- message
-}
-
-#[derive(Debug, Clone, Copy, EnumIter, DeriveColumn)]
-enum QueryOperationSerializationVersion {
- OperationSerializationVersion,
-}
-
-mod storage {
- #![allow(non_snake_case)]
- use prost::Message;
- pub const SERIALIZATION_VERSION: i32 = 1;
-
- #[derive(Message)]
- pub struct Operation {
- #[prost(message, repeated, tag = "2")]
- pub version: Vec<VectorClockEntry>,
- #[prost(bool, tag = "3")]
- pub is_undo: bool,
- #[prost(message, repeated, tag = "4")]
- pub edit_ranges: Vec<Range>,
- #[prost(string, repeated, tag = "5")]
- pub edit_texts: Vec<String>,
- #[prost(message, repeated, tag = "6")]
- pub undo_counts: Vec<UndoCount>,
- }
-
- #[derive(Message)]
- pub struct VectorClockEntry {
- #[prost(uint32, tag = "1")]
- pub replica_id: u32,
- #[prost(uint32, tag = "2")]
- pub timestamp: u32,
- }
-
- #[derive(Message)]
- pub struct Range {
- #[prost(uint64, tag = "1")]
- pub start: u64,
- #[prost(uint64, tag = "2")]
- pub end: u64,
- }
-
- #[derive(Message)]
- pub struct UndoCount {
- #[prost(uint32, tag = "1")]
- pub replica_id: u32,
- #[prost(uint32, tag = "2")]
- pub lamport_timestamp: u32,
- #[prost(uint32, tag = "3")]
- pub count: u32,
- }
-}
@@ -1,1319 +0,0 @@
-use super::*;
-use rpc::proto::channel_member::Kind;
-use sea_orm::TryGetableMany;
-
-impl Database {
- #[cfg(test)]
- pub async fn all_channels(&self) -> Result<Vec<(ChannelId, String)>> {
- self.transaction(move |tx| async move {
- let mut channels = Vec::new();
- let mut rows = channel::Entity::find().stream(&*tx).await?;
- while let Some(row) = rows.next().await {
- let row = row?;
- channels.push((row.id, row.name));
- }
- Ok(channels)
- })
- .await
- }
-
- #[cfg(test)]
- pub async fn create_root_channel(&self, name: &str, creator_id: UserId) -> Result<ChannelId> {
- Ok(self
- .create_channel(name, None, creator_id)
- .await?
- .channel
- .id)
- }
-
- #[cfg(test)]
- pub async fn create_sub_channel(
- &self,
- name: &str,
- parent: ChannelId,
- creator_id: UserId,
- ) -> Result<ChannelId> {
- Ok(self
- .create_channel(name, Some(parent), creator_id)
- .await?
- .channel
- .id)
- }
-
- pub async fn create_channel(
- &self,
- name: &str,
- parent_channel_id: Option<ChannelId>,
- admin_id: UserId,
- ) -> Result<CreateChannelResult> {
- let name = Self::sanitize_channel_name(name)?;
- self.transaction(move |tx| async move {
- let mut parent = None;
-
- if let Some(parent_channel_id) = parent_channel_id {
- let parent_channel = self.get_channel_internal(parent_channel_id, &*tx).await?;
- self.check_user_is_channel_admin(&parent_channel, admin_id, &*tx)
- .await?;
- parent = Some(parent_channel);
- }
-
- let channel = channel::ActiveModel {
- id: ActiveValue::NotSet,
- name: ActiveValue::Set(name.to_string()),
- visibility: ActiveValue::Set(ChannelVisibility::Members),
- parent_path: ActiveValue::Set(
- parent
- .as_ref()
- .map_or(String::new(), |parent| parent.path()),
- ),
- }
- .insert(&*tx)
- .await?;
-
- let participants_to_update;
- if let Some(parent) = &parent {
- participants_to_update = self
- .participants_to_notify_for_channel_change(parent, &*tx)
- .await?;
- } else {
- participants_to_update = vec![];
-
- channel_member::ActiveModel {
- id: ActiveValue::NotSet,
- channel_id: ActiveValue::Set(channel.id),
- user_id: ActiveValue::Set(admin_id),
- accepted: ActiveValue::Set(true),
- role: ActiveValue::Set(ChannelRole::Admin),
- }
- .insert(&*tx)
- .await?;
- };
-
- Ok(CreateChannelResult {
- channel: Channel::from_model(channel, ChannelRole::Admin),
- participants_to_update,
- })
- })
- .await
- }
-
- pub async fn join_channel(
- &self,
- channel_id: ChannelId,
- user_id: UserId,
- connection: ConnectionId,
- environment: &str,
- ) -> Result<(JoinRoom, Option<MembershipUpdated>, ChannelRole)> {
- self.transaction(move |tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- let mut role = self.channel_role_for_user(&channel, user_id, &*tx).await?;
-
- let mut accept_invite_result = None;
-
- if role.is_none() {
- if let Some(invitation) = self
- .pending_invite_for_channel(&channel, user_id, &*tx)
- .await?
- {
- // note, this may be a parent channel
- role = Some(invitation.role);
- channel_member::Entity::update(channel_member::ActiveModel {
- accepted: ActiveValue::Set(true),
- ..invitation.into_active_model()
- })
- .exec(&*tx)
- .await?;
-
- accept_invite_result = Some(
- self.calculate_membership_updated(&channel, user_id, &*tx)
- .await?,
- );
-
- debug_assert!(
- self.channel_role_for_user(&channel, user_id, &*tx).await? == role
- );
- }
- }
-
- if channel.visibility == ChannelVisibility::Public {
- role = Some(ChannelRole::Guest);
- let channel_to_join = self
- .public_ancestors_including_self(&channel, &*tx)
- .await?
- .first()
- .cloned()
- .unwrap_or(channel.clone());
-
- channel_member::Entity::insert(channel_member::ActiveModel {
- id: ActiveValue::NotSet,
- channel_id: ActiveValue::Set(channel_to_join.id),
- user_id: ActiveValue::Set(user_id),
- accepted: ActiveValue::Set(true),
- role: ActiveValue::Set(ChannelRole::Guest),
- })
- .exec(&*tx)
- .await?;
-
- accept_invite_result = Some(
- self.calculate_membership_updated(&channel_to_join, user_id, &*tx)
- .await?,
- );
-
- debug_assert!(self.channel_role_for_user(&channel, user_id, &*tx).await? == role);
- }
-
- if role.is_none() || role == Some(ChannelRole::Banned) {
- Err(anyhow!("not allowed"))?
- }
-
- let live_kit_room = format!("channel-{}", nanoid::nanoid!(30));
- let room_id = self
- .get_or_create_channel_room(channel_id, &live_kit_room, environment, &*tx)
- .await?;
-
- self.join_channel_room_internal(room_id, user_id, connection, &*tx)
- .await
- .map(|jr| (jr, accept_invite_result, role.unwrap()))
- })
- .await
- }
-
- pub async fn set_channel_visibility(
- &self,
- channel_id: ChannelId,
- visibility: ChannelVisibility,
- admin_id: UserId,
- ) -> Result<SetChannelVisibilityResult> {
- self.transaction(move |tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
-
- self.check_user_is_channel_admin(&channel, admin_id, &*tx)
- .await?;
-
- let previous_members = self
- .get_channel_participant_details_internal(&channel, &*tx)
- .await?;
-
- let mut model = channel.into_active_model();
- model.visibility = ActiveValue::Set(visibility);
- let channel = model.update(&*tx).await?;
-
- let mut participants_to_update: HashMap<UserId, ChannelsForUser> = self
- .participants_to_notify_for_channel_change(&channel, &*tx)
- .await?
- .into_iter()
- .collect();
-
- let mut channels_to_remove: Vec<ChannelId> = vec![];
- let mut participants_to_remove: HashSet<UserId> = HashSet::default();
- match visibility {
- ChannelVisibility::Members => {
- let all_descendents: Vec<ChannelId> = self
- .get_channel_descendants_including_self(vec![channel_id], &*tx)
- .await?
- .into_iter()
- .map(|channel| channel.id)
- .collect();
-
- channels_to_remove = channel::Entity::find()
- .filter(
- channel::Column::Id
- .is_in(all_descendents)
- .and(channel::Column::Visibility.eq(ChannelVisibility::Public)),
- )
- .all(&*tx)
- .await?
- .into_iter()
- .map(|channel| channel.id)
- .collect();
-
- channels_to_remove.push(channel_id);
-
- for member in previous_members {
- if member.role.can_only_see_public_descendants() {
- participants_to_remove.insert(member.user_id);
- }
- }
- }
- ChannelVisibility::Public => {
- if let Some(public_parent) = self.public_parent_channel(&channel, &*tx).await? {
- let parent_updates = self
- .participants_to_notify_for_channel_change(&public_parent, &*tx)
- .await?;
-
- for (user_id, channels) in parent_updates {
- participants_to_update.insert(user_id, channels);
- }
- }
- }
- }
-
- Ok(SetChannelVisibilityResult {
- participants_to_update,
- participants_to_remove,
- channels_to_remove,
- })
- })
- .await
- }
-
- pub async fn delete_channel(
- &self,
- channel_id: ChannelId,
- user_id: UserId,
- ) -> Result<(Vec<ChannelId>, Vec<UserId>)> {
- self.transaction(move |tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- self.check_user_is_channel_admin(&channel, user_id, &*tx)
- .await?;
-
- let members_to_notify: Vec<UserId> = channel_member::Entity::find()
- .filter(channel_member::Column::ChannelId.is_in(channel.ancestors_including_self()))
- .select_only()
- .column(channel_member::Column::UserId)
- .distinct()
- .into_values::<_, QueryUserIds>()
- .all(&*tx)
- .await?;
-
- let channels_to_remove = self
- .get_channel_descendants_including_self(vec![channel.id], &*tx)
- .await?
- .into_iter()
- .map(|channel| channel.id)
- .collect::<Vec<_>>();
-
- channel::Entity::delete_many()
- .filter(channel::Column::Id.is_in(channels_to_remove.iter().copied()))
- .exec(&*tx)
- .await?;
-
- Ok((channels_to_remove, members_to_notify))
- })
- .await
- }
-
- pub async fn invite_channel_member(
- &self,
- channel_id: ChannelId,
- invitee_id: UserId,
- inviter_id: UserId,
- role: ChannelRole,
- ) -> Result<InviteMemberResult> {
- self.transaction(move |tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- self.check_user_is_channel_admin(&channel, inviter_id, &*tx)
- .await?;
-
- channel_member::ActiveModel {
- id: ActiveValue::NotSet,
- channel_id: ActiveValue::Set(channel_id),
- user_id: ActiveValue::Set(invitee_id),
- accepted: ActiveValue::Set(false),
- role: ActiveValue::Set(role),
- }
- .insert(&*tx)
- .await?;
-
- let channel = Channel::from_model(channel, role);
-
- let notifications = self
- .create_notification(
- invitee_id,
- rpc::Notification::ChannelInvitation {
- channel_id: channel_id.to_proto(),
- channel_name: channel.name.clone(),
- inviter_id: inviter_id.to_proto(),
- },
- true,
- &*tx,
- )
- .await?
- .into_iter()
- .collect();
-
- Ok(InviteMemberResult {
- channel,
- notifications,
- })
- })
- .await
- }
-
- fn sanitize_channel_name(name: &str) -> Result<&str> {
- let new_name = name.trim().trim_start_matches('#');
- if new_name == "" {
- Err(anyhow!("channel name can't be blank"))?;
- }
- Ok(new_name)
- }
-
- pub async fn rename_channel(
- &self,
- channel_id: ChannelId,
- admin_id: UserId,
- new_name: &str,
- ) -> Result<RenameChannelResult> {
- self.transaction(move |tx| async move {
- let new_name = Self::sanitize_channel_name(new_name)?.to_string();
-
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- let role = self
- .check_user_is_channel_admin(&channel, admin_id, &*tx)
- .await?;
-
- let mut model = channel.into_active_model();
- model.name = ActiveValue::Set(new_name.clone());
- let channel = model.update(&*tx).await?;
-
- let participants = self
- .get_channel_participant_details_internal(&channel, &*tx)
- .await?;
-
- Ok(RenameChannelResult {
- channel: Channel::from_model(channel.clone(), role),
- participants_to_update: participants
- .iter()
- .map(|participant| {
- (
- participant.user_id,
- Channel::from_model(channel.clone(), participant.role),
- )
- })
- .collect(),
- })
- })
- .await
- }
-
- pub async fn respond_to_channel_invite(
- &self,
- channel_id: ChannelId,
- user_id: UserId,
- accept: bool,
- ) -> Result<RespondToChannelInvite> {
- self.transaction(move |tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
-
- let membership_update = if accept {
- let rows_affected = channel_member::Entity::update_many()
- .set(channel_member::ActiveModel {
- accepted: ActiveValue::Set(accept),
- ..Default::default()
- })
- .filter(
- channel_member::Column::ChannelId
- .eq(channel_id)
- .and(channel_member::Column::UserId.eq(user_id))
- .and(channel_member::Column::Accepted.eq(false)),
- )
- .exec(&*tx)
- .await?
- .rows_affected;
-
- if rows_affected == 0 {
- Err(anyhow!("no such invitation"))?;
- }
-
- Some(
- self.calculate_membership_updated(&channel, user_id, &*tx)
- .await?,
- )
- } else {
- let rows_affected = channel_member::Entity::delete_many()
- .filter(
- channel_member::Column::ChannelId
- .eq(channel_id)
- .and(channel_member::Column::UserId.eq(user_id))
- .and(channel_member::Column::Accepted.eq(false)),
- )
- .exec(&*tx)
- .await?
- .rows_affected;
- if rows_affected == 0 {
- Err(anyhow!("no such invitation"))?;
- }
-
- None
- };
-
- Ok(RespondToChannelInvite {
- membership_update,
- notifications: self
- .mark_notification_as_read_with_response(
- user_id,
- &rpc::Notification::ChannelInvitation {
- channel_id: channel_id.to_proto(),
- channel_name: Default::default(),
- inviter_id: Default::default(),
- },
- accept,
- &*tx,
- )
- .await?
- .into_iter()
- .collect(),
- })
- })
- .await
- }
-
- async fn calculate_membership_updated(
- &self,
- channel: &channel::Model,
- user_id: UserId,
- tx: &DatabaseTransaction,
- ) -> Result<MembershipUpdated> {
- let new_channels = self.get_user_channels(user_id, Some(channel), &*tx).await?;
- let removed_channels = self
- .get_channel_descendants_including_self(vec![channel.id], &*tx)
- .await?
- .into_iter()
- .filter_map(|channel| {
- if !new_channels.channels.iter().any(|c| c.id == channel.id) {
- Some(channel.id)
- } else {
- None
- }
- })
- .collect::<Vec<_>>();
-
- Ok(MembershipUpdated {
- channel_id: channel.id,
- new_channels,
- removed_channels,
- })
- }
-
- pub async fn remove_channel_member(
- &self,
- channel_id: ChannelId,
- member_id: UserId,
- admin_id: UserId,
- ) -> Result<RemoveChannelMemberResult> {
- self.transaction(|tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- self.check_user_is_channel_admin(&channel, admin_id, &*tx)
- .await?;
-
- let result = channel_member::Entity::delete_many()
- .filter(
- channel_member::Column::ChannelId
- .eq(channel_id)
- .and(channel_member::Column::UserId.eq(member_id)),
- )
- .exec(&*tx)
- .await?;
-
- if result.rows_affected == 0 {
- Err(anyhow!("no such member"))?;
- }
-
- Ok(RemoveChannelMemberResult {
- membership_update: self
- .calculate_membership_updated(&channel, member_id, &*tx)
- .await?,
- notification_id: self
- .remove_notification(
- member_id,
- rpc::Notification::ChannelInvitation {
- channel_id: channel_id.to_proto(),
- channel_name: Default::default(),
- inviter_id: Default::default(),
- },
- &*tx,
- )
- .await?,
- })
- })
- .await
- }
-
- pub async fn get_channel_invites_for_user(&self, user_id: UserId) -> Result<Vec<Channel>> {
- self.transaction(|tx| async move {
- let mut role_for_channel: HashMap<ChannelId, ChannelRole> = HashMap::default();
-
- let channel_invites = channel_member::Entity::find()
- .filter(
- channel_member::Column::UserId
- .eq(user_id)
- .and(channel_member::Column::Accepted.eq(false)),
- )
- .all(&*tx)
- .await?;
-
- for invite in channel_invites {
- role_for_channel.insert(invite.channel_id, invite.role);
- }
-
- let channels = channel::Entity::find()
- .filter(channel::Column::Id.is_in(role_for_channel.keys().copied()))
- .all(&*tx)
- .await?;
-
- let channels = channels
- .into_iter()
- .filter_map(|channel| {
- let role = *role_for_channel.get(&channel.id)?;
- Some(Channel::from_model(channel, role))
- })
- .collect();
-
- Ok(channels)
- })
- .await
- }
-
- pub async fn get_channels_for_user(&self, user_id: UserId) -> Result<ChannelsForUser> {
- self.transaction(|tx| async move {
- let tx = tx;
-
- self.get_user_channels(user_id, None, &tx).await
- })
- .await
- }
-
- pub async fn get_user_channels(
- &self,
- user_id: UserId,
- ancestor_channel: Option<&channel::Model>,
- tx: &DatabaseTransaction,
- ) -> Result<ChannelsForUser> {
- let channel_memberships = channel_member::Entity::find()
- .filter(
- channel_member::Column::UserId
- .eq(user_id)
- .and(channel_member::Column::Accepted.eq(true)),
- )
- .all(&*tx)
- .await?;
-
- let descendants = self
- .get_channel_descendants_including_self(
- channel_memberships.iter().map(|m| m.channel_id),
- &*tx,
- )
- .await?;
-
- let mut roles_by_channel_id: HashMap<ChannelId, ChannelRole> = HashMap::default();
- for membership in channel_memberships.iter() {
- roles_by_channel_id.insert(membership.channel_id, membership.role);
- }
-
- let mut visible_channel_ids: HashSet<ChannelId> = HashSet::default();
-
- let channels: Vec<Channel> = descendants
- .into_iter()
- .filter_map(|channel| {
- let parent_role = channel
- .parent_id()
- .and_then(|parent_id| roles_by_channel_id.get(&parent_id));
-
- let role = if let Some(parent_role) = parent_role {
- let role = if let Some(existing_role) = roles_by_channel_id.get(&channel.id) {
- existing_role.max(*parent_role)
- } else {
- *parent_role
- };
- roles_by_channel_id.insert(channel.id, role);
- role
- } else {
- *roles_by_channel_id.get(&channel.id)?
- };
-
- let can_see_parent_paths = role.can_see_all_descendants()
- || role.can_only_see_public_descendants()
- && channel.visibility == ChannelVisibility::Public;
- if !can_see_parent_paths {
- return None;
- }
-
- visible_channel_ids.insert(channel.id);
-
- if let Some(ancestor) = ancestor_channel {
- if !channel
- .ancestors_including_self()
- .any(|id| id == ancestor.id)
- {
- return None;
- }
- }
-
- let mut channel = Channel::from_model(channel, role);
- channel
- .parent_path
- .retain(|id| visible_channel_ids.contains(&id));
-
- Some(channel)
- })
- .collect();
-
- #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
- enum QueryUserIdsAndChannelIds {
- ChannelId,
- UserId,
- }
-
- let mut channel_participants: HashMap<ChannelId, Vec<UserId>> = HashMap::default();
- {
- let mut rows = room_participant::Entity::find()
- .inner_join(room::Entity)
- .filter(room::Column::ChannelId.is_in(channels.iter().map(|c| c.id)))
- .select_only()
- .column(room::Column::ChannelId)
- .column(room_participant::Column::UserId)
- .into_values::<_, QueryUserIdsAndChannelIds>()
- .stream(&*tx)
- .await?;
- while let Some(row) = rows.next().await {
- let row: (ChannelId, UserId) = row?;
- channel_participants.entry(row.0).or_default().push(row.1)
- }
- }
-
- let channel_ids = channels.iter().map(|c| c.id).collect::<Vec<_>>();
- let channel_buffer_changes = self
- .unseen_channel_buffer_changes(user_id, &channel_ids, &*tx)
- .await?;
-
- let unseen_messages = self
- .unseen_channel_messages(user_id, &channel_ids, &*tx)
- .await?;
-
- Ok(ChannelsForUser {
- channels,
- channel_participants,
- unseen_buffer_changes: channel_buffer_changes,
- channel_messages: unseen_messages,
- })
- }
-
- async fn participants_to_notify_for_channel_change(
- &self,
- new_parent: &channel::Model,
- tx: &DatabaseTransaction,
- ) -> Result<Vec<(UserId, ChannelsForUser)>> {
- let mut results: Vec<(UserId, ChannelsForUser)> = Vec::new();
-
- let members = self
- .get_channel_participant_details_internal(new_parent, &*tx)
- .await?;
-
- for member in members.iter() {
- if !member.role.can_see_all_descendants() {
- continue;
- }
- results.push((
- member.user_id,
- self.get_user_channels(member.user_id, Some(new_parent), &*tx)
- .await?,
- ))
- }
-
- let public_parents = self
- .public_ancestors_including_self(new_parent, &*tx)
- .await?;
- let public_parent = public_parents.last();
-
- let Some(public_parent) = public_parent else {
- return Ok(results);
- };
-
- // could save some time in the common case by skipping this if the
- // new channel is not public and has no public descendants.
- let public_members = if public_parent == new_parent {
- members
- } else {
- self.get_channel_participant_details_internal(public_parent, &*tx)
- .await?
- };
-
- for member in public_members {
- if !member.role.can_only_see_public_descendants() {
- continue;
- };
- results.push((
- member.user_id,
- self.get_user_channels(member.user_id, Some(public_parent), &*tx)
- .await?,
- ))
- }
-
- Ok(results)
- }
-
- pub async fn set_channel_member_role(
- &self,
- channel_id: ChannelId,
- admin_id: UserId,
- for_user: UserId,
- role: ChannelRole,
- ) -> Result<SetMemberRoleResult> {
- self.transaction(|tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- self.check_user_is_channel_admin(&channel, admin_id, &*tx)
- .await?;
-
- let membership = channel_member::Entity::find()
- .filter(
- channel_member::Column::ChannelId
- .eq(channel_id)
- .and(channel_member::Column::UserId.eq(for_user)),
- )
- .one(&*tx)
- .await?;
-
- let Some(membership) = membership else {
- Err(anyhow!("no such member"))?
- };
-
- let mut update = membership.into_active_model();
- update.role = ActiveValue::Set(role);
- let updated = channel_member::Entity::update(update).exec(&*tx).await?;
-
- if updated.accepted {
- Ok(SetMemberRoleResult::MembershipUpdated(
- self.calculate_membership_updated(&channel, for_user, &*tx)
- .await?,
- ))
- } else {
- Ok(SetMemberRoleResult::InviteUpdated(Channel::from_model(
- channel, role,
- )))
- }
- })
- .await
- }
-
- pub async fn get_channel_participant_details(
- &self,
- channel_id: ChannelId,
- user_id: UserId,
- ) -> Result<Vec<proto::ChannelMember>> {
- let (role, members) = self
- .transaction(move |tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- let role = self
- .check_user_is_channel_participant(&channel, user_id, &*tx)
- .await?;
- Ok((
- role,
- self.get_channel_participant_details_internal(&channel, &*tx)
- .await?,
- ))
- })
- .await?;
-
- if role == ChannelRole::Admin {
- Ok(members
- .into_iter()
- .map(|channel_member| channel_member.to_proto())
- .collect())
- } else {
- return Ok(members
- .into_iter()
- .filter_map(|member| {
- if member.kind == proto::channel_member::Kind::Invitee {
- return None;
- }
- Some(ChannelMember {
- role: member.role,
- user_id: member.user_id,
- kind: proto::channel_member::Kind::Member,
- })
- })
- .map(|channel_member| channel_member.to_proto())
- .collect());
- }
- }
-
- async fn get_channel_participant_details_internal(
- &self,
- channel: &channel::Model,
- tx: &DatabaseTransaction,
- ) -> Result<Vec<ChannelMember>> {
- #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
- enum QueryMemberDetails {
- UserId,
- Role,
- IsDirectMember,
- Accepted,
- Visibility,
- }
-
- let mut stream = channel_member::Entity::find()
- .left_join(channel::Entity)
- .filter(channel_member::Column::ChannelId.is_in(channel.ancestors_including_self()))
- .select_only()
- .column(channel_member::Column::UserId)
- .column(channel_member::Column::Role)
- .column_as(
- channel_member::Column::ChannelId.eq(channel.id),
- QueryMemberDetails::IsDirectMember,
- )
- .column(channel_member::Column::Accepted)
- .column(channel::Column::Visibility)
- .into_values::<_, QueryMemberDetails>()
- .stream(&*tx)
- .await?;
-
- let mut user_details: HashMap<UserId, ChannelMember> = HashMap::default();
-
- while let Some(user_membership) = stream.next().await {
- let (user_id, channel_role, is_direct_member, is_invite_accepted, visibility): (
- UserId,
- ChannelRole,
- bool,
- bool,
- ChannelVisibility,
- ) = user_membership?;
- let kind = match (is_direct_member, is_invite_accepted) {
- (true, true) => proto::channel_member::Kind::Member,
- (true, false) => proto::channel_member::Kind::Invitee,
- (false, true) => proto::channel_member::Kind::AncestorMember,
- (false, false) => continue,
- };
-
- if channel_role == ChannelRole::Guest
- && visibility != ChannelVisibility::Public
- && channel.visibility != ChannelVisibility::Public
- {
- continue;
- }
-
- if let Some(details_mut) = user_details.get_mut(&user_id) {
- if channel_role.should_override(details_mut.role) {
- details_mut.role = channel_role;
- }
- if kind == Kind::Member {
- details_mut.kind = kind;
- // the UI is going to be a bit confusing if you already have permissions
- // that are greater than or equal to the ones you're being invited to.
- } else if kind == Kind::Invitee && details_mut.kind == Kind::AncestorMember {
- details_mut.kind = kind;
- }
- } else {
- user_details.insert(
- user_id,
- ChannelMember {
- user_id,
- kind,
- role: channel_role,
- },
- );
- }
- }
-
- Ok(user_details
- .into_iter()
- .map(|(_, details)| details)
- .collect())
- }
-
- pub async fn get_channel_participants(
- &self,
- channel: &channel::Model,
- tx: &DatabaseTransaction,
- ) -> Result<Vec<UserId>> {
- let participants = self
- .get_channel_participant_details_internal(channel, &*tx)
- .await?;
- Ok(participants
- .into_iter()
- .map(|member| member.user_id)
- .collect())
- }
-
- pub async fn check_user_is_channel_admin(
- &self,
- channel: &channel::Model,
- user_id: UserId,
- tx: &DatabaseTransaction,
- ) -> Result<ChannelRole> {
- let role = self.channel_role_for_user(channel, user_id, tx).await?;
- match role {
- Some(ChannelRole::Admin) => Ok(role.unwrap()),
- Some(ChannelRole::Member)
- | Some(ChannelRole::Banned)
- | Some(ChannelRole::Guest)
- | None => Err(anyhow!(
- "user is not a channel admin or channel does not exist"
- ))?,
- }
- }
-
- pub async fn check_user_is_channel_member(
- &self,
- channel: &channel::Model,
- user_id: UserId,
- tx: &DatabaseTransaction,
- ) -> Result<ChannelRole> {
- let channel_role = self.channel_role_for_user(channel, user_id, tx).await?;
- match channel_role {
- Some(ChannelRole::Admin) | Some(ChannelRole::Member) => Ok(channel_role.unwrap()),
- Some(ChannelRole::Banned) | Some(ChannelRole::Guest) | None => Err(anyhow!(
- "user is not a channel member or channel does not exist"
- ))?,
- }
- }
-
- pub async fn check_user_is_channel_participant(
- &self,
- channel: &channel::Model,
- user_id: UserId,
- tx: &DatabaseTransaction,
- ) -> Result<ChannelRole> {
- let role = self.channel_role_for_user(channel, user_id, tx).await?;
- match role {
- Some(ChannelRole::Admin) | Some(ChannelRole::Member) | Some(ChannelRole::Guest) => {
- Ok(role.unwrap())
- }
- Some(ChannelRole::Banned) | None => Err(anyhow!(
- "user is not a channel participant or channel does not exist"
- ))?,
- }
- }
-
- pub async fn pending_invite_for_channel(
- &self,
- channel: &channel::Model,
- user_id: UserId,
- tx: &DatabaseTransaction,
- ) -> Result<Option<channel_member::Model>> {
- let row = channel_member::Entity::find()
- .filter(channel_member::Column::ChannelId.is_in(channel.ancestors_including_self()))
- .filter(channel_member::Column::UserId.eq(user_id))
- .filter(channel_member::Column::Accepted.eq(false))
- .one(&*tx)
- .await?;
-
- Ok(row)
- }
-
- pub async fn public_parent_channel(
- &self,
- channel: &channel::Model,
- tx: &DatabaseTransaction,
- ) -> Result<Option<channel::Model>> {
- let mut path = self.public_ancestors_including_self(channel, &*tx).await?;
- if path.last().unwrap().id == channel.id {
- path.pop();
- }
- Ok(path.pop())
- }
-
- pub async fn public_ancestors_including_self(
- &self,
- channel: &channel::Model,
- tx: &DatabaseTransaction,
- ) -> Result<Vec<channel::Model>> {
- let visible_channels = channel::Entity::find()
- .filter(channel::Column::Id.is_in(channel.ancestors_including_self()))
- .filter(channel::Column::Visibility.eq(ChannelVisibility::Public))
- .order_by_asc(channel::Column::ParentPath)
- .all(&*tx)
- .await?;
-
- Ok(visible_channels)
- }
-
- pub async fn channel_role_for_user(
- &self,
- channel: &channel::Model,
- user_id: UserId,
- tx: &DatabaseTransaction,
- ) -> Result<Option<ChannelRole>> {
- #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
- enum QueryChannelMembership {
- ChannelId,
- Role,
- Visibility,
- }
-
- let mut rows = channel_member::Entity::find()
- .left_join(channel::Entity)
- .filter(
- channel_member::Column::ChannelId
- .is_in(channel.ancestors_including_self())
- .and(channel_member::Column::UserId.eq(user_id))
- .and(channel_member::Column::Accepted.eq(true)),
- )
- .select_only()
- .column(channel_member::Column::ChannelId)
- .column(channel_member::Column::Role)
- .column(channel::Column::Visibility)
- .into_values::<_, QueryChannelMembership>()
- .stream(&*tx)
- .await?;
-
- let mut user_role: Option<ChannelRole> = None;
-
- let mut is_participant = false;
- let mut current_channel_visibility = None;
-
- // note these channels are not iterated in any particular order,
- // our current logic takes the highest permission available.
- while let Some(row) = rows.next().await {
- let (membership_channel, role, visibility): (
- ChannelId,
- ChannelRole,
- ChannelVisibility,
- ) = row?;
-
- match role {
- ChannelRole::Admin | ChannelRole::Member | ChannelRole::Banned => {
- if let Some(users_role) = user_role {
- user_role = Some(users_role.max(role));
- } else {
- user_role = Some(role)
- }
- }
- ChannelRole::Guest if visibility == ChannelVisibility::Public => {
- is_participant = true
- }
- ChannelRole::Guest => {}
- }
- if channel.id == membership_channel {
- current_channel_visibility = Some(visibility);
- }
- }
- // free up database connection
- drop(rows);
-
- if is_participant && user_role.is_none() {
- if current_channel_visibility.is_none() {
- current_channel_visibility = channel::Entity::find()
- .filter(channel::Column::Id.eq(channel.id))
- .one(&*tx)
- .await?
- .map(|channel| channel.visibility);
- }
- if current_channel_visibility == Some(ChannelVisibility::Public) {
- user_role = Some(ChannelRole::Guest);
- }
- }
-
- Ok(user_role)
- }
-
- // Get the descendants of the given set if channels, ordered by their
- // path.
- async fn get_channel_descendants_including_self(
- &self,
- channel_ids: impl IntoIterator<Item = ChannelId>,
- tx: &DatabaseTransaction,
- ) -> Result<Vec<channel::Model>> {
- let mut values = String::new();
- for id in channel_ids {
- if !values.is_empty() {
- values.push_str(", ");
- }
- write!(&mut values, "({})", id).unwrap();
- }
-
- if values.is_empty() {
- return Ok(vec![]);
- }
-
- let sql = format!(
- r#"
- SELECT DISTINCT
- descendant_channels.*,
- descendant_channels.parent_path || descendant_channels.id as full_path
- FROM
- channels parent_channels, channels descendant_channels
- WHERE
- descendant_channels.id IN ({values}) OR
- (
- parent_channels.id IN ({values}) AND
- descendant_channels.parent_path LIKE (parent_channels.parent_path || parent_channels.id || '/%')
- )
- ORDER BY
- full_path ASC
- "#
- );
-
- Ok(channel::Entity::find()
- .from_raw_sql(Statement::from_string(
- self.pool.get_database_backend(),
- sql,
- ))
- .all(tx)
- .await?)
- }
-
- /// Returns the channel with the given ID
- pub async fn get_channel(&self, channel_id: ChannelId, user_id: UserId) -> Result<Channel> {
- self.transaction(|tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- let role = self
- .check_user_is_channel_participant(&channel, user_id, &*tx)
- .await?;
-
- Ok(Channel::from_model(channel, role))
- })
- .await
- }
-
- pub async fn get_channel_internal(
- &self,
- channel_id: ChannelId,
- tx: &DatabaseTransaction,
- ) -> Result<channel::Model> {
- Ok(channel::Entity::find_by_id(channel_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such channel"))?)
- }
-
- pub(crate) async fn get_or_create_channel_room(
- &self,
- channel_id: ChannelId,
- live_kit_room: &str,
- environment: &str,
- tx: &DatabaseTransaction,
- ) -> Result<RoomId> {
- let room = room::Entity::find()
- .filter(room::Column::ChannelId.eq(channel_id))
- .one(&*tx)
- .await?;
-
- let room_id = if let Some(room) = room {
- if let Some(env) = room.enviroment {
- if &env != environment {
- Err(anyhow!("must join using the {} release", env))?;
- }
- }
- room.id
- } else {
- let result = room::Entity::insert(room::ActiveModel {
- channel_id: ActiveValue::Set(Some(channel_id)),
- live_kit_room: ActiveValue::Set(live_kit_room.to_string()),
- enviroment: ActiveValue::Set(Some(environment.to_string())),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
-
- result.last_insert_id
- };
-
- Ok(room_id)
- }
-
- /// Move a channel from one parent to another
- pub async fn move_channel(
- &self,
- channel_id: ChannelId,
- new_parent_id: Option<ChannelId>,
- admin_id: UserId,
- ) -> Result<Option<MoveChannelResult>> {
- self.transaction(|tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- self.check_user_is_channel_admin(&channel, admin_id, &*tx)
- .await?;
-
- let new_parent_path;
- let new_parent_channel;
- if let Some(new_parent_id) = new_parent_id {
- let new_parent = self.get_channel_internal(new_parent_id, &*tx).await?;
- self.check_user_is_channel_admin(&new_parent, admin_id, &*tx)
- .await?;
-
- if new_parent
- .ancestors_including_self()
- .any(|id| id == channel.id)
- {
- Err(anyhow!("cannot move a channel into one of its descendants"))?;
- }
-
- new_parent_path = new_parent.path();
- new_parent_channel = Some(new_parent);
- } else {
- new_parent_path = String::new();
- new_parent_channel = None;
- };
-
- let previous_participants = self
- .get_channel_participant_details_internal(&channel, &*tx)
- .await?;
-
- let old_path = format!("{}{}/", channel.parent_path, channel.id);
- let new_path = format!("{}{}/", new_parent_path, channel.id);
-
- if old_path == new_path {
- return Ok(None);
- }
-
- let mut model = channel.into_active_model();
- model.parent_path = ActiveValue::Set(new_parent_path);
- let channel = model.update(&*tx).await?;
-
- if new_parent_channel.is_none() {
- channel_member::ActiveModel {
- id: ActiveValue::NotSet,
- channel_id: ActiveValue::Set(channel_id),
- user_id: ActiveValue::Set(admin_id),
- accepted: ActiveValue::Set(true),
- role: ActiveValue::Set(ChannelRole::Admin),
- }
- .insert(&*tx)
- .await?;
- }
-
- let descendent_ids =
- ChannelId::find_by_statement::<QueryIds>(Statement::from_sql_and_values(
- self.pool.get_database_backend(),
- "
- UPDATE channels SET parent_path = REPLACE(parent_path, $1, $2)
- WHERE parent_path LIKE $3 || '%'
- RETURNING id
- ",
- [old_path.clone().into(), new_path.into(), old_path.into()],
- ))
- .all(&*tx)
- .await?;
-
- let participants_to_update: HashMap<_, _> = self
- .participants_to_notify_for_channel_change(
- new_parent_channel.as_ref().unwrap_or(&channel),
- &*tx,
- )
- .await?
- .into_iter()
- .collect();
-
- let mut moved_channels: HashSet<ChannelId> = HashSet::default();
- for id in descendent_ids {
- moved_channels.insert(id);
- }
- moved_channels.insert(channel_id);
-
- let mut participants_to_remove: HashSet<UserId> = HashSet::default();
- for participant in previous_participants {
- if participant.kind == proto::channel_member::Kind::AncestorMember {
- if !participants_to_update.contains_key(&participant.user_id) {
- participants_to_remove.insert(participant.user_id);
- }
- }
- }
-
- Ok(Some(MoveChannelResult {
- participants_to_remove,
- participants_to_update,
- moved_channels,
- }))
- })
- .await
- }
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
-enum QueryIds {
- Id,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
-enum QueryUserIds {
- UserId,
-}
@@ -1,353 +0,0 @@
-use super::*;
-
-impl Database {
- pub async fn get_contacts(&self, user_id: UserId) -> Result<Vec<Contact>> {
- #[derive(Debug, FromQueryResult)]
- struct ContactWithUserBusyStatuses {
- user_id_a: UserId,
- user_id_b: UserId,
- a_to_b: bool,
- accepted: bool,
- user_a_busy: bool,
- user_b_busy: bool,
- }
-
- self.transaction(|tx| async move {
- let user_a_participant = Alias::new("user_a_participant");
- let user_b_participant = Alias::new("user_b_participant");
- let mut db_contacts = contact::Entity::find()
- .column_as(
- Expr::col((user_a_participant.clone(), room_participant::Column::Id))
- .is_not_null(),
- "user_a_busy",
- )
- .column_as(
- Expr::col((user_b_participant.clone(), room_participant::Column::Id))
- .is_not_null(),
- "user_b_busy",
- )
- .filter(
- contact::Column::UserIdA
- .eq(user_id)
- .or(contact::Column::UserIdB.eq(user_id)),
- )
- .join_as(
- JoinType::LeftJoin,
- contact::Relation::UserARoomParticipant.def(),
- user_a_participant,
- )
- .join_as(
- JoinType::LeftJoin,
- contact::Relation::UserBRoomParticipant.def(),
- user_b_participant,
- )
- .into_model::<ContactWithUserBusyStatuses>()
- .stream(&*tx)
- .await?;
-
- let mut contacts = Vec::new();
- while let Some(db_contact) = db_contacts.next().await {
- let db_contact = db_contact?;
- if db_contact.user_id_a == user_id {
- if db_contact.accepted {
- contacts.push(Contact::Accepted {
- user_id: db_contact.user_id_b,
- busy: db_contact.user_b_busy,
- });
- } else if db_contact.a_to_b {
- contacts.push(Contact::Outgoing {
- user_id: db_contact.user_id_b,
- })
- } else {
- contacts.push(Contact::Incoming {
- user_id: db_contact.user_id_b,
- });
- }
- } else if db_contact.accepted {
- contacts.push(Contact::Accepted {
- user_id: db_contact.user_id_a,
- busy: db_contact.user_a_busy,
- });
- } else if db_contact.a_to_b {
- contacts.push(Contact::Incoming {
- user_id: db_contact.user_id_a,
- });
- } else {
- contacts.push(Contact::Outgoing {
- user_id: db_contact.user_id_a,
- });
- }
- }
-
- contacts.sort_unstable_by_key(|contact| contact.user_id());
-
- Ok(contacts)
- })
- .await
- }
-
- pub async fn is_user_busy(&self, user_id: UserId) -> Result<bool> {
- self.transaction(|tx| async move {
- let participant = room_participant::Entity::find()
- .filter(room_participant::Column::UserId.eq(user_id))
- .one(&*tx)
- .await?;
- Ok(participant.is_some())
- })
- .await
- }
-
- pub async fn has_contact(&self, user_id_1: UserId, user_id_2: UserId) -> Result<bool> {
- self.transaction(|tx| async move {
- let (id_a, id_b) = if user_id_1 < user_id_2 {
- (user_id_1, user_id_2)
- } else {
- (user_id_2, user_id_1)
- };
-
- Ok(contact::Entity::find()
- .filter(
- contact::Column::UserIdA
- .eq(id_a)
- .and(contact::Column::UserIdB.eq(id_b))
- .and(contact::Column::Accepted.eq(true)),
- )
- .one(&*tx)
- .await?
- .is_some())
- })
- .await
- }
-
- pub async fn send_contact_request(
- &self,
- sender_id: UserId,
- receiver_id: UserId,
- ) -> Result<NotificationBatch> {
- self.transaction(|tx| async move {
- let (id_a, id_b, a_to_b) = if sender_id < receiver_id {
- (sender_id, receiver_id, true)
- } else {
- (receiver_id, sender_id, false)
- };
-
- let rows_affected = contact::Entity::insert(contact::ActiveModel {
- user_id_a: ActiveValue::set(id_a),
- user_id_b: ActiveValue::set(id_b),
- a_to_b: ActiveValue::set(a_to_b),
- accepted: ActiveValue::set(false),
- should_notify: ActiveValue::set(true),
- ..Default::default()
- })
- .on_conflict(
- OnConflict::columns([contact::Column::UserIdA, contact::Column::UserIdB])
- .values([
- (contact::Column::Accepted, true.into()),
- (contact::Column::ShouldNotify, false.into()),
- ])
- .action_and_where(
- contact::Column::Accepted.eq(false).and(
- contact::Column::AToB
- .eq(a_to_b)
- .and(contact::Column::UserIdA.eq(id_b))
- .or(contact::Column::AToB
- .ne(a_to_b)
- .and(contact::Column::UserIdA.eq(id_a))),
- ),
- )
- .to_owned(),
- )
- .exec_without_returning(&*tx)
- .await?;
-
- if rows_affected == 0 {
- Err(anyhow!("contact already requested"))?;
- }
-
- Ok(self
- .create_notification(
- receiver_id,
- rpc::Notification::ContactRequest {
- sender_id: sender_id.to_proto(),
- },
- true,
- &*tx,
- )
- .await?
- .into_iter()
- .collect())
- })
- .await
- }
-
- /// Returns a bool indicating whether the removed contact had originally accepted or not
- ///
- /// Deletes the contact identified by the requester and responder ids, and then returns
- /// whether the deleted contact had originally accepted or was a pending contact request.
- ///
- /// # Arguments
- ///
- /// * `requester_id` - The user that initiates this request
- /// * `responder_id` - The user that will be removed
- pub async fn remove_contact(
- &self,
- requester_id: UserId,
- responder_id: UserId,
- ) -> Result<(bool, Option<NotificationId>)> {
- self.transaction(|tx| async move {
- let (id_a, id_b) = if responder_id < requester_id {
- (responder_id, requester_id)
- } else {
- (requester_id, responder_id)
- };
-
- let contact = contact::Entity::find()
- .filter(
- contact::Column::UserIdA
- .eq(id_a)
- .and(contact::Column::UserIdB.eq(id_b)),
- )
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such contact"))?;
-
- contact::Entity::delete_by_id(contact.id).exec(&*tx).await?;
-
- let mut deleted_notification_id = None;
- if !contact.accepted {
- deleted_notification_id = self
- .remove_notification(
- responder_id,
- rpc::Notification::ContactRequest {
- sender_id: requester_id.to_proto(),
- },
- &*tx,
- )
- .await?;
- }
-
- Ok((contact.accepted, deleted_notification_id))
- })
- .await
- }
-
- pub async fn dismiss_contact_notification(
- &self,
- user_id: UserId,
- contact_user_id: UserId,
- ) -> Result<()> {
- self.transaction(|tx| async move {
- let (id_a, id_b, a_to_b) = if user_id < contact_user_id {
- (user_id, contact_user_id, true)
- } else {
- (contact_user_id, user_id, false)
- };
-
- let result = contact::Entity::update_many()
- .set(contact::ActiveModel {
- should_notify: ActiveValue::set(false),
- ..Default::default()
- })
- .filter(
- contact::Column::UserIdA
- .eq(id_a)
- .and(contact::Column::UserIdB.eq(id_b))
- .and(
- contact::Column::AToB
- .eq(a_to_b)
- .and(contact::Column::Accepted.eq(true))
- .or(contact::Column::AToB
- .ne(a_to_b)
- .and(contact::Column::Accepted.eq(false))),
- ),
- )
- .exec(&*tx)
- .await?;
- if result.rows_affected == 0 {
- Err(anyhow!("no such contact request"))?
- } else {
- Ok(())
- }
- })
- .await
- }
-
- pub async fn respond_to_contact_request(
- &self,
- responder_id: UserId,
- requester_id: UserId,
- accept: bool,
- ) -> Result<NotificationBatch> {
- self.transaction(|tx| async move {
- let (id_a, id_b, a_to_b) = if responder_id < requester_id {
- (responder_id, requester_id, false)
- } else {
- (requester_id, responder_id, true)
- };
- let rows_affected = if accept {
- let result = contact::Entity::update_many()
- .set(contact::ActiveModel {
- accepted: ActiveValue::set(true),
- should_notify: ActiveValue::set(true),
- ..Default::default()
- })
- .filter(
- contact::Column::UserIdA
- .eq(id_a)
- .and(contact::Column::UserIdB.eq(id_b))
- .and(contact::Column::AToB.eq(a_to_b)),
- )
- .exec(&*tx)
- .await?;
- result.rows_affected
- } else {
- let result = contact::Entity::delete_many()
- .filter(
- contact::Column::UserIdA
- .eq(id_a)
- .and(contact::Column::UserIdB.eq(id_b))
- .and(contact::Column::AToB.eq(a_to_b))
- .and(contact::Column::Accepted.eq(false)),
- )
- .exec(&*tx)
- .await?;
-
- result.rows_affected
- };
-
- if rows_affected == 0 {
- Err(anyhow!("no such contact request"))?
- }
-
- let mut notifications = Vec::new();
- notifications.extend(
- self.mark_notification_as_read_with_response(
- responder_id,
- &rpc::Notification::ContactRequest {
- sender_id: requester_id.to_proto(),
- },
- accept,
- &*tx,
- )
- .await?,
- );
-
- if accept {
- notifications.extend(
- self.create_notification(
- requester_id,
- rpc::Notification::ContactRequestAccepted {
- responder_id: responder_id.to_proto(),
- },
- true,
- &*tx,
- )
- .await?,
- );
- }
-
- Ok(notifications)
- })
- .await
- }
-}
@@ -1,505 +0,0 @@
-use super::*;
-use rpc::Notification;
-use sea_orm::TryInsertResult;
-use time::OffsetDateTime;
-
-impl Database {
- pub async fn join_channel_chat(
- &self,
- channel_id: ChannelId,
- connection_id: ConnectionId,
- user_id: UserId,
- ) -> Result<()> {
- self.transaction(|tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- self.check_user_is_channel_participant(&channel, user_id, &*tx)
- .await?;
- channel_chat_participant::ActiveModel {
- id: ActiveValue::NotSet,
- channel_id: ActiveValue::Set(channel_id),
- user_id: ActiveValue::Set(user_id),
- connection_id: ActiveValue::Set(connection_id.id as i32),
- connection_server_id: ActiveValue::Set(ServerId(connection_id.owner_id as i32)),
- }
- .insert(&*tx)
- .await?;
- Ok(())
- })
- .await
- }
-
- pub async fn channel_chat_connection_lost(
- &self,
- connection_id: ConnectionId,
- tx: &DatabaseTransaction,
- ) -> Result<()> {
- channel_chat_participant::Entity::delete_many()
- .filter(
- Condition::all()
- .add(
- channel_chat_participant::Column::ConnectionServerId
- .eq(connection_id.owner_id),
- )
- .add(channel_chat_participant::Column::ConnectionId.eq(connection_id.id)),
- )
- .exec(tx)
- .await?;
- Ok(())
- }
-
- pub async fn leave_channel_chat(
- &self,
- channel_id: ChannelId,
- connection_id: ConnectionId,
- _user_id: UserId,
- ) -> Result<()> {
- self.transaction(|tx| async move {
- channel_chat_participant::Entity::delete_many()
- .filter(
- Condition::all()
- .add(
- channel_chat_participant::Column::ConnectionServerId
- .eq(connection_id.owner_id),
- )
- .add(channel_chat_participant::Column::ConnectionId.eq(connection_id.id))
- .add(channel_chat_participant::Column::ChannelId.eq(channel_id)),
- )
- .exec(&*tx)
- .await?;
-
- Ok(())
- })
- .await
- }
-
- pub async fn get_channel_messages(
- &self,
- channel_id: ChannelId,
- user_id: UserId,
- count: usize,
- before_message_id: Option<MessageId>,
- ) -> Result<Vec<proto::ChannelMessage>> {
- self.transaction(|tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- self.check_user_is_channel_participant(&channel, user_id, &*tx)
- .await?;
-
- let mut condition =
- Condition::all().add(channel_message::Column::ChannelId.eq(channel_id));
-
- if let Some(before_message_id) = before_message_id {
- condition = condition.add(channel_message::Column::Id.lt(before_message_id));
- }
-
- let rows = channel_message::Entity::find()
- .filter(condition)
- .order_by_desc(channel_message::Column::Id)
- .limit(count as u64)
- .all(&*tx)
- .await?;
-
- self.load_channel_messages(rows, &*tx).await
- })
- .await
- }
-
- pub async fn get_channel_messages_by_id(
- &self,
- user_id: UserId,
- message_ids: &[MessageId],
- ) -> Result<Vec<proto::ChannelMessage>> {
- self.transaction(|tx| async move {
- let rows = channel_message::Entity::find()
- .filter(channel_message::Column::Id.is_in(message_ids.iter().copied()))
- .order_by_desc(channel_message::Column::Id)
- .all(&*tx)
- .await?;
-
- let mut channels = HashMap::<ChannelId, channel::Model>::default();
- for row in &rows {
- channels.insert(
- row.channel_id,
- self.get_channel_internal(row.channel_id, &*tx).await?,
- );
- }
-
- for (_, channel) in channels {
- self.check_user_is_channel_participant(&channel, user_id, &*tx)
- .await?;
- }
-
- let messages = self.load_channel_messages(rows, &*tx).await?;
- Ok(messages)
- })
- .await
- }
-
- async fn load_channel_messages(
- &self,
- rows: Vec<channel_message::Model>,
- tx: &DatabaseTransaction,
- ) -> Result<Vec<proto::ChannelMessage>> {
- let mut messages = rows
- .into_iter()
- .map(|row| {
- let nonce = row.nonce.as_u64_pair();
- proto::ChannelMessage {
- id: row.id.to_proto(),
- sender_id: row.sender_id.to_proto(),
- body: row.body,
- timestamp: row.sent_at.assume_utc().unix_timestamp() as u64,
- mentions: vec![],
- nonce: Some(proto::Nonce {
- upper_half: nonce.0,
- lower_half: nonce.1,
- }),
- }
- })
- .collect::<Vec<_>>();
- messages.reverse();
-
- let mut mentions = channel_message_mention::Entity::find()
- .filter(channel_message_mention::Column::MessageId.is_in(messages.iter().map(|m| m.id)))
- .order_by_asc(channel_message_mention::Column::MessageId)
- .order_by_asc(channel_message_mention::Column::StartOffset)
- .stream(&*tx)
- .await?;
-
- let mut message_ix = 0;
- while let Some(mention) = mentions.next().await {
- let mention = mention?;
- let message_id = mention.message_id.to_proto();
- while let Some(message) = messages.get_mut(message_ix) {
- if message.id < message_id {
- message_ix += 1;
- } else {
- if message.id == message_id {
- message.mentions.push(proto::ChatMention {
- range: Some(proto::Range {
- start: mention.start_offset as u64,
- end: mention.end_offset as u64,
- }),
- user_id: mention.user_id.to_proto(),
- });
- }
- break;
- }
- }
- }
-
- Ok(messages)
- }
-
- pub async fn create_channel_message(
- &self,
- channel_id: ChannelId,
- user_id: UserId,
- body: &str,
- mentions: &[proto::ChatMention],
- timestamp: OffsetDateTime,
- nonce: u128,
- ) -> Result<CreatedChannelMessage> {
- self.transaction(|tx| async move {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- self.check_user_is_channel_participant(&channel, user_id, &*tx)
- .await?;
-
- let mut rows = channel_chat_participant::Entity::find()
- .filter(channel_chat_participant::Column::ChannelId.eq(channel_id))
- .stream(&*tx)
- .await?;
-
- let mut is_participant = false;
- let mut participant_connection_ids = Vec::new();
- let mut participant_user_ids = Vec::new();
- while let Some(row) = rows.next().await {
- let row = row?;
- if row.user_id == user_id {
- is_participant = true;
- }
- participant_user_ids.push(row.user_id);
- participant_connection_ids.push(row.connection());
- }
- drop(rows);
-
- if !is_participant {
- Err(anyhow!("not a chat participant"))?;
- }
-
- let timestamp = timestamp.to_offset(time::UtcOffset::UTC);
- let timestamp = time::PrimitiveDateTime::new(timestamp.date(), timestamp.time());
-
- let result = channel_message::Entity::insert(channel_message::ActiveModel {
- channel_id: ActiveValue::Set(channel_id),
- sender_id: ActiveValue::Set(user_id),
- body: ActiveValue::Set(body.to_string()),
- sent_at: ActiveValue::Set(timestamp),
- nonce: ActiveValue::Set(Uuid::from_u128(nonce)),
- id: ActiveValue::NotSet,
- })
- .on_conflict(
- OnConflict::columns([
- channel_message::Column::SenderId,
- channel_message::Column::Nonce,
- ])
- .do_nothing()
- .to_owned(),
- )
- .do_nothing()
- .exec(&*tx)
- .await?;
-
- let message_id;
- let mut notifications = Vec::new();
- match result {
- TryInsertResult::Inserted(result) => {
- message_id = result.last_insert_id;
- let mentioned_user_ids =
- mentions.iter().map(|m| m.user_id).collect::<HashSet<_>>();
- let mentions = mentions
- .iter()
- .filter_map(|mention| {
- let range = mention.range.as_ref()?;
- if !body.is_char_boundary(range.start as usize)
- || !body.is_char_boundary(range.end as usize)
- {
- return None;
- }
- Some(channel_message_mention::ActiveModel {
- message_id: ActiveValue::Set(message_id),
- start_offset: ActiveValue::Set(range.start as i32),
- end_offset: ActiveValue::Set(range.end as i32),
- user_id: ActiveValue::Set(UserId::from_proto(mention.user_id)),
- })
- })
- .collect::<Vec<_>>();
- if !mentions.is_empty() {
- channel_message_mention::Entity::insert_many(mentions)
- .exec(&*tx)
- .await?;
- }
-
- for mentioned_user in mentioned_user_ids {
- notifications.extend(
- self.create_notification(
- UserId::from_proto(mentioned_user),
- rpc::Notification::ChannelMessageMention {
- message_id: message_id.to_proto(),
- sender_id: user_id.to_proto(),
- channel_id: channel_id.to_proto(),
- },
- false,
- &*tx,
- )
- .await?,
- );
- }
-
- self.observe_channel_message_internal(channel_id, user_id, message_id, &*tx)
- .await?;
- }
- _ => {
- message_id = channel_message::Entity::find()
- .filter(channel_message::Column::Nonce.eq(Uuid::from_u128(nonce)))
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("failed to insert message"))?
- .id;
- }
- }
-
- let mut channel_members = self.get_channel_participants(&channel, &*tx).await?;
- channel_members.retain(|member| !participant_user_ids.contains(member));
-
- Ok(CreatedChannelMessage {
- message_id,
- participant_connection_ids,
- channel_members,
- notifications,
- })
- })
- .await
- }
-
- pub async fn observe_channel_message(
- &self,
- channel_id: ChannelId,
- user_id: UserId,
- message_id: MessageId,
- ) -> Result<NotificationBatch> {
- self.transaction(|tx| async move {
- self.observe_channel_message_internal(channel_id, user_id, message_id, &*tx)
- .await?;
- let mut batch = NotificationBatch::default();
- batch.extend(
- self.mark_notification_as_read(
- user_id,
- &Notification::ChannelMessageMention {
- message_id: message_id.to_proto(),
- sender_id: Default::default(),
- channel_id: Default::default(),
- },
- &*tx,
- )
- .await?,
- );
- Ok(batch)
- })
- .await
- }
-
- async fn observe_channel_message_internal(
- &self,
- channel_id: ChannelId,
- user_id: UserId,
- message_id: MessageId,
- tx: &DatabaseTransaction,
- ) -> Result<()> {
- observed_channel_messages::Entity::insert(observed_channel_messages::ActiveModel {
- user_id: ActiveValue::Set(user_id),
- channel_id: ActiveValue::Set(channel_id),
- channel_message_id: ActiveValue::Set(message_id),
- })
- .on_conflict(
- OnConflict::columns([
- observed_channel_messages::Column::ChannelId,
- observed_channel_messages::Column::UserId,
- ])
- .update_column(observed_channel_messages::Column::ChannelMessageId)
- .action_cond_where(observed_channel_messages::Column::ChannelMessageId.lt(message_id))
- .to_owned(),
- )
- // TODO: Try to upgrade SeaORM so we don't have to do this hack around their bug
- .exec_without_returning(&*tx)
- .await?;
- Ok(())
- }
-
- pub async fn unseen_channel_messages(
- &self,
- user_id: UserId,
- channel_ids: &[ChannelId],
- tx: &DatabaseTransaction,
- ) -> Result<Vec<proto::UnseenChannelMessage>> {
- let mut observed_messages_by_channel_id = HashMap::default();
- let mut rows = observed_channel_messages::Entity::find()
- .filter(observed_channel_messages::Column::UserId.eq(user_id))
- .filter(observed_channel_messages::Column::ChannelId.is_in(channel_ids.iter().copied()))
- .stream(&*tx)
- .await?;
-
- while let Some(row) = rows.next().await {
- let row = row?;
- observed_messages_by_channel_id.insert(row.channel_id, row);
- }
- drop(rows);
- let mut values = String::new();
- for id in channel_ids {
- if !values.is_empty() {
- values.push_str(", ");
- }
- write!(&mut values, "({})", id).unwrap();
- }
-
- if values.is_empty() {
- return Ok(Default::default());
- }
-
- let sql = format!(
- r#"
- SELECT
- *
- FROM (
- SELECT
- *,
- row_number() OVER (
- PARTITION BY channel_id
- ORDER BY id DESC
- ) as row_number
- FROM channel_messages
- WHERE
- channel_id in ({values})
- ) AS messages
- WHERE
- row_number = 1
- "#,
- );
-
- let stmt = Statement::from_string(self.pool.get_database_backend(), sql);
- let last_messages = channel_message::Model::find_by_statement(stmt)
- .all(&*tx)
- .await?;
-
- let mut changes = Vec::new();
- for last_message in last_messages {
- if let Some(observed_message) =
- observed_messages_by_channel_id.get(&last_message.channel_id)
- {
- if observed_message.channel_message_id == last_message.id {
- continue;
- }
- }
- changes.push(proto::UnseenChannelMessage {
- channel_id: last_message.channel_id.to_proto(),
- message_id: last_message.id.to_proto(),
- });
- }
-
- Ok(changes)
- }
-
- pub async fn remove_channel_message(
- &self,
- channel_id: ChannelId,
- message_id: MessageId,
- user_id: UserId,
- ) -> Result<Vec<ConnectionId>> {
- self.transaction(|tx| async move {
- let mut rows = channel_chat_participant::Entity::find()
- .filter(channel_chat_participant::Column::ChannelId.eq(channel_id))
- .stream(&*tx)
- .await?;
-
- let mut is_participant = false;
- let mut participant_connection_ids = Vec::new();
- while let Some(row) = rows.next().await {
- let row = row?;
- if row.user_id == user_id {
- is_participant = true;
- }
- participant_connection_ids.push(row.connection());
- }
- drop(rows);
-
- if !is_participant {
- Err(anyhow!("not a chat participant"))?;
- }
-
- let result = channel_message::Entity::delete_by_id(message_id)
- .filter(channel_message::Column::SenderId.eq(user_id))
- .exec(&*tx)
- .await?;
-
- if result.rows_affected == 0 {
- let channel = self.get_channel_internal(channel_id, &*tx).await?;
- if self
- .check_user_is_channel_admin(&channel, user_id, &*tx)
- .await
- .is_ok()
- {
- let result = channel_message::Entity::delete_by_id(message_id)
- .exec(&*tx)
- .await?;
- if result.rows_affected == 0 {
- Err(anyhow!("no such message"))?;
- }
- } else {
- Err(anyhow!("operation could not be completed"))?;
- }
- }
-
- Ok(participant_connection_ids)
- })
- .await
- }
-}
@@ -1,262 +0,0 @@
-use super::*;
-use rpc::Notification;
-
-impl Database {
- pub async fn initialize_notification_kinds(&mut self) -> Result<()> {
- notification_kind::Entity::insert_many(Notification::all_variant_names().iter().map(
- |kind| notification_kind::ActiveModel {
- name: ActiveValue::Set(kind.to_string()),
- ..Default::default()
- },
- ))
- .on_conflict(OnConflict::new().do_nothing().to_owned())
- .exec_without_returning(&self.pool)
- .await?;
-
- let mut rows = notification_kind::Entity::find().stream(&self.pool).await?;
- while let Some(row) = rows.next().await {
- let row = row?;
- self.notification_kinds_by_name.insert(row.name, row.id);
- }
-
- for name in Notification::all_variant_names() {
- if let Some(id) = self.notification_kinds_by_name.get(*name).copied() {
- self.notification_kinds_by_id.insert(id, name);
- }
- }
-
- Ok(())
- }
-
- pub async fn get_notifications(
- &self,
- recipient_id: UserId,
- limit: usize,
- before_id: Option<NotificationId>,
- ) -> Result<Vec<proto::Notification>> {
- self.transaction(|tx| async move {
- let mut result = Vec::new();
- let mut condition =
- Condition::all().add(notification::Column::RecipientId.eq(recipient_id));
-
- if let Some(before_id) = before_id {
- condition = condition.add(notification::Column::Id.lt(before_id));
- }
-
- let mut rows = notification::Entity::find()
- .filter(condition)
- .order_by_desc(notification::Column::Id)
- .limit(limit as u64)
- .stream(&*tx)
- .await?;
- while let Some(row) = rows.next().await {
- let row = row?;
- let kind = row.kind;
- if let Some(proto) = model_to_proto(self, row) {
- result.push(proto);
- } else {
- log::warn!("unknown notification kind {:?}", kind);
- }
- }
- result.reverse();
- Ok(result)
- })
- .await
- }
-
- /// Create a notification. If `avoid_duplicates` is set to true, then avoid
- /// creating a new notification if the given recipient already has an
- /// unread notification with the given kind and entity id.
- pub async fn create_notification(
- &self,
- recipient_id: UserId,
- notification: Notification,
- avoid_duplicates: bool,
- tx: &DatabaseTransaction,
- ) -> Result<Option<(UserId, proto::Notification)>> {
- if avoid_duplicates {
- if self
- .find_notification(recipient_id, ¬ification, tx)
- .await?
- .is_some()
- {
- return Ok(None);
- }
- }
-
- let proto = notification.to_proto();
- let kind = notification_kind_from_proto(self, &proto)?;
- let model = notification::ActiveModel {
- recipient_id: ActiveValue::Set(recipient_id),
- kind: ActiveValue::Set(kind),
- entity_id: ActiveValue::Set(proto.entity_id.map(|id| id as i32)),
- content: ActiveValue::Set(proto.content.clone()),
- ..Default::default()
- }
- .save(&*tx)
- .await?;
-
- Ok(Some((
- recipient_id,
- proto::Notification {
- id: model.id.as_ref().to_proto(),
- kind: proto.kind,
- timestamp: model.created_at.as_ref().assume_utc().unix_timestamp() as u64,
- is_read: false,
- response: None,
- content: proto.content,
- entity_id: proto.entity_id,
- },
- )))
- }
-
- /// Remove an unread notification with the given recipient, kind and
- /// entity id.
- pub async fn remove_notification(
- &self,
- recipient_id: UserId,
- notification: Notification,
- tx: &DatabaseTransaction,
- ) -> Result<Option<NotificationId>> {
- let id = self
- .find_notification(recipient_id, ¬ification, tx)
- .await?;
- if let Some(id) = id {
- notification::Entity::delete_by_id(id).exec(tx).await?;
- }
- Ok(id)
- }
-
- /// Populate the response for the notification with the given kind and
- /// entity id.
- pub async fn mark_notification_as_read_with_response(
- &self,
- recipient_id: UserId,
- notification: &Notification,
- response: bool,
- tx: &DatabaseTransaction,
- ) -> Result<Option<(UserId, proto::Notification)>> {
- self.mark_notification_as_read_internal(recipient_id, notification, Some(response), tx)
- .await
- }
-
- pub async fn mark_notification_as_read(
- &self,
- recipient_id: UserId,
- notification: &Notification,
- tx: &DatabaseTransaction,
- ) -> Result<Option<(UserId, proto::Notification)>> {
- self.mark_notification_as_read_internal(recipient_id, notification, None, tx)
- .await
- }
-
- pub async fn mark_notification_as_read_by_id(
- &self,
- recipient_id: UserId,
- notification_id: NotificationId,
- ) -> Result<NotificationBatch> {
- self.transaction(|tx| async move {
- let row = notification::Entity::update(notification::ActiveModel {
- id: ActiveValue::Unchanged(notification_id),
- recipient_id: ActiveValue::Unchanged(recipient_id),
- is_read: ActiveValue::Set(true),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
- Ok(model_to_proto(self, row)
- .map(|notification| (recipient_id, notification))
- .into_iter()
- .collect())
- })
- .await
- }
-
- async fn mark_notification_as_read_internal(
- &self,
- recipient_id: UserId,
- notification: &Notification,
- response: Option<bool>,
- tx: &DatabaseTransaction,
- ) -> Result<Option<(UserId, proto::Notification)>> {
- if let Some(id) = self
- .find_notification(recipient_id, notification, &*tx)
- .await?
- {
- let row = notification::Entity::update(notification::ActiveModel {
- id: ActiveValue::Unchanged(id),
- recipient_id: ActiveValue::Unchanged(recipient_id),
- is_read: ActiveValue::Set(true),
- response: if let Some(response) = response {
- ActiveValue::Set(Some(response))
- } else {
- ActiveValue::NotSet
- },
- ..Default::default()
- })
- .exec(tx)
- .await?;
- Ok(model_to_proto(self, row).map(|notification| (recipient_id, notification)))
- } else {
- Ok(None)
- }
- }
-
- /// Find an unread notification by its recipient, kind and entity id.
- async fn find_notification(
- &self,
- recipient_id: UserId,
- notification: &Notification,
- tx: &DatabaseTransaction,
- ) -> Result<Option<NotificationId>> {
- let proto = notification.to_proto();
- let kind = notification_kind_from_proto(self, &proto)?;
-
- #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
- enum QueryIds {
- Id,
- }
-
- Ok(notification::Entity::find()
- .select_only()
- .column(notification::Column::Id)
- .filter(
- Condition::all()
- .add(notification::Column::RecipientId.eq(recipient_id))
- .add(notification::Column::IsRead.eq(false))
- .add(notification::Column::Kind.eq(kind))
- .add(if proto.entity_id.is_some() {
- notification::Column::EntityId.eq(proto.entity_id)
- } else {
- notification::Column::EntityId.is_null()
- }),
- )
- .into_values::<_, QueryIds>()
- .one(&*tx)
- .await?)
- }
-}
-
-fn model_to_proto(this: &Database, row: notification::Model) -> Option<proto::Notification> {
- let kind = this.notification_kinds_by_id.get(&row.kind)?;
- Some(proto::Notification {
- id: row.id.to_proto(),
- kind: kind.to_string(),
- timestamp: row.created_at.assume_utc().unix_timestamp() as u64,
- is_read: row.is_read,
- response: row.response,
- content: row.content,
- entity_id: row.entity_id.map(|id| id as u64),
- })
-}
-
-fn notification_kind_from_proto(
- this: &Database,
- proto: &proto::Notification,
-) -> Result<NotificationKindId> {
- Ok(this
- .notification_kinds_by_name
- .get(&proto.kind)
- .copied()
- .ok_or_else(|| anyhow!("invalid notification kind {:?}", proto.kind))?)
-}
@@ -1,960 +0,0 @@
-use super::*;
-
-impl Database {
- pub async fn project_count_excluding_admins(&self) -> Result<usize> {
- #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
- enum QueryAs {
- Count,
- }
-
- self.transaction(|tx| async move {
- Ok(project::Entity::find()
- .select_only()
- .column_as(project::Column::Id.count(), QueryAs::Count)
- .inner_join(user::Entity)
- .filter(user::Column::Admin.eq(false))
- .into_values::<_, QueryAs>()
- .one(&*tx)
- .await?
- .unwrap_or(0i64) as usize)
- })
- .await
- }
-
- pub async fn share_project(
- &self,
- room_id: RoomId,
- connection: ConnectionId,
- worktrees: &[proto::WorktreeMetadata],
- ) -> Result<RoomGuard<(ProjectId, proto::Room)>> {
- self.room_transaction(room_id, |tx| async move {
- let participant = room_participant::Entity::find()
- .filter(
- Condition::all()
- .add(
- room_participant::Column::AnsweringConnectionId
- .eq(connection.id as i32),
- )
- .add(
- room_participant::Column::AnsweringConnectionServerId
- .eq(connection.owner_id as i32),
- ),
- )
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("could not find participant"))?;
- if participant.room_id != room_id {
- return Err(anyhow!("shared project on unexpected room"))?;
- }
-
- let project = project::ActiveModel {
- room_id: ActiveValue::set(participant.room_id),
- host_user_id: ActiveValue::set(participant.user_id),
- host_connection_id: ActiveValue::set(Some(connection.id as i32)),
- host_connection_server_id: ActiveValue::set(Some(ServerId(
- connection.owner_id as i32,
- ))),
- ..Default::default()
- }
- .insert(&*tx)
- .await?;
-
- if !worktrees.is_empty() {
- worktree::Entity::insert_many(worktrees.iter().map(|worktree| {
- worktree::ActiveModel {
- id: ActiveValue::set(worktree.id as i64),
- project_id: ActiveValue::set(project.id),
- abs_path: ActiveValue::set(worktree.abs_path.clone()),
- root_name: ActiveValue::set(worktree.root_name.clone()),
- visible: ActiveValue::set(worktree.visible),
- scan_id: ActiveValue::set(0),
- completed_scan_id: ActiveValue::set(0),
- }
- }))
- .exec(&*tx)
- .await?;
- }
-
- project_collaborator::ActiveModel {
- project_id: ActiveValue::set(project.id),
- connection_id: ActiveValue::set(connection.id as i32),
- connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
- user_id: ActiveValue::set(participant.user_id),
- replica_id: ActiveValue::set(ReplicaId(0)),
- is_host: ActiveValue::set(true),
- ..Default::default()
- }
- .insert(&*tx)
- .await?;
-
- let room = self.get_room(room_id, &tx).await?;
- Ok((project.id, room))
- })
- .await
- }
-
- pub async fn unshare_project(
- &self,
- project_id: ProjectId,
- connection: ConnectionId,
- ) -> Result<RoomGuard<(proto::Room, Vec<ConnectionId>)>> {
- let room_id = self.room_id_for_project(project_id).await?;
- self.room_transaction(room_id, |tx| async move {
- let guest_connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
-
- let project = project::Entity::find_by_id(project_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("project not found"))?;
- if project.host_connection()? == connection {
- project::Entity::delete(project.into_active_model())
- .exec(&*tx)
- .await?;
- let room = self.get_room(room_id, &tx).await?;
- Ok((room, guest_connection_ids))
- } else {
- Err(anyhow!("cannot unshare a project hosted by another user"))?
- }
- })
- .await
- }
-
- pub async fn update_project(
- &self,
- project_id: ProjectId,
- connection: ConnectionId,
- worktrees: &[proto::WorktreeMetadata],
- ) -> Result<RoomGuard<(proto::Room, Vec<ConnectionId>)>> {
- let room_id = self.room_id_for_project(project_id).await?;
- self.room_transaction(room_id, |tx| async move {
- let project = project::Entity::find_by_id(project_id)
- .filter(
- Condition::all()
- .add(project::Column::HostConnectionId.eq(connection.id as i32))
- .add(
- project::Column::HostConnectionServerId.eq(connection.owner_id as i32),
- ),
- )
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such project"))?;
-
- self.update_project_worktrees(project.id, worktrees, &tx)
- .await?;
-
- let guest_connection_ids = self.project_guest_connection_ids(project.id, &tx).await?;
- let room = self.get_room(project.room_id, &tx).await?;
- Ok((room, guest_connection_ids))
- })
- .await
- }
-
- pub(in crate::db) async fn update_project_worktrees(
- &self,
- project_id: ProjectId,
- worktrees: &[proto::WorktreeMetadata],
- tx: &DatabaseTransaction,
- ) -> Result<()> {
- if !worktrees.is_empty() {
- worktree::Entity::insert_many(worktrees.iter().map(|worktree| worktree::ActiveModel {
- id: ActiveValue::set(worktree.id as i64),
- project_id: ActiveValue::set(project_id),
- abs_path: ActiveValue::set(worktree.abs_path.clone()),
- root_name: ActiveValue::set(worktree.root_name.clone()),
- visible: ActiveValue::set(worktree.visible),
- scan_id: ActiveValue::set(0),
- completed_scan_id: ActiveValue::set(0),
- }))
- .on_conflict(
- OnConflict::columns([worktree::Column::ProjectId, worktree::Column::Id])
- .update_column(worktree::Column::RootName)
- .to_owned(),
- )
- .exec(&*tx)
- .await?;
- }
-
- worktree::Entity::delete_many()
- .filter(worktree::Column::ProjectId.eq(project_id).and(
- worktree::Column::Id.is_not_in(worktrees.iter().map(|worktree| worktree.id as i64)),
- ))
- .exec(&*tx)
- .await?;
-
- Ok(())
- }
-
- pub async fn update_worktree(
- &self,
- update: &proto::UpdateWorktree,
- connection: ConnectionId,
- ) -> Result<RoomGuard<Vec<ConnectionId>>> {
- let project_id = ProjectId::from_proto(update.project_id);
- let worktree_id = update.worktree_id as i64;
- let room_id = self.room_id_for_project(project_id).await?;
- self.room_transaction(room_id, |tx| async move {
- // Ensure the update comes from the host.
- let _project = project::Entity::find_by_id(project_id)
- .filter(
- Condition::all()
- .add(project::Column::HostConnectionId.eq(connection.id as i32))
- .add(
- project::Column::HostConnectionServerId.eq(connection.owner_id as i32),
- ),
- )
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such project"))?;
-
- // Update metadata.
- worktree::Entity::update(worktree::ActiveModel {
- id: ActiveValue::set(worktree_id),
- project_id: ActiveValue::set(project_id),
- root_name: ActiveValue::set(update.root_name.clone()),
- scan_id: ActiveValue::set(update.scan_id as i64),
- completed_scan_id: if update.is_last_update {
- ActiveValue::set(update.scan_id as i64)
- } else {
- ActiveValue::default()
- },
- abs_path: ActiveValue::set(update.abs_path.clone()),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
-
- if !update.updated_entries.is_empty() {
- worktree_entry::Entity::insert_many(update.updated_entries.iter().map(|entry| {
- let mtime = entry.mtime.clone().unwrap_or_default();
- worktree_entry::ActiveModel {
- project_id: ActiveValue::set(project_id),
- worktree_id: ActiveValue::set(worktree_id),
- id: ActiveValue::set(entry.id as i64),
- is_dir: ActiveValue::set(entry.is_dir),
- path: ActiveValue::set(entry.path.clone()),
- inode: ActiveValue::set(entry.inode as i64),
- mtime_seconds: ActiveValue::set(mtime.seconds as i64),
- mtime_nanos: ActiveValue::set(mtime.nanos as i32),
- is_symlink: ActiveValue::set(entry.is_symlink),
- is_ignored: ActiveValue::set(entry.is_ignored),
- is_external: ActiveValue::set(entry.is_external),
- git_status: ActiveValue::set(entry.git_status.map(|status| status as i64)),
- is_deleted: ActiveValue::set(false),
- scan_id: ActiveValue::set(update.scan_id as i64),
- }
- }))
- .on_conflict(
- OnConflict::columns([
- worktree_entry::Column::ProjectId,
- worktree_entry::Column::WorktreeId,
- worktree_entry::Column::Id,
- ])
- .update_columns([
- worktree_entry::Column::IsDir,
- worktree_entry::Column::Path,
- worktree_entry::Column::Inode,
- worktree_entry::Column::MtimeSeconds,
- worktree_entry::Column::MtimeNanos,
- worktree_entry::Column::IsSymlink,
- worktree_entry::Column::IsIgnored,
- worktree_entry::Column::GitStatus,
- worktree_entry::Column::ScanId,
- ])
- .to_owned(),
- )
- .exec(&*tx)
- .await?;
- }
-
- if !update.removed_entries.is_empty() {
- worktree_entry::Entity::update_many()
- .filter(
- worktree_entry::Column::ProjectId
- .eq(project_id)
- .and(worktree_entry::Column::WorktreeId.eq(worktree_id))
- .and(
- worktree_entry::Column::Id
- .is_in(update.removed_entries.iter().map(|id| *id as i64)),
- ),
- )
- .set(worktree_entry::ActiveModel {
- is_deleted: ActiveValue::Set(true),
- scan_id: ActiveValue::Set(update.scan_id as i64),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
- }
-
- if !update.updated_repositories.is_empty() {
- worktree_repository::Entity::insert_many(update.updated_repositories.iter().map(
- |repository| worktree_repository::ActiveModel {
- project_id: ActiveValue::set(project_id),
- worktree_id: ActiveValue::set(worktree_id),
- work_directory_id: ActiveValue::set(repository.work_directory_id as i64),
- scan_id: ActiveValue::set(update.scan_id as i64),
- branch: ActiveValue::set(repository.branch.clone()),
- is_deleted: ActiveValue::set(false),
- },
- ))
- .on_conflict(
- OnConflict::columns([
- worktree_repository::Column::ProjectId,
- worktree_repository::Column::WorktreeId,
- worktree_repository::Column::WorkDirectoryId,
- ])
- .update_columns([
- worktree_repository::Column::ScanId,
- worktree_repository::Column::Branch,
- ])
- .to_owned(),
- )
- .exec(&*tx)
- .await?;
- }
-
- if !update.removed_repositories.is_empty() {
- worktree_repository::Entity::update_many()
- .filter(
- worktree_repository::Column::ProjectId
- .eq(project_id)
- .and(worktree_repository::Column::WorktreeId.eq(worktree_id))
- .and(
- worktree_repository::Column::WorkDirectoryId
- .is_in(update.removed_repositories.iter().map(|id| *id as i64)),
- ),
- )
- .set(worktree_repository::ActiveModel {
- is_deleted: ActiveValue::Set(true),
- scan_id: ActiveValue::Set(update.scan_id as i64),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
- }
-
- let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
- Ok(connection_ids)
- })
- .await
- }
-
- pub async fn update_diagnostic_summary(
- &self,
- update: &proto::UpdateDiagnosticSummary,
- connection: ConnectionId,
- ) -> Result<RoomGuard<Vec<ConnectionId>>> {
- let project_id = ProjectId::from_proto(update.project_id);
- let worktree_id = update.worktree_id as i64;
- let room_id = self.room_id_for_project(project_id).await?;
- self.room_transaction(room_id, |tx| async move {
- let summary = update
- .summary
- .as_ref()
- .ok_or_else(|| anyhow!("invalid summary"))?;
-
- // Ensure the update comes from the host.
- let project = project::Entity::find_by_id(project_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such project"))?;
- if project.host_connection()? != connection {
- return Err(anyhow!("can't update a project hosted by someone else"))?;
- }
-
- // Update summary.
- worktree_diagnostic_summary::Entity::insert(worktree_diagnostic_summary::ActiveModel {
- project_id: ActiveValue::set(project_id),
- worktree_id: ActiveValue::set(worktree_id),
- path: ActiveValue::set(summary.path.clone()),
- language_server_id: ActiveValue::set(summary.language_server_id as i64),
- error_count: ActiveValue::set(summary.error_count as i32),
- warning_count: ActiveValue::set(summary.warning_count as i32),
- ..Default::default()
- })
- .on_conflict(
- OnConflict::columns([
- worktree_diagnostic_summary::Column::ProjectId,
- worktree_diagnostic_summary::Column::WorktreeId,
- worktree_diagnostic_summary::Column::Path,
- ])
- .update_columns([
- worktree_diagnostic_summary::Column::LanguageServerId,
- worktree_diagnostic_summary::Column::ErrorCount,
- worktree_diagnostic_summary::Column::WarningCount,
- ])
- .to_owned(),
- )
- .exec(&*tx)
- .await?;
-
- let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
- Ok(connection_ids)
- })
- .await
- }
-
- pub async fn start_language_server(
- &self,
- update: &proto::StartLanguageServer,
- connection: ConnectionId,
- ) -> Result<RoomGuard<Vec<ConnectionId>>> {
- let project_id = ProjectId::from_proto(update.project_id);
- let room_id = self.room_id_for_project(project_id).await?;
- self.room_transaction(room_id, |tx| async move {
- let server = update
- .server
- .as_ref()
- .ok_or_else(|| anyhow!("invalid language server"))?;
-
- // Ensure the update comes from the host.
- let project = project::Entity::find_by_id(project_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such project"))?;
- if project.host_connection()? != connection {
- return Err(anyhow!("can't update a project hosted by someone else"))?;
- }
-
- // Add the newly-started language server.
- language_server::Entity::insert(language_server::ActiveModel {
- project_id: ActiveValue::set(project_id),
- id: ActiveValue::set(server.id as i64),
- name: ActiveValue::set(server.name.clone()),
- ..Default::default()
- })
- .on_conflict(
- OnConflict::columns([
- language_server::Column::ProjectId,
- language_server::Column::Id,
- ])
- .update_column(language_server::Column::Name)
- .to_owned(),
- )
- .exec(&*tx)
- .await?;
-
- let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
- Ok(connection_ids)
- })
- .await
- }
-
- pub async fn update_worktree_settings(
- &self,
- update: &proto::UpdateWorktreeSettings,
- connection: ConnectionId,
- ) -> Result<RoomGuard<Vec<ConnectionId>>> {
- let project_id = ProjectId::from_proto(update.project_id);
- let room_id = self.room_id_for_project(project_id).await?;
- self.room_transaction(room_id, |tx| async move {
- // Ensure the update comes from the host.
- let project = project::Entity::find_by_id(project_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such project"))?;
- if project.host_connection()? != connection {
- return Err(anyhow!("can't update a project hosted by someone else"))?;
- }
-
- if let Some(content) = &update.content {
- worktree_settings_file::Entity::insert(worktree_settings_file::ActiveModel {
- project_id: ActiveValue::Set(project_id),
- worktree_id: ActiveValue::Set(update.worktree_id as i64),
- path: ActiveValue::Set(update.path.clone()),
- content: ActiveValue::Set(content.clone()),
- })
- .on_conflict(
- OnConflict::columns([
- worktree_settings_file::Column::ProjectId,
- worktree_settings_file::Column::WorktreeId,
- worktree_settings_file::Column::Path,
- ])
- .update_column(worktree_settings_file::Column::Content)
- .to_owned(),
- )
- .exec(&*tx)
- .await?;
- } else {
- worktree_settings_file::Entity::delete(worktree_settings_file::ActiveModel {
- project_id: ActiveValue::Set(project_id),
- worktree_id: ActiveValue::Set(update.worktree_id as i64),
- path: ActiveValue::Set(update.path.clone()),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
- }
-
- let connection_ids = self.project_guest_connection_ids(project_id, &tx).await?;
- Ok(connection_ids)
- })
- .await
- }
-
- pub async fn join_project(
- &self,
- project_id: ProjectId,
- connection: ConnectionId,
- ) -> Result<RoomGuard<(Project, ReplicaId)>> {
- let room_id = self.room_id_for_project(project_id).await?;
- self.room_transaction(room_id, |tx| async move {
- let participant = room_participant::Entity::find()
- .filter(
- Condition::all()
- .add(
- room_participant::Column::AnsweringConnectionId
- .eq(connection.id as i32),
- )
- .add(
- room_participant::Column::AnsweringConnectionServerId
- .eq(connection.owner_id as i32),
- ),
- )
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("must join a room first"))?;
-
- let project = project::Entity::find_by_id(project_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such project"))?;
- if project.room_id != participant.room_id {
- return Err(anyhow!("no such project"))?;
- }
-
- let mut collaborators = project
- .find_related(project_collaborator::Entity)
- .all(&*tx)
- .await?;
- let replica_ids = collaborators
- .iter()
- .map(|c| c.replica_id)
- .collect::<HashSet<_>>();
- let mut replica_id = ReplicaId(1);
- while replica_ids.contains(&replica_id) {
- replica_id.0 += 1;
- }
- let new_collaborator = project_collaborator::ActiveModel {
- project_id: ActiveValue::set(project_id),
- connection_id: ActiveValue::set(connection.id as i32),
- connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
- user_id: ActiveValue::set(participant.user_id),
- replica_id: ActiveValue::set(replica_id),
- is_host: ActiveValue::set(false),
- ..Default::default()
- }
- .insert(&*tx)
- .await?;
- collaborators.push(new_collaborator);
-
- let db_worktrees = project.find_related(worktree::Entity).all(&*tx).await?;
- let mut worktrees = db_worktrees
- .into_iter()
- .map(|db_worktree| {
- (
- db_worktree.id as u64,
- Worktree {
- id: db_worktree.id as u64,
- abs_path: db_worktree.abs_path,
- root_name: db_worktree.root_name,
- visible: db_worktree.visible,
- entries: Default::default(),
- repository_entries: Default::default(),
- diagnostic_summaries: Default::default(),
- settings_files: Default::default(),
- scan_id: db_worktree.scan_id as u64,
- completed_scan_id: db_worktree.completed_scan_id as u64,
- },
- )
- })
- .collect::<BTreeMap<_, _>>();
-
- // Populate worktree entries.
- {
- let mut db_entries = worktree_entry::Entity::find()
- .filter(
- Condition::all()
- .add(worktree_entry::Column::ProjectId.eq(project_id))
- .add(worktree_entry::Column::IsDeleted.eq(false)),
- )
- .stream(&*tx)
- .await?;
- while let Some(db_entry) = db_entries.next().await {
- let db_entry = db_entry?;
- if let Some(worktree) = worktrees.get_mut(&(db_entry.worktree_id as u64)) {
- worktree.entries.push(proto::Entry {
- id: db_entry.id as u64,
- is_dir: db_entry.is_dir,
- path: db_entry.path,
- inode: db_entry.inode as u64,
- mtime: Some(proto::Timestamp {
- seconds: db_entry.mtime_seconds as u64,
- nanos: db_entry.mtime_nanos as u32,
- }),
- is_symlink: db_entry.is_symlink,
- is_ignored: db_entry.is_ignored,
- is_external: db_entry.is_external,
- git_status: db_entry.git_status.map(|status| status as i32),
- });
- }
- }
- }
-
- // Populate repository entries.
- {
- let mut db_repository_entries = worktree_repository::Entity::find()
- .filter(
- Condition::all()
- .add(worktree_repository::Column::ProjectId.eq(project_id))
- .add(worktree_repository::Column::IsDeleted.eq(false)),
- )
- .stream(&*tx)
- .await?;
- while let Some(db_repository_entry) = db_repository_entries.next().await {
- let db_repository_entry = db_repository_entry?;
- if let Some(worktree) =
- worktrees.get_mut(&(db_repository_entry.worktree_id as u64))
- {
- worktree.repository_entries.insert(
- db_repository_entry.work_directory_id as u64,
- proto::RepositoryEntry {
- work_directory_id: db_repository_entry.work_directory_id as u64,
- branch: db_repository_entry.branch,
- },
- );
- }
- }
- }
-
- // Populate worktree diagnostic summaries.
- {
- let mut db_summaries = worktree_diagnostic_summary::Entity::find()
- .filter(worktree_diagnostic_summary::Column::ProjectId.eq(project_id))
- .stream(&*tx)
- .await?;
- while let Some(db_summary) = db_summaries.next().await {
- let db_summary = db_summary?;
- if let Some(worktree) = worktrees.get_mut(&(db_summary.worktree_id as u64)) {
- worktree
- .diagnostic_summaries
- .push(proto::DiagnosticSummary {
- path: db_summary.path,
- language_server_id: db_summary.language_server_id as u64,
- error_count: db_summary.error_count as u32,
- warning_count: db_summary.warning_count as u32,
- });
- }
- }
- }
-
- // Populate worktree settings files
- {
- let mut db_settings_files = worktree_settings_file::Entity::find()
- .filter(worktree_settings_file::Column::ProjectId.eq(project_id))
- .stream(&*tx)
- .await?;
- while let Some(db_settings_file) = db_settings_files.next().await {
- let db_settings_file = db_settings_file?;
- if let Some(worktree) =
- worktrees.get_mut(&(db_settings_file.worktree_id as u64))
- {
- worktree.settings_files.push(WorktreeSettingsFile {
- path: db_settings_file.path,
- content: db_settings_file.content,
- });
- }
- }
- }
-
- // Populate language servers.
- let language_servers = project
- .find_related(language_server::Entity)
- .all(&*tx)
- .await?;
-
- let project = Project {
- collaborators: collaborators
- .into_iter()
- .map(|collaborator| ProjectCollaborator {
- connection_id: collaborator.connection(),
- user_id: collaborator.user_id,
- replica_id: collaborator.replica_id,
- is_host: collaborator.is_host,
- })
- .collect(),
- worktrees,
- language_servers: language_servers
- .into_iter()
- .map(|language_server| proto::LanguageServer {
- id: language_server.id as u64,
- name: language_server.name,
- })
- .collect(),
- };
- Ok((project, replica_id as ReplicaId))
- })
- .await
- }
-
- pub async fn leave_project(
- &self,
- project_id: ProjectId,
- connection: ConnectionId,
- ) -> Result<RoomGuard<(proto::Room, LeftProject)>> {
- let room_id = self.room_id_for_project(project_id).await?;
- self.room_transaction(room_id, |tx| async move {
- let result = project_collaborator::Entity::delete_many()
- .filter(
- Condition::all()
- .add(project_collaborator::Column::ProjectId.eq(project_id))
- .add(project_collaborator::Column::ConnectionId.eq(connection.id as i32))
- .add(
- project_collaborator::Column::ConnectionServerId
- .eq(connection.owner_id as i32),
- ),
- )
- .exec(&*tx)
- .await?;
- if result.rows_affected == 0 {
- Err(anyhow!("not a collaborator on this project"))?;
- }
-
- let project = project::Entity::find_by_id(project_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such project"))?;
- let collaborators = project
- .find_related(project_collaborator::Entity)
- .all(&*tx)
- .await?;
- let connection_ids = collaborators
- .into_iter()
- .map(|collaborator| collaborator.connection())
- .collect();
-
- follower::Entity::delete_many()
- .filter(
- Condition::any()
- .add(
- Condition::all()
- .add(follower::Column::ProjectId.eq(Some(project_id)))
- .add(
- follower::Column::LeaderConnectionServerId
- .eq(connection.owner_id),
- )
- .add(follower::Column::LeaderConnectionId.eq(connection.id)),
- )
- .add(
- Condition::all()
- .add(follower::Column::ProjectId.eq(Some(project_id)))
- .add(
- follower::Column::FollowerConnectionServerId
- .eq(connection.owner_id),
- )
- .add(follower::Column::FollowerConnectionId.eq(connection.id)),
- ),
- )
- .exec(&*tx)
- .await?;
-
- let room = self.get_room(project.room_id, &tx).await?;
- let left_project = LeftProject {
- id: project_id,
- host_user_id: project.host_user_id,
- host_connection_id: project.host_connection()?,
- connection_ids,
- };
- Ok((room, left_project))
- })
- .await
- }
-
- pub async fn project_collaborators(
- &self,
- project_id: ProjectId,
- connection_id: ConnectionId,
- ) -> Result<RoomGuard<Vec<ProjectCollaborator>>> {
- let room_id = self.room_id_for_project(project_id).await?;
- self.room_transaction(room_id, |tx| async move {
- let collaborators = project_collaborator::Entity::find()
- .filter(project_collaborator::Column::ProjectId.eq(project_id))
- .all(&*tx)
- .await?
- .into_iter()
- .map(|collaborator| ProjectCollaborator {
- connection_id: collaborator.connection(),
- user_id: collaborator.user_id,
- replica_id: collaborator.replica_id,
- is_host: collaborator.is_host,
- })
- .collect::<Vec<_>>();
-
- if collaborators
- .iter()
- .any(|collaborator| collaborator.connection_id == connection_id)
- {
- Ok(collaborators)
- } else {
- Err(anyhow!("no such project"))?
- }
- })
- .await
- }
-
- pub async fn project_connection_ids(
- &self,
- project_id: ProjectId,
- connection_id: ConnectionId,
- ) -> Result<RoomGuard<HashSet<ConnectionId>>> {
- let room_id = self.room_id_for_project(project_id).await?;
- self.room_transaction(room_id, |tx| async move {
- let mut collaborators = project_collaborator::Entity::find()
- .filter(project_collaborator::Column::ProjectId.eq(project_id))
- .stream(&*tx)
- .await?;
-
- let mut connection_ids = HashSet::default();
- while let Some(collaborator) = collaborators.next().await {
- let collaborator = collaborator?;
- connection_ids.insert(collaborator.connection());
- }
-
- if connection_ids.contains(&connection_id) {
- Ok(connection_ids)
- } else {
- Err(anyhow!("no such project"))?
- }
- })
- .await
- }
-
- async fn project_guest_connection_ids(
- &self,
- project_id: ProjectId,
- tx: &DatabaseTransaction,
- ) -> Result<Vec<ConnectionId>> {
- let mut collaborators = project_collaborator::Entity::find()
- .filter(
- project_collaborator::Column::ProjectId
- .eq(project_id)
- .and(project_collaborator::Column::IsHost.eq(false)),
- )
- .stream(tx)
- .await?;
-
- let mut guest_connection_ids = Vec::new();
- while let Some(collaborator) = collaborators.next().await {
- let collaborator = collaborator?;
- guest_connection_ids.push(collaborator.connection());
- }
- Ok(guest_connection_ids)
- }
-
- pub async fn room_id_for_project(&self, project_id: ProjectId) -> Result<RoomId> {
- self.transaction(|tx| async move {
- let project = project::Entity::find_by_id(project_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("project {} not found", project_id))?;
- Ok(project.room_id)
- })
- .await
- }
-
- pub async fn check_room_participants(
- &self,
- room_id: RoomId,
- leader_id: ConnectionId,
- follower_id: ConnectionId,
- ) -> Result<()> {
- self.transaction(|tx| async move {
- use room_participant::Column;
-
- let count = room_participant::Entity::find()
- .filter(
- Condition::all().add(Column::RoomId.eq(room_id)).add(
- Condition::any()
- .add(Column::AnsweringConnectionId.eq(leader_id.id as i32).and(
- Column::AnsweringConnectionServerId.eq(leader_id.owner_id as i32),
- ))
- .add(Column::AnsweringConnectionId.eq(follower_id.id as i32).and(
- Column::AnsweringConnectionServerId.eq(follower_id.owner_id as i32),
- )),
- ),
- )
- .count(&*tx)
- .await?;
-
- if count < 2 {
- Err(anyhow!("not room participants"))?;
- }
-
- Ok(())
- })
- .await
- }
-
- pub async fn follow(
- &self,
- room_id: RoomId,
- project_id: ProjectId,
- leader_connection: ConnectionId,
- follower_connection: ConnectionId,
- ) -> Result<RoomGuard<proto::Room>> {
- self.room_transaction(room_id, |tx| async move {
- follower::ActiveModel {
- room_id: ActiveValue::set(room_id),
- project_id: ActiveValue::set(project_id),
- leader_connection_server_id: ActiveValue::set(ServerId(
- leader_connection.owner_id as i32,
- )),
- leader_connection_id: ActiveValue::set(leader_connection.id as i32),
- follower_connection_server_id: ActiveValue::set(ServerId(
- follower_connection.owner_id as i32,
- )),
- follower_connection_id: ActiveValue::set(follower_connection.id as i32),
- ..Default::default()
- }
- .insert(&*tx)
- .await?;
-
- let room = self.get_room(room_id, &*tx).await?;
- Ok(room)
- })
- .await
- }
-
- pub async fn unfollow(
- &self,
- room_id: RoomId,
- project_id: ProjectId,
- leader_connection: ConnectionId,
- follower_connection: ConnectionId,
- ) -> Result<RoomGuard<proto::Room>> {
- self.room_transaction(room_id, |tx| async move {
- follower::Entity::delete_many()
- .filter(
- Condition::all()
- .add(follower::Column::RoomId.eq(room_id))
- .add(follower::Column::ProjectId.eq(project_id))
- .add(
- follower::Column::LeaderConnectionServerId
- .eq(leader_connection.owner_id),
- )
- .add(follower::Column::LeaderConnectionId.eq(leader_connection.id))
- .add(
- follower::Column::FollowerConnectionServerId
- .eq(follower_connection.owner_id),
- )
- .add(follower::Column::FollowerConnectionId.eq(follower_connection.id)),
- )
- .exec(&*tx)
- .await?;
-
- let room = self.get_room(room_id, &*tx).await?;
- Ok(room)
- })
- .await
- }
-}
@@ -1,1203 +0,0 @@
-use super::*;
-
-impl Database {
- pub async fn clear_stale_room_participants(
- &self,
- room_id: RoomId,
- new_server_id: ServerId,
- ) -> Result<RoomGuard<RefreshedRoom>> {
- self.room_transaction(room_id, |tx| async move {
- let stale_participant_filter = Condition::all()
- .add(room_participant::Column::RoomId.eq(room_id))
- .add(room_participant::Column::AnsweringConnectionId.is_not_null())
- .add(room_participant::Column::AnsweringConnectionServerId.ne(new_server_id));
-
- let stale_participant_user_ids = room_participant::Entity::find()
- .filter(stale_participant_filter.clone())
- .all(&*tx)
- .await?
- .into_iter()
- .map(|participant| participant.user_id)
- .collect::<Vec<_>>();
-
- // Delete participants who failed to reconnect and cancel their calls.
- let mut canceled_calls_to_user_ids = Vec::new();
- room_participant::Entity::delete_many()
- .filter(stale_participant_filter)
- .exec(&*tx)
- .await?;
- let called_participants = room_participant::Entity::find()
- .filter(
- Condition::all()
- .add(
- room_participant::Column::CallingUserId
- .is_in(stale_participant_user_ids.iter().copied()),
- )
- .add(room_participant::Column::AnsweringConnectionId.is_null()),
- )
- .all(&*tx)
- .await?;
- room_participant::Entity::delete_many()
- .filter(
- room_participant::Column::Id
- .is_in(called_participants.iter().map(|participant| participant.id)),
- )
- .exec(&*tx)
- .await?;
- canceled_calls_to_user_ids.extend(
- called_participants
- .into_iter()
- .map(|participant| participant.user_id),
- );
-
- let (channel, room) = self.get_channel_room(room_id, &tx).await?;
- let channel_members;
- if let Some(channel) = &channel {
- channel_members = self.get_channel_participants(channel, &tx).await?;
- } else {
- channel_members = Vec::new();
-
- // Delete the room if it becomes empty.
- if room.participants.is_empty() {
- project::Entity::delete_many()
- .filter(project::Column::RoomId.eq(room_id))
- .exec(&*tx)
- .await?;
- room::Entity::delete_by_id(room_id).exec(&*tx).await?;
- }
- };
-
- Ok(RefreshedRoom {
- room,
- channel_id: channel.map(|channel| channel.id),
- channel_members,
- stale_participant_user_ids,
- canceled_calls_to_user_ids,
- })
- })
- .await
- }
-
- pub async fn incoming_call_for_user(
- &self,
- user_id: UserId,
- ) -> Result<Option<proto::IncomingCall>> {
- self.transaction(|tx| async move {
- let pending_participant = room_participant::Entity::find()
- .filter(
- room_participant::Column::UserId
- .eq(user_id)
- .and(room_participant::Column::AnsweringConnectionId.is_null()),
- )
- .one(&*tx)
- .await?;
-
- if let Some(pending_participant) = pending_participant {
- let room = self.get_room(pending_participant.room_id, &tx).await?;
- Ok(Self::build_incoming_call(&room, user_id))
- } else {
- Ok(None)
- }
- })
- .await
- }
-
- pub async fn create_room(
- &self,
- user_id: UserId,
- connection: ConnectionId,
- live_kit_room: &str,
- release_channel: &str,
- ) -> Result<proto::Room> {
- self.transaction(|tx| async move {
- let room = room::ActiveModel {
- live_kit_room: ActiveValue::set(live_kit_room.into()),
- enviroment: ActiveValue::set(Some(release_channel.to_string())),
- ..Default::default()
- }
- .insert(&*tx)
- .await?;
- room_participant::ActiveModel {
- room_id: ActiveValue::set(room.id),
- user_id: ActiveValue::set(user_id),
- answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
- answering_connection_server_id: ActiveValue::set(Some(ServerId(
- connection.owner_id as i32,
- ))),
- answering_connection_lost: ActiveValue::set(false),
- calling_user_id: ActiveValue::set(user_id),
- calling_connection_id: ActiveValue::set(connection.id as i32),
- calling_connection_server_id: ActiveValue::set(Some(ServerId(
- connection.owner_id as i32,
- ))),
- participant_index: ActiveValue::set(Some(0)),
- ..Default::default()
- }
- .insert(&*tx)
- .await?;
-
- let room = self.get_room(room.id, &tx).await?;
- Ok(room)
- })
- .await
- }
-
- pub async fn call(
- &self,
- room_id: RoomId,
- calling_user_id: UserId,
- calling_connection: ConnectionId,
- called_user_id: UserId,
- initial_project_id: Option<ProjectId>,
- ) -> Result<RoomGuard<(proto::Room, proto::IncomingCall)>> {
- self.room_transaction(room_id, |tx| async move {
- room_participant::ActiveModel {
- room_id: ActiveValue::set(room_id),
- user_id: ActiveValue::set(called_user_id),
- answering_connection_lost: ActiveValue::set(false),
- participant_index: ActiveValue::NotSet,
- calling_user_id: ActiveValue::set(calling_user_id),
- calling_connection_id: ActiveValue::set(calling_connection.id as i32),
- calling_connection_server_id: ActiveValue::set(Some(ServerId(
- calling_connection.owner_id as i32,
- ))),
- initial_project_id: ActiveValue::set(initial_project_id),
- ..Default::default()
- }
- .insert(&*tx)
- .await?;
-
- let room = self.get_room(room_id, &tx).await?;
- let incoming_call = Self::build_incoming_call(&room, called_user_id)
- .ok_or_else(|| anyhow!("failed to build incoming call"))?;
- Ok((room, incoming_call))
- })
- .await
- }
-
- pub async fn call_failed(
- &self,
- room_id: RoomId,
- called_user_id: UserId,
- ) -> Result<RoomGuard<proto::Room>> {
- self.room_transaction(room_id, |tx| async move {
- room_participant::Entity::delete_many()
- .filter(
- room_participant::Column::RoomId
- .eq(room_id)
- .and(room_participant::Column::UserId.eq(called_user_id)),
- )
- .exec(&*tx)
- .await?;
- let room = self.get_room(room_id, &tx).await?;
- Ok(room)
- })
- .await
- }
-
- pub async fn decline_call(
- &self,
- expected_room_id: Option<RoomId>,
- user_id: UserId,
- ) -> Result<Option<RoomGuard<proto::Room>>> {
- self.optional_room_transaction(|tx| async move {
- let mut filter = Condition::all()
- .add(room_participant::Column::UserId.eq(user_id))
- .add(room_participant::Column::AnsweringConnectionId.is_null());
- if let Some(room_id) = expected_room_id {
- filter = filter.add(room_participant::Column::RoomId.eq(room_id));
- }
- let participant = room_participant::Entity::find()
- .filter(filter)
- .one(&*tx)
- .await?;
-
- let participant = if let Some(participant) = participant {
- participant
- } else if expected_room_id.is_some() {
- return Err(anyhow!("could not find call to decline"))?;
- } else {
- return Ok(None);
- };
-
- let room_id = participant.room_id;
- room_participant::Entity::delete(participant.into_active_model())
- .exec(&*tx)
- .await?;
-
- let room = self.get_room(room_id, &tx).await?;
- Ok(Some((room_id, room)))
- })
- .await
- }
-
- pub async fn cancel_call(
- &self,
- room_id: RoomId,
- calling_connection: ConnectionId,
- called_user_id: UserId,
- ) -> Result<RoomGuard<proto::Room>> {
- self.room_transaction(room_id, |tx| async move {
- let participant = room_participant::Entity::find()
- .filter(
- Condition::all()
- .add(room_participant::Column::UserId.eq(called_user_id))
- .add(room_participant::Column::RoomId.eq(room_id))
- .add(
- room_participant::Column::CallingConnectionId
- .eq(calling_connection.id as i32),
- )
- .add(
- room_participant::Column::CallingConnectionServerId
- .eq(calling_connection.owner_id as i32),
- )
- .add(room_participant::Column::AnsweringConnectionId.is_null()),
- )
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no call to cancel"))?;
-
- room_participant::Entity::delete(participant.into_active_model())
- .exec(&*tx)
- .await?;
-
- let room = self.get_room(room_id, &tx).await?;
- Ok(room)
- })
- .await
- }
-
- pub async fn join_room(
- &self,
- room_id: RoomId,
- user_id: UserId,
- connection: ConnectionId,
- enviroment: &str,
- ) -> Result<RoomGuard<JoinRoom>> {
- self.room_transaction(room_id, |tx| async move {
- #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
- enum QueryChannelIdAndEnviroment {
- ChannelId,
- Enviroment,
- }
-
- let (channel_id, release_channel): (Option<ChannelId>, Option<String>) =
- room::Entity::find()
- .select_only()
- .column(room::Column::ChannelId)
- .column(room::Column::Enviroment)
- .filter(room::Column::Id.eq(room_id))
- .into_values::<_, QueryChannelIdAndEnviroment>()
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("no such room"))?;
-
- if let Some(release_channel) = release_channel {
- if &release_channel != enviroment {
- Err(anyhow!("must join using the {} release", release_channel))?;
- }
- }
-
- if channel_id.is_some() {
- Err(anyhow!("tried to join channel call directly"))?
- }
-
- let participant_index = self
- .get_next_participant_index_internal(room_id, &*tx)
- .await?;
-
- let result = room_participant::Entity::update_many()
- .filter(
- Condition::all()
- .add(room_participant::Column::RoomId.eq(room_id))
- .add(room_participant::Column::UserId.eq(user_id))
- .add(room_participant::Column::AnsweringConnectionId.is_null()),
- )
- .set(room_participant::ActiveModel {
- participant_index: ActiveValue::Set(Some(participant_index)),
- answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
- answering_connection_server_id: ActiveValue::set(Some(ServerId(
- connection.owner_id as i32,
- ))),
- answering_connection_lost: ActiveValue::set(false),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
- if result.rows_affected == 0 {
- Err(anyhow!("room does not exist or was already joined"))?;
- }
-
- let room = self.get_room(room_id, &tx).await?;
- Ok(JoinRoom {
- room,
- channel_id: None,
- channel_members: vec![],
- })
- })
- .await
- }
-
- async fn get_next_participant_index_internal(
- &self,
- room_id: RoomId,
- tx: &DatabaseTransaction,
- ) -> Result<i32> {
- #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
- enum QueryParticipantIndices {
- ParticipantIndex,
- }
- let existing_participant_indices: Vec<i32> = room_participant::Entity::find()
- .filter(
- room_participant::Column::RoomId
- .eq(room_id)
- .and(room_participant::Column::ParticipantIndex.is_not_null()),
- )
- .select_only()
- .column(room_participant::Column::ParticipantIndex)
- .into_values::<_, QueryParticipantIndices>()
- .all(&*tx)
- .await?;
-
- let mut participant_index = 0;
- while existing_participant_indices.contains(&participant_index) {
- participant_index += 1;
- }
-
- Ok(participant_index)
- }
-
- pub async fn channel_id_for_room(&self, room_id: RoomId) -> Result<Option<ChannelId>> {
- self.transaction(|tx| async move {
- let room: Option<room::Model> = room::Entity::find()
- .filter(room::Column::Id.eq(room_id))
- .one(&*tx)
- .await?;
-
- Ok(room.and_then(|room| room.channel_id))
- })
- .await
- }
-
- pub(crate) async fn join_channel_room_internal(
- &self,
- room_id: RoomId,
- user_id: UserId,
- connection: ConnectionId,
- tx: &DatabaseTransaction,
- ) -> Result<JoinRoom> {
- let participant_index = self
- .get_next_participant_index_internal(room_id, &*tx)
- .await?;
-
- room_participant::Entity::insert_many([room_participant::ActiveModel {
- room_id: ActiveValue::set(room_id),
- user_id: ActiveValue::set(user_id),
- answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
- answering_connection_server_id: ActiveValue::set(Some(ServerId(
- connection.owner_id as i32,
- ))),
- answering_connection_lost: ActiveValue::set(false),
- calling_user_id: ActiveValue::set(user_id),
- calling_connection_id: ActiveValue::set(connection.id as i32),
- calling_connection_server_id: ActiveValue::set(Some(ServerId(
- connection.owner_id as i32,
- ))),
- participant_index: ActiveValue::Set(Some(participant_index)),
- ..Default::default()
- }])
- .on_conflict(
- OnConflict::columns([room_participant::Column::UserId])
- .update_columns([
- room_participant::Column::AnsweringConnectionId,
- room_participant::Column::AnsweringConnectionServerId,
- room_participant::Column::AnsweringConnectionLost,
- room_participant::Column::ParticipantIndex,
- ])
- .to_owned(),
- )
- .exec(&*tx)
- .await?;
-
- let (channel, room) = self.get_channel_room(room_id, &tx).await?;
- let channel = channel.ok_or_else(|| anyhow!("no channel for room"))?;
- let channel_members = self.get_channel_participants(&channel, &*tx).await?;
- Ok(JoinRoom {
- room,
- channel_id: Some(channel.id),
- channel_members,
- })
- }
-
- pub async fn rejoin_room(
- &self,
- rejoin_room: proto::RejoinRoom,
- user_id: UserId,
- connection: ConnectionId,
- ) -> Result<RoomGuard<RejoinedRoom>> {
- let room_id = RoomId::from_proto(rejoin_room.id);
- self.room_transaction(room_id, |tx| async {
- let tx = tx;
- let participant_update = room_participant::Entity::update_many()
- .filter(
- Condition::all()
- .add(room_participant::Column::RoomId.eq(room_id))
- .add(room_participant::Column::UserId.eq(user_id))
- .add(room_participant::Column::AnsweringConnectionId.is_not_null())
- .add(
- Condition::any()
- .add(room_participant::Column::AnsweringConnectionLost.eq(true))
- .add(
- room_participant::Column::AnsweringConnectionServerId
- .ne(connection.owner_id as i32),
- ),
- ),
- )
- .set(room_participant::ActiveModel {
- answering_connection_id: ActiveValue::set(Some(connection.id as i32)),
- answering_connection_server_id: ActiveValue::set(Some(ServerId(
- connection.owner_id as i32,
- ))),
- answering_connection_lost: ActiveValue::set(false),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
- if participant_update.rows_affected == 0 {
- return Err(anyhow!("room does not exist or was already joined"))?;
- }
-
- let mut reshared_projects = Vec::new();
- for reshared_project in &rejoin_room.reshared_projects {
- let project_id = ProjectId::from_proto(reshared_project.project_id);
- let project = project::Entity::find_by_id(project_id)
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("project does not exist"))?;
- if project.host_user_id != user_id {
- return Err(anyhow!("no such project"))?;
- }
-
- let mut collaborators = project
- .find_related(project_collaborator::Entity)
- .all(&*tx)
- .await?;
- let host_ix = collaborators
- .iter()
- .position(|collaborator| {
- collaborator.user_id == user_id && collaborator.is_host
- })
- .ok_or_else(|| anyhow!("host not found among collaborators"))?;
- let host = collaborators.swap_remove(host_ix);
- let old_connection_id = host.connection();
-
- project::Entity::update(project::ActiveModel {
- host_connection_id: ActiveValue::set(Some(connection.id as i32)),
- host_connection_server_id: ActiveValue::set(Some(ServerId(
- connection.owner_id as i32,
- ))),
- ..project.into_active_model()
- })
- .exec(&*tx)
- .await?;
- project_collaborator::Entity::update(project_collaborator::ActiveModel {
- connection_id: ActiveValue::set(connection.id as i32),
- connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
- ..host.into_active_model()
- })
- .exec(&*tx)
- .await?;
-
- self.update_project_worktrees(project_id, &reshared_project.worktrees, &tx)
- .await?;
-
- reshared_projects.push(ResharedProject {
- id: project_id,
- old_connection_id,
- collaborators: collaborators
- .iter()
- .map(|collaborator| ProjectCollaborator {
- connection_id: collaborator.connection(),
- user_id: collaborator.user_id,
- replica_id: collaborator.replica_id,
- is_host: collaborator.is_host,
- })
- .collect(),
- worktrees: reshared_project.worktrees.clone(),
- });
- }
-
- project::Entity::delete_many()
- .filter(
- Condition::all()
- .add(project::Column::RoomId.eq(room_id))
- .add(project::Column::HostUserId.eq(user_id))
- .add(
- project::Column::Id
- .is_not_in(reshared_projects.iter().map(|project| project.id)),
- ),
- )
- .exec(&*tx)
- .await?;
-
- let mut rejoined_projects = Vec::new();
- for rejoined_project in &rejoin_room.rejoined_projects {
- let project_id = ProjectId::from_proto(rejoined_project.id);
- let Some(project) = project::Entity::find_by_id(project_id).one(&*tx).await? else {
- continue;
- };
-
- let mut worktrees = Vec::new();
- let db_worktrees = project.find_related(worktree::Entity).all(&*tx).await?;
- for db_worktree in db_worktrees {
- let mut worktree = RejoinedWorktree {
- id: db_worktree.id as u64,
- abs_path: db_worktree.abs_path,
- root_name: db_worktree.root_name,
- visible: db_worktree.visible,
- updated_entries: Default::default(),
- removed_entries: Default::default(),
- updated_repositories: Default::default(),
- removed_repositories: Default::default(),
- diagnostic_summaries: Default::default(),
- settings_files: Default::default(),
- scan_id: db_worktree.scan_id as u64,
- completed_scan_id: db_worktree.completed_scan_id as u64,
- };
-
- let rejoined_worktree = rejoined_project
- .worktrees
- .iter()
- .find(|worktree| worktree.id == db_worktree.id as u64);
-
- // File entries
- {
- let entry_filter = if let Some(rejoined_worktree) = rejoined_worktree {
- worktree_entry::Column::ScanId.gt(rejoined_worktree.scan_id)
- } else {
- worktree_entry::Column::IsDeleted.eq(false)
- };
-
- let mut db_entries = worktree_entry::Entity::find()
- .filter(
- Condition::all()
- .add(worktree_entry::Column::ProjectId.eq(project.id))
- .add(worktree_entry::Column::WorktreeId.eq(worktree.id))
- .add(entry_filter),
- )
- .stream(&*tx)
- .await?;
-
- while let Some(db_entry) = db_entries.next().await {
- let db_entry = db_entry?;
- if db_entry.is_deleted {
- worktree.removed_entries.push(db_entry.id as u64);
- } else {
- worktree.updated_entries.push(proto::Entry {
- id: db_entry.id as u64,
- is_dir: db_entry.is_dir,
- path: db_entry.path,
- inode: db_entry.inode as u64,
- mtime: Some(proto::Timestamp {
- seconds: db_entry.mtime_seconds as u64,
- nanos: db_entry.mtime_nanos as u32,
- }),
- is_symlink: db_entry.is_symlink,
- is_ignored: db_entry.is_ignored,
- is_external: db_entry.is_external,
- git_status: db_entry.git_status.map(|status| status as i32),
- });
- }
- }
- }
-
- // Repository Entries
- {
- let repository_entry_filter =
- if let Some(rejoined_worktree) = rejoined_worktree {
- worktree_repository::Column::ScanId.gt(rejoined_worktree.scan_id)
- } else {
- worktree_repository::Column::IsDeleted.eq(false)
- };
-
- let mut db_repositories = worktree_repository::Entity::find()
- .filter(
- Condition::all()
- .add(worktree_repository::Column::ProjectId.eq(project.id))
- .add(worktree_repository::Column::WorktreeId.eq(worktree.id))
- .add(repository_entry_filter),
- )
- .stream(&*tx)
- .await?;
-
- while let Some(db_repository) = db_repositories.next().await {
- let db_repository = db_repository?;
- if db_repository.is_deleted {
- worktree
- .removed_repositories
- .push(db_repository.work_directory_id as u64);
- } else {
- worktree.updated_repositories.push(proto::RepositoryEntry {
- work_directory_id: db_repository.work_directory_id as u64,
- branch: db_repository.branch,
- });
- }
- }
- }
-
- worktrees.push(worktree);
- }
-
- let language_servers = project
- .find_related(language_server::Entity)
- .all(&*tx)
- .await?
- .into_iter()
- .map(|language_server| proto::LanguageServer {
- id: language_server.id as u64,
- name: language_server.name,
- })
- .collect::<Vec<_>>();
-
- {
- let mut db_settings_files = worktree_settings_file::Entity::find()
- .filter(worktree_settings_file::Column::ProjectId.eq(project_id))
- .stream(&*tx)
- .await?;
- while let Some(db_settings_file) = db_settings_files.next().await {
- let db_settings_file = db_settings_file?;
- if let Some(worktree) = worktrees
- .iter_mut()
- .find(|w| w.id == db_settings_file.worktree_id as u64)
- {
- worktree.settings_files.push(WorktreeSettingsFile {
- path: db_settings_file.path,
- content: db_settings_file.content,
- });
- }
- }
- }
-
- let mut collaborators = project
- .find_related(project_collaborator::Entity)
- .all(&*tx)
- .await?;
- let self_collaborator = if let Some(self_collaborator_ix) = collaborators
- .iter()
- .position(|collaborator| collaborator.user_id == user_id)
- {
- collaborators.swap_remove(self_collaborator_ix)
- } else {
- continue;
- };
- let old_connection_id = self_collaborator.connection();
- project_collaborator::Entity::update(project_collaborator::ActiveModel {
- connection_id: ActiveValue::set(connection.id as i32),
- connection_server_id: ActiveValue::set(ServerId(connection.owner_id as i32)),
- ..self_collaborator.into_active_model()
- })
- .exec(&*tx)
- .await?;
-
- let collaborators = collaborators
- .into_iter()
- .map(|collaborator| ProjectCollaborator {
- connection_id: collaborator.connection(),
- user_id: collaborator.user_id,
- replica_id: collaborator.replica_id,
- is_host: collaborator.is_host,
- })
- .collect::<Vec<_>>();
-
- rejoined_projects.push(RejoinedProject {
- id: project_id,
- old_connection_id,
- collaborators,
- worktrees,
- language_servers,
- });
- }
-
- let (channel, room) = self.get_channel_room(room_id, &tx).await?;
- let channel_members = if let Some(channel) = &channel {
- self.get_channel_participants(&channel, &tx).await?
- } else {
- Vec::new()
- };
-
- Ok(RejoinedRoom {
- room,
- channel_id: channel.map(|channel| channel.id),
- channel_members,
- rejoined_projects,
- reshared_projects,
- })
- })
- .await
- }
-
- pub async fn leave_room(
- &self,
- connection: ConnectionId,
- ) -> Result<Option<RoomGuard<LeftRoom>>> {
- self.optional_room_transaction(|tx| async move {
- let leaving_participant = room_participant::Entity::find()
- .filter(
- Condition::all()
- .add(
- room_participant::Column::AnsweringConnectionId
- .eq(connection.id as i32),
- )
- .add(
- room_participant::Column::AnsweringConnectionServerId
- .eq(connection.owner_id as i32),
- ),
- )
- .one(&*tx)
- .await?;
-
- if let Some(leaving_participant) = leaving_participant {
- // Leave room.
- let room_id = leaving_participant.room_id;
- room_participant::Entity::delete_by_id(leaving_participant.id)
- .exec(&*tx)
- .await?;
-
- // Cancel pending calls initiated by the leaving user.
- let called_participants = room_participant::Entity::find()
- .filter(
- Condition::all()
- .add(
- room_participant::Column::CallingUserId
- .eq(leaving_participant.user_id),
- )
- .add(room_participant::Column::AnsweringConnectionId.is_null()),
- )
- .all(&*tx)
- .await?;
- room_participant::Entity::delete_many()
- .filter(
- room_participant::Column::Id
- .is_in(called_participants.iter().map(|participant| participant.id)),
- )
- .exec(&*tx)
- .await?;
- let canceled_calls_to_user_ids = called_participants
- .into_iter()
- .map(|participant| participant.user_id)
- .collect();
-
- // Detect left projects.
- #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
- enum QueryProjectIds {
- ProjectId,
- }
- let project_ids: Vec<ProjectId> = project_collaborator::Entity::find()
- .select_only()
- .column_as(
- project_collaborator::Column::ProjectId,
- QueryProjectIds::ProjectId,
- )
- .filter(
- Condition::all()
- .add(
- project_collaborator::Column::ConnectionId.eq(connection.id as i32),
- )
- .add(
- project_collaborator::Column::ConnectionServerId
- .eq(connection.owner_id as i32),
- ),
- )
- .into_values::<_, QueryProjectIds>()
- .all(&*tx)
- .await?;
- let mut left_projects = HashMap::default();
- let mut collaborators = project_collaborator::Entity::find()
- .filter(project_collaborator::Column::ProjectId.is_in(project_ids))
- .stream(&*tx)
- .await?;
- while let Some(collaborator) = collaborators.next().await {
- let collaborator = collaborator?;
- let left_project =
- left_projects
- .entry(collaborator.project_id)
- .or_insert(LeftProject {
- id: collaborator.project_id,
- host_user_id: Default::default(),
- connection_ids: Default::default(),
- host_connection_id: Default::default(),
- });
-
- let collaborator_connection_id = collaborator.connection();
- if collaborator_connection_id != connection {
- left_project.connection_ids.push(collaborator_connection_id);
- }
-
- if collaborator.is_host {
- left_project.host_user_id = collaborator.user_id;
- left_project.host_connection_id = collaborator_connection_id;
- }
- }
- drop(collaborators);
-
- // Leave projects.
- project_collaborator::Entity::delete_many()
- .filter(
- Condition::all()
- .add(
- project_collaborator::Column::ConnectionId.eq(connection.id as i32),
- )
- .add(
- project_collaborator::Column::ConnectionServerId
- .eq(connection.owner_id as i32),
- ),
- )
- .exec(&*tx)
- .await?;
-
- // Unshare projects.
- project::Entity::delete_many()
- .filter(
- Condition::all()
- .add(project::Column::RoomId.eq(room_id))
- .add(project::Column::HostConnectionId.eq(connection.id as i32))
- .add(
- project::Column::HostConnectionServerId
- .eq(connection.owner_id as i32),
- ),
- )
- .exec(&*tx)
- .await?;
-
- let (channel, room) = self.get_channel_room(room_id, &tx).await?;
- let deleted = if room.participants.is_empty() {
- let result = room::Entity::delete_by_id(room_id).exec(&*tx).await?;
- result.rows_affected > 0
- } else {
- false
- };
-
- let channel_members = if let Some(channel) = &channel {
- self.get_channel_participants(channel, &tx).await?
- } else {
- Vec::new()
- };
- let left_room = LeftRoom {
- room,
- channel_id: channel.map(|channel| channel.id),
- channel_members,
- left_projects,
- canceled_calls_to_user_ids,
- deleted,
- };
-
- if left_room.room.participants.is_empty() {
- self.rooms.remove(&room_id);
- }
-
- Ok(Some((room_id, left_room)))
- } else {
- Ok(None)
- }
- })
- .await
- }
-
- pub async fn update_room_participant_location(
- &self,
- room_id: RoomId,
- connection: ConnectionId,
- location: proto::ParticipantLocation,
- ) -> Result<RoomGuard<proto::Room>> {
- self.room_transaction(room_id, |tx| async {
- let tx = tx;
- let location_kind;
- let location_project_id;
- match location
- .variant
- .as_ref()
- .ok_or_else(|| anyhow!("invalid location"))?
- {
- proto::participant_location::Variant::SharedProject(project) => {
- location_kind = 0;
- location_project_id = Some(ProjectId::from_proto(project.id));
- }
- proto::participant_location::Variant::UnsharedProject(_) => {
- location_kind = 1;
- location_project_id = None;
- }
- proto::participant_location::Variant::External(_) => {
- location_kind = 2;
- location_project_id = None;
- }
- }
-
- let result = room_participant::Entity::update_many()
- .filter(
- Condition::all()
- .add(room_participant::Column::RoomId.eq(room_id))
- .add(
- room_participant::Column::AnsweringConnectionId
- .eq(connection.id as i32),
- )
- .add(
- room_participant::Column::AnsweringConnectionServerId
- .eq(connection.owner_id as i32),
- ),
- )
- .set(room_participant::ActiveModel {
- location_kind: ActiveValue::set(Some(location_kind)),
- location_project_id: ActiveValue::set(location_project_id),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
-
- if result.rows_affected == 1 {
- let room = self.get_room(room_id, &tx).await?;
- Ok(room)
- } else {
- Err(anyhow!("could not update room participant location"))?
- }
- })
- .await
- }
-
- pub async fn connection_lost(&self, connection: ConnectionId) -> Result<()> {
- self.transaction(|tx| async move {
- self.room_connection_lost(connection, &*tx).await?;
- self.channel_buffer_connection_lost(connection, &*tx)
- .await?;
- self.channel_chat_connection_lost(connection, &*tx).await?;
- Ok(())
- })
- .await
- }
-
- pub async fn room_connection_lost(
- &self,
- connection: ConnectionId,
- tx: &DatabaseTransaction,
- ) -> Result<()> {
- let participant = room_participant::Entity::find()
- .filter(
- Condition::all()
- .add(room_participant::Column::AnsweringConnectionId.eq(connection.id as i32))
- .add(
- room_participant::Column::AnsweringConnectionServerId
- .eq(connection.owner_id as i32),
- ),
- )
- .one(&*tx)
- .await?;
-
- if let Some(participant) = participant {
- room_participant::Entity::update(room_participant::ActiveModel {
- answering_connection_lost: ActiveValue::set(true),
- ..participant.into_active_model()
- })
- .exec(&*tx)
- .await?;
- }
- Ok(())
- }
-
- fn build_incoming_call(
- room: &proto::Room,
- called_user_id: UserId,
- ) -> Option<proto::IncomingCall> {
- let pending_participant = room
- .pending_participants
- .iter()
- .find(|participant| participant.user_id == called_user_id.to_proto())?;
-
- Some(proto::IncomingCall {
- room_id: room.id,
- calling_user_id: pending_participant.calling_user_id,
- participant_user_ids: room
- .participants
- .iter()
- .map(|participant| participant.user_id)
- .collect(),
- initial_project: room.participants.iter().find_map(|participant| {
- let initial_project_id = pending_participant.initial_project_id?;
- participant
- .projects
- .iter()
- .find(|project| project.id == initial_project_id)
- .cloned()
- }),
- })
- }
-
- pub async fn get_room(&self, room_id: RoomId, tx: &DatabaseTransaction) -> Result<proto::Room> {
- let (_, room) = self.get_channel_room(room_id, tx).await?;
- Ok(room)
- }
-
- pub async fn room_connection_ids(
- &self,
- room_id: RoomId,
- connection_id: ConnectionId,
- ) -> Result<RoomGuard<HashSet<ConnectionId>>> {
- self.room_transaction(room_id, |tx| async move {
- let mut participants = room_participant::Entity::find()
- .filter(room_participant::Column::RoomId.eq(room_id))
- .stream(&*tx)
- .await?;
-
- let mut is_participant = false;
- let mut connection_ids = HashSet::default();
- while let Some(participant) = participants.next().await {
- let participant = participant?;
- if let Some(answering_connection) = participant.answering_connection() {
- if answering_connection == connection_id {
- is_participant = true;
- } else {
- connection_ids.insert(answering_connection);
- }
- }
- }
-
- if !is_participant {
- Err(anyhow!("not a room participant"))?;
- }
-
- Ok(connection_ids)
- })
- .await
- }
-
- async fn get_channel_room(
- &self,
- room_id: RoomId,
- tx: &DatabaseTransaction,
- ) -> Result<(Option<channel::Model>, proto::Room)> {
- let db_room = room::Entity::find_by_id(room_id)
- .one(tx)
- .await?
- .ok_or_else(|| anyhow!("could not find room"))?;
-
- let mut db_participants = db_room
- .find_related(room_participant::Entity)
- .stream(tx)
- .await?;
- let mut participants = HashMap::default();
- let mut pending_participants = Vec::new();
- while let Some(db_participant) = db_participants.next().await {
- let db_participant = db_participant?;
- if let (
- Some(answering_connection_id),
- Some(answering_connection_server_id),
- Some(participant_index),
- ) = (
- db_participant.answering_connection_id,
- db_participant.answering_connection_server_id,
- db_participant.participant_index,
- ) {
- let location = match (
- db_participant.location_kind,
- db_participant.location_project_id,
- ) {
- (Some(0), Some(project_id)) => {
- Some(proto::participant_location::Variant::SharedProject(
- proto::participant_location::SharedProject {
- id: project_id.to_proto(),
- },
- ))
- }
- (Some(1), _) => Some(proto::participant_location::Variant::UnsharedProject(
- Default::default(),
- )),
- _ => Some(proto::participant_location::Variant::External(
- Default::default(),
- )),
- };
-
- let answering_connection = ConnectionId {
- owner_id: answering_connection_server_id.0 as u32,
- id: answering_connection_id as u32,
- };
- participants.insert(
- answering_connection,
- proto::Participant {
- user_id: db_participant.user_id.to_proto(),
- peer_id: Some(answering_connection.into()),
- projects: Default::default(),
- location: Some(proto::ParticipantLocation { variant: location }),
- participant_index: participant_index as u32,
- },
- );
- } else {
- pending_participants.push(proto::PendingParticipant {
- user_id: db_participant.user_id.to_proto(),
- calling_user_id: db_participant.calling_user_id.to_proto(),
- initial_project_id: db_participant.initial_project_id.map(|id| id.to_proto()),
- });
- }
- }
- drop(db_participants);
-
- let mut db_projects = db_room
- .find_related(project::Entity)
- .find_with_related(worktree::Entity)
- .stream(tx)
- .await?;
-
- while let Some(row) = db_projects.next().await {
- let (db_project, db_worktree) = row?;
- let host_connection = db_project.host_connection()?;
- if let Some(participant) = participants.get_mut(&host_connection) {
- let project = if let Some(project) = participant
- .projects
- .iter_mut()
- .find(|project| project.id == db_project.id.to_proto())
- {
- project
- } else {
- participant.projects.push(proto::ParticipantProject {
- id: db_project.id.to_proto(),
- worktree_root_names: Default::default(),
- });
- participant.projects.last_mut().unwrap()
- };
-
- if let Some(db_worktree) = db_worktree {
- if db_worktree.visible {
- project.worktree_root_names.push(db_worktree.root_name);
- }
- }
- }
- }
- drop(db_projects);
-
- let mut db_followers = db_room.find_related(follower::Entity).stream(tx).await?;
- let mut followers = Vec::new();
- while let Some(db_follower) = db_followers.next().await {
- let db_follower = db_follower?;
- followers.push(proto::Follower {
- leader_id: Some(db_follower.leader_connection().into()),
- follower_id: Some(db_follower.follower_connection().into()),
- project_id: db_follower.project_id.to_proto(),
- });
- }
- drop(db_followers);
-
- let channel = if let Some(channel_id) = db_room.channel_id {
- Some(self.get_channel_internal(channel_id, &*tx).await?)
- } else {
- None
- };
-
- Ok((
- channel,
- proto::Room {
- id: db_room.id.to_proto(),
- live_kit_room: db_room.live_kit_room,
- participants: participants.into_values().collect(),
- pending_participants,
- followers,
- },
- ))
- }
-}
@@ -1,99 +0,0 @@
-use super::*;
-
-impl Database {
- pub async fn create_server(&self, environment: &str) -> Result<ServerId> {
- self.transaction(|tx| async move {
- let server = server::ActiveModel {
- environment: ActiveValue::set(environment.into()),
- ..Default::default()
- }
- .insert(&*tx)
- .await?;
- Ok(server.id)
- })
- .await
- }
-
- pub async fn stale_server_resource_ids(
- &self,
- environment: &str,
- new_server_id: ServerId,
- ) -> Result<(Vec<RoomId>, Vec<ChannelId>)> {
- self.transaction(|tx| async move {
- #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
- enum QueryRoomIds {
- RoomId,
- }
-
- #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
- enum QueryChannelIds {
- ChannelId,
- }
-
- let stale_server_epochs = self
- .stale_server_ids(environment, new_server_id, &tx)
- .await?;
- let room_ids = room_participant::Entity::find()
- .select_only()
- .column(room_participant::Column::RoomId)
- .distinct()
- .filter(
- room_participant::Column::AnsweringConnectionServerId
- .is_in(stale_server_epochs.iter().copied()),
- )
- .into_values::<_, QueryRoomIds>()
- .all(&*tx)
- .await?;
- let channel_ids = channel_buffer_collaborator::Entity::find()
- .select_only()
- .column(channel_buffer_collaborator::Column::ChannelId)
- .distinct()
- .filter(
- channel_buffer_collaborator::Column::ConnectionServerId
- .is_in(stale_server_epochs.iter().copied()),
- )
- .into_values::<_, QueryChannelIds>()
- .all(&*tx)
- .await?;
-
- Ok((room_ids, channel_ids))
- })
- .await
- }
-
- pub async fn delete_stale_servers(
- &self,
- environment: &str,
- new_server_id: ServerId,
- ) -> Result<()> {
- self.transaction(|tx| async move {
- server::Entity::delete_many()
- .filter(
- Condition::all()
- .add(server::Column::Environment.eq(environment))
- .add(server::Column::Id.ne(new_server_id)),
- )
- .exec(&*tx)
- .await?;
- Ok(())
- })
- .await
- }
-
- async fn stale_server_ids(
- &self,
- environment: &str,
- new_server_id: ServerId,
- tx: &DatabaseTransaction,
- ) -> Result<Vec<ServerId>> {
- let stale_servers = server::Entity::find()
- .filter(
- Condition::all()
- .add(server::Column::Environment.eq(environment))
- .add(server::Column::Id.ne(new_server_id)),
- )
- .all(&*tx)
- .await?;
- Ok(stale_servers.into_iter().map(|server| server.id).collect())
- }
-}
@@ -1,259 +0,0 @@
-use super::*;
-
-impl Database {
- pub async fn create_user(
- &self,
- email_address: &str,
- admin: bool,
- params: NewUserParams,
- ) -> Result<NewUserResult> {
- self.transaction(|tx| async {
- let tx = tx;
- let user = user::Entity::insert(user::ActiveModel {
- email_address: ActiveValue::set(Some(email_address.into())),
- github_login: ActiveValue::set(params.github_login.clone()),
- github_user_id: ActiveValue::set(Some(params.github_user_id)),
- admin: ActiveValue::set(admin),
- metrics_id: ActiveValue::set(Uuid::new_v4()),
- ..Default::default()
- })
- .on_conflict(
- OnConflict::column(user::Column::GithubLogin)
- .update_column(user::Column::GithubLogin)
- .to_owned(),
- )
- .exec_with_returning(&*tx)
- .await?;
-
- Ok(NewUserResult {
- user_id: user.id,
- metrics_id: user.metrics_id.to_string(),
- signup_device_id: None,
- inviting_user_id: None,
- })
- })
- .await
- }
-
- pub async fn get_user_by_id(&self, id: UserId) -> Result<Option<user::Model>> {
- self.transaction(|tx| async move { Ok(user::Entity::find_by_id(id).one(&*tx).await?) })
- .await
- }
-
- pub async fn get_users_by_ids(&self, ids: Vec<UserId>) -> Result<Vec<user::Model>> {
- self.transaction(|tx| async {
- let tx = tx;
- Ok(user::Entity::find()
- .filter(user::Column::Id.is_in(ids.iter().copied()))
- .all(&*tx)
- .await?)
- })
- .await
- }
-
- pub async fn get_user_by_github_login(&self, github_login: &str) -> Result<Option<User>> {
- self.transaction(|tx| async move {
- Ok(user::Entity::find()
- .filter(user::Column::GithubLogin.eq(github_login))
- .one(&*tx)
- .await?)
- })
- .await
- }
-
- pub async fn get_or_create_user_by_github_account(
- &self,
- github_login: &str,
- github_user_id: Option<i32>,
- github_email: Option<&str>,
- ) -> Result<Option<User>> {
- self.transaction(|tx| async move {
- let tx = &*tx;
- if let Some(github_user_id) = github_user_id {
- if let Some(user_by_github_user_id) = user::Entity::find()
- .filter(user::Column::GithubUserId.eq(github_user_id))
- .one(tx)
- .await?
- {
- let mut user_by_github_user_id = user_by_github_user_id.into_active_model();
- user_by_github_user_id.github_login = ActiveValue::set(github_login.into());
- Ok(Some(user_by_github_user_id.update(tx).await?))
- } else if let Some(user_by_github_login) = user::Entity::find()
- .filter(user::Column::GithubLogin.eq(github_login))
- .one(tx)
- .await?
- {
- let mut user_by_github_login = user_by_github_login.into_active_model();
- user_by_github_login.github_user_id = ActiveValue::set(Some(github_user_id));
- Ok(Some(user_by_github_login.update(tx).await?))
- } else {
- let user = user::Entity::insert(user::ActiveModel {
- email_address: ActiveValue::set(github_email.map(|email| email.into())),
- github_login: ActiveValue::set(github_login.into()),
- github_user_id: ActiveValue::set(Some(github_user_id)),
- admin: ActiveValue::set(false),
- invite_count: ActiveValue::set(0),
- invite_code: ActiveValue::set(None),
- metrics_id: ActiveValue::set(Uuid::new_v4()),
- ..Default::default()
- })
- .exec_with_returning(&*tx)
- .await?;
- Ok(Some(user))
- }
- } else {
- Ok(user::Entity::find()
- .filter(user::Column::GithubLogin.eq(github_login))
- .one(tx)
- .await?)
- }
- })
- .await
- }
-
- pub async fn get_all_users(&self, page: u32, limit: u32) -> Result<Vec<User>> {
- self.transaction(|tx| async move {
- Ok(user::Entity::find()
- .order_by_asc(user::Column::GithubLogin)
- .limit(limit as u64)
- .offset(page as u64 * limit as u64)
- .all(&*tx)
- .await?)
- })
- .await
- }
-
- pub async fn get_user_metrics_id(&self, id: UserId) -> Result<String> {
- #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
- enum QueryAs {
- MetricsId,
- }
-
- self.transaction(|tx| async move {
- let metrics_id: Uuid = user::Entity::find_by_id(id)
- .select_only()
- .column(user::Column::MetricsId)
- .into_values::<_, QueryAs>()
- .one(&*tx)
- .await?
- .ok_or_else(|| anyhow!("could not find user"))?;
- Ok(metrics_id.to_string())
- })
- .await
- }
-
- pub async fn set_user_connected_once(&self, id: UserId, connected_once: bool) -> Result<()> {
- self.transaction(|tx| async move {
- user::Entity::update_many()
- .filter(user::Column::Id.eq(id))
- .set(user::ActiveModel {
- connected_once: ActiveValue::set(connected_once),
- ..Default::default()
- })
- .exec(&*tx)
- .await?;
- Ok(())
- })
- .await
- }
-
- pub async fn destroy_user(&self, id: UserId) -> Result<()> {
- self.transaction(|tx| async move {
- access_token::Entity::delete_many()
- .filter(access_token::Column::UserId.eq(id))
- .exec(&*tx)
- .await?;
- user::Entity::delete_by_id(id).exec(&*tx).await?;
- Ok(())
- })
- .await
- }
-
- pub async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result<Vec<User>> {
- self.transaction(|tx| async {
- let tx = tx;
- let like_string = Self::fuzzy_like_string(name_query);
- let query = "
- SELECT users.*
- FROM users
- WHERE github_login ILIKE $1
- ORDER BY github_login <-> $2
- LIMIT $3
- ";
-
- Ok(user::Entity::find()
- .from_raw_sql(Statement::from_sql_and_values(
- self.pool.get_database_backend(),
- query,
- vec![like_string.into(), name_query.into(), limit.into()],
- ))
- .all(&*tx)
- .await?)
- })
- .await
- }
-
- pub fn fuzzy_like_string(string: &str) -> String {
- let mut result = String::with_capacity(string.len() * 2 + 1);
- for c in string.chars() {
- if c.is_alphanumeric() {
- result.push('%');
- result.push(c);
- }
- }
- result.push('%');
- result
- }
-
- pub async fn create_user_flag(&self, flag: &str) -> Result<FlagId> {
- self.transaction(|tx| async move {
- let flag = feature_flag::Entity::insert(feature_flag::ActiveModel {
- flag: ActiveValue::set(flag.to_string()),
- ..Default::default()
- })
- .exec(&*tx)
- .await?
- .last_insert_id;
-
- Ok(flag)
- })
- .await
- }
-
- pub async fn add_user_flag(&self, user: UserId, flag: FlagId) -> Result<()> {
- self.transaction(|tx| async move {
- user_feature::Entity::insert(user_feature::ActiveModel {
- user_id: ActiveValue::set(user),
- feature_id: ActiveValue::set(flag),
- })
- .exec(&*tx)
- .await?;
-
- Ok(())
- })
- .await
- }
-
- pub async fn get_user_flags(&self, user: UserId) -> Result<Vec<String>> {
- self.transaction(|tx| async move {
- #[derive(Copy, Clone, Debug, EnumIter, DeriveColumn)]
- enum QueryAs {
- Flag,
- }
-
- let flags = user::Model {
- id: user,
- ..Default::default()
- }
- .find_linked(user::UserFlags)
- .select_only()
- .column(feature_flag::Column::Flag)
- .into_values::<_, QueryAs>()
- .all(&*tx)
- .await?;
-
- Ok(flags)
- })
- .await
- }
-}
@@ -1,32 +0,0 @@
-pub mod access_token;
-pub mod buffer;
-pub mod buffer_operation;
-pub mod buffer_snapshot;
-pub mod channel;
-pub mod channel_buffer_collaborator;
-pub mod channel_chat_participant;
-pub mod channel_member;
-pub mod channel_message;
-pub mod channel_message_mention;
-pub mod contact;
-pub mod feature_flag;
-pub mod follower;
-pub mod language_server;
-pub mod notification;
-pub mod notification_kind;
-pub mod observed_buffer_edits;
-pub mod observed_channel_messages;
-pub mod project;
-pub mod project_collaborator;
-pub mod room;
-pub mod room_participant;
-pub mod server;
-pub mod signup;
-pub mod user;
-pub mod user_feature;
-pub mod worktree;
-pub mod worktree_diagnostic_summary;
-pub mod worktree_entry;
-pub mod worktree_repository;
-pub mod worktree_repository_statuses;
-pub mod worktree_settings_file;
@@ -1,29 +0,0 @@
-use crate::db::{AccessTokenId, UserId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "access_tokens")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: AccessTokenId,
- pub user_id: UserId,
- pub hash: String,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::UserId",
- to = "super::user::Column::Id"
- )]
- User,
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::User.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,45 +0,0 @@
-use crate::db::{BufferId, ChannelId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "buffers")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: BufferId,
- pub epoch: i32,
- pub channel_id: ChannelId,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(has_many = "super::buffer_operation::Entity")]
- Operations,
- #[sea_orm(has_many = "super::buffer_snapshot::Entity")]
- Snapshots,
- #[sea_orm(
- belongs_to = "super::channel::Entity",
- from = "Column::ChannelId",
- to = "super::channel::Column::Id"
- )]
- Channel,
-}
-
-impl Related<super::buffer_operation::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Operations.def()
- }
-}
-
-impl Related<super::buffer_snapshot::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Snapshots.def()
- }
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Channel.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,34 +0,0 @@
-use crate::db::BufferId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "buffer_operations")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub buffer_id: BufferId,
- #[sea_orm(primary_key)]
- pub epoch: i32,
- #[sea_orm(primary_key)]
- pub lamport_timestamp: i32,
- #[sea_orm(primary_key)]
- pub replica_id: i32,
- pub value: Vec<u8>,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::buffer::Entity",
- from = "Column::BufferId",
- to = "super::buffer::Column::Id"
- )]
- Buffer,
-}
-
-impl Related<super::buffer::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Buffer.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,31 +0,0 @@
-use crate::db::BufferId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "buffer_snapshots")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub buffer_id: BufferId,
- #[sea_orm(primary_key)]
- pub epoch: i32,
- pub text: String,
- pub operation_serialization_version: i32,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::buffer::Entity",
- from = "Column::BufferId",
- to = "super::buffer::Column::Id"
- )]
- Buffer,
-}
-
-impl Related<super::buffer::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Buffer.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,79 +0,0 @@
-use crate::db::{ChannelId, ChannelVisibility};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "channels")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: ChannelId,
- pub name: String,
- pub visibility: ChannelVisibility,
- pub parent_path: String,
-}
-
-impl Model {
- pub fn parent_id(&self) -> Option<ChannelId> {
- self.ancestors().last()
- }
-
- pub fn ancestors(&self) -> impl Iterator<Item = ChannelId> + '_ {
- self.parent_path
- .trim_end_matches('/')
- .split('/')
- .filter_map(|id| Some(ChannelId::from_proto(id.parse().ok()?)))
- }
-
- pub fn ancestors_including_self(&self) -> impl Iterator<Item = ChannelId> + '_ {
- self.ancestors().chain(Some(self.id))
- }
-
- pub fn path(&self) -> String {
- format!("{}{}/", self.parent_path, self.id)
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(has_one = "super::room::Entity")]
- Room,
- #[sea_orm(has_one = "super::buffer::Entity")]
- Buffer,
- #[sea_orm(has_many = "super::channel_member::Entity")]
- Member,
- #[sea_orm(has_many = "super::channel_buffer_collaborator::Entity")]
- BufferCollaborators,
- #[sea_orm(has_many = "super::channel_chat_participant::Entity")]
- ChatParticipants,
-}
-
-impl Related<super::channel_member::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Member.def()
- }
-}
-
-impl Related<super::room::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Room.def()
- }
-}
-
-impl Related<super::buffer::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Buffer.def()
- }
-}
-
-impl Related<super::channel_buffer_collaborator::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::BufferCollaborators.def()
- }
-}
-
-impl Related<super::channel_chat_participant::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::ChatParticipants.def()
- }
-}
@@ -1,43 +0,0 @@
-use crate::db::{ChannelBufferCollaboratorId, ChannelId, ReplicaId, ServerId, UserId};
-use rpc::ConnectionId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "channel_buffer_collaborators")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: ChannelBufferCollaboratorId,
- pub channel_id: ChannelId,
- pub connection_id: i32,
- pub connection_server_id: ServerId,
- pub connection_lost: bool,
- pub user_id: UserId,
- pub replica_id: ReplicaId,
-}
-
-impl Model {
- pub fn connection(&self) -> ConnectionId {
- ConnectionId {
- owner_id: self.connection_server_id.0 as u32,
- id: self.connection_id as u32,
- }
- }
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::channel::Entity",
- from = "Column::ChannelId",
- to = "super::channel::Column::Id"
- )]
- Channel,
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Channel.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,41 +0,0 @@
-use crate::db::{ChannelChatParticipantId, ChannelId, ServerId, UserId};
-use rpc::ConnectionId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "channel_chat_participants")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: ChannelChatParticipantId,
- pub channel_id: ChannelId,
- pub user_id: UserId,
- pub connection_id: i32,
- pub connection_server_id: ServerId,
-}
-
-impl Model {
- pub fn connection(&self) -> ConnectionId {
- ConnectionId {
- owner_id: self.connection_server_id.0 as u32,
- id: self.connection_id as u32,
- }
- }
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::channel::Entity",
- from = "Column::ChannelId",
- to = "super::channel::Column::Id"
- )]
- Channel,
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Channel.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,59 +0,0 @@
-use crate::db::{channel_member, ChannelId, ChannelMemberId, ChannelRole, UserId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "channel_members")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: ChannelMemberId,
- pub channel_id: ChannelId,
- pub user_id: UserId,
- pub accepted: bool,
- pub role: ChannelRole,
-}
-
-impl ActiveModelBehavior for ActiveModel {}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::channel::Entity",
- from = "Column::ChannelId",
- to = "super::channel::Column::Id"
- )]
- Channel,
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::UserId",
- to = "super::user::Column::Id"
- )]
- User,
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Channel.def()
- }
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::User.def()
- }
-}
-
-#[derive(Debug)]
-pub struct UserToChannel;
-
-impl Linked for UserToChannel {
- type FromEntity = super::user::Entity;
-
- type ToEntity = super::channel::Entity;
-
- fn link(&self) -> Vec<RelationDef> {
- vec![
- channel_member::Relation::User.def().rev(),
- channel_member::Relation::Channel.def(),
- ]
- }
-}
@@ -1,45 +0,0 @@
-use crate::db::{ChannelId, MessageId, UserId};
-use sea_orm::entity::prelude::*;
-use time::PrimitiveDateTime;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "channel_messages")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: MessageId,
- pub channel_id: ChannelId,
- pub sender_id: UserId,
- pub body: String,
- pub sent_at: PrimitiveDateTime,
- pub nonce: Uuid,
-}
-
-impl ActiveModelBehavior for ActiveModel {}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::channel::Entity",
- from = "Column::ChannelId",
- to = "super::channel::Column::Id"
- )]
- Channel,
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::SenderId",
- to = "super::user::Column::Id"
- )]
- Sender,
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Channel.def()
- }
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Sender.def()
- }
-}
@@ -1,43 +0,0 @@
-use crate::db::{MessageId, UserId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "channel_message_mentions")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub message_id: MessageId,
- #[sea_orm(primary_key)]
- pub start_offset: i32,
- pub end_offset: i32,
- pub user_id: UserId,
-}
-
-impl ActiveModelBehavior for ActiveModel {}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::channel_message::Entity",
- from = "Column::MessageId",
- to = "super::channel_message::Column::Id"
- )]
- Message,
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::UserId",
- to = "super::user::Column::Id"
- )]
- MentionedUser,
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Message.def()
- }
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::MentionedUser.def()
- }
-}
@@ -1,32 +0,0 @@
-use crate::db::{ContactId, UserId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "contacts")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: ContactId,
- pub user_id_a: UserId,
- pub user_id_b: UserId,
- pub a_to_b: bool,
- pub should_notify: bool,
- pub accepted: bool,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::room_participant::Entity",
- from = "Column::UserIdA",
- to = "super::room_participant::Column::UserId"
- )]
- UserARoomParticipant,
- #[sea_orm(
- belongs_to = "super::room_participant::Entity",
- from = "Column::UserIdB",
- to = "super::room_participant::Column::UserId"
- )]
- UserBRoomParticipant,
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,40 +0,0 @@
-use sea_orm::entity::prelude::*;
-
-use crate::db::FlagId;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "feature_flags")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: FlagId,
- pub flag: String,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(has_many = "super::user_feature::Entity")]
- UserFeature,
-}
-
-impl Related<super::user_feature::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::UserFeature.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
-
-pub struct FlaggedUsers;
-
-impl Linked for FlaggedUsers {
- type FromEntity = Entity;
-
- type ToEntity = super::user::Entity;
-
- fn link(&self) -> Vec<RelationDef> {
- vec![
- super::user_feature::Relation::Flag.def().rev(),
- super::user_feature::Relation::User.def(),
- ]
- }
-}
@@ -1,50 +0,0 @@
-use crate::db::{FollowerId, ProjectId, RoomId, ServerId};
-use rpc::ConnectionId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "followers")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: FollowerId,
- pub room_id: RoomId,
- pub project_id: ProjectId,
- pub leader_connection_server_id: ServerId,
- pub leader_connection_id: i32,
- pub follower_connection_server_id: ServerId,
- pub follower_connection_id: i32,
-}
-
-impl Model {
- pub fn leader_connection(&self) -> ConnectionId {
- ConnectionId {
- owner_id: self.leader_connection_server_id.0 as u32,
- id: self.leader_connection_id as u32,
- }
- }
-
- pub fn follower_connection(&self) -> ConnectionId {
- ConnectionId {
- owner_id: self.follower_connection_server_id.0 as u32,
- id: self.follower_connection_id as u32,
- }
- }
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::room::Entity",
- from = "Column::RoomId",
- to = "super::room::Column::Id"
- )]
- Room,
-}
-
-impl Related<super::room::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Room.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,30 +0,0 @@
-use crate::db::ProjectId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "language_servers")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub project_id: ProjectId,
- #[sea_orm(primary_key)]
- pub id: i64,
- pub name: String,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::project::Entity",
- from = "Column::ProjectId",
- to = "super::project::Column::Id"
- )]
- Project,
-}
-
-impl Related<super::project::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Project.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,29 +0,0 @@
-use crate::db::{NotificationId, NotificationKindId, UserId};
-use sea_orm::entity::prelude::*;
-use time::PrimitiveDateTime;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "notifications")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: NotificationId,
- pub created_at: PrimitiveDateTime,
- pub recipient_id: UserId,
- pub kind: NotificationKindId,
- pub entity_id: Option<i32>,
- pub content: String,
- pub is_read: bool,
- pub response: Option<bool>,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::RecipientId",
- to = "super::user::Column::Id"
- )]
- Recipient,
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,15 +0,0 @@
-use crate::db::NotificationKindId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "notification_kinds")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: NotificationKindId,
- pub name: String,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,43 +0,0 @@
-use crate::db::{BufferId, UserId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "observed_buffer_edits")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub user_id: UserId,
- pub buffer_id: BufferId,
- pub epoch: i32,
- pub lamport_timestamp: i32,
- pub replica_id: i32,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::buffer::Entity",
- from = "Column::BufferId",
- to = "super::buffer::Column::Id"
- )]
- Buffer,
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::UserId",
- to = "super::user::Column::Id"
- )]
- User,
-}
-
-impl Related<super::buffer::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Buffer.def()
- }
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::User.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,41 +0,0 @@
-use crate::db::{ChannelId, MessageId, UserId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "observed_channel_messages")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub user_id: UserId,
- pub channel_id: ChannelId,
- pub channel_message_id: MessageId,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::channel::Entity",
- from = "Column::ChannelId",
- to = "super::channel::Column::Id"
- )]
- Channel,
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::UserId",
- to = "super::user::Column::Id"
- )]
- User,
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Channel.def()
- }
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::User.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,84 +0,0 @@
-use crate::db::{ProjectId, Result, RoomId, ServerId, UserId};
-use anyhow::anyhow;
-use rpc::ConnectionId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "projects")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: ProjectId,
- pub room_id: RoomId,
- pub host_user_id: UserId,
- pub host_connection_id: Option<i32>,
- pub host_connection_server_id: Option<ServerId>,
-}
-
-impl Model {
- pub fn host_connection(&self) -> Result<ConnectionId> {
- let host_connection_server_id = self
- .host_connection_server_id
- .ok_or_else(|| anyhow!("empty host_connection_server_id"))?;
- let host_connection_id = self
- .host_connection_id
- .ok_or_else(|| anyhow!("empty host_connection_id"))?;
- Ok(ConnectionId {
- owner_id: host_connection_server_id.0 as u32,
- id: host_connection_id as u32,
- })
- }
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::HostUserId",
- to = "super::user::Column::Id"
- )]
- HostUser,
- #[sea_orm(
- belongs_to = "super::room::Entity",
- from = "Column::RoomId",
- to = "super::room::Column::Id"
- )]
- Room,
- #[sea_orm(has_many = "super::worktree::Entity")]
- Worktrees,
- #[sea_orm(has_many = "super::project_collaborator::Entity")]
- Collaborators,
- #[sea_orm(has_many = "super::language_server::Entity")]
- LanguageServers,
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::HostUser.def()
- }
-}
-
-impl Related<super::room::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Room.def()
- }
-}
-
-impl Related<super::worktree::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Worktrees.def()
- }
-}
-
-impl Related<super::project_collaborator::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Collaborators.def()
- }
-}
-
-impl Related<super::language_server::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::LanguageServers.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,43 +0,0 @@
-use crate::db::{ProjectCollaboratorId, ProjectId, ReplicaId, ServerId, UserId};
-use rpc::ConnectionId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "project_collaborators")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: ProjectCollaboratorId,
- pub project_id: ProjectId,
- pub connection_id: i32,
- pub connection_server_id: ServerId,
- pub user_id: UserId,
- pub replica_id: ReplicaId,
- pub is_host: bool,
-}
-
-impl Model {
- pub fn connection(&self) -> ConnectionId {
- ConnectionId {
- owner_id: self.connection_server_id.0 as u32,
- id: self.connection_id as u32,
- }
- }
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::project::Entity",
- from = "Column::ProjectId",
- to = "super::project::Column::Id"
- )]
- Project,
-}
-
-impl Related<super::project::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Project.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,54 +0,0 @@
-use crate::db::{ChannelId, RoomId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Default, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "rooms")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: RoomId,
- pub live_kit_room: String,
- pub channel_id: Option<ChannelId>,
- pub enviroment: Option<String>,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(has_many = "super::room_participant::Entity")]
- RoomParticipant,
- #[sea_orm(has_many = "super::project::Entity")]
- Project,
- #[sea_orm(has_many = "super::follower::Entity")]
- Follower,
- #[sea_orm(
- belongs_to = "super::channel::Entity",
- from = "Column::ChannelId",
- to = "super::channel::Column::Id"
- )]
- Channel,
-}
-
-impl Related<super::room_participant::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::RoomParticipant.def()
- }
-}
-
-impl Related<super::project::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Project.def()
- }
-}
-
-impl Related<super::follower::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Follower.def()
- }
-}
-
-impl Related<super::channel::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Channel.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,61 +0,0 @@
-use crate::db::{ProjectId, RoomId, RoomParticipantId, ServerId, UserId};
-use rpc::ConnectionId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "room_participants")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: RoomParticipantId,
- pub room_id: RoomId,
- pub user_id: UserId,
- pub answering_connection_id: Option<i32>,
- pub answering_connection_server_id: Option<ServerId>,
- pub answering_connection_lost: bool,
- pub location_kind: Option<i32>,
- pub location_project_id: Option<ProjectId>,
- pub initial_project_id: Option<ProjectId>,
- pub calling_user_id: UserId,
- pub calling_connection_id: i32,
- pub calling_connection_server_id: Option<ServerId>,
- pub participant_index: Option<i32>,
-}
-
-impl Model {
- pub fn answering_connection(&self) -> Option<ConnectionId> {
- Some(ConnectionId {
- owner_id: self.answering_connection_server_id?.0 as u32,
- id: self.answering_connection_id? as u32,
- })
- }
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::UserId",
- to = "super::user::Column::Id"
- )]
- User,
- #[sea_orm(
- belongs_to = "super::room::Entity",
- from = "Column::RoomId",
- to = "super::room::Column::Id"
- )]
- Room,
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::User.def()
- }
-}
-
-impl Related<super::room::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Room.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,15 +0,0 @@
-use crate::db::ServerId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "servers")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: ServerId,
- pub environment: String,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,28 +0,0 @@
-use crate::db::{SignupId, UserId};
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "signups")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: SignupId,
- pub email_address: String,
- pub email_confirmation_code: String,
- pub email_confirmation_sent: bool,
- pub created_at: DateTime,
- pub device_id: Option<String>,
- pub user_id: Option<UserId>,
- pub inviting_user_id: Option<UserId>,
- pub platform_mac: bool,
- pub platform_linux: bool,
- pub platform_windows: bool,
- pub platform_unknown: bool,
- pub editor_features: Option<Vec<String>>,
- pub programming_languages: Option<Vec<String>>,
- pub added_to_mailing_list: bool,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,80 +0,0 @@
-use crate::db::UserId;
-use sea_orm::entity::prelude::*;
-use serde::Serialize;
-
-#[derive(Clone, Debug, Default, PartialEq, Eq, DeriveEntityModel, Serialize)]
-#[sea_orm(table_name = "users")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: UserId,
- pub github_login: String,
- pub github_user_id: Option<i32>,
- pub email_address: Option<String>,
- pub admin: bool,
- pub invite_code: Option<String>,
- pub invite_count: i32,
- pub inviter_id: Option<UserId>,
- pub connected_once: bool,
- pub metrics_id: Uuid,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(has_many = "super::access_token::Entity")]
- AccessToken,
- #[sea_orm(has_one = "super::room_participant::Entity")]
- RoomParticipant,
- #[sea_orm(has_many = "super::project::Entity")]
- HostedProjects,
- #[sea_orm(has_many = "super::channel_member::Entity")]
- ChannelMemberships,
- #[sea_orm(has_many = "super::user_feature::Entity")]
- UserFeatures,
-}
-
-impl Related<super::access_token::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::AccessToken.def()
- }
-}
-
-impl Related<super::room_participant::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::RoomParticipant.def()
- }
-}
-
-impl Related<super::project::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::HostedProjects.def()
- }
-}
-
-impl Related<super::channel_member::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::ChannelMemberships.def()
- }
-}
-
-impl Related<super::user_feature::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::UserFeatures.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
-
-pub struct UserFlags;
-
-impl Linked for UserFlags {
- type FromEntity = Entity;
-
- type ToEntity = super::feature_flag::Entity;
-
- fn link(&self) -> Vec<RelationDef> {
- vec![
- super::user_feature::Relation::User.def().rev(),
- super::user_feature::Relation::Flag.def(),
- ]
- }
-}
@@ -1,42 +0,0 @@
-use sea_orm::entity::prelude::*;
-
-use crate::db::{FlagId, UserId};
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "user_features")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub user_id: UserId,
- #[sea_orm(primary_key)]
- pub feature_id: FlagId,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::feature_flag::Entity",
- from = "Column::FeatureId",
- to = "super::feature_flag::Column::Id"
- )]
- Flag,
- #[sea_orm(
- belongs_to = "super::user::Entity",
- from = "Column::UserId",
- to = "super::user::Column::Id"
- )]
- User,
-}
-
-impl Related<super::feature_flag::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Flag.def()
- }
-}
-
-impl Related<super::user::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::User.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,36 +0,0 @@
-use crate::db::ProjectId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "worktrees")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub id: i64,
- #[sea_orm(primary_key)]
- pub project_id: ProjectId,
- pub abs_path: String,
- pub root_name: String,
- pub visible: bool,
- /// The last scan for which we've observed entries. It may be in progress.
- pub scan_id: i64,
- /// The last scan that fully completed.
- pub completed_scan_id: i64,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {
- #[sea_orm(
- belongs_to = "super::project::Entity",
- from = "Column::ProjectId",
- to = "super::project::Column::Id"
- )]
- Project,
-}
-
-impl Related<super::project::Entity> for Entity {
- fn to() -> RelationDef {
- Relation::Project.def()
- }
-}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,21 +0,0 @@
-use crate::db::ProjectId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "worktree_diagnostic_summaries")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub project_id: ProjectId,
- #[sea_orm(primary_key)]
- pub worktree_id: i64,
- #[sea_orm(primary_key)]
- pub path: String,
- pub language_server_id: i64,
- pub error_count: i32,
- pub warning_count: i32,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,29 +0,0 @@
-use crate::db::ProjectId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "worktree_entries")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub project_id: ProjectId,
- #[sea_orm(primary_key)]
- pub worktree_id: i64,
- #[sea_orm(primary_key)]
- pub id: i64,
- pub is_dir: bool,
- pub path: String,
- pub inode: i64,
- pub mtime_seconds: i64,
- pub mtime_nanos: i32,
- pub git_status: Option<i64>,
- pub is_symlink: bool,
- pub is_ignored: bool,
- pub is_external: bool,
- pub is_deleted: bool,
- pub scan_id: i64,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,21 +0,0 @@
-use crate::db::ProjectId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "worktree_repositories")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub project_id: ProjectId,
- #[sea_orm(primary_key)]
- pub worktree_id: i64,
- #[sea_orm(primary_key)]
- pub work_directory_id: i64,
- pub scan_id: i64,
- pub branch: Option<String>,
- pub is_deleted: bool,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,23 +0,0 @@
-use crate::db::ProjectId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "worktree_repository_statuses")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub project_id: ProjectId,
- #[sea_orm(primary_key)]
- pub worktree_id: i64,
- #[sea_orm(primary_key)]
- pub work_directory_id: i64,
- #[sea_orm(primary_key)]
- pub repo_path: String,
- pub status: i64,
- pub scan_id: i64,
- pub is_deleted: bool,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,19 +0,0 @@
-use crate::db::ProjectId;
-use sea_orm::entity::prelude::*;
-
-#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel)]
-#[sea_orm(table_name = "worktree_settings_files")]
-pub struct Model {
- #[sea_orm(primary_key)]
- pub project_id: ProjectId,
- #[sea_orm(primary_key)]
- pub worktree_id: i64,
- #[sea_orm(primary_key)]
- pub path: String,
- pub content: String,
-}
-
-#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
-pub enum Relation {}
-
-impl ActiveModelBehavior for ActiveModel {}
@@ -1,187 +0,0 @@
-mod buffer_tests;
-mod channel_tests;
-mod db_tests;
-mod feature_flag_tests;
-mod message_tests;
-
-use super::*;
-use gpui::BackgroundExecutor;
-use parking_lot::Mutex;
-use sea_orm::ConnectionTrait;
-use sqlx::migrate::MigrateDatabase;
-use std::sync::{
- atomic::{AtomicI32, AtomicU32, Ordering::SeqCst},
- Arc,
-};
-
-const TEST_RELEASE_CHANNEL: &'static str = "test";
-
-pub struct TestDb {
- pub db: Option<Arc<Database>>,
- pub connection: Option<sqlx::AnyConnection>,
-}
-
-impl TestDb {
- pub fn sqlite(background: BackgroundExecutor) -> Self {
- let url = format!("sqlite::memory:");
- let runtime = tokio::runtime::Builder::new_current_thread()
- .enable_io()
- .enable_time()
- .build()
- .unwrap();
-
- let mut db = runtime.block_on(async {
- let mut options = ConnectOptions::new(url);
- options.max_connections(5);
- let mut db = Database::new(options, Executor::Deterministic(background))
- .await
- .unwrap();
- let sql = include_str!(concat!(
- env!("CARGO_MANIFEST_DIR"),
- "/migrations.sqlite/20221109000000_test_schema.sql"
- ));
- db.pool
- .execute(sea_orm::Statement::from_string(
- db.pool.get_database_backend(),
- sql,
- ))
- .await
- .unwrap();
- db.initialize_notification_kinds().await.unwrap();
- db
- });
-
- db.runtime = Some(runtime);
-
- Self {
- db: Some(Arc::new(db)),
- connection: None,
- }
- }
-
- pub fn postgres(background: BackgroundExecutor) -> Self {
- static LOCK: Mutex<()> = Mutex::new(());
-
- let _guard = LOCK.lock();
- let mut rng = StdRng::from_entropy();
- let url = format!(
- "postgres://postgres@localhost/zed-test-{}",
- rng.gen::<u128>()
- );
- let runtime = tokio::runtime::Builder::new_current_thread()
- .enable_io()
- .enable_time()
- .build()
- .unwrap();
-
- let mut db = runtime.block_on(async {
- sqlx::Postgres::create_database(&url)
- .await
- .expect("failed to create test db");
- let mut options = ConnectOptions::new(url);
- options
- .max_connections(5)
- .idle_timeout(Duration::from_secs(0));
- let mut db = Database::new(options, Executor::Deterministic(background))
- .await
- .unwrap();
- let migrations_path = concat!(env!("CARGO_MANIFEST_DIR"), "/migrations");
- db.migrate(Path::new(migrations_path), false).await.unwrap();
- db.initialize_notification_kinds().await.unwrap();
- db
- });
-
- db.runtime = Some(runtime);
-
- Self {
- db: Some(Arc::new(db)),
- connection: None,
- }
- }
-
- pub fn db(&self) -> &Arc<Database> {
- self.db.as_ref().unwrap()
- }
-}
-
-#[macro_export]
-macro_rules! test_both_dbs {
- ($test_name:ident, $postgres_test_name:ident, $sqlite_test_name:ident) => {
- #[gpui::test]
- async fn $postgres_test_name(cx: &mut gpui::TestAppContext) {
- let test_db = crate::db::TestDb::postgres(cx.executor().clone());
- $test_name(test_db.db()).await;
- }
-
- #[gpui::test]
- async fn $sqlite_test_name(cx: &mut gpui::TestAppContext) {
- let test_db = crate::db::TestDb::sqlite(cx.executor().clone());
- $test_name(test_db.db()).await;
- }
- };
-}
-
-impl Drop for TestDb {
- fn drop(&mut self) {
- let db = self.db.take().unwrap();
- if let sea_orm::DatabaseBackend::Postgres = db.pool.get_database_backend() {
- db.runtime.as_ref().unwrap().block_on(async {
- use util::ResultExt;
- let query = "
- SELECT pg_terminate_backend(pg_stat_activity.pid)
- FROM pg_stat_activity
- WHERE
- pg_stat_activity.datname = current_database() AND
- pid <> pg_backend_pid();
- ";
- db.pool
- .execute(sea_orm::Statement::from_string(
- db.pool.get_database_backend(),
- query,
- ))
- .await
- .log_err();
- sqlx::Postgres::drop_database(db.options.get_url())
- .await
- .log_err();
- })
- }
- }
-}
-
-fn channel_tree(channels: &[(ChannelId, &[ChannelId], &'static str, ChannelRole)]) -> Vec<Channel> {
- channels
- .iter()
- .map(|(id, parent_path, name, role)| Channel {
- id: *id,
- name: name.to_string(),
- visibility: ChannelVisibility::Members,
- role: *role,
- parent_path: parent_path.to_vec(),
- })
- .collect()
-}
-
-static GITHUB_USER_ID: AtomicI32 = AtomicI32::new(5);
-
-async fn new_test_user(db: &Arc<Database>, email: &str) -> UserId {
- db.create_user(
- email,
- false,
- NewUserParams {
- github_login: email[0..email.find("@").unwrap()].to_string(),
- github_user_id: GITHUB_USER_ID.fetch_add(1, SeqCst),
- },
- )
- .await
- .unwrap()
- .user_id
-}
-
-static TEST_CONNECTION_ID: AtomicU32 = AtomicU32::new(1);
-fn new_test_connection(server: ServerId) -> ConnectionId {
- ConnectionId {
- id: TEST_CONNECTION_ID.fetch_add(1, SeqCst),
- owner_id: server.0 as u32,
- }
-}
@@ -1,506 +0,0 @@
-use super::*;
-use crate::test_both_dbs;
-use language::proto::{self, serialize_version};
-use text::Buffer;
-
-test_both_dbs!(
- test_channel_buffers,
- test_channel_buffers_postgres,
- test_channel_buffers_sqlite
-);
-
-async fn test_channel_buffers(db: &Arc<Database>) {
- let a_id = db
- .create_user(
- "user_a@example.com",
- false,
- NewUserParams {
- github_login: "user_a".into(),
- github_user_id: 101,
- },
- )
- .await
- .unwrap()
- .user_id;
- let b_id = db
- .create_user(
- "user_b@example.com",
- false,
- NewUserParams {
- github_login: "user_b".into(),
- github_user_id: 102,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- // This user will not be a part of the channel
- let c_id = db
- .create_user(
- "user_c@example.com",
- false,
- NewUserParams {
- github_login: "user_c".into(),
- github_user_id: 102,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let owner_id = db.create_server("production").await.unwrap().0 as u32;
-
- let zed_id = db.create_root_channel("zed", a_id).await.unwrap();
-
- db.invite_channel_member(zed_id, b_id, a_id, ChannelRole::Member)
- .await
- .unwrap();
-
- db.respond_to_channel_invite(zed_id, b_id, true)
- .await
- .unwrap();
-
- let connection_id_a = ConnectionId { owner_id, id: 1 };
- let _ = db
- .join_channel_buffer(zed_id, a_id, connection_id_a)
- .await
- .unwrap();
-
- let mut buffer_a = Buffer::new(0, 0, "".to_string());
- let mut operations = Vec::new();
- operations.push(buffer_a.edit([(0..0, "hello world")]));
- operations.push(buffer_a.edit([(5..5, ", cruel")]));
- operations.push(buffer_a.edit([(0..5, "goodbye")]));
- operations.push(buffer_a.undo().unwrap().1);
- assert_eq!(buffer_a.text(), "hello, cruel world");
-
- let operations = operations
- .into_iter()
- .map(|op| proto::serialize_operation(&language::Operation::Buffer(op)))
- .collect::<Vec<_>>();
-
- db.update_channel_buffer(zed_id, a_id, &operations)
- .await
- .unwrap();
-
- let connection_id_b = ConnectionId { owner_id, id: 2 };
- let buffer_response_b = db
- .join_channel_buffer(zed_id, b_id, connection_id_b)
- .await
- .unwrap();
-
- let mut buffer_b = Buffer::new(0, 0, buffer_response_b.base_text);
- buffer_b
- .apply_ops(buffer_response_b.operations.into_iter().map(|operation| {
- let operation = proto::deserialize_operation(operation).unwrap();
- if let language::Operation::Buffer(operation) = operation {
- operation
- } else {
- unreachable!()
- }
- }))
- .unwrap();
-
- assert_eq!(buffer_b.text(), "hello, cruel world");
-
- // Ensure that C fails to open the buffer
- assert!(db
- .join_channel_buffer(zed_id, c_id, ConnectionId { owner_id, id: 3 })
- .await
- .is_err());
-
- // Ensure that both collaborators have shown up
- assert_eq!(
- buffer_response_b.collaborators,
- &[
- rpc::proto::Collaborator {
- user_id: a_id.to_proto(),
- peer_id: Some(rpc::proto::PeerId { id: 1, owner_id }),
- replica_id: 0,
- },
- rpc::proto::Collaborator {
- user_id: b_id.to_proto(),
- peer_id: Some(rpc::proto::PeerId { id: 2, owner_id }),
- replica_id: 1,
- }
- ]
- );
-
- // Ensure that get_channel_buffer_collaborators works
- let zed_collaborats = db.get_channel_buffer_collaborators(zed_id).await.unwrap();
- assert_eq!(zed_collaborats, &[a_id, b_id]);
-
- let left_buffer = db
- .leave_channel_buffer(zed_id, connection_id_b)
- .await
- .unwrap();
-
- assert_eq!(left_buffer.connections, &[connection_id_a],);
-
- let cargo_id = db.create_root_channel("cargo", a_id).await.unwrap();
- let _ = db
- .join_channel_buffer(cargo_id, a_id, connection_id_a)
- .await
- .unwrap();
-
- db.leave_channel_buffers(connection_id_a).await.unwrap();
-
- let zed_collaborators = db.get_channel_buffer_collaborators(zed_id).await.unwrap();
- let cargo_collaborators = db.get_channel_buffer_collaborators(cargo_id).await.unwrap();
- assert_eq!(zed_collaborators, &[]);
- assert_eq!(cargo_collaborators, &[]);
-
- // When everyone has left the channel, the operations are collapsed into
- // a new base text.
- let buffer_response_b = db
- .join_channel_buffer(zed_id, b_id, connection_id_b)
- .await
- .unwrap();
- assert_eq!(buffer_response_b.base_text, "hello, cruel world");
- assert_eq!(buffer_response_b.operations, &[]);
-}
-
-test_both_dbs!(
- test_channel_buffers_last_operations,
- test_channel_buffers_last_operations_postgres,
- test_channel_buffers_last_operations_sqlite
-);
-
-async fn test_channel_buffers_last_operations(db: &Database) {
- let user_id = db
- .create_user(
- "user_a@example.com",
- false,
- NewUserParams {
- github_login: "user_a".into(),
- github_user_id: 101,
- },
- )
- .await
- .unwrap()
- .user_id;
- let observer_id = db
- .create_user(
- "user_b@example.com",
- false,
- NewUserParams {
- github_login: "user_b".into(),
- github_user_id: 102,
- },
- )
- .await
- .unwrap()
- .user_id;
- let owner_id = db.create_server("production").await.unwrap().0 as u32;
- let connection_id = ConnectionId {
- owner_id,
- id: user_id.0 as u32,
- };
-
- let mut buffers = Vec::new();
- let mut text_buffers = Vec::new();
- for i in 0..3 {
- let channel = db
- .create_root_channel(&format!("channel-{i}"), user_id)
- .await
- .unwrap();
-
- db.invite_channel_member(channel, observer_id, user_id, ChannelRole::Member)
- .await
- .unwrap();
- db.respond_to_channel_invite(channel, observer_id, true)
- .await
- .unwrap();
-
- db.join_channel_buffer(channel, user_id, connection_id)
- .await
- .unwrap();
-
- buffers.push(
- db.transaction(|tx| async move { db.get_channel_buffer(channel, &*tx).await })
- .await
- .unwrap(),
- );
-
- text_buffers.push(Buffer::new(0, 0, "".to_string()));
- }
-
- let operations = db
- .transaction(|tx| {
- let buffers = &buffers;
- async move {
- db.get_latest_operations_for_buffers([buffers[0].id, buffers[2].id], &*tx)
- .await
- }
- })
- .await
- .unwrap();
-
- assert!(operations.is_empty());
-
- update_buffer(
- buffers[0].channel_id,
- user_id,
- db,
- vec![
- text_buffers[0].edit([(0..0, "a")]),
- text_buffers[0].edit([(0..0, "b")]),
- text_buffers[0].edit([(0..0, "c")]),
- ],
- )
- .await;
-
- update_buffer(
- buffers[1].channel_id,
- user_id,
- db,
- vec![
- text_buffers[1].edit([(0..0, "d")]),
- text_buffers[1].edit([(1..1, "e")]),
- text_buffers[1].edit([(2..2, "f")]),
- ],
- )
- .await;
-
- // cause buffer 1's epoch to increment.
- db.leave_channel_buffer(buffers[1].channel_id, connection_id)
- .await
- .unwrap();
- db.join_channel_buffer(buffers[1].channel_id, user_id, connection_id)
- .await
- .unwrap();
- text_buffers[1] = Buffer::new(1, 0, "def".to_string());
- update_buffer(
- buffers[1].channel_id,
- user_id,
- db,
- vec![
- text_buffers[1].edit([(0..0, "g")]),
- text_buffers[1].edit([(0..0, "h")]),
- ],
- )
- .await;
-
- update_buffer(
- buffers[2].channel_id,
- user_id,
- db,
- vec![text_buffers[2].edit([(0..0, "i")])],
- )
- .await;
-
- let operations = db
- .transaction(|tx| {
- let buffers = &buffers;
- async move {
- db.get_latest_operations_for_buffers([buffers[1].id, buffers[2].id], &*tx)
- .await
- }
- })
- .await
- .unwrap();
- assert_operations(
- &operations,
- &[
- (buffers[1].id, 1, &text_buffers[1]),
- (buffers[2].id, 0, &text_buffers[2]),
- ],
- );
-
- let operations = db
- .transaction(|tx| {
- let buffers = &buffers;
- async move {
- db.get_latest_operations_for_buffers([buffers[0].id, buffers[1].id], &*tx)
- .await
- }
- })
- .await
- .unwrap();
- assert_operations(
- &operations,
- &[
- (buffers[0].id, 0, &text_buffers[0]),
- (buffers[1].id, 1, &text_buffers[1]),
- ],
- );
-
- let buffer_changes = db
- .transaction(|tx| {
- let buffers = &buffers;
- async move {
- db.unseen_channel_buffer_changes(
- observer_id,
- &[
- buffers[0].channel_id,
- buffers[1].channel_id,
- buffers[2].channel_id,
- ],
- &*tx,
- )
- .await
- }
- })
- .await
- .unwrap();
-
- pretty_assertions::assert_eq!(
- buffer_changes,
- [
- rpc::proto::UnseenChannelBufferChange {
- channel_id: buffers[0].channel_id.to_proto(),
- epoch: 0,
- version: serialize_version(&text_buffers[0].version()),
- },
- rpc::proto::UnseenChannelBufferChange {
- channel_id: buffers[1].channel_id.to_proto(),
- epoch: 1,
- version: serialize_version(&text_buffers[1].version())
- .into_iter()
- .filter(|vector| vector.replica_id
- == buffer_changes[1].version.first().unwrap().replica_id)
- .collect::<Vec<_>>(),
- },
- rpc::proto::UnseenChannelBufferChange {
- channel_id: buffers[2].channel_id.to_proto(),
- epoch: 0,
- version: serialize_version(&text_buffers[2].version()),
- },
- ]
- );
-
- db.observe_buffer_version(
- buffers[1].id,
- observer_id,
- 1,
- serialize_version(&text_buffers[1].version()).as_slice(),
- )
- .await
- .unwrap();
-
- let buffer_changes = db
- .transaction(|tx| {
- let buffers = &buffers;
- async move {
- db.unseen_channel_buffer_changes(
- observer_id,
- &[
- buffers[0].channel_id,
- buffers[1].channel_id,
- buffers[2].channel_id,
- ],
- &*tx,
- )
- .await
- }
- })
- .await
- .unwrap();
-
- assert_eq!(
- buffer_changes,
- [
- rpc::proto::UnseenChannelBufferChange {
- channel_id: buffers[0].channel_id.to_proto(),
- epoch: 0,
- version: serialize_version(&text_buffers[0].version()),
- },
- rpc::proto::UnseenChannelBufferChange {
- channel_id: buffers[2].channel_id.to_proto(),
- epoch: 0,
- version: serialize_version(&text_buffers[2].version()),
- },
- ]
- );
-
- // Observe an earlier version of the buffer.
- db.observe_buffer_version(
- buffers[1].id,
- observer_id,
- 1,
- &[rpc::proto::VectorClockEntry {
- replica_id: 0,
- timestamp: 0,
- }],
- )
- .await
- .unwrap();
-
- let buffer_changes = db
- .transaction(|tx| {
- let buffers = &buffers;
- async move {
- db.unseen_channel_buffer_changes(
- observer_id,
- &[
- buffers[0].channel_id,
- buffers[1].channel_id,
- buffers[2].channel_id,
- ],
- &*tx,
- )
- .await
- }
- })
- .await
- .unwrap();
-
- assert_eq!(
- buffer_changes,
- [
- rpc::proto::UnseenChannelBufferChange {
- channel_id: buffers[0].channel_id.to_proto(),
- epoch: 0,
- version: serialize_version(&text_buffers[0].version()),
- },
- rpc::proto::UnseenChannelBufferChange {
- channel_id: buffers[2].channel_id.to_proto(),
- epoch: 0,
- version: serialize_version(&text_buffers[2].version()),
- },
- ]
- );
-}
-
-async fn update_buffer(
- channel_id: ChannelId,
- user_id: UserId,
- db: &Database,
- operations: Vec<text::Operation>,
-) {
- let operations = operations
- .into_iter()
- .map(|op| proto::serialize_operation(&language::Operation::Buffer(op)))
- .collect::<Vec<_>>();
- db.update_channel_buffer(channel_id, user_id, &operations)
- .await
- .unwrap();
-}
-
-fn assert_operations(
- operations: &[buffer_operation::Model],
- expected: &[(BufferId, i32, &text::Buffer)],
-) {
- let actual = operations
- .iter()
- .map(|op| buffer_operation::Model {
- buffer_id: op.buffer_id,
- epoch: op.epoch,
- lamport_timestamp: op.lamport_timestamp,
- replica_id: op.replica_id,
- value: vec![],
- })
- .collect::<Vec<_>>();
- let expected = expected
- .iter()
- .map(|(buffer_id, epoch, buffer)| buffer_operation::Model {
- buffer_id: *buffer_id,
- epoch: *epoch,
- lamport_timestamp: buffer.lamport_clock.value as i32 - 1,
- replica_id: buffer.replica_id() as i32,
- value: vec![],
- })
- .collect::<Vec<_>>();
- assert_eq!(actual, expected, "unexpected operations")
-}
@@ -1,831 +0,0 @@
-use crate::{
- db::{
- tests::{channel_tree, new_test_connection, new_test_user, TEST_RELEASE_CHANNEL},
- Channel, ChannelId, ChannelRole, Database, NewUserParams, RoomId,
- },
- test_both_dbs,
-};
-use rpc::{
- proto::{self},
- ConnectionId,
-};
-use std::sync::Arc;
-
-test_both_dbs!(test_channels, test_channels_postgres, test_channels_sqlite);
-
-async fn test_channels(db: &Arc<Database>) {
- let a_id = new_test_user(db, "user1@example.com").await;
- let b_id = new_test_user(db, "user2@example.com").await;
-
- let zed_id = db.create_root_channel("zed", a_id).await.unwrap();
-
- // Make sure that people cannot read channels they haven't been invited to
- assert!(db.get_channel(zed_id, b_id).await.is_err());
-
- db.invite_channel_member(zed_id, b_id, a_id, ChannelRole::Member)
- .await
- .unwrap();
-
- db.respond_to_channel_invite(zed_id, b_id, true)
- .await
- .unwrap();
-
- let crdb_id = db.create_sub_channel("crdb", zed_id, a_id).await.unwrap();
- let livestreaming_id = db
- .create_sub_channel("livestreaming", zed_id, a_id)
- .await
- .unwrap();
- let replace_id = db
- .create_sub_channel("replace", zed_id, a_id)
- .await
- .unwrap();
-
- let mut members = db
- .transaction(|tx| async move {
- let channel = db.get_channel_internal(replace_id, &*tx).await?;
- Ok(db.get_channel_participants(&channel, &*tx).await?)
- })
- .await
- .unwrap();
- members.sort();
- assert_eq!(members, &[a_id, b_id]);
-
- let rust_id = db.create_root_channel("rust", a_id).await.unwrap();
- let cargo_id = db.create_sub_channel("cargo", rust_id, a_id).await.unwrap();
-
- let cargo_ra_id = db
- .create_sub_channel("cargo-ra", cargo_id, a_id)
- .await
- .unwrap();
-
- let result = db.get_channels_for_user(a_id).await.unwrap();
- assert_eq!(
- result.channels,
- channel_tree(&[
- (zed_id, &[], "zed", ChannelRole::Admin),
- (crdb_id, &[zed_id], "crdb", ChannelRole::Admin),
- (
- livestreaming_id,
- &[zed_id],
- "livestreaming",
- ChannelRole::Admin
- ),
- (replace_id, &[zed_id], "replace", ChannelRole::Admin),
- (rust_id, &[], "rust", ChannelRole::Admin),
- (cargo_id, &[rust_id], "cargo", ChannelRole::Admin),
- (
- cargo_ra_id,
- &[rust_id, cargo_id],
- "cargo-ra",
- ChannelRole::Admin
- )
- ],)
- );
-
- let result = db.get_channels_for_user(b_id).await.unwrap();
- assert_eq!(
- result.channels,
- channel_tree(&[
- (zed_id, &[], "zed", ChannelRole::Member),
- (crdb_id, &[zed_id], "crdb", ChannelRole::Member),
- (
- livestreaming_id,
- &[zed_id],
- "livestreaming",
- ChannelRole::Member
- ),
- (replace_id, &[zed_id], "replace", ChannelRole::Member)
- ],)
- );
-
- // Update member permissions
- let set_subchannel_admin = db
- .set_channel_member_role(crdb_id, a_id, b_id, ChannelRole::Admin)
- .await;
- assert!(set_subchannel_admin.is_err());
- let set_channel_admin = db
- .set_channel_member_role(zed_id, a_id, b_id, ChannelRole::Admin)
- .await;
- assert!(set_channel_admin.is_ok());
-
- let result = db.get_channels_for_user(b_id).await.unwrap();
- assert_eq!(
- result.channels,
- channel_tree(&[
- (zed_id, &[], "zed", ChannelRole::Admin),
- (crdb_id, &[zed_id], "crdb", ChannelRole::Admin),
- (
- livestreaming_id,
- &[zed_id],
- "livestreaming",
- ChannelRole::Admin
- ),
- (replace_id, &[zed_id], "replace", ChannelRole::Admin)
- ],)
- );
-
- // Remove a single channel
- db.delete_channel(crdb_id, a_id).await.unwrap();
- assert!(db.get_channel(crdb_id, a_id).await.is_err());
-
- // Remove a channel tree
- let (mut channel_ids, user_ids) = db.delete_channel(rust_id, a_id).await.unwrap();
- channel_ids.sort();
- assert_eq!(channel_ids, &[rust_id, cargo_id, cargo_ra_id]);
- assert_eq!(user_ids, &[a_id]);
-
- assert!(db.get_channel(rust_id, a_id).await.is_err());
- assert!(db.get_channel(cargo_id, a_id).await.is_err());
- assert!(db.get_channel(cargo_ra_id, a_id).await.is_err());
-}
-
-test_both_dbs!(
- test_joining_channels,
- test_joining_channels_postgres,
- test_joining_channels_sqlite
-);
-
-async fn test_joining_channels(db: &Arc<Database>) {
- let owner_id = db.create_server("test").await.unwrap().0 as u32;
-
- let user_1 = new_test_user(db, "user1@example.com").await;
- let user_2 = new_test_user(db, "user2@example.com").await;
-
- let channel_1 = db.create_root_channel("channel_1", user_1).await.unwrap();
-
- // can join a room with membership to its channel
- let (joined_room, _, _) = db
- .join_channel(
- channel_1,
- user_1,
- ConnectionId { owner_id, id: 1 },
- TEST_RELEASE_CHANNEL,
- )
- .await
- .unwrap();
- assert_eq!(joined_room.room.participants.len(), 1);
-
- let room_id = RoomId::from_proto(joined_room.room.id);
- drop(joined_room);
- // cannot join a room without membership to its channel
- assert!(db
- .join_room(
- room_id,
- user_2,
- ConnectionId { owner_id, id: 1 },
- TEST_RELEASE_CHANNEL
- )
- .await
- .is_err());
-}
-
-test_both_dbs!(
- test_channel_invites,
- test_channel_invites_postgres,
- test_channel_invites_sqlite
-);
-
-async fn test_channel_invites(db: &Arc<Database>) {
- db.create_server("test").await.unwrap();
-
- let user_1 = new_test_user(db, "user1@example.com").await;
- let user_2 = new_test_user(db, "user2@example.com").await;
- let user_3 = new_test_user(db, "user3@example.com").await;
-
- let channel_1_1 = db.create_root_channel("channel_1", user_1).await.unwrap();
-
- let channel_1_2 = db.create_root_channel("channel_2", user_1).await.unwrap();
-
- db.invite_channel_member(channel_1_1, user_2, user_1, ChannelRole::Member)
- .await
- .unwrap();
- db.invite_channel_member(channel_1_2, user_2, user_1, ChannelRole::Member)
- .await
- .unwrap();
- db.invite_channel_member(channel_1_1, user_3, user_1, ChannelRole::Admin)
- .await
- .unwrap();
-
- let user_2_invites = db
- .get_channel_invites_for_user(user_2) // -> [channel_1_1, channel_1_2]
- .await
- .unwrap()
- .into_iter()
- .map(|channel| channel.id)
- .collect::<Vec<_>>();
-
- assert_eq!(user_2_invites, &[channel_1_1, channel_1_2]);
-
- let user_3_invites = db
- .get_channel_invites_for_user(user_3) // -> [channel_1_1]
- .await
- .unwrap()
- .into_iter()
- .map(|channel| channel.id)
- .collect::<Vec<_>>();
-
- assert_eq!(user_3_invites, &[channel_1_1]);
-
- let mut members = db
- .get_channel_participant_details(channel_1_1, user_1)
- .await
- .unwrap();
-
- members.sort_by_key(|member| member.user_id);
- assert_eq!(
- members,
- &[
- proto::ChannelMember {
- user_id: user_1.to_proto(),
- kind: proto::channel_member::Kind::Member.into(),
- role: proto::ChannelRole::Admin.into(),
- },
- proto::ChannelMember {
- user_id: user_2.to_proto(),
- kind: proto::channel_member::Kind::Invitee.into(),
- role: proto::ChannelRole::Member.into(),
- },
- proto::ChannelMember {
- user_id: user_3.to_proto(),
- kind: proto::channel_member::Kind::Invitee.into(),
- role: proto::ChannelRole::Admin.into(),
- },
- ]
- );
-
- db.respond_to_channel_invite(channel_1_1, user_2, true)
- .await
- .unwrap();
-
- let channel_1_3 = db
- .create_sub_channel("channel_3", channel_1_1, user_1)
- .await
- .unwrap();
-
- let members = db
- .get_channel_participant_details(channel_1_3, user_1)
- .await
- .unwrap();
- assert_eq!(
- members,
- &[
- proto::ChannelMember {
- user_id: user_1.to_proto(),
- kind: proto::channel_member::Kind::AncestorMember.into(),
- role: proto::ChannelRole::Admin.into(),
- },
- proto::ChannelMember {
- user_id: user_2.to_proto(),
- kind: proto::channel_member::Kind::AncestorMember.into(),
- role: proto::ChannelRole::Member.into(),
- },
- ]
- );
-}
-
-test_both_dbs!(
- test_channel_renames,
- test_channel_renames_postgres,
- test_channel_renames_sqlite
-);
-
-async fn test_channel_renames(db: &Arc<Database>) {
- db.create_server("test").await.unwrap();
-
- let user_1 = db
- .create_user(
- "user1@example.com",
- false,
- NewUserParams {
- github_login: "user1".into(),
- github_user_id: 5,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let user_2 = db
- .create_user(
- "user2@example.com",
- false,
- NewUserParams {
- github_login: "user2".into(),
- github_user_id: 6,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let zed_id = db.create_root_channel("zed", user_1).await.unwrap();
-
- db.rename_channel(zed_id, user_1, "#zed-archive")
- .await
- .unwrap();
-
- let channel = db.get_channel(zed_id, user_1).await.unwrap();
- assert_eq!(channel.name, "zed-archive");
-
- let non_permissioned_rename = db.rename_channel(zed_id, user_2, "hacked-lol").await;
- assert!(non_permissioned_rename.is_err());
-
- let bad_name_rename = db.rename_channel(zed_id, user_1, "#").await;
- assert!(bad_name_rename.is_err())
-}
-
-test_both_dbs!(
- test_db_channel_moving,
- test_channels_moving_postgres,
- test_channels_moving_sqlite
-);
-
-async fn test_db_channel_moving(db: &Arc<Database>) {
- let a_id = db
- .create_user(
- "user1@example.com",
- false,
- NewUserParams {
- github_login: "user1".into(),
- github_user_id: 5,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let zed_id = db.create_root_channel("zed", a_id).await.unwrap();
-
- let crdb_id = db.create_sub_channel("crdb", zed_id, a_id).await.unwrap();
-
- let gpui2_id = db.create_sub_channel("gpui2", zed_id, a_id).await.unwrap();
-
- let livestreaming_id = db
- .create_sub_channel("livestreaming", crdb_id, a_id)
- .await
- .unwrap();
-
- let livestreaming_dag_id = db
- .create_sub_channel("livestreaming_dag", livestreaming_id, a_id)
- .await
- .unwrap();
-
- // ========================================================================
- // sanity check
- // Initial DAG:
- // /- gpui2
- // zed -- crdb - livestreaming - livestreaming_dag
- let result = db.get_channels_for_user(a_id).await.unwrap();
- assert_channel_tree(
- result.channels,
- &[
- (zed_id, &[]),
- (crdb_id, &[zed_id]),
- (livestreaming_id, &[zed_id, crdb_id]),
- (livestreaming_dag_id, &[zed_id, crdb_id, livestreaming_id]),
- (gpui2_id, &[zed_id]),
- ],
- );
-}
-
-test_both_dbs!(
- test_db_channel_moving_bugs,
- test_db_channel_moving_bugs_postgres,
- test_db_channel_moving_bugs_sqlite
-);
-
-async fn test_db_channel_moving_bugs(db: &Arc<Database>) {
- let user_id = db
- .create_user(
- "user1@example.com",
- false,
- NewUserParams {
- github_login: "user1".into(),
- github_user_id: 5,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let zed_id = db.create_root_channel("zed", user_id).await.unwrap();
-
- let projects_id = db
- .create_sub_channel("projects", zed_id, user_id)
- .await
- .unwrap();
-
- let livestreaming_id = db
- .create_sub_channel("livestreaming", projects_id, user_id)
- .await
- .unwrap();
-
- // Move to same parent should be a no-op
- assert!(db
- .move_channel(projects_id, Some(zed_id), user_id)
- .await
- .unwrap()
- .is_none());
-
- let result = db.get_channels_for_user(user_id).await.unwrap();
- assert_channel_tree(
- result.channels,
- &[
- (zed_id, &[]),
- (projects_id, &[zed_id]),
- (livestreaming_id, &[zed_id, projects_id]),
- ],
- );
-
- // Move the project channel to the root
- db.move_channel(projects_id, None, user_id).await.unwrap();
- let result = db.get_channels_for_user(user_id).await.unwrap();
- assert_channel_tree(
- result.channels,
- &[
- (zed_id, &[]),
- (projects_id, &[]),
- (livestreaming_id, &[projects_id]),
- ],
- );
-
- // Can't move a channel into its ancestor
- db.move_channel(projects_id, Some(livestreaming_id), user_id)
- .await
- .unwrap_err();
- let result = db.get_channels_for_user(user_id).await.unwrap();
- assert_channel_tree(
- result.channels,
- &[
- (zed_id, &[]),
- (projects_id, &[]),
- (livestreaming_id, &[projects_id]),
- ],
- );
-}
-
-test_both_dbs!(
- test_user_is_channel_participant,
- test_user_is_channel_participant_postgres,
- test_user_is_channel_participant_sqlite
-);
-
-async fn test_user_is_channel_participant(db: &Arc<Database>) {
- let admin = new_test_user(db, "admin@example.com").await;
- let member = new_test_user(db, "member@example.com").await;
- let guest = new_test_user(db, "guest@example.com").await;
-
- let zed_channel = db.create_root_channel("zed", admin).await.unwrap();
- let active_channel_id = db
- .create_sub_channel("active", zed_channel, admin)
- .await
- .unwrap();
- let vim_channel_id = db
- .create_sub_channel("vim", active_channel_id, admin)
- .await
- .unwrap();
-
- db.set_channel_visibility(vim_channel_id, crate::db::ChannelVisibility::Public, admin)
- .await
- .unwrap();
- db.invite_channel_member(active_channel_id, member, admin, ChannelRole::Member)
- .await
- .unwrap();
- db.invite_channel_member(vim_channel_id, guest, admin, ChannelRole::Guest)
- .await
- .unwrap();
-
- db.respond_to_channel_invite(active_channel_id, member, true)
- .await
- .unwrap();
-
- db.transaction(|tx| async move {
- db.check_user_is_channel_participant(
- &db.get_channel_internal(vim_channel_id, &*tx).await?,
- admin,
- &*tx,
- )
- .await
- })
- .await
- .unwrap();
- db.transaction(|tx| async move {
- db.check_user_is_channel_participant(
- &db.get_channel_internal(vim_channel_id, &*tx).await?,
- member,
- &*tx,
- )
- .await
- })
- .await
- .unwrap();
-
- let mut members = db
- .get_channel_participant_details(vim_channel_id, admin)
- .await
- .unwrap();
-
- members.sort_by_key(|member| member.user_id);
-
- assert_eq!(
- members,
- &[
- proto::ChannelMember {
- user_id: admin.to_proto(),
- kind: proto::channel_member::Kind::AncestorMember.into(),
- role: proto::ChannelRole::Admin.into(),
- },
- proto::ChannelMember {
- user_id: member.to_proto(),
- kind: proto::channel_member::Kind::AncestorMember.into(),
- role: proto::ChannelRole::Member.into(),
- },
- proto::ChannelMember {
- user_id: guest.to_proto(),
- kind: proto::channel_member::Kind::Invitee.into(),
- role: proto::ChannelRole::Guest.into(),
- },
- ]
- );
-
- db.respond_to_channel_invite(vim_channel_id, guest, true)
- .await
- .unwrap();
-
- db.transaction(|tx| async move {
- db.check_user_is_channel_participant(
- &db.get_channel_internal(vim_channel_id, &*tx).await?,
- guest,
- &*tx,
- )
- .await
- })
- .await
- .unwrap();
-
- let channels = db.get_channels_for_user(guest).await.unwrap().channels;
- assert_channel_tree(channels, &[(vim_channel_id, &[])]);
- let channels = db.get_channels_for_user(member).await.unwrap().channels;
- assert_channel_tree(
- channels,
- &[
- (active_channel_id, &[]),
- (vim_channel_id, &[active_channel_id]),
- ],
- );
-
- db.set_channel_member_role(vim_channel_id, admin, guest, ChannelRole::Banned)
- .await
- .unwrap();
- assert!(db
- .transaction(|tx| async move {
- db.check_user_is_channel_participant(
- &db.get_channel_internal(vim_channel_id, &*tx).await.unwrap(),
- guest,
- &*tx,
- )
- .await
- })
- .await
- .is_err());
-
- let mut members = db
- .get_channel_participant_details(vim_channel_id, admin)
- .await
- .unwrap();
-
- members.sort_by_key(|member| member.user_id);
-
- assert_eq!(
- members,
- &[
- proto::ChannelMember {
- user_id: admin.to_proto(),
- kind: proto::channel_member::Kind::AncestorMember.into(),
- role: proto::ChannelRole::Admin.into(),
- },
- proto::ChannelMember {
- user_id: member.to_proto(),
- kind: proto::channel_member::Kind::AncestorMember.into(),
- role: proto::ChannelRole::Member.into(),
- },
- proto::ChannelMember {
- user_id: guest.to_proto(),
- kind: proto::channel_member::Kind::Member.into(),
- role: proto::ChannelRole::Banned.into(),
- },
- ]
- );
-
- db.remove_channel_member(vim_channel_id, guest, admin)
- .await
- .unwrap();
-
- db.set_channel_visibility(zed_channel, crate::db::ChannelVisibility::Public, admin)
- .await
- .unwrap();
-
- db.invite_channel_member(zed_channel, guest, admin, ChannelRole::Guest)
- .await
- .unwrap();
-
- // currently people invited to parent channels are not shown here
- let mut members = db
- .get_channel_participant_details(vim_channel_id, admin)
- .await
- .unwrap();
-
- members.sort_by_key(|member| member.user_id);
-
- assert_eq!(
- members,
- &[
- proto::ChannelMember {
- user_id: admin.to_proto(),
- kind: proto::channel_member::Kind::AncestorMember.into(),
- role: proto::ChannelRole::Admin.into(),
- },
- proto::ChannelMember {
- user_id: member.to_proto(),
- kind: proto::channel_member::Kind::AncestorMember.into(),
- role: proto::ChannelRole::Member.into(),
- },
- ]
- );
-
- db.respond_to_channel_invite(zed_channel, guest, true)
- .await
- .unwrap();
-
- db.transaction(|tx| async move {
- db.check_user_is_channel_participant(
- &db.get_channel_internal(zed_channel, &*tx).await.unwrap(),
- guest,
- &*tx,
- )
- .await
- })
- .await
- .unwrap();
- assert!(db
- .transaction(|tx| async move {
- db.check_user_is_channel_participant(
- &db.get_channel_internal(active_channel_id, &*tx)
- .await
- .unwrap(),
- guest,
- &*tx,
- )
- .await
- })
- .await
- .is_err(),);
-
- db.transaction(|tx| async move {
- db.check_user_is_channel_participant(
- &db.get_channel_internal(vim_channel_id, &*tx).await.unwrap(),
- guest,
- &*tx,
- )
- .await
- })
- .await
- .unwrap();
-
- let mut members = db
- .get_channel_participant_details(vim_channel_id, admin)
- .await
- .unwrap();
-
- members.sort_by_key(|member| member.user_id);
-
- assert_eq!(
- members,
- &[
- proto::ChannelMember {
- user_id: admin.to_proto(),
- kind: proto::channel_member::Kind::AncestorMember.into(),
- role: proto::ChannelRole::Admin.into(),
- },
- proto::ChannelMember {
- user_id: member.to_proto(),
- kind: proto::channel_member::Kind::AncestorMember.into(),
- role: proto::ChannelRole::Member.into(),
- },
- proto::ChannelMember {
- user_id: guest.to_proto(),
- kind: proto::channel_member::Kind::AncestorMember.into(),
- role: proto::ChannelRole::Guest.into(),
- },
- ]
- );
-
- let channels = db.get_channels_for_user(guest).await.unwrap().channels;
- assert_channel_tree(
- channels,
- &[(zed_channel, &[]), (vim_channel_id, &[zed_channel])],
- )
-}
-
-test_both_dbs!(
- test_user_joins_correct_channel,
- test_user_joins_correct_channel_postgres,
- test_user_joins_correct_channel_sqlite
-);
-
-async fn test_user_joins_correct_channel(db: &Arc<Database>) {
- let admin = new_test_user(db, "admin@example.com").await;
-
- let zed_channel = db.create_root_channel("zed", admin).await.unwrap();
-
- let active_channel = db
- .create_sub_channel("active", zed_channel, admin)
- .await
- .unwrap();
-
- let vim_channel = db
- .create_sub_channel("vim", active_channel, admin)
- .await
- .unwrap();
-
- let vim2_channel = db
- .create_sub_channel("vim2", vim_channel, admin)
- .await
- .unwrap();
-
- db.set_channel_visibility(zed_channel, crate::db::ChannelVisibility::Public, admin)
- .await
- .unwrap();
-
- db.set_channel_visibility(vim_channel, crate::db::ChannelVisibility::Public, admin)
- .await
- .unwrap();
-
- db.set_channel_visibility(vim2_channel, crate::db::ChannelVisibility::Public, admin)
- .await
- .unwrap();
-
- let most_public = db
- .transaction(|tx| async move {
- Ok(db
- .public_ancestors_including_self(
- &db.get_channel_internal(vim_channel, &*tx).await.unwrap(),
- &tx,
- )
- .await?
- .first()
- .cloned())
- })
- .await
- .unwrap()
- .unwrap()
- .id;
-
- assert_eq!(most_public, zed_channel)
-}
-
-test_both_dbs!(
- test_guest_access,
- test_guest_access_postgres,
- test_guest_access_sqlite
-);
-
-async fn test_guest_access(db: &Arc<Database>) {
- let server = db.create_server("test").await.unwrap();
-
- let admin = new_test_user(db, "admin@example.com").await;
- let guest = new_test_user(db, "guest@example.com").await;
- let guest_connection = new_test_connection(server);
-
- let zed_channel = db.create_root_channel("zed", admin).await.unwrap();
- db.set_channel_visibility(zed_channel, crate::db::ChannelVisibility::Public, admin)
- .await
- .unwrap();
-
- assert!(db
- .join_channel_chat(zed_channel, guest_connection, guest)
- .await
- .is_err());
-
- db.join_channel(zed_channel, guest, guest_connection, TEST_RELEASE_CHANNEL)
- .await
- .unwrap();
-
- assert!(db
- .join_channel_chat(zed_channel, guest_connection, guest)
- .await
- .is_ok())
-}
-
-#[track_caller]
-fn assert_channel_tree(actual: Vec<Channel>, expected: &[(ChannelId, &[ChannelId])]) {
- let actual = actual
- .iter()
- .map(|channel| (channel.id, channel.parent_path.as_slice()))
- .collect::<Vec<_>>();
- pretty_assertions::assert_eq!(
- actual,
- expected.to_vec(),
- "wrong channel ids and parent paths"
- );
-}
@@ -1,633 +0,0 @@
-use super::*;
-use crate::test_both_dbs;
-use gpui::TestAppContext;
-use pretty_assertions::{assert_eq, assert_ne};
-use std::sync::Arc;
-use tests::TestDb;
-
-test_both_dbs!(
- test_get_users,
- test_get_users_by_ids_postgres,
- test_get_users_by_ids_sqlite
-);
-
-async fn test_get_users(db: &Arc<Database>) {
- let mut user_ids = Vec::new();
- let mut user_metric_ids = Vec::new();
- for i in 1..=4 {
- let user = db
- .create_user(
- &format!("user{i}@example.com"),
- false,
- NewUserParams {
- github_login: format!("user{i}"),
- github_user_id: i,
- },
- )
- .await
- .unwrap();
- user_ids.push(user.user_id);
- user_metric_ids.push(user.metrics_id);
- }
-
- assert_eq!(
- db.get_users_by_ids(user_ids.clone()).await.unwrap(),
- vec![
- User {
- id: user_ids[0],
- github_login: "user1".to_string(),
- github_user_id: Some(1),
- email_address: Some("user1@example.com".to_string()),
- admin: false,
- metrics_id: user_metric_ids[0].parse().unwrap(),
- ..Default::default()
- },
- User {
- id: user_ids[1],
- github_login: "user2".to_string(),
- github_user_id: Some(2),
- email_address: Some("user2@example.com".to_string()),
- admin: false,
- metrics_id: user_metric_ids[1].parse().unwrap(),
- ..Default::default()
- },
- User {
- id: user_ids[2],
- github_login: "user3".to_string(),
- github_user_id: Some(3),
- email_address: Some("user3@example.com".to_string()),
- admin: false,
- metrics_id: user_metric_ids[2].parse().unwrap(),
- ..Default::default()
- },
- User {
- id: user_ids[3],
- github_login: "user4".to_string(),
- github_user_id: Some(4),
- email_address: Some("user4@example.com".to_string()),
- admin: false,
- metrics_id: user_metric_ids[3].parse().unwrap(),
- ..Default::default()
- }
- ]
- );
-}
-
-test_both_dbs!(
- test_get_or_create_user_by_github_account,
- test_get_or_create_user_by_github_account_postgres,
- test_get_or_create_user_by_github_account_sqlite
-);
-
-async fn test_get_or_create_user_by_github_account(db: &Arc<Database>) {
- let user_id1 = db
- .create_user(
- "user1@example.com",
- false,
- NewUserParams {
- github_login: "login1".into(),
- github_user_id: 101,
- },
- )
- .await
- .unwrap()
- .user_id;
- let user_id2 = db
- .create_user(
- "user2@example.com",
- false,
- NewUserParams {
- github_login: "login2".into(),
- github_user_id: 102,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let user = db
- .get_or_create_user_by_github_account("login1", None, None)
- .await
- .unwrap()
- .unwrap();
- assert_eq!(user.id, user_id1);
- assert_eq!(&user.github_login, "login1");
- assert_eq!(user.github_user_id, Some(101));
-
- assert!(db
- .get_or_create_user_by_github_account("non-existent-login", None, None)
- .await
- .unwrap()
- .is_none());
-
- let user = db
- .get_or_create_user_by_github_account("the-new-login2", Some(102), None)
- .await
- .unwrap()
- .unwrap();
- assert_eq!(user.id, user_id2);
- assert_eq!(&user.github_login, "the-new-login2");
- assert_eq!(user.github_user_id, Some(102));
-
- let user = db
- .get_or_create_user_by_github_account("login3", Some(103), Some("user3@example.com"))
- .await
- .unwrap()
- .unwrap();
- assert_eq!(&user.github_login, "login3");
- assert_eq!(user.github_user_id, Some(103));
- assert_eq!(user.email_address, Some("user3@example.com".into()));
-}
-
-test_both_dbs!(
- test_create_access_tokens,
- test_create_access_tokens_postgres,
- test_create_access_tokens_sqlite
-);
-
-async fn test_create_access_tokens(db: &Arc<Database>) {
- let user = db
- .create_user(
- "u1@example.com",
- false,
- NewUserParams {
- github_login: "u1".into(),
- github_user_id: 1,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let token_1 = db.create_access_token(user, "h1", 2).await.unwrap();
- let token_2 = db.create_access_token(user, "h2", 2).await.unwrap();
- assert_eq!(
- db.get_access_token(token_1).await.unwrap(),
- access_token::Model {
- id: token_1,
- user_id: user,
- hash: "h1".into(),
- }
- );
- assert_eq!(
- db.get_access_token(token_2).await.unwrap(),
- access_token::Model {
- id: token_2,
- user_id: user,
- hash: "h2".into()
- }
- );
-
- let token_3 = db.create_access_token(user, "h3", 2).await.unwrap();
- assert_eq!(
- db.get_access_token(token_3).await.unwrap(),
- access_token::Model {
- id: token_3,
- user_id: user,
- hash: "h3".into()
- }
- );
- assert_eq!(
- db.get_access_token(token_2).await.unwrap(),
- access_token::Model {
- id: token_2,
- user_id: user,
- hash: "h2".into()
- }
- );
- assert!(db.get_access_token(token_1).await.is_err());
-
- let token_4 = db.create_access_token(user, "h4", 2).await.unwrap();
- assert_eq!(
- db.get_access_token(token_4).await.unwrap(),
- access_token::Model {
- id: token_4,
- user_id: user,
- hash: "h4".into()
- }
- );
- assert_eq!(
- db.get_access_token(token_3).await.unwrap(),
- access_token::Model {
- id: token_3,
- user_id: user,
- hash: "h3".into()
- }
- );
- assert!(db.get_access_token(token_2).await.is_err());
- assert!(db.get_access_token(token_1).await.is_err());
-}
-
-test_both_dbs!(
- test_add_contacts,
- test_add_contacts_postgres,
- test_add_contacts_sqlite
-);
-
-async fn test_add_contacts(db: &Arc<Database>) {
- let mut user_ids = Vec::new();
- for i in 0..3 {
- user_ids.push(
- db.create_user(
- &format!("user{i}@example.com"),
- false,
- NewUserParams {
- github_login: format!("user{i}"),
- github_user_id: i,
- },
- )
- .await
- .unwrap()
- .user_id,
- );
- }
-
- let user_1 = user_ids[0];
- let user_2 = user_ids[1];
- let user_3 = user_ids[2];
-
- // User starts with no contacts
- assert_eq!(db.get_contacts(user_1).await.unwrap(), &[]);
-
- // User requests a contact. Both users see the pending request.
- db.send_contact_request(user_1, user_2).await.unwrap();
- assert!(!db.has_contact(user_1, user_2).await.unwrap());
- assert!(!db.has_contact(user_2, user_1).await.unwrap());
- assert_eq!(
- db.get_contacts(user_1).await.unwrap(),
- &[Contact::Outgoing { user_id: user_2 }],
- );
- assert_eq!(
- db.get_contacts(user_2).await.unwrap(),
- &[Contact::Incoming { user_id: user_1 }]
- );
-
- // User 2 dismisses the contact request notification without accepting or rejecting.
- // We shouldn't notify them again.
- db.dismiss_contact_notification(user_1, user_2)
- .await
- .unwrap_err();
- db.dismiss_contact_notification(user_2, user_1)
- .await
- .unwrap();
- assert_eq!(
- db.get_contacts(user_2).await.unwrap(),
- &[Contact::Incoming { user_id: user_1 }]
- );
-
- // User can't accept their own contact request
- db.respond_to_contact_request(user_1, user_2, true)
- .await
- .unwrap_err();
-
- // User accepts a contact request. Both users see the contact.
- db.respond_to_contact_request(user_2, user_1, true)
- .await
- .unwrap();
- assert_eq!(
- db.get_contacts(user_1).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_2,
- busy: false,
- }],
- );
- assert!(db.has_contact(user_1, user_2).await.unwrap());
- assert!(db.has_contact(user_2, user_1).await.unwrap());
- assert_eq!(
- db.get_contacts(user_2).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_1,
- busy: false,
- }]
- );
-
- // Users cannot re-request existing contacts.
- db.send_contact_request(user_1, user_2).await.unwrap_err();
- db.send_contact_request(user_2, user_1).await.unwrap_err();
-
- // Users can't dismiss notifications of them accepting other users' requests.
- db.dismiss_contact_notification(user_2, user_1)
- .await
- .unwrap_err();
- assert_eq!(
- db.get_contacts(user_1).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_2,
- busy: false,
- }]
- );
-
- // Users can dismiss notifications of other users accepting their requests.
- db.dismiss_contact_notification(user_1, user_2)
- .await
- .unwrap();
- assert_eq!(
- db.get_contacts(user_1).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_2,
- busy: false,
- }]
- );
-
- // Users send each other concurrent contact requests and
- // see that they are immediately accepted.
- db.send_contact_request(user_1, user_3).await.unwrap();
- db.send_contact_request(user_3, user_1).await.unwrap();
- assert_eq!(
- db.get_contacts(user_1).await.unwrap(),
- &[
- Contact::Accepted {
- user_id: user_2,
- busy: false,
- },
- Contact::Accepted {
- user_id: user_3,
- busy: false,
- }
- ]
- );
- assert_eq!(
- db.get_contacts(user_3).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_1,
- busy: false,
- }],
- );
-
- // User declines a contact request. Both users see that it is gone.
- db.send_contact_request(user_2, user_3).await.unwrap();
- db.respond_to_contact_request(user_3, user_2, false)
- .await
- .unwrap();
- assert!(!db.has_contact(user_2, user_3).await.unwrap());
- assert!(!db.has_contact(user_3, user_2).await.unwrap());
- assert_eq!(
- db.get_contacts(user_2).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_1,
- busy: false,
- }]
- );
- assert_eq!(
- db.get_contacts(user_3).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_1,
- busy: false,
- }],
- );
-}
-
-test_both_dbs!(
- test_metrics_id,
- test_metrics_id_postgres,
- test_metrics_id_sqlite
-);
-
-async fn test_metrics_id(db: &Arc<Database>) {
- let NewUserResult {
- user_id: user1,
- metrics_id: metrics_id1,
- ..
- } = db
- .create_user(
- "person1@example.com",
- false,
- NewUserParams {
- github_login: "person1".into(),
- github_user_id: 101,
- },
- )
- .await
- .unwrap();
- let NewUserResult {
- user_id: user2,
- metrics_id: metrics_id2,
- ..
- } = db
- .create_user(
- "person2@example.com",
- false,
- NewUserParams {
- github_login: "person2".into(),
- github_user_id: 102,
- },
- )
- .await
- .unwrap();
-
- assert_eq!(db.get_user_metrics_id(user1).await.unwrap(), metrics_id1);
- assert_eq!(db.get_user_metrics_id(user2).await.unwrap(), metrics_id2);
- assert_eq!(metrics_id1.len(), 36);
- assert_eq!(metrics_id2.len(), 36);
- assert_ne!(metrics_id1, metrics_id2);
-}
-
-test_both_dbs!(
- test_project_count,
- test_project_count_postgres,
- test_project_count_sqlite
-);
-
-async fn test_project_count(db: &Arc<Database>) {
- let owner_id = db.create_server("test").await.unwrap().0 as u32;
-
- let user1 = db
- .create_user(
- &format!("admin@example.com"),
- true,
- NewUserParams {
- github_login: "admin".into(),
- github_user_id: 0,
- },
- )
- .await
- .unwrap();
- let user2 = db
- .create_user(
- &format!("user@example.com"),
- false,
- NewUserParams {
- github_login: "user".into(),
- github_user_id: 1,
- },
- )
- .await
- .unwrap();
-
- let room_id = RoomId::from_proto(
- db.create_room(user1.user_id, ConnectionId { owner_id, id: 0 }, "", "dev")
- .await
- .unwrap()
- .id,
- );
- db.call(
- room_id,
- user1.user_id,
- ConnectionId { owner_id, id: 0 },
- user2.user_id,
- None,
- )
- .await
- .unwrap();
- db.join_room(
- room_id,
- user2.user_id,
- ConnectionId { owner_id, id: 1 },
- "dev",
- )
- .await
- .unwrap();
- assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
-
- db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
- .await
- .unwrap();
- assert_eq!(db.project_count_excluding_admins().await.unwrap(), 1);
-
- db.share_project(room_id, ConnectionId { owner_id, id: 1 }, &[])
- .await
- .unwrap();
- assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
-
- // Projects shared by admins aren't counted.
- db.share_project(room_id, ConnectionId { owner_id, id: 0 }, &[])
- .await
- .unwrap();
- assert_eq!(db.project_count_excluding_admins().await.unwrap(), 2);
-
- db.leave_room(ConnectionId { owner_id, id: 1 })
- .await
- .unwrap();
- assert_eq!(db.project_count_excluding_admins().await.unwrap(), 0);
-}
-
-#[test]
-fn test_fuzzy_like_string() {
- assert_eq!(Database::fuzzy_like_string("abcd"), "%a%b%c%d%");
- assert_eq!(Database::fuzzy_like_string("x y"), "%x%y%");
- assert_eq!(Database::fuzzy_like_string(" z "), "%z%");
-}
-
-#[gpui::test]
-async fn test_fuzzy_search_users(cx: &mut TestAppContext) {
- let test_db = TestDb::postgres(cx.executor());
- let db = test_db.db();
- for (i, github_login) in [
- "California",
- "colorado",
- "oregon",
- "washington",
- "florida",
- "delaware",
- "rhode-island",
- ]
- .into_iter()
- .enumerate()
- {
- db.create_user(
- &format!("{github_login}@example.com"),
- false,
- NewUserParams {
- github_login: github_login.into(),
- github_user_id: i as i32,
- },
- )
- .await
- .unwrap();
- }
-
- assert_eq!(
- fuzzy_search_user_names(db, "clr").await,
- &["colorado", "California"]
- );
- assert_eq!(
- fuzzy_search_user_names(db, "ro").await,
- &["rhode-island", "colorado", "oregon"],
- );
-
- async fn fuzzy_search_user_names(db: &Database, query: &str) -> Vec<String> {
- db.fuzzy_search_users(query, 10)
- .await
- .unwrap()
- .into_iter()
- .map(|user| user.github_login)
- .collect::<Vec<_>>()
- }
-}
-
-test_both_dbs!(
- test_non_matching_release_channels,
- test_non_matching_release_channels_postgres,
- test_non_matching_release_channels_sqlite
-);
-
-async fn test_non_matching_release_channels(db: &Arc<Database>) {
- let owner_id = db.create_server("test").await.unwrap().0 as u32;
-
- let user1 = db
- .create_user(
- &format!("admin@example.com"),
- true,
- NewUserParams {
- github_login: "admin".into(),
- github_user_id: 0,
- },
- )
- .await
- .unwrap();
- let user2 = db
- .create_user(
- &format!("user@example.com"),
- false,
- NewUserParams {
- github_login: "user".into(),
- github_user_id: 1,
- },
- )
- .await
- .unwrap();
-
- let room = db
- .create_room(
- user1.user_id,
- ConnectionId { owner_id, id: 0 },
- "",
- "stable",
- )
- .await
- .unwrap();
-
- db.call(
- RoomId::from_proto(room.id),
- user1.user_id,
- ConnectionId { owner_id, id: 0 },
- user2.user_id,
- None,
- )
- .await
- .unwrap();
-
- // User attempts to join from preview
- let result = db
- .join_room(
- RoomId::from_proto(room.id),
- user2.user_id,
- ConnectionId { owner_id, id: 1 },
- "preview",
- )
- .await;
-
- assert!(result.is_err());
-
- // User switches to stable
- let result = db
- .join_room(
- RoomId::from_proto(room.id),
- user2.user_id,
- ConnectionId { owner_id, id: 1 },
- "stable",
- )
- .await;
-
- assert!(result.is_ok())
-}
@@ -1,58 +0,0 @@
-use crate::{
- db::{Database, NewUserParams},
- test_both_dbs,
-};
-use std::sync::Arc;
-
-test_both_dbs!(
- test_get_user_flags,
- test_get_user_flags_postgres,
- test_get_user_flags_sqlite
-);
-
-async fn test_get_user_flags(db: &Arc<Database>) {
- let user_1 = db
- .create_user(
- &format!("user1@example.com"),
- false,
- NewUserParams {
- github_login: format!("user1"),
- github_user_id: 1,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- let user_2 = db
- .create_user(
- &format!("user2@example.com"),
- false,
- NewUserParams {
- github_login: format!("user2"),
- github_user_id: 2,
- },
- )
- .await
- .unwrap()
- .user_id;
-
- const CHANNELS_ALPHA: &'static str = "channels-alpha";
- const NEW_SEARCH: &'static str = "new-search";
-
- let channels_flag = db.create_user_flag(CHANNELS_ALPHA).await.unwrap();
- let search_flag = db.create_user_flag(NEW_SEARCH).await.unwrap();
-
- db.add_user_flag(user_1, channels_flag).await.unwrap();
- db.add_user_flag(user_1, search_flag).await.unwrap();
-
- db.add_user_flag(user_2, channels_flag).await.unwrap();
-
- let mut user_1_flags = db.get_user_flags(user_1).await.unwrap();
- user_1_flags.sort();
- assert_eq!(user_1_flags, &[CHANNELS_ALPHA, NEW_SEARCH]);
-
- let mut user_2_flags = db.get_user_flags(user_2).await.unwrap();
- user_2_flags.sort();
- assert_eq!(user_2_flags, &[CHANNELS_ALPHA]);
-}
@@ -1,454 +0,0 @@
-use super::new_test_user;
-use crate::{
- db::{ChannelRole, Database, MessageId},
- test_both_dbs,
-};
-use channel::mentions_to_proto;
-use std::sync::Arc;
-use time::OffsetDateTime;
-
-test_both_dbs!(
- test_channel_message_retrieval,
- test_channel_message_retrieval_postgres,
- test_channel_message_retrieval_sqlite
-);
-
-async fn test_channel_message_retrieval(db: &Arc<Database>) {
- let user = new_test_user(db, "user@example.com").await;
- let result = db.create_channel("channel", None, user).await.unwrap();
-
- let owner_id = db.create_server("test").await.unwrap().0 as u32;
- db.join_channel_chat(
- result.channel.id,
- rpc::ConnectionId { owner_id, id: 0 },
- user,
- )
- .await
- .unwrap();
-
- let mut all_messages = Vec::new();
- for i in 0..10 {
- all_messages.push(
- db.create_channel_message(
- result.channel.id,
- user,
- &i.to_string(),
- &[],
- OffsetDateTime::now_utc(),
- i,
- )
- .await
- .unwrap()
- .message_id
- .to_proto(),
- );
- }
-
- let messages = db
- .get_channel_messages(result.channel.id, user, 3, None)
- .await
- .unwrap()
- .into_iter()
- .map(|message| message.id)
- .collect::<Vec<_>>();
- assert_eq!(messages, &all_messages[7..10]);
-
- let messages = db
- .get_channel_messages(
- result.channel.id,
- user,
- 4,
- Some(MessageId::from_proto(all_messages[6])),
- )
- .await
- .unwrap()
- .into_iter()
- .map(|message| message.id)
- .collect::<Vec<_>>();
- assert_eq!(messages, &all_messages[2..6]);
-}
-
-test_both_dbs!(
- test_channel_message_nonces,
- test_channel_message_nonces_postgres,
- test_channel_message_nonces_sqlite
-);
-
-async fn test_channel_message_nonces(db: &Arc<Database>) {
- let user_a = new_test_user(db, "user_a@example.com").await;
- let user_b = new_test_user(db, "user_b@example.com").await;
- let user_c = new_test_user(db, "user_c@example.com").await;
- let channel = db.create_root_channel("channel", user_a).await.unwrap();
- db.invite_channel_member(channel, user_b, user_a, ChannelRole::Member)
- .await
- .unwrap();
- db.invite_channel_member(channel, user_c, user_a, ChannelRole::Member)
- .await
- .unwrap();
- db.respond_to_channel_invite(channel, user_b, true)
- .await
- .unwrap();
- db.respond_to_channel_invite(channel, user_c, true)
- .await
- .unwrap();
-
- let owner_id = db.create_server("test").await.unwrap().0 as u32;
- db.join_channel_chat(channel, rpc::ConnectionId { owner_id, id: 0 }, user_a)
- .await
- .unwrap();
- db.join_channel_chat(channel, rpc::ConnectionId { owner_id, id: 1 }, user_b)
- .await
- .unwrap();
-
- // As user A, create messages that re-use the same nonces. The requests
- // succeed, but return the same ids.
- let id1 = db
- .create_channel_message(
- channel,
- user_a,
- "hi @user_b",
- &mentions_to_proto(&[(3..10, user_b.to_proto())]),
- OffsetDateTime::now_utc(),
- 100,
- )
- .await
- .unwrap()
- .message_id;
- let id2 = db
- .create_channel_message(
- channel,
- user_a,
- "hello, fellow users",
- &mentions_to_proto(&[]),
- OffsetDateTime::now_utc(),
- 200,
- )
- .await
- .unwrap()
- .message_id;
- let id3 = db
- .create_channel_message(
- channel,
- user_a,
- "bye @user_c (same nonce as first message)",
- &mentions_to_proto(&[(4..11, user_c.to_proto())]),
- OffsetDateTime::now_utc(),
- 100,
- )
- .await
- .unwrap()
- .message_id;
- let id4 = db
- .create_channel_message(
- channel,
- user_a,
- "omg (same nonce as second message)",
- &mentions_to_proto(&[]),
- OffsetDateTime::now_utc(),
- 200,
- )
- .await
- .unwrap()
- .message_id;
-
- // As a different user, reuse one of the same nonces. This request succeeds
- // and returns a different id.
- let id5 = db
- .create_channel_message(
- channel,
- user_b,
- "omg @user_a (same nonce as user_a's first message)",
- &mentions_to_proto(&[(4..11, user_a.to_proto())]),
- OffsetDateTime::now_utc(),
- 100,
- )
- .await
- .unwrap()
- .message_id;
-
- assert_ne!(id1, id2);
- assert_eq!(id1, id3);
- assert_eq!(id2, id4);
- assert_ne!(id5, id1);
-
- let messages = db
- .get_channel_messages(channel, user_a, 5, None)
- .await
- .unwrap()
- .into_iter()
- .map(|m| (m.id, m.body, m.mentions))
- .collect::<Vec<_>>();
- assert_eq!(
- messages,
- &[
- (
- id1.to_proto(),
- "hi @user_b".into(),
- mentions_to_proto(&[(3..10, user_b.to_proto())]),
- ),
- (
- id2.to_proto(),
- "hello, fellow users".into(),
- mentions_to_proto(&[])
- ),
- (
- id5.to_proto(),
- "omg @user_a (same nonce as user_a's first message)".into(),
- mentions_to_proto(&[(4..11, user_a.to_proto())]),
- ),
- ]
- );
-}
-
-test_both_dbs!(
- test_unseen_channel_messages,
- test_unseen_channel_messages_postgres,
- test_unseen_channel_messages_sqlite
-);
-
-async fn test_unseen_channel_messages(db: &Arc<Database>) {
- let user = new_test_user(db, "user_a@example.com").await;
- let observer = new_test_user(db, "user_b@example.com").await;
-
- let channel_1 = db.create_root_channel("channel", user).await.unwrap();
- let channel_2 = db.create_root_channel("channel-2", user).await.unwrap();
-
- db.invite_channel_member(channel_1, observer, user, ChannelRole::Member)
- .await
- .unwrap();
- db.invite_channel_member(channel_2, observer, user, ChannelRole::Member)
- .await
- .unwrap();
-
- db.respond_to_channel_invite(channel_1, observer, true)
- .await
- .unwrap();
- db.respond_to_channel_invite(channel_2, observer, true)
- .await
- .unwrap();
-
- let owner_id = db.create_server("test").await.unwrap().0 as u32;
- let user_connection_id = rpc::ConnectionId { owner_id, id: 0 };
-
- db.join_channel_chat(channel_1, user_connection_id, user)
- .await
- .unwrap();
-
- let _ = db
- .create_channel_message(channel_1, user, "1_1", &[], OffsetDateTime::now_utc(), 1)
- .await
- .unwrap();
-
- let second_message = db
- .create_channel_message(channel_1, user, "1_2", &[], OffsetDateTime::now_utc(), 2)
- .await
- .unwrap()
- .message_id;
-
- let third_message = db
- .create_channel_message(channel_1, user, "1_3", &[], OffsetDateTime::now_utc(), 3)
- .await
- .unwrap()
- .message_id;
-
- db.join_channel_chat(channel_2, user_connection_id, user)
- .await
- .unwrap();
-
- let fourth_message = db
- .create_channel_message(channel_2, user, "2_1", &[], OffsetDateTime::now_utc(), 4)
- .await
- .unwrap()
- .message_id;
-
- // Check that observer has new messages
- let unseen_messages = db
- .transaction(|tx| async move {
- db.unseen_channel_messages(observer, &[channel_1, channel_2], &*tx)
- .await
- })
- .await
- .unwrap();
-
- assert_eq!(
- unseen_messages,
- [
- rpc::proto::UnseenChannelMessage {
- channel_id: channel_1.to_proto(),
- message_id: third_message.to_proto(),
- },
- rpc::proto::UnseenChannelMessage {
- channel_id: channel_2.to_proto(),
- message_id: fourth_message.to_proto(),
- },
- ]
- );
-
- // Observe the second message
- db.observe_channel_message(channel_1, observer, second_message)
- .await
- .unwrap();
-
- // Make sure the observer still has a new message
- let unseen_messages = db
- .transaction(|tx| async move {
- db.unseen_channel_messages(observer, &[channel_1, channel_2], &*tx)
- .await
- })
- .await
- .unwrap();
- assert_eq!(
- unseen_messages,
- [
- rpc::proto::UnseenChannelMessage {
- channel_id: channel_1.to_proto(),
- message_id: third_message.to_proto(),
- },
- rpc::proto::UnseenChannelMessage {
- channel_id: channel_2.to_proto(),
- message_id: fourth_message.to_proto(),
- },
- ]
- );
-
- // Observe the third message,
- db.observe_channel_message(channel_1, observer, third_message)
- .await
- .unwrap();
-
- // Make sure the observer does not have a new method
- let unseen_messages = db
- .transaction(|tx| async move {
- db.unseen_channel_messages(observer, &[channel_1, channel_2], &*tx)
- .await
- })
- .await
- .unwrap();
-
- assert_eq!(
- unseen_messages,
- [rpc::proto::UnseenChannelMessage {
- channel_id: channel_2.to_proto(),
- message_id: fourth_message.to_proto(),
- }]
- );
-
- // Observe the second message again, should not regress our observed state
- db.observe_channel_message(channel_1, observer, second_message)
- .await
- .unwrap();
-
- // Make sure the observer does not have a new message
- let unseen_messages = db
- .transaction(|tx| async move {
- db.unseen_channel_messages(observer, &[channel_1, channel_2], &*tx)
- .await
- })
- .await
- .unwrap();
- assert_eq!(
- unseen_messages,
- [rpc::proto::UnseenChannelMessage {
- channel_id: channel_2.to_proto(),
- message_id: fourth_message.to_proto(),
- }]
- );
-}
-
-test_both_dbs!(
- test_channel_message_mentions,
- test_channel_message_mentions_postgres,
- test_channel_message_mentions_sqlite
-);
-
-async fn test_channel_message_mentions(db: &Arc<Database>) {
- let user_a = new_test_user(db, "user_a@example.com").await;
- let user_b = new_test_user(db, "user_b@example.com").await;
- let user_c = new_test_user(db, "user_c@example.com").await;
-
- let channel = db
- .create_channel("channel", None, user_a)
- .await
- .unwrap()
- .channel
- .id;
- db.invite_channel_member(channel, user_b, user_a, ChannelRole::Member)
- .await
- .unwrap();
- db.respond_to_channel_invite(channel, user_b, true)
- .await
- .unwrap();
-
- let owner_id = db.create_server("test").await.unwrap().0 as u32;
- let connection_id = rpc::ConnectionId { owner_id, id: 0 };
- db.join_channel_chat(channel, connection_id, user_a)
- .await
- .unwrap();
-
- db.create_channel_message(
- channel,
- user_a,
- "hi @user_b and @user_c",
- &mentions_to_proto(&[(3..10, user_b.to_proto()), (15..22, user_c.to_proto())]),
- OffsetDateTime::now_utc(),
- 1,
- )
- .await
- .unwrap();
- db.create_channel_message(
- channel,
- user_a,
- "bye @user_c",
- &mentions_to_proto(&[(4..11, user_c.to_proto())]),
- OffsetDateTime::now_utc(),
- 2,
- )
- .await
- .unwrap();
- db.create_channel_message(
- channel,
- user_a,
- "umm",
- &mentions_to_proto(&[]),
- OffsetDateTime::now_utc(),
- 3,
- )
- .await
- .unwrap();
- db.create_channel_message(
- channel,
- user_a,
- "@user_b, stop.",
- &mentions_to_proto(&[(0..7, user_b.to_proto())]),
- OffsetDateTime::now_utc(),
- 4,
- )
- .await
- .unwrap();
-
- let messages = db
- .get_channel_messages(channel, user_b, 5, None)
- .await
- .unwrap()
- .into_iter()
- .map(|m| (m.body, m.mentions))
- .collect::<Vec<_>>();
- assert_eq!(
- &messages,
- &[
- (
- "hi @user_b and @user_c".into(),
- mentions_to_proto(&[(3..10, user_b.to_proto()), (15..22, user_c.to_proto())]),
- ),
- (
- "bye @user_c".into(),
- mentions_to_proto(&[(4..11, user_c.to_proto())]),
- ),
- ("umm".into(), mentions_to_proto(&[]),),
- (
- "@user_b, stop.".into(),
- mentions_to_proto(&[(0..7, user_b.to_proto())]),
- ),
- ]
- );
-}
@@ -1,20 +0,0 @@
-use anyhow::anyhow;
-use std::fs;
-
-pub fn load_dotenv() -> anyhow::Result<()> {
- let env: toml::map::Map<String, toml::Value> = toml::de::from_str(
- &fs::read_to_string("./.env.toml").map_err(|_| anyhow!("no .env.toml file found"))?,
- )?;
-
- for (key, value) in env {
- let value = match value {
- toml::Value::String(value) => value,
- toml::Value::Integer(value) => value.to_string(),
- toml::Value::Float(value) => value.to_string(),
- _ => panic!("unsupported TOML value in .env.toml for key {}", key),
- };
- std::env::set_var(key, value);
- }
-
- Ok(())
-}
@@ -1,29 +0,0 @@
-// Allow tide Results to accept context like other Results do when
-// using anyhow.
-pub trait TideResultExt {
- fn context<C>(self, cx: C) -> Self
- where
- C: std::fmt::Display + Send + Sync + 'static;
-
- fn with_context<C, F>(self, f: F) -> Self
- where
- C: std::fmt::Display + Send + Sync + 'static,
- F: FnOnce() -> C;
-}
-
-impl<T> TideResultExt for tide::Result<T> {
- fn context<C>(self, cx: C) -> Self
- where
- C: std::fmt::Display + Send + Sync + 'static,
- {
- self.map_err(|e| tide::Error::new(e.status(), e.into_inner().context(cx)))
- }
-
- fn with_context<C, F>(self, f: F) -> Self
- where
- C: std::fmt::Display + Send + Sync + 'static,
- F: FnOnce() -> C,
- {
- self.map_err(|e| tide::Error::new(e.status(), e.into_inner().context(f())))
- }
-}
@@ -1,39 +0,0 @@
-use std::{future::Future, time::Duration};
-
-#[cfg(test)]
-use gpui::BackgroundExecutor;
-
-#[derive(Clone)]
-pub enum Executor {
- Production,
- #[cfg(test)]
- Deterministic(BackgroundExecutor),
-}
-
-impl Executor {
- pub fn spawn_detached<F>(&self, future: F)
- where
- F: 'static + Send + Future<Output = ()>,
- {
- match self {
- Executor::Production => {
- tokio::spawn(future);
- }
- #[cfg(test)]
- Executor::Deterministic(background) => {
- background.spawn(future).detach();
- }
- }
- }
-
- pub fn sleep(&self, duration: Duration) -> impl Future<Output = ()> {
- let this = self.clone();
- async move {
- match this {
- Executor::Production => tokio::time::sleep(duration).await,
- #[cfg(test)]
- Executor::Deterministic(background) => background.timer(duration).await,
- }
- }
- }
-}
@@ -1,147 +0,0 @@
-pub mod api;
-pub mod auth;
-pub mod db;
-pub mod env;
-pub mod executor;
-pub mod rpc;
-
-#[cfg(test)]
-mod tests;
-
-use axum::{http::StatusCode, response::IntoResponse};
-use db::Database;
-use executor::Executor;
-use serde::Deserialize;
-use std::{path::PathBuf, sync::Arc};
-
-pub type Result<T, E = Error> = std::result::Result<T, E>;
-
-pub enum Error {
- Http(StatusCode, String),
- Database(sea_orm::error::DbErr),
- Internal(anyhow::Error),
-}
-
-impl From<anyhow::Error> for Error {
- fn from(error: anyhow::Error) -> Self {
- Self::Internal(error)
- }
-}
-
-impl From<sea_orm::error::DbErr> for Error {
- fn from(error: sea_orm::error::DbErr) -> Self {
- Self::Database(error)
- }
-}
-
-impl From<axum::Error> for Error {
- fn from(error: axum::Error) -> Self {
- Self::Internal(error.into())
- }
-}
-
-impl From<hyper::Error> for Error {
- fn from(error: hyper::Error) -> Self {
- Self::Internal(error.into())
- }
-}
-
-impl From<serde_json::Error> for Error {
- fn from(error: serde_json::Error) -> Self {
- Self::Internal(error.into())
- }
-}
-
-impl IntoResponse for Error {
- fn into_response(self) -> axum::response::Response {
- match self {
- Error::Http(code, message) => (code, message).into_response(),
- Error::Database(error) => {
- (StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response()
- }
- Error::Internal(error) => {
- (StatusCode::INTERNAL_SERVER_ERROR, format!("{}", &error)).into_response()
- }
- }
- }
-}
-
-impl std::fmt::Debug for Error {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- Error::Http(code, message) => (code, message).fmt(f),
- Error::Database(error) => error.fmt(f),
- Error::Internal(error) => error.fmt(f),
- }
- }
-}
-
-impl std::fmt::Display for Error {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- match self {
- Error::Http(code, message) => write!(f, "{code}: {message}"),
- Error::Database(error) => error.fmt(f),
- Error::Internal(error) => error.fmt(f),
- }
- }
-}
-
-impl std::error::Error for Error {}
-
-#[derive(Default, Deserialize)]
-pub struct Config {
- pub http_port: u16,
- pub database_url: String,
- pub database_max_connections: u32,
- pub api_token: String,
- pub invite_link_prefix: String,
- pub live_kit_server: Option<String>,
- pub live_kit_key: Option<String>,
- pub live_kit_secret: Option<String>,
- pub rust_log: Option<String>,
- pub log_json: Option<bool>,
- pub zed_environment: String,
-}
-
-#[derive(Default, Deserialize)]
-pub struct MigrateConfig {
- pub database_url: String,
- pub migrations_path: Option<PathBuf>,
-}
-
-pub struct AppState {
- pub db: Arc<Database>,
- pub live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
- pub config: Config,
-}
-
-impl AppState {
- pub async fn new(config: Config) -> Result<Arc<Self>> {
- let mut db_options = db::ConnectOptions::new(config.database_url.clone());
- db_options.max_connections(config.database_max_connections);
- let mut db = Database::new(db_options, Executor::Production).await?;
- db.initialize_notification_kinds().await?;
-
- let live_kit_client = if let Some(((server, key), secret)) = config
- .live_kit_server
- .as_ref()
- .zip(config.live_kit_key.as_ref())
- .zip(config.live_kit_secret.as_ref())
- {
- Some(Arc::new(live_kit_server::api::LiveKitClient::new(
- server.clone(),
- key.clone(),
- secret.clone(),
- )) as Arc<dyn live_kit_server::api::Client>)
- } else {
- None
- };
-
- let this = Self {
- db: Arc::new(db),
- live_kit_client,
- config,
- };
- Ok(Arc::new(this))
- }
-}
@@ -1,139 +0,0 @@
-use anyhow::anyhow;
-use axum::{routing::get, Extension, Router};
-use collab2::{db, env, executor::Executor, AppState, Config, MigrateConfig, Result};
-use db::Database;
-use std::{
- env::args,
- net::{SocketAddr, TcpListener},
- path::Path,
- sync::Arc,
-};
-use tokio::signal::unix::SignalKind;
-use tracing_log::LogTracer;
-use tracing_subscriber::{filter::EnvFilter, fmt::format::JsonFields, Layer};
-use util::ResultExt;
-
-const VERSION: &'static str = env!("CARGO_PKG_VERSION");
-
-#[tokio::main]
-async fn main() -> Result<()> {
- if let Err(error) = env::load_dotenv() {
- eprintln!(
- "error loading .env.toml (this is expected in production): {}",
- error
- );
- }
-
- match args().skip(1).next().as_deref() {
- Some("version") => {
- println!("collab v{VERSION}");
- }
- Some("migrate") => {
- let config = envy::from_env::<MigrateConfig>().expect("error loading config");
- let mut db_options = db::ConnectOptions::new(config.database_url.clone());
- db_options.max_connections(5);
- let db = Database::new(db_options, Executor::Production).await?;
-
- let migrations_path = config
- .migrations_path
- .as_deref()
- .unwrap_or_else(|| Path::new(concat!(env!("CARGO_MANIFEST_DIR"), "/migrations")));
-
- let migrations = db.migrate(&migrations_path, false).await?;
- for (migration, duration) in migrations {
- println!(
- "Ran {} {} {:?}",
- migration.version, migration.description, duration
- );
- }
-
- return Ok(());
- }
- Some("serve") => {
- let config = envy::from_env::<Config>().expect("error loading config");
- init_tracing(&config);
-
- let state = AppState::new(config).await?;
-
- let listener = TcpListener::bind(&format!("0.0.0.0:{}", state.config.http_port))
- .expect("failed to bind TCP listener");
-
- let epoch = state
- .db
- .create_server(&state.config.zed_environment)
- .await?;
- let rpc_server = collab2::rpc::Server::new(epoch, state.clone(), Executor::Production);
- rpc_server.start().await?;
-
- let app = collab2::api::routes(rpc_server.clone(), state.clone())
- .merge(collab2::rpc::routes(rpc_server.clone()))
- .merge(
- Router::new()
- .route("/", get(handle_root))
- .route("/healthz", get(handle_liveness_probe))
- .layer(Extension(state.clone())),
- );
-
- axum::Server::from_tcp(listener)?
- .serve(app.into_make_service_with_connect_info::<SocketAddr>())
- .with_graceful_shutdown(async move {
- let mut sigterm = tokio::signal::unix::signal(SignalKind::terminate())
- .expect("failed to listen for interrupt signal");
- let mut sigint = tokio::signal::unix::signal(SignalKind::interrupt())
- .expect("failed to listen for interrupt signal");
- let sigterm = sigterm.recv();
- let sigint = sigint.recv();
- futures::pin_mut!(sigterm, sigint);
- futures::future::select(sigterm, sigint).await;
- tracing::info!("Received interrupt signal");
- rpc_server.teardown();
- })
- .await?;
- }
- _ => {
- Err(anyhow!("usage: collab <version | migrate | serve>"))?;
- }
- }
- Ok(())
-}
-
-async fn handle_root() -> String {
- format!("collab v{VERSION}")
-}
-
-async fn handle_liveness_probe(Extension(state): Extension<Arc<AppState>>) -> Result<String> {
- state.db.get_all_users(0, 1).await?;
- Ok("ok".to_string())
-}
-
-pub fn init_tracing(config: &Config) -> Option<()> {
- use std::str::FromStr;
- use tracing_subscriber::layer::SubscriberExt;
- let rust_log = config.rust_log.clone()?;
-
- LogTracer::init().log_err()?;
-
- let subscriber = tracing_subscriber::Registry::default()
- .with(if config.log_json.unwrap_or(false) {
- Box::new(
- tracing_subscriber::fmt::layer()
- .fmt_fields(JsonFields::default())
- .event_format(
- tracing_subscriber::fmt::format()
- .json()
- .flatten_event(true)
- .with_span_list(true),
- ),
- ) as Box<dyn Layer<_> + Send + Sync>
- } else {
- Box::new(
- tracing_subscriber::fmt::layer()
- .event_format(tracing_subscriber::fmt::format().pretty()),
- )
- })
- .with(EnvFilter::from_str(rust_log.as_str()).log_err()?);
-
- tracing::subscriber::set_global_default(subscriber).unwrap();
-
- None
-}
@@ -1,3495 +0,0 @@
-mod connection_pool;
-
-use crate::{
- auth,
- db::{
- self, BufferId, ChannelId, ChannelRole, ChannelsForUser, CreateChannelResult,
- CreatedChannelMessage, Database, InviteMemberResult, MembershipUpdated, MessageId,
- MoveChannelResult, NotificationId, ProjectId, RemoveChannelMemberResult,
- RenameChannelResult, RespondToChannelInvite, RoomId, ServerId, SetChannelVisibilityResult,
- User, UserId,
- },
- executor::Executor,
- AppState, Result,
-};
-use anyhow::anyhow;
-use async_tungstenite::tungstenite::{
- protocol::CloseFrame as TungsteniteCloseFrame, Message as TungsteniteMessage,
-};
-use axum::{
- body::Body,
- extract::{
- ws::{CloseFrame as AxumCloseFrame, Message as AxumMessage},
- ConnectInfo, WebSocketUpgrade,
- },
- headers::{Header, HeaderName},
- http::StatusCode,
- middleware,
- response::IntoResponse,
- routing::get,
- Extension, Router, TypedHeader,
-};
-use collections::{HashMap, HashSet};
-pub use connection_pool::ConnectionPool;
-use futures::{
- channel::oneshot,
- future::{self, BoxFuture},
- stream::FuturesUnordered,
- FutureExt, SinkExt, StreamExt, TryStreamExt,
-};
-use lazy_static::lazy_static;
-use prometheus::{register_int_gauge, IntGauge};
-use rpc::{
- proto::{
- self, Ack, AnyTypedEnvelope, EntityMessage, EnvelopedMessage, LiveKitConnectionInfo,
- RequestMessage, UpdateChannelBufferCollaborators,
- },
- Connection, ConnectionId, Peer, Receipt, TypedEnvelope,
-};
-use serde::{Serialize, Serializer};
-use std::{
- any::TypeId,
- fmt,
- future::Future,
- marker::PhantomData,
- mem,
- net::SocketAddr,
- ops::{Deref, DerefMut},
- rc::Rc,
- sync::{
- atomic::{AtomicBool, Ordering::SeqCst},
- Arc,
- },
- time::{Duration, Instant},
-};
-use time::OffsetDateTime;
-use tokio::sync::{watch, Semaphore};
-use tower::ServiceBuilder;
-use tracing::{info_span, instrument, Instrument};
-use util::channel::RELEASE_CHANNEL_NAME;
-
-pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
-pub const CLEANUP_TIMEOUT: Duration = Duration::from_secs(10);
-
-const MESSAGE_COUNT_PER_PAGE: usize = 100;
-const MAX_MESSAGE_LEN: usize = 1024;
-const NOTIFICATION_COUNT_PER_PAGE: usize = 50;
-
-lazy_static! {
- static ref METRIC_CONNECTIONS: IntGauge =
- register_int_gauge!("connections", "number of connections").unwrap();
- static ref METRIC_SHARED_PROJECTS: IntGauge = register_int_gauge!(
- "shared_projects",
- "number of open projects with one or more guests"
- )
- .unwrap();
-}
-
-type MessageHandler =
- Box<dyn Send + Sync + Fn(Box<dyn AnyTypedEnvelope>, Session) -> BoxFuture<'static, ()>>;
-
-struct Response<R> {
- peer: Arc<Peer>,
- receipt: Receipt<R>,
- responded: Arc<AtomicBool>,
-}
-
-impl<R: RequestMessage> Response<R> {
- fn send(self, payload: R::Response) -> Result<()> {
- self.responded.store(true, SeqCst);
- self.peer.respond(self.receipt, payload)?;
- Ok(())
- }
-}
-
-#[derive(Clone)]
-struct Session {
- user_id: UserId,
- connection_id: ConnectionId,
- db: Arc<tokio::sync::Mutex<DbHandle>>,
- peer: Arc<Peer>,
- connection_pool: Arc<parking_lot::Mutex<ConnectionPool>>,
- live_kit_client: Option<Arc<dyn live_kit_server::api::Client>>,
- _executor: Executor,
-}
-
-impl Session {
- async fn db(&self) -> tokio::sync::MutexGuard<DbHandle> {
- #[cfg(test)]
- tokio::task::yield_now().await;
- let guard = self.db.lock().await;
- #[cfg(test)]
- tokio::task::yield_now().await;
- guard
- }
-
- async fn connection_pool(&self) -> ConnectionPoolGuard<'_> {
- #[cfg(test)]
- tokio::task::yield_now().await;
- let guard = self.connection_pool.lock();
- ConnectionPoolGuard {
- guard,
- _not_send: PhantomData,
- }
- }
-}
-
-impl fmt::Debug for Session {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- f.debug_struct("Session")
- .field("user_id", &self.user_id)
- .field("connection_id", &self.connection_id)
- .finish()
- }
-}
-
-struct DbHandle(Arc<Database>);
-
-impl Deref for DbHandle {
- type Target = Database;
-
- fn deref(&self) -> &Self::Target {
- self.0.as_ref()
- }
-}
-
-pub struct Server {
- id: parking_lot::Mutex<ServerId>,
- peer: Arc<Peer>,
- pub(crate) connection_pool: Arc<parking_lot::Mutex<ConnectionPool>>,
- app_state: Arc<AppState>,
- executor: Executor,
- handlers: HashMap<TypeId, MessageHandler>,
- teardown: watch::Sender<()>,
-}
-
-pub(crate) struct ConnectionPoolGuard<'a> {
- guard: parking_lot::MutexGuard<'a, ConnectionPool>,
- _not_send: PhantomData<Rc<()>>,
-}
-
-#[derive(Serialize)]
-pub struct ServerSnapshot<'a> {
- peer: &'a Peer,
- #[serde(serialize_with = "serialize_deref")]
- connection_pool: ConnectionPoolGuard<'a>,
-}
-
-pub fn serialize_deref<S, T, U>(value: &T, serializer: S) -> Result<S::Ok, S::Error>
-where
- S: Serializer,
- T: Deref<Target = U>,
- U: Serialize,
-{
- Serialize::serialize(value.deref(), serializer)
-}
-
-impl Server {
- pub fn new(id: ServerId, app_state: Arc<AppState>, executor: Executor) -> Arc<Self> {
- let mut server = Self {
- id: parking_lot::Mutex::new(id),
- peer: Peer::new(id.0 as u32),
- app_state,
- executor,
- connection_pool: Default::default(),
- handlers: Default::default(),
- teardown: watch::channel(()).0,
- };
-
- server
- .add_request_handler(ping)
- .add_request_handler(create_room)
- .add_request_handler(join_room)
- .add_request_handler(rejoin_room)
- .add_request_handler(leave_room)
- .add_request_handler(call)
- .add_request_handler(cancel_call)
- .add_message_handler(decline_call)
- .add_request_handler(update_participant_location)
- .add_request_handler(share_project)
- .add_message_handler(unshare_project)
- .add_request_handler(join_project)
- .add_message_handler(leave_project)
- .add_request_handler(update_project)
- .add_request_handler(update_worktree)
- .add_message_handler(start_language_server)
- .add_message_handler(update_language_server)
- .add_message_handler(update_diagnostic_summary)
- .add_message_handler(update_worktree_settings)
- .add_message_handler(refresh_inlay_hints)
- .add_request_handler(forward_project_request::<proto::GetHover>)
- .add_request_handler(forward_project_request::<proto::GetDefinition>)
- .add_request_handler(forward_project_request::<proto::GetTypeDefinition>)
- .add_request_handler(forward_project_request::<proto::GetReferences>)
- .add_request_handler(forward_project_request::<proto::SearchProject>)
- .add_request_handler(forward_project_request::<proto::GetDocumentHighlights>)
- .add_request_handler(forward_project_request::<proto::GetProjectSymbols>)
- .add_request_handler(forward_project_request::<proto::OpenBufferForSymbol>)
- .add_request_handler(forward_project_request::<proto::OpenBufferById>)
- .add_request_handler(forward_project_request::<proto::OpenBufferByPath>)
- .add_request_handler(forward_project_request::<proto::GetCompletions>)
- .add_request_handler(forward_project_request::<proto::ApplyCompletionAdditionalEdits>)
- .add_request_handler(forward_project_request::<proto::ResolveCompletionDocumentation>)
- .add_request_handler(forward_project_request::<proto::GetCodeActions>)
- .add_request_handler(forward_project_request::<proto::ApplyCodeAction>)
- .add_request_handler(forward_project_request::<proto::PrepareRename>)
- .add_request_handler(forward_project_request::<proto::PerformRename>)
- .add_request_handler(forward_project_request::<proto::ReloadBuffers>)
- .add_request_handler(forward_project_request::<proto::SynchronizeBuffers>)
- .add_request_handler(forward_project_request::<proto::FormatBuffers>)
- .add_request_handler(forward_project_request::<proto::CreateProjectEntry>)
- .add_request_handler(forward_project_request::<proto::RenameProjectEntry>)
- .add_request_handler(forward_project_request::<proto::CopyProjectEntry>)
- .add_request_handler(forward_project_request::<proto::DeleteProjectEntry>)
- .add_request_handler(forward_project_request::<proto::ExpandProjectEntry>)
- .add_request_handler(forward_project_request::<proto::OnTypeFormatting>)
- .add_request_handler(forward_project_request::<proto::InlayHints>)
- .add_message_handler(create_buffer_for_peer)
- .add_request_handler(update_buffer)
- .add_message_handler(update_buffer_file)
- .add_message_handler(buffer_reloaded)
- .add_message_handler(buffer_saved)
- .add_request_handler(forward_project_request::<proto::SaveBuffer>)
- .add_request_handler(get_users)
- .add_request_handler(fuzzy_search_users)
- .add_request_handler(request_contact)
- .add_request_handler(remove_contact)
- .add_request_handler(respond_to_contact_request)
- .add_request_handler(create_channel)
- .add_request_handler(delete_channel)
- .add_request_handler(invite_channel_member)
- .add_request_handler(remove_channel_member)
- .add_request_handler(set_channel_member_role)
- .add_request_handler(set_channel_visibility)
- .add_request_handler(rename_channel)
- .add_request_handler(join_channel_buffer)
- .add_request_handler(leave_channel_buffer)
- .add_message_handler(update_channel_buffer)
- .add_request_handler(rejoin_channel_buffers)
- .add_request_handler(get_channel_members)
- .add_request_handler(respond_to_channel_invite)
- .add_request_handler(join_channel)
- .add_request_handler(join_channel_chat)
- .add_message_handler(leave_channel_chat)
- .add_request_handler(send_channel_message)
- .add_request_handler(remove_channel_message)
- .add_request_handler(get_channel_messages)
- .add_request_handler(get_channel_messages_by_id)
- .add_request_handler(get_notifications)
- .add_request_handler(mark_notification_as_read)
- .add_request_handler(move_channel)
- .add_request_handler(follow)
- .add_message_handler(unfollow)
- .add_message_handler(update_followers)
- .add_message_handler(update_diff_base)
- .add_request_handler(get_private_user_info)
- .add_message_handler(acknowledge_channel_message)
- .add_message_handler(acknowledge_buffer_version);
-
- Arc::new(server)
- }
-
- pub async fn start(&self) -> Result<()> {
- let server_id = *self.id.lock();
- let app_state = self.app_state.clone();
- let peer = self.peer.clone();
- let timeout = self.executor.sleep(CLEANUP_TIMEOUT);
- let pool = self.connection_pool.clone();
- let live_kit_client = self.app_state.live_kit_client.clone();
-
- let span = info_span!("start server");
- self.executor.spawn_detached(
- async move {
- tracing::info!("waiting for cleanup timeout");
- timeout.await;
- tracing::info!("cleanup timeout expired, retrieving stale rooms");
- if let Some((room_ids, channel_ids)) = app_state
- .db
- .stale_server_resource_ids(&app_state.config.zed_environment, server_id)
- .await
- .trace_err()
- {
- tracing::info!(stale_room_count = room_ids.len(), "retrieved stale rooms");
- tracing::info!(
- stale_channel_buffer_count = channel_ids.len(),
- "retrieved stale channel buffers"
- );
-
- for channel_id in channel_ids {
- if let Some(refreshed_channel_buffer) = app_state
- .db
- .clear_stale_channel_buffer_collaborators(channel_id, server_id)
- .await
- .trace_err()
- {
- for connection_id in refreshed_channel_buffer.connection_ids {
- peer.send(
- connection_id,
- proto::UpdateChannelBufferCollaborators {
- channel_id: channel_id.to_proto(),
- collaborators: refreshed_channel_buffer
- .collaborators
- .clone(),
- },
- )
- .trace_err();
- }
- }
- }
-
- for room_id in room_ids {
- let mut contacts_to_update = HashSet::default();
- let mut canceled_calls_to_user_ids = Vec::new();
- let mut live_kit_room = String::new();
- let mut delete_live_kit_room = false;
-
- if let Some(mut refreshed_room) = app_state
- .db
- .clear_stale_room_participants(room_id, server_id)
- .await
- .trace_err()
- {
- tracing::info!(
- room_id = room_id.0,
- new_participant_count = refreshed_room.room.participants.len(),
- "refreshed room"
- );
- room_updated(&refreshed_room.room, &peer);
- if let Some(channel_id) = refreshed_room.channel_id {
- channel_updated(
- channel_id,
- &refreshed_room.room,
- &refreshed_room.channel_members,
- &peer,
- &*pool.lock(),
- );
- }
- contacts_to_update
- .extend(refreshed_room.stale_participant_user_ids.iter().copied());
- contacts_to_update
- .extend(refreshed_room.canceled_calls_to_user_ids.iter().copied());
- canceled_calls_to_user_ids =
- mem::take(&mut refreshed_room.canceled_calls_to_user_ids);
- live_kit_room = mem::take(&mut refreshed_room.room.live_kit_room);
- delete_live_kit_room = refreshed_room.room.participants.is_empty();
- }
-
- {
- let pool = pool.lock();
- for canceled_user_id in canceled_calls_to_user_ids {
- for connection_id in pool.user_connection_ids(canceled_user_id) {
- peer.send(
- connection_id,
- proto::CallCanceled {
- room_id: room_id.to_proto(),
- },
- )
- .trace_err();
- }
- }
- }
-
- for user_id in contacts_to_update {
- let busy = app_state.db.is_user_busy(user_id).await.trace_err();
- let contacts = app_state.db.get_contacts(user_id).await.trace_err();
- if let Some((busy, contacts)) = busy.zip(contacts) {
- let pool = pool.lock();
- let updated_contact = contact_for_user(user_id, busy, &pool);
- for contact in contacts {
- if let db::Contact::Accepted {
- user_id: contact_user_id,
- ..
- } = contact
- {
- for contact_conn_id in
- pool.user_connection_ids(contact_user_id)
- {
- peer.send(
- contact_conn_id,
- proto::UpdateContacts {
- contacts: vec![updated_contact.clone()],
- remove_contacts: Default::default(),
- incoming_requests: Default::default(),
- remove_incoming_requests: Default::default(),
- outgoing_requests: Default::default(),
- remove_outgoing_requests: Default::default(),
- },
- )
- .trace_err();
- }
- }
- }
- }
- }
-
- if let Some(live_kit) = live_kit_client.as_ref() {
- if delete_live_kit_room {
- live_kit.delete_room(live_kit_room).await.trace_err();
- }
- }
- }
- }
-
- app_state
- .db
- .delete_stale_servers(&app_state.config.zed_environment, server_id)
- .await
- .trace_err();
- }
- .instrument(span),
- );
- Ok(())
- }
-
- pub fn teardown(&self) {
- self.peer.teardown();
- self.connection_pool.lock().reset();
- let _ = self.teardown.send(());
- }
-
- #[cfg(test)]
- pub fn reset(&self, id: ServerId) {
- self.teardown();
- *self.id.lock() = id;
- self.peer.reset(id.0 as u32);
- }
-
- #[cfg(test)]
- pub fn id(&self) -> ServerId {
- *self.id.lock()
- }
-
- fn add_handler<F, Fut, M>(&mut self, handler: F) -> &mut Self
- where
- F: 'static + Send + Sync + Fn(TypedEnvelope<M>, Session) -> Fut,
- Fut: 'static + Send + Future<Output = Result<()>>,
- M: EnvelopedMessage,
- {
- let prev_handler = self.handlers.insert(
- TypeId::of::<M>(),
- Box::new(move |envelope, session| {
- let envelope = envelope.into_any().downcast::<TypedEnvelope<M>>().unwrap();
- let span = info_span!(
- "handle message",
- payload_type = envelope.payload_type_name()
- );
- span.in_scope(|| {
- tracing::info!(
- payload_type = envelope.payload_type_name(),
- "message received"
- );
- });
- let start_time = Instant::now();
- let future = (handler)(*envelope, session);
- async move {
- let result = future.await;
- let duration_ms = start_time.elapsed().as_micros() as f64 / 1000.0;
- match result {
- Err(error) => {
- tracing::error!(%error, ?duration_ms, "error handling message")
- }
- Ok(()) => tracing::info!(?duration_ms, "finished handling message"),
- }
- }
- .instrument(span)
- .boxed()
- }),
- );
- if prev_handler.is_some() {
- panic!("registered a handler for the same message twice");
- }
- self
- }
-
- fn add_message_handler<F, Fut, M>(&mut self, handler: F) -> &mut Self
- where
- F: 'static + Send + Sync + Fn(M, Session) -> Fut,
- Fut: 'static + Send + Future<Output = Result<()>>,
- M: EnvelopedMessage,
- {
- self.add_handler(move |envelope, session| handler(envelope.payload, session));
- self
- }
-
- fn add_request_handler<F, Fut, M>(&mut self, handler: F) -> &mut Self
- where
- F: 'static + Send + Sync + Fn(M, Response<M>, Session) -> Fut,
- Fut: Send + Future<Output = Result<()>>,
- M: RequestMessage,
- {
- let handler = Arc::new(handler);
- self.add_handler(move |envelope, session| {
- let receipt = envelope.receipt();
- let handler = handler.clone();
- async move {
- let peer = session.peer.clone();
- let responded = Arc::new(AtomicBool::default());
- let response = Response {
- peer: peer.clone(),
- responded: responded.clone(),
- receipt,
- };
- match (handler)(envelope.payload, response, session).await {
- Ok(()) => {
- if responded.load(std::sync::atomic::Ordering::SeqCst) {
- Ok(())
- } else {
- Err(anyhow!("handler did not send a response"))?
- }
- }
- Err(error) => {
- peer.respond_with_error(
- receipt,
- proto::Error {
- message: error.to_string(),
- },
- )?;
- Err(error)
- }
- }
- }
- })
- }
-
- pub fn handle_connection(
- self: &Arc<Self>,
- connection: Connection,
- address: String,
- user: User,
- mut send_connection_id: Option<oneshot::Sender<ConnectionId>>,
- executor: Executor,
- ) -> impl Future<Output = Result<()>> {
- let this = self.clone();
- let user_id = user.id;
- let login = user.github_login;
- let span = info_span!("handle connection", %user_id, %login, %address);
- let mut teardown = self.teardown.subscribe();
- async move {
- let (connection_id, handle_io, mut incoming_rx) = this
- .peer
- .add_connection(connection, {
- let executor = executor.clone();
- move |duration| executor.sleep(duration)
- });
-
- tracing::info!(%user_id, %login, %connection_id, %address, "connection opened");
- this.peer.send(connection_id, proto::Hello { peer_id: Some(connection_id.into()) })?;
- tracing::info!(%user_id, %login, %connection_id, %address, "sent hello message");
-
- if let Some(send_connection_id) = send_connection_id.take() {
- let _ = send_connection_id.send(connection_id);
- }
-
- if !user.connected_once {
- this.peer.send(connection_id, proto::ShowContacts {})?;
- this.app_state.db.set_user_connected_once(user_id, true).await?;
- }
-
- let (contacts, channels_for_user, channel_invites) = future::try_join3(
- this.app_state.db.get_contacts(user_id),
- this.app_state.db.get_channels_for_user(user_id),
- this.app_state.db.get_channel_invites_for_user(user_id),
- ).await?;
-
- {
- let mut pool = this.connection_pool.lock();
- pool.add_connection(connection_id, user_id, user.admin);
- this.peer.send(connection_id, build_initial_contacts_update(contacts, &pool))?;
- this.peer.send(connection_id, build_channels_update(
- channels_for_user,
- channel_invites
- ))?;
- }
-
- if let Some(incoming_call) = this.app_state.db.incoming_call_for_user(user_id).await? {
- this.peer.send(connection_id, incoming_call)?;
- }
-
- let session = Session {
- user_id,
- connection_id,
- db: Arc::new(tokio::sync::Mutex::new(DbHandle(this.app_state.db.clone()))),
- peer: this.peer.clone(),
- connection_pool: this.connection_pool.clone(),
- live_kit_client: this.app_state.live_kit_client.clone(),
- _executor: executor.clone()
- };
- update_user_contacts(user_id, &session).await?;
-
- let handle_io = handle_io.fuse();
- futures::pin_mut!(handle_io);
-
- // Handlers for foreground messages are pushed into the following `FuturesUnordered`.
- // This prevents deadlocks when e.g., client A performs a request to client B and
- // client B performs a request to client A. If both clients stop processing further
- // messages until their respective request completes, they won't have a chance to
- // respond to the other client's request and cause a deadlock.
- //
- // This arrangement ensures we will attempt to process earlier messages first, but fall
- // back to processing messages arrived later in the spirit of making progress.
- let mut foreground_message_handlers = FuturesUnordered::new();
- let concurrent_handlers = Arc::new(Semaphore::new(256));
- loop {
- let next_message = async {
- let permit = concurrent_handlers.clone().acquire_owned().await.unwrap();
- let message = incoming_rx.next().await;
- (permit, message)
- }.fuse();
- futures::pin_mut!(next_message);
- futures::select_biased! {
- _ = teardown.changed().fuse() => return Ok(()),
- result = handle_io => {
- if let Err(error) = result {
- tracing::error!(?error, %user_id, %login, %connection_id, %address, "error handling I/O");
- }
- break;
- }
- _ = foreground_message_handlers.next() => {}
- next_message = next_message => {
- let (permit, message) = next_message;
- if let Some(message) = message {
- let type_name = message.payload_type_name();
- let span = tracing::info_span!("receive message", %user_id, %login, %connection_id, %address, type_name);
- let span_enter = span.enter();
- if let Some(handler) = this.handlers.get(&message.payload_type_id()) {
- let is_background = message.is_background();
- let handle_message = (handler)(message, session.clone());
- drop(span_enter);
-
- let handle_message = async move {
- handle_message.await;
- drop(permit);
- }.instrument(span);
- if is_background {
- executor.spawn_detached(handle_message);
- } else {
- foreground_message_handlers.push(handle_message);
- }
- } else {
- tracing::error!(%user_id, %login, %connection_id, %address, "no message handler");
- }
- } else {
- tracing::info!(%user_id, %login, %connection_id, %address, "connection closed");
- break;
- }
- }
- }
- }
-
- drop(foreground_message_handlers);
- tracing::info!(%user_id, %login, %connection_id, %address, "signing out");
- if let Err(error) = connection_lost(session, teardown, executor).await {
- tracing::error!(%user_id, %login, %connection_id, %address, ?error, "error signing out");
- }
-
- Ok(())
- }.instrument(span)
- }
-
- pub async fn invite_code_redeemed(
- self: &Arc<Self>,
- inviter_id: UserId,
- invitee_id: UserId,
- ) -> Result<()> {
- if let Some(user) = self.app_state.db.get_user_by_id(inviter_id).await? {
- if let Some(code) = &user.invite_code {
- let pool = self.connection_pool.lock();
- let invitee_contact = contact_for_user(invitee_id, false, &pool);
- for connection_id in pool.user_connection_ids(inviter_id) {
- self.peer.send(
- connection_id,
- proto::UpdateContacts {
- contacts: vec![invitee_contact.clone()],
- ..Default::default()
- },
- )?;
- self.peer.send(
- connection_id,
- proto::UpdateInviteInfo {
- url: format!("{}{}", self.app_state.config.invite_link_prefix, &code),
- count: user.invite_count as u32,
- },
- )?;
- }
- }
- }
- Ok(())
- }
-
- pub async fn invite_count_updated(self: &Arc<Self>, user_id: UserId) -> Result<()> {
- if let Some(user) = self.app_state.db.get_user_by_id(user_id).await? {
- if let Some(invite_code) = &user.invite_code {
- let pool = self.connection_pool.lock();
- for connection_id in pool.user_connection_ids(user_id) {
- self.peer.send(
- connection_id,
- proto::UpdateInviteInfo {
- url: format!(
- "{}{}",
- self.app_state.config.invite_link_prefix, invite_code
- ),
- count: user.invite_count as u32,
- },
- )?;
- }
- }
- }
- Ok(())
- }
-
- pub async fn snapshot<'a>(self: &'a Arc<Self>) -> ServerSnapshot<'a> {
- ServerSnapshot {
- connection_pool: ConnectionPoolGuard {
- guard: self.connection_pool.lock(),
- _not_send: PhantomData,
- },
- peer: &self.peer,
- }
- }
-}
-
-impl<'a> Deref for ConnectionPoolGuard<'a> {
- type Target = ConnectionPool;
-
- fn deref(&self) -> &Self::Target {
- &*self.guard
- }
-}
-
-impl<'a> DerefMut for ConnectionPoolGuard<'a> {
- fn deref_mut(&mut self) -> &mut Self::Target {
- &mut *self.guard
- }
-}
-
-impl<'a> Drop for ConnectionPoolGuard<'a> {
- fn drop(&mut self) {
- #[cfg(test)]
- self.check_invariants();
- }
-}
-
-fn broadcast<F>(
- sender_id: Option<ConnectionId>,
- receiver_ids: impl IntoIterator<Item = ConnectionId>,
- mut f: F,
-) where
- F: FnMut(ConnectionId) -> anyhow::Result<()>,
-{
- for receiver_id in receiver_ids {
- if Some(receiver_id) != sender_id {
- if let Err(error) = f(receiver_id) {
- tracing::error!("failed to send to {:?} {}", receiver_id, error);
- }
- }
- }
-}
-
-lazy_static! {
- static ref ZED_PROTOCOL_VERSION: HeaderName = HeaderName::from_static("x-zed-protocol-version");
-}
-
-pub struct ProtocolVersion(u32);
-
-impl Header for ProtocolVersion {
- fn name() -> &'static HeaderName {
- &ZED_PROTOCOL_VERSION
- }
-
- fn decode<'i, I>(values: &mut I) -> Result<Self, axum::headers::Error>
- where
- Self: Sized,
- I: Iterator<Item = &'i axum::http::HeaderValue>,
- {
- let version = values
- .next()
- .ok_or_else(axum::headers::Error::invalid)?
- .to_str()
- .map_err(|_| axum::headers::Error::invalid())?
- .parse()
- .map_err(|_| axum::headers::Error::invalid())?;
- Ok(Self(version))
- }
-
- fn encode<E: Extend<axum::http::HeaderValue>>(&self, values: &mut E) {
- values.extend([self.0.to_string().parse().unwrap()]);
- }
-}
-
-pub fn routes(server: Arc<Server>) -> Router<Body> {
- Router::new()
- .route("/rpc", get(handle_websocket_request))
- .layer(
- ServiceBuilder::new()
- .layer(Extension(server.app_state.clone()))
- .layer(middleware::from_fn(auth::validate_header)),
- )
- .route("/metrics", get(handle_metrics))
- .layer(Extension(server))
-}
-
-pub async fn handle_websocket_request(
- TypedHeader(ProtocolVersion(protocol_version)): TypedHeader<ProtocolVersion>,
- ConnectInfo(socket_address): ConnectInfo<SocketAddr>,
- Extension(server): Extension<Arc<Server>>,
- Extension(user): Extension<User>,
- ws: WebSocketUpgrade,
-) -> axum::response::Response {
- if protocol_version != rpc::PROTOCOL_VERSION {
- return (
- StatusCode::UPGRADE_REQUIRED,
- "client must be upgraded".to_string(),
- )
- .into_response();
- }
- let socket_address = socket_address.to_string();
- ws.on_upgrade(move |socket| {
- use util::ResultExt;
- let socket = socket
- .map_ok(to_tungstenite_message)
- .err_into()
- .with(|message| async move { Ok(to_axum_message(message)) });
- let connection = Connection::new(Box::pin(socket));
- async move {
- server
- .handle_connection(connection, socket_address, user, None, Executor::Production)
- .await
- .log_err();
- }
- })
-}
-
-pub async fn handle_metrics(Extension(server): Extension<Arc<Server>>) -> Result<String> {
- let connections = server
- .connection_pool
- .lock()
- .connections()
- .filter(|connection| !connection.admin)
- .count();
-
- METRIC_CONNECTIONS.set(connections as _);
-
- let shared_projects = server.app_state.db.project_count_excluding_admins().await?;
- METRIC_SHARED_PROJECTS.set(shared_projects as _);
-
- let encoder = prometheus::TextEncoder::new();
- let metric_families = prometheus::gather();
- let encoded_metrics = encoder
- .encode_to_string(&metric_families)
- .map_err(|err| anyhow!("{}", err))?;
- Ok(encoded_metrics)
-}
-
-#[instrument(err, skip(executor))]
-async fn connection_lost(
- session: Session,
- mut teardown: watch::Receiver<()>,
- executor: Executor,
-) -> Result<()> {
- session.peer.disconnect(session.connection_id);
- session
- .connection_pool()
- .await
- .remove_connection(session.connection_id)?;
-
- session
- .db()
- .await
- .connection_lost(session.connection_id)
- .await
- .trace_err();
-
- futures::select_biased! {
- _ = executor.sleep(RECONNECT_TIMEOUT).fuse() => {
- log::info!("connection lost, removing all resources for user:{}, connection:{:?}", session.user_id, session.connection_id);
- leave_room_for_session(&session).await.trace_err();
- leave_channel_buffers_for_session(&session)
- .await
- .trace_err();
-
- if !session
- .connection_pool()
- .await
- .is_user_online(session.user_id)
- {
- let db = session.db().await;
- if let Some(room) = db.decline_call(None, session.user_id).await.trace_err().flatten() {
- room_updated(&room, &session.peer);
- }
- }
-
- update_user_contacts(session.user_id, &session).await?;
- }
- _ = teardown.changed().fuse() => {}
- }
-
- Ok(())
-}
-
-async fn ping(_: proto::Ping, response: Response<proto::Ping>, _session: Session) -> Result<()> {
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn create_room(
- _request: proto::CreateRoom,
- response: Response<proto::CreateRoom>,
- session: Session,
-) -> Result<()> {
- let live_kit_room = nanoid::nanoid!(30);
-
- let live_kit_connection_info = {
- let live_kit_room = live_kit_room.clone();
- let live_kit = session.live_kit_client.as_ref();
-
- util::async_maybe!({
- let live_kit = live_kit?;
-
- let token = live_kit
- .room_token(&live_kit_room, &session.user_id.to_string())
- .trace_err()?;
-
- Some(proto::LiveKitConnectionInfo {
- server_url: live_kit.url().into(),
- token,
- can_publish: true,
- })
- })
- }
- .await;
-
- let room = session
- .db()
- .await
- .create_room(
- session.user_id,
- session.connection_id,
- &live_kit_room,
- RELEASE_CHANNEL_NAME.as_str(),
- )
- .await?;
-
- response.send(proto::CreateRoomResponse {
- room: Some(room.clone()),
- live_kit_connection_info,
- })?;
-
- update_user_contacts(session.user_id, &session).await?;
- Ok(())
-}
-
-async fn join_room(
- request: proto::JoinRoom,
- response: Response<proto::JoinRoom>,
- session: Session,
-) -> Result<()> {
- let room_id = RoomId::from_proto(request.id);
-
- let channel_id = session.db().await.channel_id_for_room(room_id).await?;
-
- if let Some(channel_id) = channel_id {
- return join_channel_internal(channel_id, Box::new(response), session).await;
- }
-
- let joined_room = {
- let room = session
- .db()
- .await
- .join_room(
- room_id,
- session.user_id,
- session.connection_id,
- RELEASE_CHANNEL_NAME.as_str(),
- )
- .await?;
- room_updated(&room.room, &session.peer);
- room.into_inner()
- };
-
- for connection_id in session
- .connection_pool()
- .await
- .user_connection_ids(session.user_id)
- {
- session
- .peer
- .send(
- connection_id,
- proto::CallCanceled {
- room_id: room_id.to_proto(),
- },
- )
- .trace_err();
- }
-
- let live_kit_connection_info = if let Some(live_kit) = session.live_kit_client.as_ref() {
- if let Some(token) = live_kit
- .room_token(
- &joined_room.room.live_kit_room,
- &session.user_id.to_string(),
- )
- .trace_err()
- {
- Some(proto::LiveKitConnectionInfo {
- server_url: live_kit.url().into(),
- token,
- can_publish: true,
- })
- } else {
- None
- }
- } else {
- None
- };
-
- response.send(proto::JoinRoomResponse {
- room: Some(joined_room.room),
- channel_id: None,
- live_kit_connection_info,
- })?;
-
- update_user_contacts(session.user_id, &session).await?;
- Ok(())
-}
-
-async fn rejoin_room(
- request: proto::RejoinRoom,
- response: Response<proto::RejoinRoom>,
- session: Session,
-) -> Result<()> {
- let room;
- let channel_id;
- let channel_members;
- {
- let mut rejoined_room = session
- .db()
- .await
- .rejoin_room(request, session.user_id, session.connection_id)
- .await?;
-
- response.send(proto::RejoinRoomResponse {
- room: Some(rejoined_room.room.clone()),
- reshared_projects: rejoined_room
- .reshared_projects
- .iter()
- .map(|project| proto::ResharedProject {
- id: project.id.to_proto(),
- collaborators: project
- .collaborators
- .iter()
- .map(|collaborator| collaborator.to_proto())
- .collect(),
- })
- .collect(),
- rejoined_projects: rejoined_room
- .rejoined_projects
- .iter()
- .map(|rejoined_project| proto::RejoinedProject {
- id: rejoined_project.id.to_proto(),
- worktrees: rejoined_project
- .worktrees
- .iter()
- .map(|worktree| proto::WorktreeMetadata {
- id: worktree.id,
- root_name: worktree.root_name.clone(),
- visible: worktree.visible,
- abs_path: worktree.abs_path.clone(),
- })
- .collect(),
- collaborators: rejoined_project
- .collaborators
- .iter()
- .map(|collaborator| collaborator.to_proto())
- .collect(),
- language_servers: rejoined_project.language_servers.clone(),
- })
- .collect(),
- })?;
- room_updated(&rejoined_room.room, &session.peer);
-
- for project in &rejoined_room.reshared_projects {
- for collaborator in &project.collaborators {
- session
- .peer
- .send(
- collaborator.connection_id,
- proto::UpdateProjectCollaborator {
- project_id: project.id.to_proto(),
- old_peer_id: Some(project.old_connection_id.into()),
- new_peer_id: Some(session.connection_id.into()),
- },
- )
- .trace_err();
- }
-
- broadcast(
- Some(session.connection_id),
- project
- .collaborators
- .iter()
- .map(|collaborator| collaborator.connection_id),
- |connection_id| {
- session.peer.forward_send(
- session.connection_id,
- connection_id,
- proto::UpdateProject {
- project_id: project.id.to_proto(),
- worktrees: project.worktrees.clone(),
- },
- )
- },
- );
- }
-
- for project in &rejoined_room.rejoined_projects {
- for collaborator in &project.collaborators {
- session
- .peer
- .send(
- collaborator.connection_id,
- proto::UpdateProjectCollaborator {
- project_id: project.id.to_proto(),
- old_peer_id: Some(project.old_connection_id.into()),
- new_peer_id: Some(session.connection_id.into()),
- },
- )
- .trace_err();
- }
- }
-
- for project in &mut rejoined_room.rejoined_projects {
- for worktree in mem::take(&mut project.worktrees) {
- #[cfg(any(test, feature = "test-support"))]
- const MAX_CHUNK_SIZE: usize = 2;
- #[cfg(not(any(test, feature = "test-support")))]
- const MAX_CHUNK_SIZE: usize = 256;
-
- // Stream this worktree's entries.
- let message = proto::UpdateWorktree {
- project_id: project.id.to_proto(),
- worktree_id: worktree.id,
- abs_path: worktree.abs_path.clone(),
- root_name: worktree.root_name,
- updated_entries: worktree.updated_entries,
- removed_entries: worktree.removed_entries,
- scan_id: worktree.scan_id,
- is_last_update: worktree.completed_scan_id == worktree.scan_id,
- updated_repositories: worktree.updated_repositories,
- removed_repositories: worktree.removed_repositories,
- };
- for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) {
- session.peer.send(session.connection_id, update.clone())?;
- }
-
- // Stream this worktree's diagnostics.
- for summary in worktree.diagnostic_summaries {
- session.peer.send(
- session.connection_id,
- proto::UpdateDiagnosticSummary {
- project_id: project.id.to_proto(),
- worktree_id: worktree.id,
- summary: Some(summary),
- },
- )?;
- }
-
- for settings_file in worktree.settings_files {
- session.peer.send(
- session.connection_id,
- proto::UpdateWorktreeSettings {
- project_id: project.id.to_proto(),
- worktree_id: worktree.id,
- path: settings_file.path,
- content: Some(settings_file.content),
- },
- )?;
- }
- }
-
- for language_server in &project.language_servers {
- session.peer.send(
- session.connection_id,
- proto::UpdateLanguageServer {
- project_id: project.id.to_proto(),
- language_server_id: language_server.id,
- variant: Some(
- proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
- proto::LspDiskBasedDiagnosticsUpdated {},
- ),
- ),
- },
- )?;
- }
- }
-
- let rejoined_room = rejoined_room.into_inner();
-
- room = rejoined_room.room;
- channel_id = rejoined_room.channel_id;
- channel_members = rejoined_room.channel_members;
- }
-
- if let Some(channel_id) = channel_id {
- channel_updated(
- channel_id,
- &room,
- &channel_members,
- &session.peer,
- &*session.connection_pool().await,
- );
- }
-
- update_user_contacts(session.user_id, &session).await?;
- Ok(())
-}
-
-async fn leave_room(
- _: proto::LeaveRoom,
- response: Response<proto::LeaveRoom>,
- session: Session,
-) -> Result<()> {
- leave_room_for_session(&session).await?;
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn call(
- request: proto::Call,
- response: Response<proto::Call>,
- session: Session,
-) -> Result<()> {
- let room_id = RoomId::from_proto(request.room_id);
- let calling_user_id = session.user_id;
- let calling_connection_id = session.connection_id;
- let called_user_id = UserId::from_proto(request.called_user_id);
- let initial_project_id = request.initial_project_id.map(ProjectId::from_proto);
- if !session
- .db()
- .await
- .has_contact(calling_user_id, called_user_id)
- .await?
- {
- return Err(anyhow!("cannot call a user who isn't a contact"))?;
- }
-
- let incoming_call = {
- let (room, incoming_call) = &mut *session
- .db()
- .await
- .call(
- room_id,
- calling_user_id,
- calling_connection_id,
- called_user_id,
- initial_project_id,
- )
- .await?;
- room_updated(&room, &session.peer);
- mem::take(incoming_call)
- };
- update_user_contacts(called_user_id, &session).await?;
-
- let mut calls = session
- .connection_pool()
- .await
- .user_connection_ids(called_user_id)
- .map(|connection_id| session.peer.request(connection_id, incoming_call.clone()))
- .collect::<FuturesUnordered<_>>();
-
- while let Some(call_response) = calls.next().await {
- match call_response.as_ref() {
- Ok(_) => {
- response.send(proto::Ack {})?;
- return Ok(());
- }
- Err(_) => {
- call_response.trace_err();
- }
- }
- }
-
- {
- let room = session
- .db()
- .await
- .call_failed(room_id, called_user_id)
- .await?;
- room_updated(&room, &session.peer);
- }
- update_user_contacts(called_user_id, &session).await?;
-
- Err(anyhow!("failed to ring user"))?
-}
-
-async fn cancel_call(
- request: proto::CancelCall,
- response: Response<proto::CancelCall>,
- session: Session,
-) -> Result<()> {
- let called_user_id = UserId::from_proto(request.called_user_id);
- let room_id = RoomId::from_proto(request.room_id);
- {
- let room = session
- .db()
- .await
- .cancel_call(room_id, session.connection_id, called_user_id)
- .await?;
- room_updated(&room, &session.peer);
- }
-
- for connection_id in session
- .connection_pool()
- .await
- .user_connection_ids(called_user_id)
- {
- session
- .peer
- .send(
- connection_id,
- proto::CallCanceled {
- room_id: room_id.to_proto(),
- },
- )
- .trace_err();
- }
- response.send(proto::Ack {})?;
-
- update_user_contacts(called_user_id, &session).await?;
- Ok(())
-}
-
-async fn decline_call(message: proto::DeclineCall, session: Session) -> Result<()> {
- let room_id = RoomId::from_proto(message.room_id);
- {
- let room = session
- .db()
- .await
- .decline_call(Some(room_id), session.user_id)
- .await?
- .ok_or_else(|| anyhow!("failed to decline call"))?;
- room_updated(&room, &session.peer);
- }
-
- for connection_id in session
- .connection_pool()
- .await
- .user_connection_ids(session.user_id)
- {
- session
- .peer
- .send(
- connection_id,
- proto::CallCanceled {
- room_id: room_id.to_proto(),
- },
- )
- .trace_err();
- }
- update_user_contacts(session.user_id, &session).await?;
- Ok(())
-}
-
-async fn update_participant_location(
- request: proto::UpdateParticipantLocation,
- response: Response<proto::UpdateParticipantLocation>,
- session: Session,
-) -> Result<()> {
- let room_id = RoomId::from_proto(request.room_id);
- let location = request
- .location
- .ok_or_else(|| anyhow!("invalid location"))?;
-
- let db = session.db().await;
- let room = db
- .update_room_participant_location(room_id, session.connection_id, location)
- .await?;
-
- room_updated(&room, &session.peer);
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn share_project(
- request: proto::ShareProject,
- response: Response<proto::ShareProject>,
- session: Session,
-) -> Result<()> {
- let (project_id, room) = &*session
- .db()
- .await
- .share_project(
- RoomId::from_proto(request.room_id),
- session.connection_id,
- &request.worktrees,
- )
- .await?;
- response.send(proto::ShareProjectResponse {
- project_id: project_id.to_proto(),
- })?;
- room_updated(&room, &session.peer);
-
- Ok(())
-}
-
-async fn unshare_project(message: proto::UnshareProject, session: Session) -> Result<()> {
- let project_id = ProjectId::from_proto(message.project_id);
-
- let (room, guest_connection_ids) = &*session
- .db()
- .await
- .unshare_project(project_id, session.connection_id)
- .await?;
-
- broadcast(
- Some(session.connection_id),
- guest_connection_ids.iter().copied(),
- |conn_id| session.peer.send(conn_id, message.clone()),
- );
- room_updated(&room, &session.peer);
-
- Ok(())
-}
-
-async fn join_project(
- request: proto::JoinProject,
- response: Response<proto::JoinProject>,
- session: Session,
-) -> Result<()> {
- let project_id = ProjectId::from_proto(request.project_id);
- let guest_user_id = session.user_id;
-
- tracing::info!(%project_id, "join project");
-
- let (project, replica_id) = &mut *session
- .db()
- .await
- .join_project(project_id, session.connection_id)
- .await?;
-
- let collaborators = project
- .collaborators
- .iter()
- .filter(|collaborator| collaborator.connection_id != session.connection_id)
- .map(|collaborator| collaborator.to_proto())
- .collect::<Vec<_>>();
-
- let worktrees = project
- .worktrees
- .iter()
- .map(|(id, worktree)| proto::WorktreeMetadata {
- id: *id,
- root_name: worktree.root_name.clone(),
- visible: worktree.visible,
- abs_path: worktree.abs_path.clone(),
- })
- .collect::<Vec<_>>();
-
- for collaborator in &collaborators {
- session
- .peer
- .send(
- collaborator.peer_id.unwrap().into(),
- proto::AddProjectCollaborator {
- project_id: project_id.to_proto(),
- collaborator: Some(proto::Collaborator {
- peer_id: Some(session.connection_id.into()),
- replica_id: replica_id.0 as u32,
- user_id: guest_user_id.to_proto(),
- }),
- },
- )
- .trace_err();
- }
-
- // First, we send the metadata associated with each worktree.
- response.send(proto::JoinProjectResponse {
- worktrees: worktrees.clone(),
- replica_id: replica_id.0 as u32,
- collaborators: collaborators.clone(),
- language_servers: project.language_servers.clone(),
- })?;
-
- for (worktree_id, worktree) in mem::take(&mut project.worktrees) {
- #[cfg(any(test, feature = "test-support"))]
- const MAX_CHUNK_SIZE: usize = 2;
- #[cfg(not(any(test, feature = "test-support")))]
- const MAX_CHUNK_SIZE: usize = 256;
-
- // Stream this worktree's entries.
- let message = proto::UpdateWorktree {
- project_id: project_id.to_proto(),
- worktree_id,
- abs_path: worktree.abs_path.clone(),
- root_name: worktree.root_name,
- updated_entries: worktree.entries,
- removed_entries: Default::default(),
- scan_id: worktree.scan_id,
- is_last_update: worktree.scan_id == worktree.completed_scan_id,
- updated_repositories: worktree.repository_entries.into_values().collect(),
- removed_repositories: Default::default(),
- };
- for update in proto::split_worktree_update(message, MAX_CHUNK_SIZE) {
- session.peer.send(session.connection_id, update.clone())?;
- }
-
- // Stream this worktree's diagnostics.
- for summary in worktree.diagnostic_summaries {
- session.peer.send(
- session.connection_id,
- proto::UpdateDiagnosticSummary {
- project_id: project_id.to_proto(),
- worktree_id: worktree.id,
- summary: Some(summary),
- },
- )?;
- }
-
- for settings_file in worktree.settings_files {
- session.peer.send(
- session.connection_id,
- proto::UpdateWorktreeSettings {
- project_id: project_id.to_proto(),
- worktree_id: worktree.id,
- path: settings_file.path,
- content: Some(settings_file.content),
- },
- )?;
- }
- }
-
- for language_server in &project.language_servers {
- session.peer.send(
- session.connection_id,
- proto::UpdateLanguageServer {
- project_id: project_id.to_proto(),
- language_server_id: language_server.id,
- variant: Some(
- proto::update_language_server::Variant::DiskBasedDiagnosticsUpdated(
- proto::LspDiskBasedDiagnosticsUpdated {},
- ),
- ),
- },
- )?;
- }
-
- Ok(())
-}
-
-async fn leave_project(request: proto::LeaveProject, session: Session) -> Result<()> {
- let sender_id = session.connection_id;
- let project_id = ProjectId::from_proto(request.project_id);
-
- let (room, project) = &*session
- .db()
- .await
- .leave_project(project_id, sender_id)
- .await?;
- tracing::info!(
- %project_id,
- host_user_id = %project.host_user_id,
- host_connection_id = %project.host_connection_id,
- "leave project"
- );
-
- project_left(&project, &session);
- room_updated(&room, &session.peer);
-
- Ok(())
-}
-
-async fn update_project(
- request: proto::UpdateProject,
- response: Response<proto::UpdateProject>,
- session: Session,
-) -> Result<()> {
- let project_id = ProjectId::from_proto(request.project_id);
- let (room, guest_connection_ids) = &*session
- .db()
- .await
- .update_project(project_id, session.connection_id, &request.worktrees)
- .await?;
- broadcast(
- Some(session.connection_id),
- guest_connection_ids.iter().copied(),
- |connection_id| {
- session
- .peer
- .forward_send(session.connection_id, connection_id, request.clone())
- },
- );
- room_updated(&room, &session.peer);
- response.send(proto::Ack {})?;
-
- Ok(())
-}
-
-async fn update_worktree(
- request: proto::UpdateWorktree,
- response: Response<proto::UpdateWorktree>,
- session: Session,
-) -> Result<()> {
- let guest_connection_ids = session
- .db()
- .await
- .update_worktree(&request, session.connection_id)
- .await?;
-
- broadcast(
- Some(session.connection_id),
- guest_connection_ids.iter().copied(),
- |connection_id| {
- session
- .peer
- .forward_send(session.connection_id, connection_id, request.clone())
- },
- );
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn update_diagnostic_summary(
- message: proto::UpdateDiagnosticSummary,
- session: Session,
-) -> Result<()> {
- let guest_connection_ids = session
- .db()
- .await
- .update_diagnostic_summary(&message, session.connection_id)
- .await?;
-
- broadcast(
- Some(session.connection_id),
- guest_connection_ids.iter().copied(),
- |connection_id| {
- session
- .peer
- .forward_send(session.connection_id, connection_id, message.clone())
- },
- );
-
- Ok(())
-}
-
-async fn update_worktree_settings(
- message: proto::UpdateWorktreeSettings,
- session: Session,
-) -> Result<()> {
- let guest_connection_ids = session
- .db()
- .await
- .update_worktree_settings(&message, session.connection_id)
- .await?;
-
- broadcast(
- Some(session.connection_id),
- guest_connection_ids.iter().copied(),
- |connection_id| {
- session
- .peer
- .forward_send(session.connection_id, connection_id, message.clone())
- },
- );
-
- Ok(())
-}
-
-async fn refresh_inlay_hints(request: proto::RefreshInlayHints, session: Session) -> Result<()> {
- broadcast_project_message(request.project_id, request, session).await
-}
-
-async fn start_language_server(
- request: proto::StartLanguageServer,
- session: Session,
-) -> Result<()> {
- let guest_connection_ids = session
- .db()
- .await
- .start_language_server(&request, session.connection_id)
- .await?;
-
- broadcast(
- Some(session.connection_id),
- guest_connection_ids.iter().copied(),
- |connection_id| {
- session
- .peer
- .forward_send(session.connection_id, connection_id, request.clone())
- },
- );
- Ok(())
-}
-
-async fn update_language_server(
- request: proto::UpdateLanguageServer,
- session: Session,
-) -> Result<()> {
- let project_id = ProjectId::from_proto(request.project_id);
- let project_connection_ids = session
- .db()
- .await
- .project_connection_ids(project_id, session.connection_id)
- .await?;
- broadcast(
- Some(session.connection_id),
- project_connection_ids.iter().copied(),
- |connection_id| {
- session
- .peer
- .forward_send(session.connection_id, connection_id, request.clone())
- },
- );
- Ok(())
-}
-
-async fn forward_project_request<T>(
- request: T,
- response: Response<T>,
- session: Session,
-) -> Result<()>
-where
- T: EntityMessage + RequestMessage,
-{
- let project_id = ProjectId::from_proto(request.remote_entity_id());
- let host_connection_id = {
- let collaborators = session
- .db()
- .await
- .project_collaborators(project_id, session.connection_id)
- .await?;
- collaborators
- .iter()
- .find(|collaborator| collaborator.is_host)
- .ok_or_else(|| anyhow!("host not found"))?
- .connection_id
- };
-
- let payload = session
- .peer
- .forward_request(session.connection_id, host_connection_id, request)
- .await?;
-
- response.send(payload)?;
- Ok(())
-}
-
-async fn create_buffer_for_peer(
- request: proto::CreateBufferForPeer,
- session: Session,
-) -> Result<()> {
- let peer_id = request.peer_id.ok_or_else(|| anyhow!("invalid peer id"))?;
- session
- .peer
- .forward_send(session.connection_id, peer_id.into(), request)?;
- Ok(())
-}
-
-async fn update_buffer(
- request: proto::UpdateBuffer,
- response: Response<proto::UpdateBuffer>,
- session: Session,
-) -> Result<()> {
- let project_id = ProjectId::from_proto(request.project_id);
- let mut guest_connection_ids;
- let mut host_connection_id = None;
- {
- let collaborators = session
- .db()
- .await
- .project_collaborators(project_id, session.connection_id)
- .await?;
- guest_connection_ids = Vec::with_capacity(collaborators.len() - 1);
- for collaborator in collaborators.iter() {
- if collaborator.is_host {
- host_connection_id = Some(collaborator.connection_id);
- } else {
- guest_connection_ids.push(collaborator.connection_id);
- }
- }
- }
- let host_connection_id = host_connection_id.ok_or_else(|| anyhow!("host not found"))?;
-
- broadcast(
- Some(session.connection_id),
- guest_connection_ids,
- |connection_id| {
- session
- .peer
- .forward_send(session.connection_id, connection_id, request.clone())
- },
- );
- if host_connection_id != session.connection_id {
- session
- .peer
- .forward_request(session.connection_id, host_connection_id, request.clone())
- .await?;
- }
-
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn update_buffer_file(request: proto::UpdateBufferFile, session: Session) -> Result<()> {
- let project_id = ProjectId::from_proto(request.project_id);
- let project_connection_ids = session
- .db()
- .await
- .project_connection_ids(project_id, session.connection_id)
- .await?;
-
- broadcast(
- Some(session.connection_id),
- project_connection_ids.iter().copied(),
- |connection_id| {
- session
- .peer
- .forward_send(session.connection_id, connection_id, request.clone())
- },
- );
- Ok(())
-}
-
-async fn buffer_reloaded(request: proto::BufferReloaded, session: Session) -> Result<()> {
- let project_id = ProjectId::from_proto(request.project_id);
- let project_connection_ids = session
- .db()
- .await
- .project_connection_ids(project_id, session.connection_id)
- .await?;
- broadcast(
- Some(session.connection_id),
- project_connection_ids.iter().copied(),
- |connection_id| {
- session
- .peer
- .forward_send(session.connection_id, connection_id, request.clone())
- },
- );
- Ok(())
-}
-
-async fn buffer_saved(request: proto::BufferSaved, session: Session) -> Result<()> {
- broadcast_project_message(request.project_id, request, session).await
-}
-
-async fn broadcast_project_message<T: EnvelopedMessage>(
- project_id: u64,
- request: T,
- session: Session,
-) -> Result<()> {
- let project_id = ProjectId::from_proto(project_id);
- let project_connection_ids = session
- .db()
- .await
- .project_connection_ids(project_id, session.connection_id)
- .await?;
- broadcast(
- Some(session.connection_id),
- project_connection_ids.iter().copied(),
- |connection_id| {
- session
- .peer
- .forward_send(session.connection_id, connection_id, request.clone())
- },
- );
- Ok(())
-}
-
-async fn follow(
- request: proto::Follow,
- response: Response<proto::Follow>,
- session: Session,
-) -> Result<()> {
- let room_id = RoomId::from_proto(request.room_id);
- let project_id = request.project_id.map(ProjectId::from_proto);
- let leader_id = request
- .leader_id
- .ok_or_else(|| anyhow!("invalid leader id"))?
- .into();
- let follower_id = session.connection_id;
-
- session
- .db()
- .await
- .check_room_participants(room_id, leader_id, session.connection_id)
- .await?;
-
- let response_payload = session
- .peer
- .forward_request(session.connection_id, leader_id, request)
- .await?;
- response.send(response_payload)?;
-
- if let Some(project_id) = project_id {
- let room = session
- .db()
- .await
- .follow(room_id, project_id, leader_id, follower_id)
- .await?;
- room_updated(&room, &session.peer);
- }
-
- Ok(())
-}
-
-async fn unfollow(request: proto::Unfollow, session: Session) -> Result<()> {
- let room_id = RoomId::from_proto(request.room_id);
- let project_id = request.project_id.map(ProjectId::from_proto);
- let leader_id = request
- .leader_id
- .ok_or_else(|| anyhow!("invalid leader id"))?
- .into();
- let follower_id = session.connection_id;
-
- session
- .db()
- .await
- .check_room_participants(room_id, leader_id, session.connection_id)
- .await?;
-
- session
- .peer
- .forward_send(session.connection_id, leader_id, request)?;
-
- if let Some(project_id) = project_id {
- let room = session
- .db()
- .await
- .unfollow(room_id, project_id, leader_id, follower_id)
- .await?;
- room_updated(&room, &session.peer);
- }
-
- Ok(())
-}
-
-async fn update_followers(request: proto::UpdateFollowers, session: Session) -> Result<()> {
- let room_id = RoomId::from_proto(request.room_id);
- let database = session.db.lock().await;
-
- let connection_ids = if let Some(project_id) = request.project_id {
- let project_id = ProjectId::from_proto(project_id);
- database
- .project_connection_ids(project_id, session.connection_id)
- .await?
- } else {
- database
- .room_connection_ids(room_id, session.connection_id)
- .await?
- };
-
- // For now, don't send view update messages back to that view's current leader.
- let connection_id_to_omit = request.variant.as_ref().and_then(|variant| match variant {
- proto::update_followers::Variant::UpdateView(payload) => payload.leader_id,
- _ => None,
- });
-
- for follower_peer_id in request.follower_ids.iter().copied() {
- let follower_connection_id = follower_peer_id.into();
- if Some(follower_peer_id) != connection_id_to_omit
- && connection_ids.contains(&follower_connection_id)
- {
- session.peer.forward_send(
- session.connection_id,
- follower_connection_id,
- request.clone(),
- )?;
- }
- }
- Ok(())
-}
-
-async fn get_users(
- request: proto::GetUsers,
- response: Response<proto::GetUsers>,
- session: Session,
-) -> Result<()> {
- let user_ids = request
- .user_ids
- .into_iter()
- .map(UserId::from_proto)
- .collect();
- let users = session
- .db()
- .await
- .get_users_by_ids(user_ids)
- .await?
- .into_iter()
- .map(|user| proto::User {
- id: user.id.to_proto(),
- avatar_url: format!("https://github.com/{}.png?size=128", user.github_login),
- github_login: user.github_login,
- })
- .collect();
- response.send(proto::UsersResponse { users })?;
- Ok(())
-}
-
-async fn fuzzy_search_users(
- request: proto::FuzzySearchUsers,
- response: Response<proto::FuzzySearchUsers>,
- session: Session,
-) -> Result<()> {
- let query = request.query;
- let users = match query.len() {
- 0 => vec![],
- 1 | 2 => session
- .db()
- .await
- .get_user_by_github_login(&query)
- .await?
- .into_iter()
- .collect(),
- _ => session.db().await.fuzzy_search_users(&query, 10).await?,
- };
- let users = users
- .into_iter()
- .filter(|user| user.id != session.user_id)
- .map(|user| proto::User {
- id: user.id.to_proto(),
- avatar_url: format!("https://github.com/{}.png?size=128", user.github_login),
- github_login: user.github_login,
- })
- .collect();
- response.send(proto::UsersResponse { users })?;
- Ok(())
-}
-
-async fn request_contact(
- request: proto::RequestContact,
- response: Response<proto::RequestContact>,
- session: Session,
-) -> Result<()> {
- let requester_id = session.user_id;
- let responder_id = UserId::from_proto(request.responder_id);
- if requester_id == responder_id {
- return Err(anyhow!("cannot add yourself as a contact"))?;
- }
-
- let notifications = session
- .db()
- .await
- .send_contact_request(requester_id, responder_id)
- .await?;
-
- // Update outgoing contact requests of requester
- let mut update = proto::UpdateContacts::default();
- update.outgoing_requests.push(responder_id.to_proto());
- for connection_id in session
- .connection_pool()
- .await
- .user_connection_ids(requester_id)
- {
- session.peer.send(connection_id, update.clone())?;
- }
-
- // Update incoming contact requests of responder
- let mut update = proto::UpdateContacts::default();
- update
- .incoming_requests
- .push(proto::IncomingContactRequest {
- requester_id: requester_id.to_proto(),
- });
- let connection_pool = session.connection_pool().await;
- for connection_id in connection_pool.user_connection_ids(responder_id) {
- session.peer.send(connection_id, update.clone())?;
- }
-
- send_notifications(&*connection_pool, &session.peer, notifications);
-
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn respond_to_contact_request(
- request: proto::RespondToContactRequest,
- response: Response<proto::RespondToContactRequest>,
- session: Session,
-) -> Result<()> {
- let responder_id = session.user_id;
- let requester_id = UserId::from_proto(request.requester_id);
- let db = session.db().await;
- if request.response == proto::ContactRequestResponse::Dismiss as i32 {
- db.dismiss_contact_notification(responder_id, requester_id)
- .await?;
- } else {
- let accept = request.response == proto::ContactRequestResponse::Accept as i32;
-
- let notifications = db
- .respond_to_contact_request(responder_id, requester_id, accept)
- .await?;
- let requester_busy = db.is_user_busy(requester_id).await?;
- let responder_busy = db.is_user_busy(responder_id).await?;
-
- let pool = session.connection_pool().await;
- // Update responder with new contact
- let mut update = proto::UpdateContacts::default();
- if accept {
- update
- .contacts
- .push(contact_for_user(requester_id, requester_busy, &pool));
- }
- update
- .remove_incoming_requests
- .push(requester_id.to_proto());
- for connection_id in pool.user_connection_ids(responder_id) {
- session.peer.send(connection_id, update.clone())?;
- }
-
- // Update requester with new contact
- let mut update = proto::UpdateContacts::default();
- if accept {
- update
- .contacts
- .push(contact_for_user(responder_id, responder_busy, &pool));
- }
- update
- .remove_outgoing_requests
- .push(responder_id.to_proto());
-
- for connection_id in pool.user_connection_ids(requester_id) {
- session.peer.send(connection_id, update.clone())?;
- }
-
- send_notifications(&*pool, &session.peer, notifications);
- }
-
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn remove_contact(
- request: proto::RemoveContact,
- response: Response<proto::RemoveContact>,
- session: Session,
-) -> Result<()> {
- let requester_id = session.user_id;
- let responder_id = UserId::from_proto(request.user_id);
- let db = session.db().await;
- let (contact_accepted, deleted_notification_id) =
- db.remove_contact(requester_id, responder_id).await?;
-
- let pool = session.connection_pool().await;
- // Update outgoing contact requests of requester
- let mut update = proto::UpdateContacts::default();
- if contact_accepted {
- update.remove_contacts.push(responder_id.to_proto());
- } else {
- update
- .remove_outgoing_requests
- .push(responder_id.to_proto());
- }
- for connection_id in pool.user_connection_ids(requester_id) {
- session.peer.send(connection_id, update.clone())?;
- }
-
- // Update incoming contact requests of responder
- let mut update = proto::UpdateContacts::default();
- if contact_accepted {
- update.remove_contacts.push(requester_id.to_proto());
- } else {
- update
- .remove_incoming_requests
- .push(requester_id.to_proto());
- }
- for connection_id in pool.user_connection_ids(responder_id) {
- session.peer.send(connection_id, update.clone())?;
- if let Some(notification_id) = deleted_notification_id {
- session.peer.send(
- connection_id,
- proto::DeleteNotification {
- notification_id: notification_id.to_proto(),
- },
- )?;
- }
- }
-
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn create_channel(
- request: proto::CreateChannel,
- response: Response<proto::CreateChannel>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
-
- let parent_id = request.parent_id.map(|id| ChannelId::from_proto(id));
- let CreateChannelResult {
- channel,
- participants_to_update,
- } = db
- .create_channel(&request.name, parent_id, session.user_id)
- .await?;
-
- response.send(proto::CreateChannelResponse {
- channel: Some(channel.to_proto()),
- parent_id: request.parent_id,
- })?;
-
- let connection_pool = session.connection_pool().await;
- for (user_id, channels) in participants_to_update {
- let update = build_channels_update(channels, vec![]);
- for connection_id in connection_pool.user_connection_ids(user_id) {
- if user_id == session.user_id {
- continue;
- }
- session.peer.send(connection_id, update.clone())?;
- }
- }
-
- Ok(())
-}
-
-async fn delete_channel(
- request: proto::DeleteChannel,
- response: Response<proto::DeleteChannel>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
-
- let channel_id = request.channel_id;
- let (removed_channels, member_ids) = db
- .delete_channel(ChannelId::from_proto(channel_id), session.user_id)
- .await?;
- response.send(proto::Ack {})?;
-
- // Notify members of removed channels
- let mut update = proto::UpdateChannels::default();
- update
- .delete_channels
- .extend(removed_channels.into_iter().map(|id| id.to_proto()));
-
- let connection_pool = session.connection_pool().await;
- for member_id in member_ids {
- for connection_id in connection_pool.user_connection_ids(member_id) {
- session.peer.send(connection_id, update.clone())?;
- }
- }
-
- Ok(())
-}
-
-async fn invite_channel_member(
- request: proto::InviteChannelMember,
- response: Response<proto::InviteChannelMember>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
- let channel_id = ChannelId::from_proto(request.channel_id);
- let invitee_id = UserId::from_proto(request.user_id);
- let InviteMemberResult {
- channel,
- notifications,
- } = db
- .invite_channel_member(
- channel_id,
- invitee_id,
- session.user_id,
- request.role().into(),
- )
- .await?;
-
- let update = proto::UpdateChannels {
- channel_invitations: vec![channel.to_proto()],
- ..Default::default()
- };
-
- let connection_pool = session.connection_pool().await;
- for connection_id in connection_pool.user_connection_ids(invitee_id) {
- session.peer.send(connection_id, update.clone())?;
- }
-
- send_notifications(&*connection_pool, &session.peer, notifications);
-
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn remove_channel_member(
- request: proto::RemoveChannelMember,
- response: Response<proto::RemoveChannelMember>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
- let channel_id = ChannelId::from_proto(request.channel_id);
- let member_id = UserId::from_proto(request.user_id);
-
- let RemoveChannelMemberResult {
- membership_update,
- notification_id,
- } = db
- .remove_channel_member(channel_id, member_id, session.user_id)
- .await?;
-
- let connection_pool = &session.connection_pool().await;
- notify_membership_updated(
- &connection_pool,
- membership_update,
- member_id,
- &session.peer,
- );
- for connection_id in connection_pool.user_connection_ids(member_id) {
- if let Some(notification_id) = notification_id {
- session
- .peer
- .send(
- connection_id,
- proto::DeleteNotification {
- notification_id: notification_id.to_proto(),
- },
- )
- .trace_err();
- }
- }
-
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn set_channel_visibility(
- request: proto::SetChannelVisibility,
- response: Response<proto::SetChannelVisibility>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
- let channel_id = ChannelId::from_proto(request.channel_id);
- let visibility = request.visibility().into();
-
- let SetChannelVisibilityResult {
- participants_to_update,
- participants_to_remove,
- channels_to_remove,
- } = db
- .set_channel_visibility(channel_id, visibility, session.user_id)
- .await?;
-
- let connection_pool = session.connection_pool().await;
- for (user_id, channels) in participants_to_update {
- let update = build_channels_update(channels, vec![]);
- for connection_id in connection_pool.user_connection_ids(user_id) {
- session.peer.send(connection_id, update.clone())?;
- }
- }
- for user_id in participants_to_remove {
- let update = proto::UpdateChannels {
- delete_channels: channels_to_remove.iter().map(|id| id.to_proto()).collect(),
- ..Default::default()
- };
- for connection_id in connection_pool.user_connection_ids(user_id) {
- session.peer.send(connection_id, update.clone())?;
- }
- }
-
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn set_channel_member_role(
- request: proto::SetChannelMemberRole,
- response: Response<proto::SetChannelMemberRole>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
- let channel_id = ChannelId::from_proto(request.channel_id);
- let member_id = UserId::from_proto(request.user_id);
- let result = db
- .set_channel_member_role(
- channel_id,
- session.user_id,
- member_id,
- request.role().into(),
- )
- .await?;
-
- match result {
- db::SetMemberRoleResult::MembershipUpdated(membership_update) => {
- let connection_pool = session.connection_pool().await;
- notify_membership_updated(
- &connection_pool,
- membership_update,
- member_id,
- &session.peer,
- )
- }
- db::SetMemberRoleResult::InviteUpdated(channel) => {
- let update = proto::UpdateChannels {
- channel_invitations: vec![channel.to_proto()],
- ..Default::default()
- };
-
- for connection_id in session
- .connection_pool()
- .await
- .user_connection_ids(member_id)
- {
- session.peer.send(connection_id, update.clone())?;
- }
- }
- }
-
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn rename_channel(
- request: proto::RenameChannel,
- response: Response<proto::RenameChannel>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
- let channel_id = ChannelId::from_proto(request.channel_id);
- let RenameChannelResult {
- channel,
- participants_to_update,
- } = db
- .rename_channel(channel_id, session.user_id, &request.name)
- .await?;
-
- response.send(proto::RenameChannelResponse {
- channel: Some(channel.to_proto()),
- })?;
-
- let connection_pool = session.connection_pool().await;
- for (user_id, channel) in participants_to_update {
- for connection_id in connection_pool.user_connection_ids(user_id) {
- let update = proto::UpdateChannels {
- channels: vec![channel.to_proto()],
- ..Default::default()
- };
-
- session.peer.send(connection_id, update.clone())?;
- }
- }
-
- Ok(())
-}
-
-async fn move_channel(
- request: proto::MoveChannel,
- response: Response<proto::MoveChannel>,
- session: Session,
-) -> Result<()> {
- let channel_id = ChannelId::from_proto(request.channel_id);
- let to = request.to.map(ChannelId::from_proto);
-
- let result = session
- .db()
- .await
- .move_channel(channel_id, to, session.user_id)
- .await?;
-
- notify_channel_moved(result, session).await?;
-
- response.send(Ack {})?;
- Ok(())
-}
-
-async fn notify_channel_moved(result: Option<MoveChannelResult>, session: Session) -> Result<()> {
- let Some(MoveChannelResult {
- participants_to_remove,
- participants_to_update,
- moved_channels,
- }) = result
- else {
- return Ok(());
- };
- let moved_channels: Vec<u64> = moved_channels.iter().map(|id| id.to_proto()).collect();
-
- let connection_pool = session.connection_pool().await;
- for (user_id, channels) in participants_to_update {
- let mut update = build_channels_update(channels, vec![]);
- update.delete_channels = moved_channels.clone();
- for connection_id in connection_pool.user_connection_ids(user_id) {
- session.peer.send(connection_id, update.clone())?;
- }
- }
-
- for user_id in participants_to_remove {
- let update = proto::UpdateChannels {
- delete_channels: moved_channels.clone(),
- ..Default::default()
- };
- for connection_id in connection_pool.user_connection_ids(user_id) {
- session.peer.send(connection_id, update.clone())?;
- }
- }
- Ok(())
-}
-
-async fn get_channel_members(
- request: proto::GetChannelMembers,
- response: Response<proto::GetChannelMembers>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
- let channel_id = ChannelId::from_proto(request.channel_id);
- let members = db
- .get_channel_participant_details(channel_id, session.user_id)
- .await?;
- response.send(proto::GetChannelMembersResponse { members })?;
- Ok(())
-}
-
-async fn respond_to_channel_invite(
- request: proto::RespondToChannelInvite,
- response: Response<proto::RespondToChannelInvite>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
- let channel_id = ChannelId::from_proto(request.channel_id);
- let RespondToChannelInvite {
- membership_update,
- notifications,
- } = db
- .respond_to_channel_invite(channel_id, session.user_id, request.accept)
- .await?;
-
- let connection_pool = session.connection_pool().await;
- if let Some(membership_update) = membership_update {
- notify_membership_updated(
- &connection_pool,
- membership_update,
- session.user_id,
- &session.peer,
- );
- } else {
- let update = proto::UpdateChannels {
- remove_channel_invitations: vec![channel_id.to_proto()],
- ..Default::default()
- };
-
- for connection_id in connection_pool.user_connection_ids(session.user_id) {
- session.peer.send(connection_id, update.clone())?;
- }
- };
-
- send_notifications(&*connection_pool, &session.peer, notifications);
-
- response.send(proto::Ack {})?;
-
- Ok(())
-}
-
-async fn join_channel(
- request: proto::JoinChannel,
- response: Response<proto::JoinChannel>,
- session: Session,
-) -> Result<()> {
- let channel_id = ChannelId::from_proto(request.channel_id);
- join_channel_internal(channel_id, Box::new(response), session).await
-}
-
-trait JoinChannelInternalResponse {
- fn send(self, result: proto::JoinRoomResponse) -> Result<()>;
-}
-impl JoinChannelInternalResponse for Response<proto::JoinChannel> {
- fn send(self, result: proto::JoinRoomResponse) -> Result<()> {
- Response::<proto::JoinChannel>::send(self, result)
- }
-}
-impl JoinChannelInternalResponse for Response<proto::JoinRoom> {
- fn send(self, result: proto::JoinRoomResponse) -> Result<()> {
- Response::<proto::JoinRoom>::send(self, result)
- }
-}
-
-async fn join_channel_internal(
- channel_id: ChannelId,
- response: Box<impl JoinChannelInternalResponse>,
- session: Session,
-) -> Result<()> {
- let joined_room = {
- leave_room_for_session(&session).await?;
- let db = session.db().await;
-
- let (joined_room, membership_updated, role) = db
- .join_channel(
- channel_id,
- session.user_id,
- session.connection_id,
- RELEASE_CHANNEL_NAME.as_str(),
- )
- .await?;
-
- let live_kit_connection_info = session.live_kit_client.as_ref().and_then(|live_kit| {
- let (can_publish, token) = if role == ChannelRole::Guest {
- (
- false,
- live_kit
- .guest_token(
- &joined_room.room.live_kit_room,
- &session.user_id.to_string(),
- )
- .trace_err()?,
- )
- } else {
- (
- true,
- live_kit
- .room_token(
- &joined_room.room.live_kit_room,
- &session.user_id.to_string(),
- )
- .trace_err()?,
- )
- };
-
- Some(LiveKitConnectionInfo {
- server_url: live_kit.url().into(),
- token,
- can_publish,
- })
- });
-
- response.send(proto::JoinRoomResponse {
- room: Some(joined_room.room.clone()),
- channel_id: joined_room.channel_id.map(|id| id.to_proto()),
- live_kit_connection_info,
- })?;
-
- let connection_pool = session.connection_pool().await;
- if let Some(membership_updated) = membership_updated {
- notify_membership_updated(
- &connection_pool,
- membership_updated,
- session.user_id,
- &session.peer,
- );
- }
-
- room_updated(&joined_room.room, &session.peer);
-
- joined_room
- };
-
- channel_updated(
- channel_id,
- &joined_room.room,
- &joined_room.channel_members,
- &session.peer,
- &*session.connection_pool().await,
- );
-
- update_user_contacts(session.user_id, &session).await?;
- Ok(())
-}
-
-async fn join_channel_buffer(
- request: proto::JoinChannelBuffer,
- response: Response<proto::JoinChannelBuffer>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
- let channel_id = ChannelId::from_proto(request.channel_id);
-
- let open_response = db
- .join_channel_buffer(channel_id, session.user_id, session.connection_id)
- .await?;
-
- let collaborators = open_response.collaborators.clone();
- response.send(open_response)?;
-
- let update = UpdateChannelBufferCollaborators {
- channel_id: channel_id.to_proto(),
- collaborators: collaborators.clone(),
- };
- channel_buffer_updated(
- session.connection_id,
- collaborators
- .iter()
- .filter_map(|collaborator| Some(collaborator.peer_id?.into())),
- &update,
- &session.peer,
- );
-
- Ok(())
-}
-
-async fn update_channel_buffer(
- request: proto::UpdateChannelBuffer,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
- let channel_id = ChannelId::from_proto(request.channel_id);
-
- let (collaborators, non_collaborators, epoch, version) = db
- .update_channel_buffer(channel_id, session.user_id, &request.operations)
- .await?;
-
- channel_buffer_updated(
- session.connection_id,
- collaborators,
- &proto::UpdateChannelBuffer {
- channel_id: channel_id.to_proto(),
- operations: request.operations,
- },
- &session.peer,
- );
-
- let pool = &*session.connection_pool().await;
-
- broadcast(
- None,
- non_collaborators
- .iter()
- .flat_map(|user_id| pool.user_connection_ids(*user_id)),
- |peer_id| {
- session.peer.send(
- peer_id.into(),
- proto::UpdateChannels {
- unseen_channel_buffer_changes: vec![proto::UnseenChannelBufferChange {
- channel_id: channel_id.to_proto(),
- epoch: epoch as u64,
- version: version.clone(),
- }],
- ..Default::default()
- },
- )
- },
- );
-
- Ok(())
-}
-
-async fn rejoin_channel_buffers(
- request: proto::RejoinChannelBuffers,
- response: Response<proto::RejoinChannelBuffers>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
- let buffers = db
- .rejoin_channel_buffers(&request.buffers, session.user_id, session.connection_id)
- .await?;
-
- for rejoined_buffer in &buffers {
- let collaborators_to_notify = rejoined_buffer
- .buffer
- .collaborators
- .iter()
- .filter_map(|c| Some(c.peer_id?.into()));
- channel_buffer_updated(
- session.connection_id,
- collaborators_to_notify,
- &proto::UpdateChannelBufferCollaborators {
- channel_id: rejoined_buffer.buffer.channel_id,
- collaborators: rejoined_buffer.buffer.collaborators.clone(),
- },
- &session.peer,
- );
- }
-
- response.send(proto::RejoinChannelBuffersResponse {
- buffers: buffers.into_iter().map(|b| b.buffer).collect(),
- })?;
-
- Ok(())
-}
-
-async fn leave_channel_buffer(
- request: proto::LeaveChannelBuffer,
- response: Response<proto::LeaveChannelBuffer>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
- let channel_id = ChannelId::from_proto(request.channel_id);
-
- let left_buffer = db
- .leave_channel_buffer(channel_id, session.connection_id)
- .await?;
-
- response.send(Ack {})?;
-
- channel_buffer_updated(
- session.connection_id,
- left_buffer.connections,
- &proto::UpdateChannelBufferCollaborators {
- channel_id: channel_id.to_proto(),
- collaborators: left_buffer.collaborators,
- },
- &session.peer,
- );
-
- Ok(())
-}
-
-fn channel_buffer_updated<T: EnvelopedMessage>(
- sender_id: ConnectionId,
- collaborators: impl IntoIterator<Item = ConnectionId>,
- message: &T,
- peer: &Peer,
-) {
- broadcast(Some(sender_id), collaborators.into_iter(), |peer_id| {
- peer.send(peer_id.into(), message.clone())
- });
-}
-
-fn send_notifications(
- connection_pool: &ConnectionPool,
- peer: &Peer,
- notifications: db::NotificationBatch,
-) {
- for (user_id, notification) in notifications {
- for connection_id in connection_pool.user_connection_ids(user_id) {
- if let Err(error) = peer.send(
- connection_id,
- proto::AddNotification {
- notification: Some(notification.clone()),
- },
- ) {
- tracing::error!(
- "failed to send notification to {:?} {}",
- connection_id,
- error
- );
- }
- }
- }
-}
-
-async fn send_channel_message(
- request: proto::SendChannelMessage,
- response: Response<proto::SendChannelMessage>,
- session: Session,
-) -> Result<()> {
- // Validate the message body.
- let body = request.body.trim().to_string();
- if body.len() > MAX_MESSAGE_LEN {
- return Err(anyhow!("message is too long"))?;
- }
- if body.is_empty() {
- return Err(anyhow!("message can't be blank"))?;
- }
-
- // TODO: adjust mentions if body is trimmed
-
- let timestamp = OffsetDateTime::now_utc();
- let nonce = request
- .nonce
- .ok_or_else(|| anyhow!("nonce can't be blank"))?;
-
- let channel_id = ChannelId::from_proto(request.channel_id);
- let CreatedChannelMessage {
- message_id,
- participant_connection_ids,
- channel_members,
- notifications,
- } = session
- .db()
- .await
- .create_channel_message(
- channel_id,
- session.user_id,
- &body,
- &request.mentions,
- timestamp,
- nonce.clone().into(),
- )
- .await?;
- let message = proto::ChannelMessage {
- sender_id: session.user_id.to_proto(),
- id: message_id.to_proto(),
- body,
- mentions: request.mentions,
- timestamp: timestamp.unix_timestamp() as u64,
- nonce: Some(nonce),
- };
- broadcast(
- Some(session.connection_id),
- participant_connection_ids,
- |connection| {
- session.peer.send(
- connection,
- proto::ChannelMessageSent {
- channel_id: channel_id.to_proto(),
- message: Some(message.clone()),
- },
- )
- },
- );
- response.send(proto::SendChannelMessageResponse {
- message: Some(message),
- })?;
-
- let pool = &*session.connection_pool().await;
- broadcast(
- None,
- channel_members
- .iter()
- .flat_map(|user_id| pool.user_connection_ids(*user_id)),
- |peer_id| {
- session.peer.send(
- peer_id.into(),
- proto::UpdateChannels {
- unseen_channel_messages: vec![proto::UnseenChannelMessage {
- channel_id: channel_id.to_proto(),
- message_id: message_id.to_proto(),
- }],
- ..Default::default()
- },
- )
- },
- );
- send_notifications(pool, &session.peer, notifications);
-
- Ok(())
-}
-
-async fn remove_channel_message(
- request: proto::RemoveChannelMessage,
- response: Response<proto::RemoveChannelMessage>,
- session: Session,
-) -> Result<()> {
- let channel_id = ChannelId::from_proto(request.channel_id);
- let message_id = MessageId::from_proto(request.message_id);
- let connection_ids = session
- .db()
- .await
- .remove_channel_message(channel_id, message_id, session.user_id)
- .await?;
- broadcast(Some(session.connection_id), connection_ids, |connection| {
- session.peer.send(connection, request.clone())
- });
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn acknowledge_channel_message(
- request: proto::AckChannelMessage,
- session: Session,
-) -> Result<()> {
- let channel_id = ChannelId::from_proto(request.channel_id);
- let message_id = MessageId::from_proto(request.message_id);
- let notifications = session
- .db()
- .await
- .observe_channel_message(channel_id, session.user_id, message_id)
- .await?;
- send_notifications(
- &*session.connection_pool().await,
- &session.peer,
- notifications,
- );
- Ok(())
-}
-
-async fn acknowledge_buffer_version(
- request: proto::AckBufferOperation,
- session: Session,
-) -> Result<()> {
- let buffer_id = BufferId::from_proto(request.buffer_id);
- session
- .db()
- .await
- .observe_buffer_version(
- buffer_id,
- session.user_id,
- request.epoch as i32,
- &request.version,
- )
- .await?;
- Ok(())
-}
-
-async fn join_channel_chat(
- request: proto::JoinChannelChat,
- response: Response<proto::JoinChannelChat>,
- session: Session,
-) -> Result<()> {
- let channel_id = ChannelId::from_proto(request.channel_id);
-
- let db = session.db().await;
- db.join_channel_chat(channel_id, session.connection_id, session.user_id)
- .await?;
- let messages = db
- .get_channel_messages(channel_id, session.user_id, MESSAGE_COUNT_PER_PAGE, None)
- .await?;
- response.send(proto::JoinChannelChatResponse {
- done: messages.len() < MESSAGE_COUNT_PER_PAGE,
- messages,
- })?;
- Ok(())
-}
-
-async fn leave_channel_chat(request: proto::LeaveChannelChat, session: Session) -> Result<()> {
- let channel_id = ChannelId::from_proto(request.channel_id);
- session
- .db()
- .await
- .leave_channel_chat(channel_id, session.connection_id, session.user_id)
- .await?;
- Ok(())
-}
-
-async fn get_channel_messages(
- request: proto::GetChannelMessages,
- response: Response<proto::GetChannelMessages>,
- session: Session,
-) -> Result<()> {
- let channel_id = ChannelId::from_proto(request.channel_id);
- let messages = session
- .db()
- .await
- .get_channel_messages(
- channel_id,
- session.user_id,
- MESSAGE_COUNT_PER_PAGE,
- Some(MessageId::from_proto(request.before_message_id)),
- )
- .await?;
- response.send(proto::GetChannelMessagesResponse {
- done: messages.len() < MESSAGE_COUNT_PER_PAGE,
- messages,
- })?;
- Ok(())
-}
-
-async fn get_channel_messages_by_id(
- request: proto::GetChannelMessagesById,
- response: Response<proto::GetChannelMessagesById>,
- session: Session,
-) -> Result<()> {
- let message_ids = request
- .message_ids
- .iter()
- .map(|id| MessageId::from_proto(*id))
- .collect::<Vec<_>>();
- let messages = session
- .db()
- .await
- .get_channel_messages_by_id(session.user_id, &message_ids)
- .await?;
- response.send(proto::GetChannelMessagesResponse {
- done: messages.len() < MESSAGE_COUNT_PER_PAGE,
- messages,
- })?;
- Ok(())
-}
-
-async fn get_notifications(
- request: proto::GetNotifications,
- response: Response<proto::GetNotifications>,
- session: Session,
-) -> Result<()> {
- let notifications = session
- .db()
- .await
- .get_notifications(
- session.user_id,
- NOTIFICATION_COUNT_PER_PAGE,
- request
- .before_id
- .map(|id| db::NotificationId::from_proto(id)),
- )
- .await?;
- response.send(proto::GetNotificationsResponse {
- done: notifications.len() < NOTIFICATION_COUNT_PER_PAGE,
- notifications,
- })?;
- Ok(())
-}
-
-async fn mark_notification_as_read(
- request: proto::MarkNotificationRead,
- response: Response<proto::MarkNotificationRead>,
- session: Session,
-) -> Result<()> {
- let database = &session.db().await;
- let notifications = database
- .mark_notification_as_read_by_id(
- session.user_id,
- NotificationId::from_proto(request.notification_id),
- )
- .await?;
- send_notifications(
- &*session.connection_pool().await,
- &session.peer,
- notifications,
- );
- response.send(proto::Ack {})?;
- Ok(())
-}
-
-async fn update_diff_base(request: proto::UpdateDiffBase, session: Session) -> Result<()> {
- let project_id = ProjectId::from_proto(request.project_id);
- let project_connection_ids = session
- .db()
- .await
- .project_connection_ids(project_id, session.connection_id)
- .await?;
- broadcast(
- Some(session.connection_id),
- project_connection_ids.iter().copied(),
- |connection_id| {
- session
- .peer
- .forward_send(session.connection_id, connection_id, request.clone())
- },
- );
- Ok(())
-}
-
-async fn get_private_user_info(
- _request: proto::GetPrivateUserInfo,
- response: Response<proto::GetPrivateUserInfo>,
- session: Session,
-) -> Result<()> {
- let db = session.db().await;
-
- let metrics_id = db.get_user_metrics_id(session.user_id).await?;
- let user = db
- .get_user_by_id(session.user_id)
- .await?
- .ok_or_else(|| anyhow!("user not found"))?;
- let flags = db.get_user_flags(session.user_id).await?;
-
- response.send(proto::GetPrivateUserInfoResponse {
- metrics_id,
- staff: user.admin,
- flags,
- })?;
- Ok(())
-}
-
-fn to_axum_message(message: TungsteniteMessage) -> AxumMessage {
- match message {
- TungsteniteMessage::Text(payload) => AxumMessage::Text(payload),
- TungsteniteMessage::Binary(payload) => AxumMessage::Binary(payload),
- TungsteniteMessage::Ping(payload) => AxumMessage::Ping(payload),
- TungsteniteMessage::Pong(payload) => AxumMessage::Pong(payload),
- TungsteniteMessage::Close(frame) => AxumMessage::Close(frame.map(|frame| AxumCloseFrame {
- code: frame.code.into(),
- reason: frame.reason,
- })),
- }
-}
-
-fn to_tungstenite_message(message: AxumMessage) -> TungsteniteMessage {
- match message {
- AxumMessage::Text(payload) => TungsteniteMessage::Text(payload),
- AxumMessage::Binary(payload) => TungsteniteMessage::Binary(payload),
- AxumMessage::Ping(payload) => TungsteniteMessage::Ping(payload),
- AxumMessage::Pong(payload) => TungsteniteMessage::Pong(payload),
- AxumMessage::Close(frame) => {
- TungsteniteMessage::Close(frame.map(|frame| TungsteniteCloseFrame {
- code: frame.code.into(),
- reason: frame.reason,
- }))
- }
- }
-}
-
-fn notify_membership_updated(
- connection_pool: &ConnectionPool,
- result: MembershipUpdated,
- user_id: UserId,
- peer: &Peer,
-) {
- let mut update = build_channels_update(result.new_channels, vec![]);
- update.delete_channels = result
- .removed_channels
- .into_iter()
- .map(|id| id.to_proto())
- .collect();
- update.remove_channel_invitations = vec![result.channel_id.to_proto()];
-
- for connection_id in connection_pool.user_connection_ids(user_id) {
- peer.send(connection_id, update.clone()).trace_err();
- }
-}
-
-fn build_channels_update(
- channels: ChannelsForUser,
- channel_invites: Vec<db::Channel>,
-) -> proto::UpdateChannels {
- let mut update = proto::UpdateChannels::default();
-
- for channel in channels.channels {
- update.channels.push(channel.to_proto());
- }
-
- update.unseen_channel_buffer_changes = channels.unseen_buffer_changes;
- update.unseen_channel_messages = channels.channel_messages;
-
- for (channel_id, participants) in channels.channel_participants {
- update
- .channel_participants
- .push(proto::ChannelParticipants {
- channel_id: channel_id.to_proto(),
- participant_user_ids: participants.into_iter().map(|id| id.to_proto()).collect(),
- });
- }
-
- for channel in channel_invites {
- update.channel_invitations.push(channel.to_proto());
- }
-
- update
-}
-
-fn build_initial_contacts_update(
- contacts: Vec<db::Contact>,
- pool: &ConnectionPool,
-) -> proto::UpdateContacts {
- let mut update = proto::UpdateContacts::default();
-
- for contact in contacts {
- match contact {
- db::Contact::Accepted { user_id, busy } => {
- update.contacts.push(contact_for_user(user_id, busy, &pool));
- }
- db::Contact::Outgoing { user_id } => update.outgoing_requests.push(user_id.to_proto()),
- db::Contact::Incoming { user_id } => {
- update
- .incoming_requests
- .push(proto::IncomingContactRequest {
- requester_id: user_id.to_proto(),
- })
- }
- }
- }
-
- update
-}
-
-fn contact_for_user(user_id: UserId, busy: bool, pool: &ConnectionPool) -> proto::Contact {
- proto::Contact {
- user_id: user_id.to_proto(),
- online: pool.is_user_online(user_id),
- busy,
- }
-}
-
-fn room_updated(room: &proto::Room, peer: &Peer) {
- broadcast(
- None,
- room.participants
- .iter()
- .filter_map(|participant| Some(participant.peer_id?.into())),
- |peer_id| {
- peer.send(
- peer_id.into(),
- proto::RoomUpdated {
- room: Some(room.clone()),
- },
- )
- },
- );
-}
-
-fn channel_updated(
- channel_id: ChannelId,
- room: &proto::Room,
- channel_members: &[UserId],
- peer: &Peer,
- pool: &ConnectionPool,
-) {
- let participants = room
- .participants
- .iter()
- .map(|p| p.user_id)
- .collect::<Vec<_>>();
-
- broadcast(
- None,
- channel_members
- .iter()
- .flat_map(|user_id| pool.user_connection_ids(*user_id)),
- |peer_id| {
- peer.send(
- peer_id.into(),
- proto::UpdateChannels {
- channel_participants: vec![proto::ChannelParticipants {
- channel_id: channel_id.to_proto(),
- participant_user_ids: participants.clone(),
- }],
- ..Default::default()
- },
- )
- },
- );
-}
-
-async fn update_user_contacts(user_id: UserId, session: &Session) -> Result<()> {
- let db = session.db().await;
-
- let contacts = db.get_contacts(user_id).await?;
- let busy = db.is_user_busy(user_id).await?;
-
- let pool = session.connection_pool().await;
- let updated_contact = contact_for_user(user_id, busy, &pool);
- for contact in contacts {
- if let db::Contact::Accepted {
- user_id: contact_user_id,
- ..
- } = contact
- {
- for contact_conn_id in pool.user_connection_ids(contact_user_id) {
- session
- .peer
- .send(
- contact_conn_id,
- proto::UpdateContacts {
- contacts: vec![updated_contact.clone()],
- remove_contacts: Default::default(),
- incoming_requests: Default::default(),
- remove_incoming_requests: Default::default(),
- outgoing_requests: Default::default(),
- remove_outgoing_requests: Default::default(),
- },
- )
- .trace_err();
- }
- }
- }
- Ok(())
-}
-
-async fn leave_room_for_session(session: &Session) -> Result<()> {
- let mut contacts_to_update = HashSet::default();
-
- let room_id;
- let canceled_calls_to_user_ids;
- let live_kit_room;
- let delete_live_kit_room;
- let room;
- let channel_members;
- let channel_id;
-
- if let Some(mut left_room) = session.db().await.leave_room(session.connection_id).await? {
- contacts_to_update.insert(session.user_id);
-
- for project in left_room.left_projects.values() {
- project_left(project, session);
- }
-
- room_id = RoomId::from_proto(left_room.room.id);
- canceled_calls_to_user_ids = mem::take(&mut left_room.canceled_calls_to_user_ids);
- live_kit_room = mem::take(&mut left_room.room.live_kit_room);
- delete_live_kit_room = left_room.deleted;
- room = mem::take(&mut left_room.room);
- channel_members = mem::take(&mut left_room.channel_members);
- channel_id = left_room.channel_id;
-
- room_updated(&room, &session.peer);
- } else {
- return Ok(());
- }
-
- if let Some(channel_id) = channel_id {
- channel_updated(
- channel_id,
- &room,
- &channel_members,
- &session.peer,
- &*session.connection_pool().await,
- );
- }
-
- {
- let pool = session.connection_pool().await;
- for canceled_user_id in canceled_calls_to_user_ids {
- for connection_id in pool.user_connection_ids(canceled_user_id) {
- session
- .peer
- .send(
- connection_id,
- proto::CallCanceled {
- room_id: room_id.to_proto(),
- },
- )
- .trace_err();
- }
- contacts_to_update.insert(canceled_user_id);
- }
- }
-
- for contact_user_id in contacts_to_update {
- update_user_contacts(contact_user_id, &session).await?;
- }
-
- if let Some(live_kit) = session.live_kit_client.as_ref() {
- live_kit
- .remove_participant(live_kit_room.clone(), session.user_id.to_string())
- .await
- .trace_err();
-
- if delete_live_kit_room {
- live_kit.delete_room(live_kit_room).await.trace_err();
- }
- }
-
- Ok(())
-}
-
-async fn leave_channel_buffers_for_session(session: &Session) -> Result<()> {
- let left_channel_buffers = session
- .db()
- .await
- .leave_channel_buffers(session.connection_id)
- .await?;
-
- for left_buffer in left_channel_buffers {
- channel_buffer_updated(
- session.connection_id,
- left_buffer.connections,
- &proto::UpdateChannelBufferCollaborators {
- channel_id: left_buffer.channel_id.to_proto(),
- collaborators: left_buffer.collaborators,
- },
- &session.peer,
- );
- }
-
- Ok(())
-}
-
-fn project_left(project: &db::LeftProject, session: &Session) {
- for connection_id in &project.connection_ids {
- if project.host_user_id == session.user_id {
- session
- .peer
- .send(
- *connection_id,
- proto::UnshareProject {
- project_id: project.id.to_proto(),
- },
- )
- .trace_err();
- } else {
- session
- .peer
- .send(
- *connection_id,
- proto::RemoveProjectCollaborator {
- project_id: project.id.to_proto(),
- peer_id: Some(session.connection_id.into()),
- },
- )
- .trace_err();
- }
- }
-}
-
-pub trait ResultExt {
- type Ok;
-
- fn trace_err(self) -> Option<Self::Ok>;
-}
-
-impl<T, E> ResultExt for Result<T, E>
-where
- E: std::fmt::Debug,
-{
- type Ok = T;
-
- fn trace_err(self) -> Option<T> {
- match self {
- Ok(value) => Some(value),
- Err(error) => {
- tracing::error!("{:?}", error);
- None
- }
- }
- }
-}
@@ -1,98 +0,0 @@
-use crate::db::UserId;
-use anyhow::{anyhow, Result};
-use collections::{BTreeMap, HashSet};
-use rpc::ConnectionId;
-use serde::Serialize;
-use tracing::instrument;
-
-#[derive(Default, Serialize)]
-pub struct ConnectionPool {
- connections: BTreeMap<ConnectionId, Connection>,
- connected_users: BTreeMap<UserId, ConnectedUser>,
-}
-
-#[derive(Default, Serialize)]
-struct ConnectedUser {
- connection_ids: HashSet<ConnectionId>,
-}
-
-#[derive(Serialize)]
-pub struct Connection {
- pub user_id: UserId,
- pub admin: bool,
-}
-
-impl ConnectionPool {
- pub fn reset(&mut self) {
- self.connections.clear();
- self.connected_users.clear();
- }
-
- #[instrument(skip(self))]
- pub fn add_connection(&mut self, connection_id: ConnectionId, user_id: UserId, admin: bool) {
- self.connections
- .insert(connection_id, Connection { user_id, admin });
- let connected_user = self.connected_users.entry(user_id).or_default();
- connected_user.connection_ids.insert(connection_id);
- }
-
- #[instrument(skip(self))]
- pub fn remove_connection(&mut self, connection_id: ConnectionId) -> Result<()> {
- let connection = self
- .connections
- .get_mut(&connection_id)
- .ok_or_else(|| anyhow!("no such connection"))?;
-
- let user_id = connection.user_id;
- let connected_user = self.connected_users.get_mut(&user_id).unwrap();
- connected_user.connection_ids.remove(&connection_id);
- if connected_user.connection_ids.is_empty() {
- self.connected_users.remove(&user_id);
- }
- self.connections.remove(&connection_id).unwrap();
- Ok(())
- }
-
- pub fn connections(&self) -> impl Iterator<Item = &Connection> {
- self.connections.values()
- }
-
- pub fn user_connection_ids(&self, user_id: UserId) -> impl Iterator<Item = ConnectionId> + '_ {
- self.connected_users
- .get(&user_id)
- .into_iter()
- .map(|state| &state.connection_ids)
- .flatten()
- .copied()
- }
-
- pub fn is_user_online(&self, user_id: UserId) -> bool {
- !self
- .connected_users
- .get(&user_id)
- .unwrap_or(&Default::default())
- .connection_ids
- .is_empty()
- }
-
- #[cfg(test)]
- pub fn check_invariants(&self) {
- for (connection_id, connection) in &self.connections {
- assert!(self
- .connected_users
- .get(&connection.user_id)
- .unwrap()
- .connection_ids
- .contains(connection_id));
- }
-
- for (user_id, state) in &self.connected_users {
- for connection_id in &state.connection_ids {
- assert_eq!(
- self.connections.get(connection_id).unwrap().user_id,
- *user_id
- );
- }
- }
- }
-}
@@ -1,47 +0,0 @@
-use call::Room;
-use gpui::{Model, TestAppContext};
-
-mod channel_buffer_tests;
-mod channel_message_tests;
-mod channel_tests;
-mod editor_tests;
-mod following_tests;
-mod integration_tests;
-mod notification_tests;
-mod random_channel_buffer_tests;
-mod random_project_collaboration_tests;
-mod randomized_test_helpers;
-mod test_server;
-
-pub use randomized_test_helpers::{
- run_randomized_test, save_randomized_test_plan, RandomizedTest, TestError, UserTestPlan,
-};
-pub use test_server::{TestClient, TestServer};
-
-#[derive(Debug, Eq, PartialEq)]
-struct RoomParticipants {
- remote: Vec<String>,
- pending: Vec<String>,
-}
-
-fn room_participants(room: &Model<Room>, cx: &mut TestAppContext) -> RoomParticipants {
- room.read_with(cx, |room, _| {
- let mut remote = room
- .remote_participants()
- .iter()
- .map(|(_, participant)| participant.user.github_login.clone())
- .collect::<Vec<_>>();
- let mut pending = room
- .pending_participants()
- .iter()
- .map(|user| user.github_login.clone())
- .collect::<Vec<_>>();
- remote.sort();
- pending.sort();
- RoomParticipants { remote, pending }
- })
-}
-
-fn channel_id(room: &Model<Room>, cx: &mut TestAppContext) -> Option<u64> {
- cx.read(|cx| room.read(cx).channel_id())
-}
@@ -1,883 +0,0 @@
-use crate::{
- rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
- tests::TestServer,
-};
-use call::ActiveCall;
-use channel::ACKNOWLEDGE_DEBOUNCE_INTERVAL;
-use client::{Collaborator, ParticipantIndex, UserId};
-use collab_ui::channel_view::ChannelView;
-use collections::HashMap;
-use editor::{Anchor, Editor, ToOffset};
-use futures::future;
-use gpui::{BackgroundExecutor, Model, TestAppContext, ViewContext};
-use rpc::{proto::PeerId, RECEIVE_TIMEOUT};
-use serde_json::json;
-use std::ops::Range;
-
-#[gpui::test]
-async fn test_core_channel_buffers(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let channel_id = server
- .make_channel("zed", None, (&client_a, cx_a), &mut [(&client_b, cx_b)])
- .await;
-
- // Client A joins the channel buffer
- let channel_buffer_a = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
- .await
- .unwrap();
-
- // Client A edits the buffer
- let buffer_a = channel_buffer_a.read_with(cx_a, |buffer, _| buffer.buffer());
- buffer_a.update(cx_a, |buffer, cx| {
- buffer.edit([(0..0, "hello world")], None, cx)
- });
- buffer_a.update(cx_a, |buffer, cx| {
- buffer.edit([(5..5, ", cruel")], None, cx)
- });
- buffer_a.update(cx_a, |buffer, cx| {
- buffer.edit([(0..5, "goodbye")], None, cx)
- });
- buffer_a.update(cx_a, |buffer, cx| buffer.undo(cx));
- assert_eq!(buffer_text(&buffer_a, cx_a), "hello, cruel world");
- executor.run_until_parked();
-
- // Client B joins the channel buffer
- let channel_buffer_b = client_b
- .channel_store()
- .update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
- .await
- .unwrap();
- channel_buffer_b.read_with(cx_b, |buffer, _| {
- assert_collaborators(
- buffer.collaborators(),
- &[client_a.user_id(), client_b.user_id()],
- );
- });
-
- // Client B sees the correct text, and then edits it
- let buffer_b = channel_buffer_b.read_with(cx_b, |buffer, _| buffer.buffer());
- assert_eq!(
- buffer_b.read_with(cx_b, |buffer, _| buffer.remote_id()),
- buffer_a.read_with(cx_a, |buffer, _| buffer.remote_id())
- );
- assert_eq!(buffer_text(&buffer_b, cx_b), "hello, cruel world");
- buffer_b.update(cx_b, |buffer, cx| {
- buffer.edit([(7..12, "beautiful")], None, cx)
- });
-
- // Both A and B see the new edit
- executor.run_until_parked();
- assert_eq!(buffer_text(&buffer_a, cx_a), "hello, beautiful world");
- assert_eq!(buffer_text(&buffer_b, cx_b), "hello, beautiful world");
-
- // Client A closes the channel buffer.
- cx_a.update(|_| drop(channel_buffer_a));
- executor.run_until_parked();
-
- // Client B sees that client A is gone from the channel buffer.
- channel_buffer_b.read_with(cx_b, |buffer, _| {
- assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
- });
-
- // Client A rejoins the channel buffer
- let _channel_buffer_a = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
- .await
- .unwrap();
- executor.run_until_parked();
-
- // Sanity test, make sure we saw A rejoining
- channel_buffer_b.read_with(cx_b, |buffer, _| {
- assert_collaborators(
- &buffer.collaborators(),
- &[client_a.user_id(), client_b.user_id()],
- );
- });
-
- // Client A loses connection.
- server.forbid_connections();
- server.disconnect_client(client_a.peer_id().unwrap());
- executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
-
- // Client B observes A disconnect
- channel_buffer_b.read_with(cx_b, |buffer, _| {
- assert_collaborators(&buffer.collaborators(), &[client_b.user_id()]);
- });
-
- // TODO:
- // - Test synchronizing offline updates, what happens to A's channel buffer when A disconnects
- // - Test interaction with channel deletion while buffer is open
-}
-
-#[gpui::test]
-async fn test_channel_notes_participant_indices(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- cx_a.update(editor::init);
- cx_b.update(editor::init);
- cx_c.update(editor::init);
-
- let channel_id = server
- .make_channel(
- "the-channel",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b), (&client_c, cx_c)],
- )
- .await;
-
- client_a
- .fs()
- .insert_tree("/root", json!({"file.txt": "123"}))
- .await;
- let (project_a, worktree_id_a) = client_a.build_local_project("/root", cx_a).await;
- let project_b = client_b.build_empty_local_project(cx_b);
- let project_c = client_c.build_empty_local_project(cx_c);
-
- let (workspace_a, mut cx_a) = client_a.build_workspace(&project_a, cx_a);
- let (workspace_b, mut cx_b) = client_b.build_workspace(&project_b, cx_b);
- let (workspace_c, cx_c) = client_c.build_workspace(&project_c, cx_c);
-
- // Clients A, B, and C open the channel notes
- let channel_view_a = cx_a
- .update(|cx| ChannelView::open(channel_id, workspace_a.clone(), cx))
- .await
- .unwrap();
- let channel_view_b = cx_b
- .update(|cx| ChannelView::open(channel_id, workspace_b.clone(), cx))
- .await
- .unwrap();
- let channel_view_c = cx_c
- .update(|cx| ChannelView::open(channel_id, workspace_c.clone(), cx))
- .await
- .unwrap();
-
- // Clients A, B, and C all insert and select some text
- channel_view_a.update(cx_a, |notes, cx| {
- notes.editor.update(cx, |editor, cx| {
- editor.insert("a", cx);
- editor.change_selections(None, cx, |selections| {
- selections.select_ranges(vec![0..1]);
- });
- });
- });
- executor.run_until_parked();
- channel_view_b.update(cx_b, |notes, cx| {
- notes.editor.update(cx, |editor, cx| {
- editor.move_down(&Default::default(), cx);
- editor.insert("b", cx);
- editor.change_selections(None, cx, |selections| {
- selections.select_ranges(vec![1..2]);
- });
- });
- });
- executor.run_until_parked();
- channel_view_c.update(cx_c, |notes, cx| {
- notes.editor.update(cx, |editor, cx| {
- editor.move_down(&Default::default(), cx);
- editor.insert("c", cx);
- editor.change_selections(None, cx, |selections| {
- selections.select_ranges(vec![2..3]);
- });
- });
- });
-
- // Client A sees clients B and C without assigned colors, because they aren't
- // in a call together.
- executor.run_until_parked();
- channel_view_a.update(cx_a, |notes, cx| {
- notes.editor.update(cx, |editor, cx| {
- assert_remote_selections(editor, &[(None, 1..2), (None, 2..3)], cx);
- });
- });
-
- // Clients A and B join the same call.
- for (call, cx) in [(&active_call_a, &mut cx_a), (&active_call_b, &mut cx_b)] {
- call.update(*cx, |call, cx| call.join_channel(channel_id, cx))
- .await
- .unwrap();
- }
-
- // Clients A and B see each other with two different assigned colors. Client C
- // still doesn't have a color.
- executor.run_until_parked();
- channel_view_a.update(cx_a, |notes, cx| {
- notes.editor.update(cx, |editor, cx| {
- assert_remote_selections(
- editor,
- &[(Some(ParticipantIndex(1)), 1..2), (None, 2..3)],
- cx,
- );
- });
- });
- channel_view_b.update(cx_b, |notes, cx| {
- notes.editor.update(cx, |editor, cx| {
- assert_remote_selections(
- editor,
- &[(Some(ParticipantIndex(0)), 0..1), (None, 2..3)],
- cx,
- );
- });
- });
-
- // Client A shares a project, and client B joins.
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
- let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
-
- // Clients A and B open the same file.
- let editor_a = workspace_a
- .update(cx_a, |workspace, cx| {
- workspace.open_path((worktree_id_a, "file.txt"), None, true, cx)
- })
- .await
- .unwrap()
- .downcast::<Editor>()
- .unwrap();
- let editor_b = workspace_b
- .update(cx_b, |workspace, cx| {
- workspace.open_path((worktree_id_a, "file.txt"), None, true, cx)
- })
- .await
- .unwrap()
- .downcast::<Editor>()
- .unwrap();
-
- editor_a.update(cx_a, |editor, cx| {
- editor.change_selections(None, cx, |selections| {
- selections.select_ranges(vec![0..1]);
- });
- });
- editor_b.update(cx_b, |editor, cx| {
- editor.change_selections(None, cx, |selections| {
- selections.select_ranges(vec![2..3]);
- });
- });
- executor.run_until_parked();
-
- // Clients A and B see each other with the same colors as in the channel notes.
- editor_a.update(cx_a, |editor, cx| {
- assert_remote_selections(editor, &[(Some(ParticipantIndex(1)), 2..3)], cx);
- });
- editor_b.update(cx_b, |editor, cx| {
- assert_remote_selections(editor, &[(Some(ParticipantIndex(0)), 0..1)], cx);
- });
-}
-
-#[track_caller]
-fn assert_remote_selections(
- editor: &mut Editor,
- expected_selections: &[(Option<ParticipantIndex>, Range<usize>)],
- cx: &mut ViewContext<Editor>,
-) {
- let snapshot = editor.snapshot(cx);
- let range = Anchor::min()..Anchor::max();
- let remote_selections = snapshot
- .remote_selections_in_range(&range, editor.collaboration_hub().unwrap(), cx)
- .map(|s| {
- let start = s.selection.start.to_offset(&snapshot.buffer_snapshot);
- let end = s.selection.end.to_offset(&snapshot.buffer_snapshot);
- (s.participant_index, start..end)
- })
- .collect::<Vec<_>>();
- assert_eq!(
- remote_selections, expected_selections,
- "incorrect remote selections"
- );
-}
-
-#[gpui::test]
-async fn test_multiple_handles_to_channel_buffer(
- deterministic: BackgroundExecutor,
- cx_a: &mut TestAppContext,
-) {
- let mut server = TestServer::start(deterministic.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
-
- let channel_id = server
- .make_channel("the-channel", None, (&client_a, cx_a), &mut [])
- .await;
-
- let channel_buffer_1 = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx));
- let channel_buffer_2 = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx));
- let channel_buffer_3 = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx));
-
- // All concurrent tasks for opening a channel buffer return the same model handle.
- let (channel_buffer, channel_buffer_2, channel_buffer_3) =
- future::try_join3(channel_buffer_1, channel_buffer_2, channel_buffer_3)
- .await
- .unwrap();
- let channel_buffer_model_id = channel_buffer.entity_id();
- assert_eq!(channel_buffer, channel_buffer_2);
- assert_eq!(channel_buffer, channel_buffer_3);
-
- channel_buffer.update(cx_a, |buffer, cx| {
- buffer.buffer().update(cx, |buffer, cx| {
- buffer.edit([(0..0, "hello")], None, cx);
- })
- });
- deterministic.run_until_parked();
-
- cx_a.update(|_| {
- drop(channel_buffer);
- drop(channel_buffer_2);
- drop(channel_buffer_3);
- });
- deterministic.run_until_parked();
-
- // The channel buffer can be reopened after dropping it.
- let channel_buffer = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
- .await
- .unwrap();
- assert_ne!(channel_buffer.entity_id(), channel_buffer_model_id);
- channel_buffer.update(cx_a, |buffer, cx| {
- buffer.buffer().update(cx, |buffer, _| {
- assert_eq!(buffer.text(), "hello");
- })
- });
-}
-
-#[gpui::test]
-async fn test_channel_buffer_disconnect(
- deterministic: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(deterministic.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let channel_id = server
- .make_channel(
- "the-channel",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b)],
- )
- .await;
-
- let channel_buffer_a = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
- .await
- .unwrap();
-
- let channel_buffer_b = client_b
- .channel_store()
- .update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
- .await
- .unwrap();
-
- server.forbid_connections();
- server.disconnect_client(client_a.peer_id().unwrap());
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
-
- channel_buffer_a.update(cx_a, |buffer, cx| {
- assert_eq!(buffer.channel(cx).unwrap().name, "the-channel");
- assert!(!buffer.is_connected());
- });
-
- deterministic.run_until_parked();
-
- server.allow_connections();
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
-
- deterministic.run_until_parked();
-
- client_a
- .channel_store()
- .update(cx_a, |channel_store, _| {
- channel_store.remove_channel(channel_id)
- })
- .await
- .unwrap();
- deterministic.run_until_parked();
-
- // Channel buffer observed the deletion
- channel_buffer_b.update(cx_b, |buffer, cx| {
- assert!(buffer.channel(cx).is_none());
- assert!(!buffer.is_connected());
- });
-}
-
-#[gpui::test]
-async fn test_rejoin_channel_buffer(
- deterministic: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(deterministic.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let channel_id = server
- .make_channel(
- "the-channel",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b)],
- )
- .await;
-
- let channel_buffer_a = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
- .await
- .unwrap();
- let channel_buffer_b = client_b
- .channel_store()
- .update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
- .await
- .unwrap();
-
- channel_buffer_a.update(cx_a, |buffer, cx| {
- buffer.buffer().update(cx, |buffer, cx| {
- buffer.edit([(0..0, "1")], None, cx);
- })
- });
- deterministic.run_until_parked();
-
- // Client A disconnects.
- server.forbid_connections();
- server.disconnect_client(client_a.peer_id().unwrap());
-
- // Both clients make an edit.
- channel_buffer_a.update(cx_a, |buffer, cx| {
- buffer.buffer().update(cx, |buffer, cx| {
- buffer.edit([(1..1, "2")], None, cx);
- })
- });
- channel_buffer_b.update(cx_b, |buffer, cx| {
- buffer.buffer().update(cx, |buffer, cx| {
- buffer.edit([(0..0, "0")], None, cx);
- })
- });
-
- // Both clients see their own edit.
- deterministic.run_until_parked();
- channel_buffer_a.read_with(cx_a, |buffer, cx| {
- assert_eq!(buffer.buffer().read(cx).text(), "12");
- });
- channel_buffer_b.read_with(cx_b, |buffer, cx| {
- assert_eq!(buffer.buffer().read(cx).text(), "01");
- });
-
- // Client A reconnects. Both clients see each other's edits, and see
- // the same collaborators.
- server.allow_connections();
- deterministic.advance_clock(RECEIVE_TIMEOUT);
- channel_buffer_a.read_with(cx_a, |buffer, cx| {
- assert_eq!(buffer.buffer().read(cx).text(), "012");
- });
- channel_buffer_b.read_with(cx_b, |buffer, cx| {
- assert_eq!(buffer.buffer().read(cx).text(), "012");
- });
-
- channel_buffer_a.read_with(cx_a, |buffer_a, _| {
- channel_buffer_b.read_with(cx_b, |buffer_b, _| {
- assert_eq!(buffer_a.collaborators(), buffer_b.collaborators());
- });
- });
-}
-
-#[gpui::test]
-async fn test_channel_buffers_and_server_restarts(
- deterministic: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(deterministic.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
-
- let channel_id = server
- .make_channel(
- "the-channel",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b), (&client_c, cx_c)],
- )
- .await;
-
- let channel_buffer_a = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
- .await
- .unwrap();
- let channel_buffer_b = client_b
- .channel_store()
- .update(cx_b, |store, cx| store.open_channel_buffer(channel_id, cx))
- .await
- .unwrap();
- let _channel_buffer_c = client_c
- .channel_store()
- .update(cx_c, |store, cx| store.open_channel_buffer(channel_id, cx))
- .await
- .unwrap();
-
- channel_buffer_a.update(cx_a, |buffer, cx| {
- buffer.buffer().update(cx, |buffer, cx| {
- buffer.edit([(0..0, "1")], None, cx);
- })
- });
- deterministic.run_until_parked();
-
- // Client C can't reconnect.
- client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
-
- // Server stops.
- server.reset().await;
- deterministic.advance_clock(RECEIVE_TIMEOUT);
-
- // While the server is down, both clients make an edit.
- channel_buffer_a.update(cx_a, |buffer, cx| {
- buffer.buffer().update(cx, |buffer, cx| {
- buffer.edit([(1..1, "2")], None, cx);
- })
- });
- channel_buffer_b.update(cx_b, |buffer, cx| {
- buffer.buffer().update(cx, |buffer, cx| {
- buffer.edit([(0..0, "0")], None, cx);
- })
- });
-
- // Server restarts.
- server.start().await.unwrap();
- deterministic.advance_clock(CLEANUP_TIMEOUT);
-
- // Clients reconnects. Clients A and B see each other's edits, and see
- // that client C has disconnected.
- channel_buffer_a.read_with(cx_a, |buffer, cx| {
- assert_eq!(buffer.buffer().read(cx).text(), "012");
- });
- channel_buffer_b.read_with(cx_b, |buffer, cx| {
- assert_eq!(buffer.buffer().read(cx).text(), "012");
- });
-
- channel_buffer_a.read_with(cx_a, |buffer_a, _| {
- channel_buffer_b.read_with(cx_b, |buffer_b, _| {
- assert_collaborators(
- buffer_a.collaborators(),
- &[client_a.user_id(), client_b.user_id()],
- );
- assert_eq!(buffer_a.collaborators(), buffer_b.collaborators());
- });
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_following_to_channel_notes_without_a_shared_project(
- deterministic: BackgroundExecutor,
- mut cx_a: &mut TestAppContext,
- mut cx_b: &mut TestAppContext,
- mut cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(deterministic.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let client_c = server.create_client(cx_c, "user_c").await;
-
- cx_a.update(editor::init);
- cx_b.update(editor::init);
- cx_c.update(editor::init);
- cx_a.update(collab_ui::channel_view::init);
- cx_b.update(collab_ui::channel_view::init);
- cx_c.update(collab_ui::channel_view::init);
-
- let channel_1_id = server
- .make_channel(
- "channel-1",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b), (&client_c, cx_c)],
- )
- .await;
- let channel_2_id = server
- .make_channel(
- "channel-2",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b), (&client_c, cx_c)],
- )
- .await;
-
- // Clients A, B, and C join a channel.
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
- let active_call_c = cx_c.read(ActiveCall::global);
- for (call, cx) in [
- (&active_call_a, &mut cx_a),
- (&active_call_b, &mut cx_b),
- (&active_call_c, &mut cx_c),
- ] {
- call.update(*cx, |call, cx| call.join_channel(channel_1_id, cx))
- .await
- .unwrap();
- }
- deterministic.run_until_parked();
-
- // Clients A, B, and C all open their own unshared projects.
- client_a.fs().insert_tree("/a", json!({})).await;
- client_b.fs().insert_tree("/b", json!({})).await;
- client_c.fs().insert_tree("/c", json!({})).await;
- let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
- let (project_b, _) = client_b.build_local_project("/b", cx_b).await;
- let (project_c, _) = client_b.build_local_project("/c", cx_c).await;
- let (workspace_a, cx_a) = client_a.build_workspace(&project_a, cx_a);
- let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
- let (_workspace_c, _cx_c) = client_c.build_workspace(&project_c, cx_c);
-
- active_call_a
- .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
- .await
- .unwrap();
-
- // Client A opens the notes for channel 1.
- let channel_view_1_a = cx_a
- .update(|cx| ChannelView::open(channel_1_id, workspace_a.clone(), cx))
- .await
- .unwrap();
- channel_view_1_a.update(cx_a, |notes, cx| {
- assert_eq!(notes.channel(cx).unwrap().name, "channel-1");
- notes.editor.update(cx, |editor, cx| {
- editor.insert("Hello from A.", cx);
- editor.change_selections(None, cx, |selections| {
- selections.select_ranges(vec![3..4]);
- });
- });
- });
-
- // Client B follows client A.
- workspace_b
- .update(cx_b, |workspace, cx| {
- workspace
- .start_following(client_a.peer_id().unwrap(), cx)
- .unwrap()
- })
- .await
- .unwrap();
-
- // Client B is taken to the notes for channel 1, with the same
- // text selected as client A.
- deterministic.run_until_parked();
- let channel_view_1_b = workspace_b.update(cx_b, |workspace, cx| {
- assert_eq!(
- workspace.leader_for_pane(workspace.active_pane()),
- Some(client_a.peer_id().unwrap())
- );
- workspace
- .active_item(cx)
- .expect("no active item")
- .downcast::<ChannelView>()
- .expect("active item is not a channel view")
- });
- channel_view_1_b.update(cx_b, |notes, cx| {
- assert_eq!(notes.channel(cx).unwrap().name, "channel-1");
- let editor = notes.editor.read(cx);
- assert_eq!(editor.text(cx), "Hello from A.");
- assert_eq!(editor.selections.ranges::<usize>(cx), &[3..4]);
- });
-
- // Client A opens the notes for channel 2.
- eprintln!("opening -------------------->");
-
- let channel_view_2_a = cx_a
- .update(|cx| ChannelView::open(channel_2_id, workspace_a.clone(), cx))
- .await
- .unwrap();
- channel_view_2_a.update(cx_a, |notes, cx| {
- assert_eq!(notes.channel(cx).unwrap().name, "channel-2");
- });
-
- // Client B is taken to the notes for channel 2.
- deterministic.run_until_parked();
-
- eprintln!("opening <--------------------");
-
- let channel_view_2_b = workspace_b.update(cx_b, |workspace, cx| {
- assert_eq!(
- workspace.leader_for_pane(workspace.active_pane()),
- Some(client_a.peer_id().unwrap())
- );
- workspace
- .active_item(cx)
- .expect("no active item")
- .downcast::<ChannelView>()
- .expect("active item is not a channel view")
- });
- channel_view_2_b.update(cx_b, |notes, cx| {
- assert_eq!(notes.channel(cx).unwrap().name, "channel-2");
- });
-}
-
-#[gpui::test]
-async fn test_channel_buffer_changes(
- deterministic: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(deterministic.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let channel_id = server
- .make_channel(
- "the-channel",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b)],
- )
- .await;
-
- let channel_buffer_a = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_buffer(channel_id, cx))
- .await
- .unwrap();
-
- // Client A makes an edit, and client B should see that the note has changed.
- channel_buffer_a.update(cx_a, |buffer, cx| {
- buffer.buffer().update(cx, |buffer, cx| {
- buffer.edit([(0..0, "1")], None, cx);
- })
- });
- deterministic.run_until_parked();
-
- let has_buffer_changed = cx_b.update(|cx| {
- client_b
- .channel_store()
- .read(cx)
- .has_channel_buffer_changed(channel_id)
- .unwrap()
- });
- assert!(has_buffer_changed);
-
- // Opening the buffer should clear the changed flag.
- let project_b = client_b.build_empty_local_project(cx_b);
- let (workspace_b, cx_b) = client_b.build_workspace(&project_b, cx_b);
- let channel_view_b = cx_b
- .update(|cx| ChannelView::open(channel_id, workspace_b.clone(), cx))
- .await
- .unwrap();
- deterministic.run_until_parked();
-
- let has_buffer_changed = cx_b.update(|cx| {
- client_b
- .channel_store()
- .read(cx)
- .has_channel_buffer_changed(channel_id)
- .unwrap()
- });
- assert!(!has_buffer_changed);
-
- // Editing the channel while the buffer is open should not show that the buffer has changed.
- channel_buffer_a.update(cx_a, |buffer, cx| {
- buffer.buffer().update(cx, |buffer, cx| {
- buffer.edit([(0..0, "2")], None, cx);
- })
- });
- deterministic.run_until_parked();
-
- let has_buffer_changed = cx_b.read(|cx| {
- client_b
- .channel_store()
- .read(cx)
- .has_channel_buffer_changed(channel_id)
- .unwrap()
- });
- assert!(!has_buffer_changed);
-
- deterministic.advance_clock(ACKNOWLEDGE_DEBOUNCE_INTERVAL);
-
- // Test that the server is tracking things correctly, and we retain our 'not changed'
- // state across a disconnect
- server
- .simulate_long_connection_interruption(client_b.peer_id().unwrap(), deterministic.clone());
- let has_buffer_changed = cx_b.read(|cx| {
- client_b
- .channel_store()
- .read(cx)
- .has_channel_buffer_changed(channel_id)
- .unwrap()
- });
- assert!(!has_buffer_changed);
-
- // Closing the buffer should re-enable change tracking
- cx_b.update(|cx| {
- workspace_b.update(cx, |workspace, cx| {
- workspace.close_all_items_and_panes(&Default::default(), cx)
- });
-
- drop(channel_view_b)
- });
-
- deterministic.run_until_parked();
-
- channel_buffer_a.update(cx_a, |buffer, cx| {
- buffer.buffer().update(cx, |buffer, cx| {
- buffer.edit([(0..0, "3")], None, cx);
- })
- });
- deterministic.run_until_parked();
-
- let has_buffer_changed = cx_b.read(|cx| {
- client_b
- .channel_store()
- .read(cx)
- .has_channel_buffer_changed(channel_id)
- .unwrap()
- });
- assert!(has_buffer_changed);
-}
-
-#[track_caller]
-fn assert_collaborators(collaborators: &HashMap<PeerId, Collaborator>, ids: &[Option<UserId>]) {
- let mut user_ids = collaborators
- .values()
- .map(|collaborator| collaborator.user_id)
- .collect::<Vec<_>>();
- user_ids.sort();
- assert_eq!(
- user_ids,
- ids.into_iter().map(|id| id.unwrap()).collect::<Vec<_>>()
- );
-}
-
-fn buffer_text(channel_buffer: &Model<language::Buffer>, cx: &mut TestAppContext) -> String {
- channel_buffer.read_with(cx, |buffer, _| buffer.text())
-}
@@ -1,408 +0,0 @@
-use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
-use channel::{ChannelChat, ChannelMessageId, MessageParams};
-use gpui::{BackgroundExecutor, Model, TestAppContext};
-use rpc::Notification;
-
-#[gpui::test]
-async fn test_basic_channel_messages(
- executor: BackgroundExecutor,
- mut cx_a: &mut TestAppContext,
- mut cx_b: &mut TestAppContext,
- mut cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
-
- let channel_id = server
- .make_channel(
- "the-channel",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b), (&client_c, cx_c)],
- )
- .await;
-
- let channel_chat_a = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_chat(channel_id, cx))
- .await
- .unwrap();
- let channel_chat_b = client_b
- .channel_store()
- .update(cx_b, |store, cx| store.open_channel_chat(channel_id, cx))
- .await
- .unwrap();
-
- let message_id = channel_chat_a
- .update(cx_a, |c, cx| {
- c.send_message(
- MessageParams {
- text: "hi @user_c!".into(),
- mentions: vec![(3..10, client_c.id())],
- },
- cx,
- )
- .unwrap()
- })
- .await
- .unwrap();
- channel_chat_a
- .update(cx_a, |c, cx| c.send_message("two".into(), cx).unwrap())
- .await
- .unwrap();
-
- executor.run_until_parked();
- channel_chat_b
- .update(cx_b, |c, cx| c.send_message("three".into(), cx).unwrap())
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- let channel_chat_c = client_c
- .channel_store()
- .update(cx_c, |store, cx| store.open_channel_chat(channel_id, cx))
- .await
- .unwrap();
-
- for (chat, cx) in [
- (&channel_chat_a, &mut cx_a),
- (&channel_chat_b, &mut cx_b),
- (&channel_chat_c, &mut cx_c),
- ] {
- chat.update(*cx, |c, _| {
- assert_eq!(
- c.messages()
- .iter()
- .map(|m| (m.body.as_str(), m.mentions.as_slice()))
- .collect::<Vec<_>>(),
- vec![
- ("hi @user_c!", [(3..10, client_c.id())].as_slice()),
- ("two", &[]),
- ("three", &[])
- ],
- "results for user {}",
- c.client().id(),
- );
- });
- }
-
- client_c.notification_store().update(cx_c, |store, _| {
- assert_eq!(store.notification_count(), 2);
- assert_eq!(store.unread_notification_count(), 1);
- assert_eq!(
- store.notification_at(0).unwrap().notification,
- Notification::ChannelMessageMention {
- message_id,
- sender_id: client_a.id(),
- channel_id,
- }
- );
- assert_eq!(
- store.notification_at(1).unwrap().notification,
- Notification::ChannelInvitation {
- channel_id,
- channel_name: "the-channel".to_string(),
- inviter_id: client_a.id()
- }
- );
- });
-}
-
-#[gpui::test]
-async fn test_rejoin_channel_chat(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let channel_id = server
- .make_channel(
- "the-channel",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b)],
- )
- .await;
-
- let channel_chat_a = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_chat(channel_id, cx))
- .await
- .unwrap();
- let channel_chat_b = client_b
- .channel_store()
- .update(cx_b, |store, cx| store.open_channel_chat(channel_id, cx))
- .await
- .unwrap();
-
- channel_chat_a
- .update(cx_a, |c, cx| c.send_message("one".into(), cx).unwrap())
- .await
- .unwrap();
- channel_chat_b
- .update(cx_b, |c, cx| c.send_message("two".into(), cx).unwrap())
- .await
- .unwrap();
-
- server.forbid_connections();
- server.disconnect_client(client_a.peer_id().unwrap());
-
- // While client A is disconnected, clients A and B both send new messages.
- channel_chat_a
- .update(cx_a, |c, cx| c.send_message("three".into(), cx).unwrap())
- .await
- .unwrap_err();
- channel_chat_a
- .update(cx_a, |c, cx| c.send_message("four".into(), cx).unwrap())
- .await
- .unwrap_err();
- channel_chat_b
- .update(cx_b, |c, cx| c.send_message("five".into(), cx).unwrap())
- .await
- .unwrap();
- channel_chat_b
- .update(cx_b, |c, cx| c.send_message("six".into(), cx).unwrap())
- .await
- .unwrap();
-
- // Client A reconnects.
- server.allow_connections();
- executor.advance_clock(RECONNECT_TIMEOUT);
-
- // Client A fetches the messages that were sent while they were disconnected
- // and resends their own messages which failed to send.
- let expected_messages = &["one", "two", "five", "six", "three", "four"];
- assert_messages(&channel_chat_a, expected_messages, cx_a);
- assert_messages(&channel_chat_b, expected_messages, cx_b);
-}
-
-#[gpui::test]
-async fn test_remove_channel_message(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
-
- let channel_id = server
- .make_channel(
- "the-channel",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b), (&client_c, cx_c)],
- )
- .await;
-
- let channel_chat_a = client_a
- .channel_store()
- .update(cx_a, |store, cx| store.open_channel_chat(channel_id, cx))
- .await
- .unwrap();
- let channel_chat_b = client_b
- .channel_store()
- .update(cx_b, |store, cx| store.open_channel_chat(channel_id, cx))
- .await
- .unwrap();
-
- // Client A sends some messages.
- channel_chat_a
- .update(cx_a, |c, cx| c.send_message("one".into(), cx).unwrap())
- .await
- .unwrap();
- channel_chat_a
- .update(cx_a, |c, cx| c.send_message("two".into(), cx).unwrap())
- .await
- .unwrap();
- channel_chat_a
- .update(cx_a, |c, cx| c.send_message("three".into(), cx).unwrap())
- .await
- .unwrap();
-
- // Clients A and B see all of the messages.
- executor.run_until_parked();
- let expected_messages = &["one", "two", "three"];
- assert_messages(&channel_chat_a, expected_messages, cx_a);
- assert_messages(&channel_chat_b, expected_messages, cx_b);
-
- // Client A deletes one of their messages.
- channel_chat_a
- .update(cx_a, |c, cx| {
- let ChannelMessageId::Saved(id) = c.message(1).id else {
- panic!("message not saved")
- };
- c.remove_message(id, cx)
- })
- .await
- .unwrap();
-
- // Client B sees that the message is gone.
- executor.run_until_parked();
- let expected_messages = &["one", "three"];
- assert_messages(&channel_chat_a, expected_messages, cx_a);
- assert_messages(&channel_chat_b, expected_messages, cx_b);
-
- // Client C joins the channel chat, and does not see the deleted message.
- let channel_chat_c = client_c
- .channel_store()
- .update(cx_c, |store, cx| store.open_channel_chat(channel_id, cx))
- .await
- .unwrap();
- assert_messages(&channel_chat_c, expected_messages, cx_c);
-}
-
-#[track_caller]
-fn assert_messages(chat: &Model<ChannelChat>, messages: &[&str], cx: &mut TestAppContext) {
- // todo!(don't directly borrow here)
- assert_eq!(
- chat.read_with(cx, |chat, _| {
- chat.messages()
- .iter()
- .map(|m| m.body.clone())
- .collect::<Vec<_>>()
- }),
- messages
- );
-}
-
-//todo!(collab_ui)
-// #[gpui::test]
-// async fn test_channel_message_changes(
-// executor: BackgroundExecutor,
-// cx_a: &mut TestAppContext,
-// cx_b: &mut TestAppContext,
-// ) {
-// let mut server = TestServer::start(&executor).await;
-// let client_a = server.create_client(cx_a, "user_a").await;
-// let client_b = server.create_client(cx_b, "user_b").await;
-
-// let channel_id = server
-// .make_channel(
-// "the-channel",
-// None,
-// (&client_a, cx_a),
-// &mut [(&client_b, cx_b)],
-// )
-// .await;
-
-// // Client A sends a message, client B should see that there is a new message.
-// let channel_chat_a = client_a
-// .channel_store()
-// .update(cx_a, |store, cx| store.open_channel_chat(channel_id, cx))
-// .await
-// .unwrap();
-
-// channel_chat_a
-// .update(cx_a, |c, cx| c.send_message("one".into(), cx).unwrap())
-// .await
-// .unwrap();
-
-// executor.run_until_parked();
-
-// let b_has_messages = cx_b.read_with(|cx| {
-// client_b
-// .channel_store()
-// .read(cx)
-// .has_new_messages(channel_id)
-// .unwrap()
-// });
-
-// assert!(b_has_messages);
-
-// // Opening the chat should clear the changed flag.
-// cx_b.update(|cx| {
-// collab_ui::init(&client_b.app_state, cx);
-// });
-// let project_b = client_b.build_empty_local_project(cx_b);
-// let workspace_b = client_b.build_workspace(&project_b, cx_b).root(cx_b);
-// let chat_panel_b = workspace_b.update(cx_b, |workspace, cx| ChatPanel::new(workspace, cx));
-// chat_panel_b
-// .update(cx_b, |chat_panel, cx| {
-// chat_panel.set_active(true, cx);
-// chat_panel.select_channel(channel_id, None, cx)
-// })
-// .await
-// .unwrap();
-
-// executor.run_until_parked();
-
-// let b_has_messages = cx_b.read_with(|cx| {
-// client_b
-// .channel_store()
-// .read(cx)
-// .has_new_messages(channel_id)
-// .unwrap()
-// });
-
-// assert!(!b_has_messages);
-
-// // Sending a message while the chat is open should not change the flag.
-// channel_chat_a
-// .update(cx_a, |c, cx| c.send_message("two".into(), cx).unwrap())
-// .await
-// .unwrap();
-
-// executor.run_until_parked();
-
-// let b_has_messages = cx_b.read_with(|cx| {
-// client_b
-// .channel_store()
-// .read(cx)
-// .has_new_messages(channel_id)
-// .unwrap()
-// });
-
-// assert!(!b_has_messages);
-
-// // Sending a message while the chat is closed should change the flag.
-// chat_panel_b.update(cx_b, |chat_panel, cx| {
-// chat_panel.set_active(false, cx);
-// });
-
-// // Sending a message while the chat is open should not change the flag.
-// channel_chat_a
-// .update(cx_a, |c, cx| c.send_message("three".into(), cx).unwrap())
-// .await
-// .unwrap();
-
-// executor.run_until_parked();
-
-// let b_has_messages = cx_b.read_with(|cx| {
-// client_b
-// .channel_store()
-// .read(cx)
-// .has_new_messages(channel_id)
-// .unwrap()
-// });
-
-// assert!(b_has_messages);
-
-// // Closing the chat should re-enable change tracking
-// cx_b.update(|_| drop(chat_panel_b));
-
-// channel_chat_a
-// .update(cx_a, |c, cx| c.send_message("four".into(), cx).unwrap())
-// .await
-// .unwrap();
-
-// executor.run_until_parked();
-
-// let b_has_messages = cx_b.read_with(|cx| {
-// client_b
-// .channel_store()
-// .read(cx)
-// .has_new_messages(channel_id)
-// .unwrap()
-// });
-
-// assert!(b_has_messages);
-// }
@@ -1,1543 +0,0 @@
-use crate::{
- db::{self, UserId},
- rpc::RECONNECT_TIMEOUT,
- tests::{room_participants, RoomParticipants, TestServer},
-};
-use call::ActiveCall;
-use channel::{ChannelId, ChannelMembership, ChannelStore};
-use client::User;
-use futures::future::try_join_all;
-use gpui::{BackgroundExecutor, Model, SharedString, TestAppContext};
-use rpc::{
- proto::{self, ChannelRole},
- RECEIVE_TIMEOUT,
-};
-use std::sync::Arc;
-
-#[gpui::test]
-async fn test_core_channels(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let channel_a_id = client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.create_channel("channel-a", None, cx)
- })
- .await
- .unwrap();
- let channel_b_id = client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.create_channel("channel-b", Some(channel_a_id), cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
- assert_channels(
- client_a.channel_store(),
- cx_a,
- &[
- ExpectedChannel {
- id: channel_a_id,
- name: "channel-a".into(),
- depth: 0,
- role: ChannelRole::Admin,
- },
- ExpectedChannel {
- id: channel_b_id,
- name: "channel-b".into(),
- depth: 1,
- role: ChannelRole::Admin,
- },
- ],
- );
-
- cx_b.read(|cx| {
- client_b.channel_store().read_with(cx, |channels, _| {
- assert!(channels.ordered_channels().collect::<Vec<_>>().is_empty())
- })
- });
-
- // Invite client B to channel A as client A.
- client_a
- .channel_store()
- .update(cx_a, |store, cx| {
- assert!(!store.has_pending_channel_invite(channel_a_id, client_b.user_id().unwrap()));
-
- let invite = store.invite_member(
- channel_a_id,
- client_b.user_id().unwrap(),
- proto::ChannelRole::Member,
- cx,
- );
-
- // Make sure we're synchronously storing the pending invite
- assert!(store.has_pending_channel_invite(channel_a_id, client_b.user_id().unwrap()));
- invite
- })
- .await
- .unwrap();
-
- // Client A sees that B has been invited.
- executor.run_until_parked();
- assert_channel_invitations(
- client_b.channel_store(),
- cx_b,
- &[ExpectedChannel {
- id: channel_a_id,
- name: "channel-a".into(),
- depth: 0,
- role: ChannelRole::Member,
- }],
- );
-
- let members = client_a
- .channel_store()
- .update(cx_a, |store, cx| {
- assert!(!store.has_pending_channel_invite(channel_a_id, client_b.user_id().unwrap()));
- store.get_channel_member_details(channel_a_id, cx)
- })
- .await
- .unwrap();
- assert_members_eq(
- &members,
- &[
- (
- client_a.user_id().unwrap(),
- proto::ChannelRole::Admin,
- proto::channel_member::Kind::Member,
- ),
- (
- client_b.user_id().unwrap(),
- proto::ChannelRole::Member,
- proto::channel_member::Kind::Invitee,
- ),
- ],
- );
-
- // Client B accepts the invitation.
- client_b
- .channel_store()
- .update(cx_b, |channels, cx| {
- channels.respond_to_channel_invite(channel_a_id, true, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
-
- // Client B now sees that they are a member of channel A and its existing subchannels.
- assert_channel_invitations(client_b.channel_store(), cx_b, &[]);
- assert_channels(
- client_b.channel_store(),
- cx_b,
- &[
- ExpectedChannel {
- id: channel_a_id,
- name: "channel-a".into(),
- role: ChannelRole::Member,
- depth: 0,
- },
- ExpectedChannel {
- id: channel_b_id,
- name: "channel-b".into(),
- role: ChannelRole::Member,
- depth: 1,
- },
- ],
- );
-
- let channel_c_id = client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.create_channel("channel-c", Some(channel_b_id), cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
- assert_channels(
- client_b.channel_store(),
- cx_b,
- &[
- ExpectedChannel {
- id: channel_a_id,
- name: "channel-a".into(),
- role: ChannelRole::Member,
- depth: 0,
- },
- ExpectedChannel {
- id: channel_b_id,
- name: "channel-b".into(),
- role: ChannelRole::Member,
- depth: 1,
- },
- ExpectedChannel {
- id: channel_c_id,
- name: "channel-c".into(),
- role: ChannelRole::Member,
- depth: 2,
- },
- ],
- );
-
- // Update client B's membership to channel A to be an admin.
- client_a
- .channel_store()
- .update(cx_a, |store, cx| {
- store.set_member_role(
- channel_a_id,
- client_b.user_id().unwrap(),
- proto::ChannelRole::Admin,
- cx,
- )
- })
- .await
- .unwrap();
- executor.run_until_parked();
-
- // Observe that client B is now an admin of channel A, and that
- // their admin priveleges extend to subchannels of channel A.
- assert_channel_invitations(client_b.channel_store(), cx_b, &[]);
- assert_channels(
- client_b.channel_store(),
- cx_b,
- &[
- ExpectedChannel {
- id: channel_a_id,
- name: "channel-a".into(),
- depth: 0,
- role: ChannelRole::Admin,
- },
- ExpectedChannel {
- id: channel_b_id,
- name: "channel-b".into(),
- depth: 1,
- role: ChannelRole::Admin,
- },
- ExpectedChannel {
- id: channel_c_id,
- name: "channel-c".into(),
- depth: 2,
- role: ChannelRole::Admin,
- },
- ],
- );
-
- // Client A deletes the channel, deletion also deletes subchannels.
- client_a
- .channel_store()
- .update(cx_a, |channel_store, _| {
- channel_store.remove_channel(channel_b_id)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
- assert_channels(
- client_a.channel_store(),
- cx_a,
- &[ExpectedChannel {
- id: channel_a_id,
- name: "channel-a".into(),
- depth: 0,
- role: ChannelRole::Admin,
- }],
- );
- assert_channels(
- client_b.channel_store(),
- cx_b,
- &[ExpectedChannel {
- id: channel_a_id,
- name: "channel-a".into(),
- depth: 0,
- role: ChannelRole::Admin,
- }],
- );
-
- // Remove client B
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.remove_member(channel_a_id, client_b.user_id().unwrap(), cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // Client A still has their channel
- assert_channels(
- client_a.channel_store(),
- cx_a,
- &[ExpectedChannel {
- id: channel_a_id,
- name: "channel-a".into(),
- depth: 0,
- role: ChannelRole::Admin,
- }],
- );
-
- // Client B no longer has access to the channel
- assert_channels(client_b.channel_store(), cx_b, &[]);
-
- server.forbid_connections();
- server.disconnect_client(client_a.peer_id().unwrap());
- executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
-
- server
- .app_state
- .db
- .rename_channel(
- db::ChannelId::from_proto(channel_a_id),
- UserId::from_proto(client_a.id()),
- "channel-a-renamed",
- )
- .await
- .unwrap();
-
- server.allow_connections();
- executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- assert_channels(
- client_a.channel_store(),
- cx_a,
- &[ExpectedChannel {
- id: channel_a_id,
- name: "channel-a-renamed".into(),
- depth: 0,
- role: ChannelRole::Admin,
- }],
- );
-}
-
-#[track_caller]
-fn assert_participants_eq(participants: &[Arc<User>], expected_partitipants: &[u64]) {
- assert_eq!(
- participants.iter().map(|p| p.id).collect::<Vec<_>>(),
- expected_partitipants
- );
-}
-
-#[track_caller]
-fn assert_members_eq(
- members: &[ChannelMembership],
- expected_members: &[(u64, proto::ChannelRole, proto::channel_member::Kind)],
-) {
- assert_eq!(
- members
- .iter()
- .map(|member| (member.user.id, member.role, member.kind))
- .collect::<Vec<_>>(),
- expected_members
- );
-}
-
-#[gpui::test]
-async fn test_joining_channel_ancestor_member(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
-
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let parent_id = server
- .make_channel("parent", None, (&client_a, cx_a), &mut [(&client_b, cx_b)])
- .await;
-
- let sub_id = client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.create_channel("sub_channel", Some(parent_id), cx)
- })
- .await
- .unwrap();
-
- let active_call_b = cx_b.read(ActiveCall::global);
-
- assert!(active_call_b
- .update(cx_b, |active_call, cx| active_call.join_channel(sub_id, cx))
- .await
- .is_ok());
-}
-
-#[gpui::test]
-async fn test_channel_room(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
-
- let zed_id = server
- .make_channel(
- "zed",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b), (&client_c, cx_c)],
- )
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- active_call_a
- .update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx))
- .await
- .unwrap();
-
- // Give everyone a chance to observe user A joining
- executor.run_until_parked();
- let room_a =
- cx_a.read(|cx| active_call_a.read_with(cx, |call, _| call.room().unwrap().clone()));
- cx_a.read(|cx| room_a.read_with(cx, |room, _| assert!(room.is_connected())));
-
- cx_a.read(|cx| {
- client_a.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap()],
- );
- })
- });
-
- assert_channels(
- client_b.channel_store(),
- cx_b,
- &[ExpectedChannel {
- id: zed_id,
- name: "zed".into(),
- depth: 0,
- role: ChannelRole::Member,
- }],
- );
- cx_b.read(|cx| {
- client_b.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap()],
- );
- })
- });
-
- cx_c.read(|cx| {
- client_c.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap()],
- );
- })
- });
-
- active_call_b
- .update(cx_b, |active_call, cx| active_call.join_channel(zed_id, cx))
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- cx_a.read(|cx| {
- client_a.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
- );
- })
- });
-
- cx_b.read(|cx| {
- client_b.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
- );
- })
- });
-
- cx_c.read(|cx| {
- client_c.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
- );
- })
- });
-
- let room_a =
- cx_a.read(|cx| active_call_a.read_with(cx, |call, _| call.room().unwrap().clone()));
- cx_a.read(|cx| room_a.read_with(cx, |room, _| assert!(room.is_connected())));
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: vec![]
- }
- );
-
- let room_b =
- cx_b.read(|cx| active_call_b.read_with(cx, |call, _| call.room().unwrap().clone()));
- cx_b.read(|cx| room_b.read_with(cx, |room, _| assert!(room.is_connected())));
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string()],
- pending: vec![]
- }
- );
-
- // Make sure that leaving and rejoining works
-
- active_call_a
- .update(cx_a, |active_call, cx| active_call.hang_up(cx))
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- cx_a.read(|cx| {
- client_a.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_b.user_id().unwrap()],
- );
- })
- });
-
- cx_b.read(|cx| {
- client_b.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_b.user_id().unwrap()],
- );
- })
- });
-
- cx_c.read(|cx| {
- client_c.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_b.user_id().unwrap()],
- );
- })
- });
-
- active_call_b
- .update(cx_b, |active_call, cx| active_call.hang_up(cx))
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- cx_a.read(|cx| {
- client_a.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(channels.channel_participants(zed_id), &[]);
- })
- });
-
- cx_b.read(|cx| {
- client_b.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(channels.channel_participants(zed_id), &[]);
- })
- });
-
- cx_c.read(|cx| {
- client_c.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(channels.channel_participants(zed_id), &[]);
- })
- });
-
- active_call_a
- .update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx))
- .await
- .unwrap();
-
- active_call_b
- .update(cx_b, |active_call, cx| active_call.join_channel(zed_id, cx))
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- let room_a =
- cx_a.read(|cx| active_call_a.read_with(cx, |call, _| call.room().unwrap().clone()));
- cx_a.read(|cx| room_a.read_with(cx, |room, _| assert!(room.is_connected())));
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: vec![]
- }
- );
-
- let room_b =
- cx_b.read(|cx| active_call_b.read_with(cx, |call, _| call.room().unwrap().clone()));
- cx_b.read(|cx| room_b.read_with(cx, |room, _| assert!(room.is_connected())));
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string()],
- pending: vec![]
- }
- );
-}
-
-#[gpui::test]
-async fn test_channel_jumping(executor: BackgroundExecutor, cx_a: &mut TestAppContext) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
-
- let zed_id = server
- .make_channel("zed", None, (&client_a, cx_a), &mut [])
- .await;
- let rust_id = server
- .make_channel("rust", None, (&client_a, cx_a), &mut [])
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
-
- active_call_a
- .update(cx_a, |active_call, cx| active_call.join_channel(zed_id, cx))
- .await
- .unwrap();
-
- // Give everything a chance to observe user A joining
- executor.run_until_parked();
-
- cx_a.read(|cx| {
- client_a.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(zed_id),
- &[client_a.user_id().unwrap()],
- );
- assert_participants_eq(channels.channel_participants(rust_id), &[]);
- })
- });
-
- active_call_a
- .update(cx_a, |active_call, cx| {
- active_call.join_channel(rust_id, cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- cx_a.read(|cx| {
- client_a.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(channels.channel_participants(zed_id), &[]);
- assert_participants_eq(
- channels.channel_participants(rust_id),
- &[client_a.user_id().unwrap()],
- );
- })
- });
-}
-
-#[gpui::test]
-async fn test_permissions_update_while_invited(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let rust_id = server
- .make_channel("rust", None, (&client_a, cx_a), &mut [])
- .await;
-
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.invite_member(
- rust_id,
- client_b.user_id().unwrap(),
- proto::ChannelRole::Member,
- cx,
- )
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- assert_channel_invitations(
- client_b.channel_store(),
- cx_b,
- &[ExpectedChannel {
- depth: 0,
- id: rust_id,
- name: "rust".into(),
- role: ChannelRole::Member,
- }],
- );
- assert_channels(client_b.channel_store(), cx_b, &[]);
-
- // Update B's invite before they've accepted it
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.set_member_role(
- rust_id,
- client_b.user_id().unwrap(),
- proto::ChannelRole::Admin,
- cx,
- )
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- assert_channel_invitations(
- client_b.channel_store(),
- cx_b,
- &[ExpectedChannel {
- depth: 0,
- id: rust_id,
- name: "rust".into(),
- role: ChannelRole::Member,
- }],
- );
- assert_channels(client_b.channel_store(), cx_b, &[]);
-}
-
-#[gpui::test]
-async fn test_channel_rename(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let rust_id = server
- .make_channel("rust", None, (&client_a, cx_a), &mut [(&client_b, cx_b)])
- .await;
-
- // Rename the channel
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.rename(rust_id, "#rust-archive", cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // Client A sees the channel with its new name.
- assert_channels(
- client_a.channel_store(),
- cx_a,
- &[ExpectedChannel {
- depth: 0,
- id: rust_id,
- name: "rust-archive".into(),
- role: ChannelRole::Admin,
- }],
- );
-
- // Client B sees the channel with its new name.
- assert_channels(
- client_b.channel_store(),
- cx_b,
- &[ExpectedChannel {
- depth: 0,
- id: rust_id,
- name: "rust-archive".into(),
- role: ChannelRole::Member,
- }],
- );
-}
-
-#[gpui::test]
-async fn test_call_from_channel(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- server
- .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
-
- let channel_id = server
- .make_channel(
- "x",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b), (&client_c, cx_c)],
- )
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- active_call_a
- .update(cx_a, |call, cx| call.join_channel(channel_id, cx))
- .await
- .unwrap();
-
- // Client A calls client B while in the channel.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
-
- // Client B accepts the call.
- executor.run_until_parked();
- active_call_b
- .update(cx_b, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
-
- // Client B sees that they are now in the channel
- executor.run_until_parked();
- cx_b.read(|cx| {
- active_call_b.read_with(cx, |call, cx| {
- assert_eq!(call.channel_id(cx), Some(channel_id));
- })
- });
- cx_b.read(|cx| {
- client_b.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(channel_id),
- &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
- );
- })
- });
-
- // Clients A and C also see that client B is in the channel.
- cx_a.read(|cx| {
- client_a.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(channel_id),
- &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
- );
- })
- });
- cx_c.read(|cx| {
- client_c.channel_store().read_with(cx, |channels, _| {
- assert_participants_eq(
- channels.channel_participants(channel_id),
- &[client_a.user_id().unwrap(), client_b.user_id().unwrap()],
- );
- })
- });
-}
-
-#[gpui::test]
-async fn test_lost_channel_creation(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- server
- .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
-
- let channel_id = server
- .make_channel("x", None, (&client_a, cx_a), &mut [])
- .await;
-
- // Invite a member
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.invite_member(
- channel_id,
- client_b.user_id().unwrap(),
- proto::ChannelRole::Member,
- cx,
- )
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // Sanity check, B has the invitation
- assert_channel_invitations(
- client_b.channel_store(),
- cx_b,
- &[ExpectedChannel {
- depth: 0,
- id: channel_id,
- name: "x".into(),
- role: ChannelRole::Member,
- }],
- );
-
- // A creates a subchannel while the invite is still pending.
- let subchannel_id = client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.create_channel("subchannel", Some(channel_id), cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // Make sure A sees their new channel
- assert_channels(
- client_a.channel_store(),
- cx_a,
- &[
- ExpectedChannel {
- depth: 0,
- id: channel_id,
- name: "x".into(),
- role: ChannelRole::Admin,
- },
- ExpectedChannel {
- depth: 1,
- id: subchannel_id,
- name: "subchannel".into(),
- role: ChannelRole::Admin,
- },
- ],
- );
-
- // Client B accepts the invite
- client_b
- .channel_store()
- .update(cx_b, |channel_store, cx| {
- channel_store.respond_to_channel_invite(channel_id, true, cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // Client B should now see the channel
- assert_channels(
- client_b.channel_store(),
- cx_b,
- &[
- ExpectedChannel {
- depth: 0,
- id: channel_id,
- name: "x".into(),
- role: ChannelRole::Member,
- },
- ExpectedChannel {
- depth: 1,
- id: subchannel_id,
- name: "subchannel".into(),
- role: ChannelRole::Member,
- },
- ],
- );
-}
-
-#[gpui::test]
-async fn test_channel_link_notifications(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
-
- let user_b = client_b.user_id().unwrap();
- let user_c = client_c.user_id().unwrap();
-
- let channels = server
- .make_channel_tree(&[("zed", None)], (&client_a, cx_a))
- .await;
- let zed_channel = channels[0];
-
- try_join_all(client_a.channel_store().update(cx_a, |channel_store, cx| {
- [
- channel_store.set_channel_visibility(zed_channel, proto::ChannelVisibility::Public, cx),
- channel_store.invite_member(zed_channel, user_b, proto::ChannelRole::Member, cx),
- channel_store.invite_member(zed_channel, user_c, proto::ChannelRole::Guest, cx),
- ]
- }))
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- client_b
- .channel_store()
- .update(cx_b, |channel_store, cx| {
- channel_store.respond_to_channel_invite(zed_channel, true, cx)
- })
- .await
- .unwrap();
-
- client_c
- .channel_store()
- .update(cx_c, |channel_store, cx| {
- channel_store.respond_to_channel_invite(zed_channel, true, cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // we have an admin (a), member (b) and guest (c) all part of the zed channel.
-
- // create a new private channel, make it public, and move it under the previous one, and verify it shows for b and not c
- let active_channel = client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.create_channel("active", Some(zed_channel), cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // the new channel shows for b and not c
- assert_channels_list_shape(
- client_a.channel_store(),
- cx_a,
- &[(zed_channel, 0), (active_channel, 1)],
- );
- assert_channels_list_shape(
- client_b.channel_store(),
- cx_b,
- &[(zed_channel, 0), (active_channel, 1)],
- );
- assert_channels_list_shape(client_c.channel_store(), cx_c, &[(zed_channel, 0)]);
-
- let vim_channel = client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.create_channel("vim", None, cx)
- })
- .await
- .unwrap();
-
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.set_channel_visibility(vim_channel, proto::ChannelVisibility::Public, cx)
- })
- .await
- .unwrap();
-
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.move_channel(vim_channel, Some(active_channel), cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // the new channel shows for b and c
- assert_channels_list_shape(
- client_a.channel_store(),
- cx_a,
- &[(zed_channel, 0), (active_channel, 1), (vim_channel, 2)],
- );
- assert_channels_list_shape(
- client_b.channel_store(),
- cx_b,
- &[(zed_channel, 0), (active_channel, 1), (vim_channel, 2)],
- );
- assert_channels_list_shape(
- client_c.channel_store(),
- cx_c,
- &[(zed_channel, 0), (vim_channel, 1)],
- );
-
- let helix_channel = client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.create_channel("helix", None, cx)
- })
- .await
- .unwrap();
-
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.move_channel(helix_channel, Some(vim_channel), cx)
- })
- .await
- .unwrap();
-
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.set_channel_visibility(
- helix_channel,
- proto::ChannelVisibility::Public,
- cx,
- )
- })
- .await
- .unwrap();
-
- // the new channel shows for b and c
- assert_channels_list_shape(
- client_b.channel_store(),
- cx_b,
- &[
- (zed_channel, 0),
- (active_channel, 1),
- (vim_channel, 2),
- (helix_channel, 3),
- ],
- );
- assert_channels_list_shape(
- client_c.channel_store(),
- cx_c,
- &[(zed_channel, 0), (vim_channel, 1), (helix_channel, 2)],
- );
-
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.set_channel_visibility(vim_channel, proto::ChannelVisibility::Members, cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // the members-only channel is still shown for c, but hidden for b
- assert_channels_list_shape(
- client_b.channel_store(),
- cx_b,
- &[
- (zed_channel, 0),
- (active_channel, 1),
- (vim_channel, 2),
- (helix_channel, 3),
- ],
- );
- cx_b.read(|cx| {
- client_b.channel_store().read_with(cx, |channel_store, _| {
- assert_eq!(
- channel_store
- .channel_for_id(vim_channel)
- .unwrap()
- .visibility,
- proto::ChannelVisibility::Members
- )
- })
- });
-
- assert_channels_list_shape(client_c.channel_store(), cx_c, &[(zed_channel, 0)]);
-}
-
-#[gpui::test]
-async fn test_channel_membership_notifications(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_c").await;
-
- let user_b = client_b.user_id().unwrap();
-
- let channels = server
- .make_channel_tree(
- &[
- ("zed", None),
- ("active", Some("zed")),
- ("vim", Some("active")),
- ],
- (&client_a, cx_a),
- )
- .await;
- let zed_channel = channels[0];
- let _active_channel = channels[1];
- let vim_channel = channels[2];
-
- try_join_all(client_a.channel_store().update(cx_a, |channel_store, cx| {
- [
- channel_store.set_channel_visibility(zed_channel, proto::ChannelVisibility::Public, cx),
- channel_store.set_channel_visibility(vim_channel, proto::ChannelVisibility::Public, cx),
- channel_store.invite_member(vim_channel, user_b, proto::ChannelRole::Member, cx),
- channel_store.invite_member(zed_channel, user_b, proto::ChannelRole::Guest, cx),
- ]
- }))
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- client_b
- .channel_store()
- .update(cx_b, |channel_store, cx| {
- channel_store.respond_to_channel_invite(zed_channel, true, cx)
- })
- .await
- .unwrap();
-
- client_b
- .channel_store()
- .update(cx_b, |channel_store, cx| {
- channel_store.respond_to_channel_invite(vim_channel, true, cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // we have an admin (a), and a guest (b) with access to all of zed, and membership in vim.
- assert_channels(
- client_b.channel_store(),
- cx_b,
- &[
- ExpectedChannel {
- depth: 0,
- id: zed_channel,
- name: "zed".into(),
- role: ChannelRole::Guest,
- },
- ExpectedChannel {
- depth: 1,
- id: vim_channel,
- name: "vim".into(),
- role: ChannelRole::Member,
- },
- ],
- );
-
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.remove_member(vim_channel, user_b, cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- assert_channels(
- client_b.channel_store(),
- cx_b,
- &[
- ExpectedChannel {
- depth: 0,
- id: zed_channel,
- name: "zed".into(),
- role: ChannelRole::Guest,
- },
- ExpectedChannel {
- depth: 1,
- id: vim_channel,
- name: "vim".into(),
- role: ChannelRole::Guest,
- },
- ],
- )
-}
-
-#[gpui::test]
-async fn test_guest_access(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let channels = server
- .make_channel_tree(
- &[("channel-a", None), ("channel-b", Some("channel-a"))],
- (&client_a, cx_a),
- )
- .await;
- let channel_a = channels[0];
- let channel_b = channels[1];
-
- let active_call_b = cx_b.read(ActiveCall::global);
-
- // Non-members should not be allowed to join
- assert!(active_call_b
- .update(cx_b, |call, cx| call.join_channel(channel_a, cx))
- .await
- .is_err());
-
- // Make channels A and B public
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.set_channel_visibility(channel_a, proto::ChannelVisibility::Public, cx)
- })
- .await
- .unwrap();
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.set_channel_visibility(channel_b, proto::ChannelVisibility::Public, cx)
- })
- .await
- .unwrap();
-
- // Client B joins channel A as a guest
- active_call_b
- .update(cx_b, |call, cx| call.join_channel(channel_a, cx))
- .await
- .unwrap();
-
- executor.run_until_parked();
- assert_channels_list_shape(
- client_a.channel_store(),
- cx_a,
- &[(channel_a, 0), (channel_b, 1)],
- );
- assert_channels_list_shape(
- client_b.channel_store(),
- cx_b,
- &[(channel_a, 0), (channel_b, 1)],
- );
-
- client_a.channel_store().update(cx_a, |channel_store, _| {
- let participants = channel_store.channel_participants(channel_a);
- assert_eq!(participants.len(), 1);
- assert_eq!(participants[0].id, client_b.user_id().unwrap());
- });
-
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.set_channel_visibility(channel_a, proto::ChannelVisibility::Members, cx)
- })
- .await
- .unwrap();
-
- assert_channels_list_shape(client_b.channel_store(), cx_b, &[]);
-
- active_call_b
- .update(cx_b, |call, cx| call.join_channel(channel_b, cx))
- .await
- .unwrap();
-
- executor.run_until_parked();
- assert_channels_list_shape(client_b.channel_store(), cx_b, &[(channel_b, 0)]);
-}
-
-#[gpui::test]
-async fn test_invite_access(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let channels = server
- .make_channel_tree(
- &[("channel-a", None), ("channel-b", Some("channel-a"))],
- (&client_a, cx_a),
- )
- .await;
- let channel_a_id = channels[0];
- let channel_b_id = channels[0];
-
- let active_call_b = cx_b.read(ActiveCall::global);
-
- // should not be allowed to join
- assert!(active_call_b
- .update(cx_b, |call, cx| call.join_channel(channel_b_id, cx))
- .await
- .is_err());
-
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.invite_member(
- channel_a_id,
- client_b.user_id().unwrap(),
- ChannelRole::Member,
- cx,
- )
- })
- .await
- .unwrap();
-
- active_call_b
- .update(cx_b, |call, cx| call.join_channel(channel_b_id, cx))
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- client_b.channel_store().update(cx_b, |channel_store, _| {
- assert!(channel_store.channel_for_id(channel_b_id).is_some());
- assert!(channel_store.channel_for_id(channel_a_id).is_some());
- });
-
- client_a.channel_store().update(cx_a, |channel_store, _| {
- let participants = channel_store.channel_participants(channel_b_id);
- assert_eq!(participants.len(), 1);
- assert_eq!(participants[0].id, client_b.user_id().unwrap());
- })
-}
-
-#[gpui::test]
-async fn test_channel_moving(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- _cx_b: &mut TestAppContext,
- _cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- // let client_b = server.create_client(cx_b, "user_b").await;
- // let client_c = server.create_client(cx_c, "user_c").await;
-
- let channels = server
- .make_channel_tree(
- &[
- ("channel-a", None),
- ("channel-b", Some("channel-a")),
- ("channel-c", Some("channel-b")),
- ("channel-d", Some("channel-c")),
- ],
- (&client_a, cx_a),
- )
- .await;
- let channel_a_id = channels[0];
- let channel_b_id = channels[1];
- let channel_c_id = channels[2];
- let channel_d_id = channels[3];
-
- // Current shape:
- // a - b - c - d
- assert_channels_list_shape(
- client_a.channel_store(),
- cx_a,
- &[
- (channel_a_id, 0),
- (channel_b_id, 1),
- (channel_c_id, 2),
- (channel_d_id, 3),
- ],
- );
-
- client_a
- .channel_store()
- .update(cx_a, |channel_store, cx| {
- channel_store.move_channel(channel_d_id, Some(channel_b_id), cx)
- })
- .await
- .unwrap();
-
- // Current shape:
- // /- d
- // a - b -- c
- assert_channels_list_shape(
- client_a.channel_store(),
- cx_a,
- &[
- (channel_a_id, 0),
- (channel_b_id, 1),
- (channel_c_id, 2),
- (channel_d_id, 2),
- ],
- );
-}
-
-#[derive(Debug, PartialEq)]
-struct ExpectedChannel {
- depth: usize,
- id: ChannelId,
- name: SharedString,
- role: ChannelRole,
-}
-
-#[track_caller]
-fn assert_channel_invitations(
- channel_store: &Model<ChannelStore>,
- cx: &TestAppContext,
- expected_channels: &[ExpectedChannel],
-) {
- let actual = cx.read(|cx| {
- channel_store.read_with(cx, |store, _| {
- store
- .channel_invitations()
- .iter()
- .map(|channel| ExpectedChannel {
- depth: 0,
- name: channel.name.clone(),
- id: channel.id,
- role: channel.role,
- })
- .collect::<Vec<_>>()
- })
- });
- assert_eq!(actual, expected_channels);
-}
-
-#[track_caller]
-fn assert_channels(
- channel_store: &Model<ChannelStore>,
- cx: &TestAppContext,
- expected_channels: &[ExpectedChannel],
-) {
- let actual = cx.read(|cx| {
- channel_store.read_with(cx, |store, _| {
- store
- .ordered_channels()
- .map(|(depth, channel)| ExpectedChannel {
- depth,
- name: channel.name.clone().into(),
- id: channel.id,
- role: channel.role,
- })
- .collect::<Vec<_>>()
- })
- });
- pretty_assertions::assert_eq!(actual, expected_channels);
-}
-
-#[track_caller]
-fn assert_channels_list_shape(
- channel_store: &Model<ChannelStore>,
- cx: &TestAppContext,
- expected_channels: &[(u64, usize)],
-) {
- let actual = cx.read(|cx| {
- channel_store.read_with(cx, |store, _| {
- store
- .ordered_channels()
- .map(|(depth, channel)| (channel.id, depth))
- .collect::<Vec<_>>()
- })
- });
- pretty_assertions::assert_eq!(actual, expected_channels);
-}
@@ -1,1890 +0,0 @@
-//todo!(workspace)
-
-// use crate::{rpc::RECONNECT_TIMEOUT, tests::TestServer};
-// use call::ActiveCall;
-// use collab_ui::notifications::project_shared_notification::ProjectSharedNotification;
-// use editor::{Editor, ExcerptRange, MultiBuffer};
-// use gpui::{point, BackgroundExecutor, TestAppContext, View, VisualTestContext, WindowContext};
-// use live_kit_client::MacOSDisplay;
-// use project::project_settings::ProjectSettings;
-// use rpc::proto::PeerId;
-// use serde_json::json;
-// use settings::SettingsStore;
-// use std::borrow::Cow;
-// use workspace::{
-// dock::{test::TestPanel, DockPosition},
-// item::{test::TestItem, ItemHandle as _},
-// shared_screen::SharedScreen,
-// SplitDirection, Workspace,
-// };
-
-// #[gpui::test(iterations = 10)]
-// async fn test_basic_following(
-// executor: BackgroundExecutor,
-// cx_a: &mut TestAppContext,
-// cx_b: &mut TestAppContext,
-// cx_c: &mut TestAppContext,
-// cx_d: &mut TestAppContext,
-// ) {
-// let mut server = TestServer::start(executor.clone()).await;
-// let client_a = server.create_client(cx_a, "user_a").await;
-// let client_b = server.create_client(cx_b, "user_b").await;
-// let client_c = server.create_client(cx_c, "user_c").await;
-// let client_d = server.create_client(cx_d, "user_d").await;
-// server
-// .create_room(&mut [
-// (&client_a, cx_a),
-// (&client_b, cx_b),
-// (&client_c, cx_c),
-// (&client_d, cx_d),
-// ])
-// .await;
-// let active_call_a = cx_a.read(ActiveCall::global);
-// let active_call_b = cx_b.read(ActiveCall::global);
-
-// cx_a.update(editor::init);
-// cx_b.update(editor::init);
-
-// client_a
-// .fs()
-// .insert_tree(
-// "/a",
-// json!({
-// "1.txt": "one\none\none",
-// "2.txt": "two\ntwo\ntwo",
-// "3.txt": "three\nthree\nthree",
-// }),
-// )
-// .await;
-// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
-// active_call_a
-// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
-// .await
-// .unwrap();
-
-// let project_id = active_call_a
-// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
-// .await
-// .unwrap();
-// let project_b = client_b.build_remote_project(project_id, cx_b).await;
-// active_call_b
-// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
-// .await
-// .unwrap();
-
-// let window_a = client_a.build_workspace(&project_a, cx_a);
-// let workspace_a = window_a.root(cx_a).unwrap();
-// let window_b = client_b.build_workspace(&project_b, cx_b);
-// let workspace_b = window_b.root(cx_b).unwrap();
-
-// todo!("could be wrong")
-// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
-// let cx_a = &mut cx_a;
-// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
-// let cx_b = &mut cx_b;
-// let mut cx_c = VisualTestContext::from_window(*window_c, cx_c);
-// let cx_c = &mut cx_c;
-// let mut cx_d = VisualTestContext::from_window(*window_d, cx_d);
-// let cx_d = &mut cx_d;
-
-// // Client A opens some editors.
-// let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone());
-// let editor_a1 = workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
-// })
-// .await
-// .unwrap()
-// .downcast::<Editor>()
-// .unwrap();
-// let editor_a2 = workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.open_path((worktree_id, "2.txt"), None, true, cx)
-// })
-// .await
-// .unwrap()
-// .downcast::<Editor>()
-// .unwrap();
-
-// // Client B opens an editor.
-// let editor_b1 = workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
-// })
-// .await
-// .unwrap()
-// .downcast::<Editor>()
-// .unwrap();
-
-// let peer_id_a = client_a.peer_id().unwrap();
-// let peer_id_b = client_b.peer_id().unwrap();
-// let peer_id_c = client_c.peer_id().unwrap();
-// let peer_id_d = client_d.peer_id().unwrap();
-
-// // Client A updates their selections in those editors
-// editor_a1.update(cx_a, |editor, cx| {
-// editor.handle_input("a", cx);
-// editor.handle_input("b", cx);
-// editor.handle_input("c", cx);
-// editor.select_left(&Default::default(), cx);
-// assert_eq!(editor.selections.ranges(cx), vec![3..2]);
-// });
-// editor_a2.update(cx_a, |editor, cx| {
-// editor.handle_input("d", cx);
-// editor.handle_input("e", cx);
-// editor.select_left(&Default::default(), cx);
-// assert_eq!(editor.selections.ranges(cx), vec![2..1]);
-// });
-
-// // When client B starts following client A, all visible view states are replicated to client B.
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.follow(peer_id_a, cx).unwrap()
-// })
-// .await
-// .unwrap();
-
-// cx_c.executor().run_until_parked();
-// let editor_b2 = workspace_b.update(cx_b, |workspace, cx| {
-// workspace
-// .active_item(cx)
-// .unwrap()
-// .downcast::<Editor>()
-// .unwrap()
-// });
-// assert_eq!(
-// cx_b.read(|cx| editor_b2.project_path(cx)),
-// Some((worktree_id, "2.txt").into())
-// );
-// assert_eq!(
-// editor_b2.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
-// vec![2..1]
-// );
-// assert_eq!(
-// editor_b1.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
-// vec![3..2]
-// );
-
-// cx_c.executor().run_until_parked();
-// let active_call_c = cx_c.read(ActiveCall::global);
-// let project_c = client_c.build_remote_project(project_id, cx_c).await;
-// let window_c = client_c.build_workspace(&project_c, cx_c);
-// let workspace_c = window_c.root(cx_c).unwrap();
-// active_call_c
-// .update(cx_c, |call, cx| call.set_location(Some(&project_c), cx))
-// .await
-// .unwrap();
-// drop(project_c);
-
-// // Client C also follows client A.
-// workspace_c
-// .update(cx_c, |workspace, cx| {
-// workspace.follow(peer_id_a, cx).unwrap()
-// })
-// .await
-// .unwrap();
-
-// cx_d.executor().run_until_parked();
-// let active_call_d = cx_d.read(ActiveCall::global);
-// let project_d = client_d.build_remote_project(project_id, cx_d).await;
-// let workspace_d = client_d
-// .build_workspace(&project_d, cx_d)
-// .root(cx_d)
-// .unwrap();
-// active_call_d
-// .update(cx_d, |call, cx| call.set_location(Some(&project_d), cx))
-// .await
-// .unwrap();
-// drop(project_d);
-
-// // All clients see that clients B and C are following client A.
-// cx_c.executor().run_until_parked();
-// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
-// assert_eq!(
-// followers_by_leader(project_id, cx),
-// &[(peer_id_a, vec![peer_id_b, peer_id_c])],
-// "followers seen by {name}"
-// );
-// }
-
-// // Client C unfollows client A.
-// workspace_c.update(cx_c, |workspace, cx| {
-// workspace.unfollow(&workspace.active_pane().clone(), cx);
-// });
-
-// // All clients see that clients B is following client A.
-// cx_c.executor().run_until_parked();
-// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
-// assert_eq!(
-// followers_by_leader(project_id, cx),
-// &[(peer_id_a, vec![peer_id_b])],
-// "followers seen by {name}"
-// );
-// }
-
-// // Client C re-follows client A.
-// workspace_c
-// .update(cx_c, |workspace, cx| {
-// workspace.follow(peer_id_a, cx).unwrap()
-// })
-// .await
-// .unwrap();
-
-// // All clients see that clients B and C are following client A.
-// cx_c.executor().run_until_parked();
-// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
-// assert_eq!(
-// followers_by_leader(project_id, cx),
-// &[(peer_id_a, vec![peer_id_b, peer_id_c])],
-// "followers seen by {name}"
-// );
-// }
-
-// // Client D follows client B, then switches to following client C.
-// workspace_d
-// .update(cx_d, |workspace, cx| {
-// workspace.follow(peer_id_b, cx).unwrap()
-// })
-// .await
-// .unwrap();
-// workspace_d
-// .update(cx_d, |workspace, cx| {
-// workspace.follow(peer_id_c, cx).unwrap()
-// })
-// .await
-// .unwrap();
-
-// // All clients see that D is following C
-// cx_d.executor().run_until_parked();
-// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
-// assert_eq!(
-// followers_by_leader(project_id, cx),
-// &[
-// (peer_id_a, vec![peer_id_b, peer_id_c]),
-// (peer_id_c, vec![peer_id_d])
-// ],
-// "followers seen by {name}"
-// );
-// }
-
-// // Client C closes the project.
-// window_c.remove(cx_c);
-// cx_c.drop_last(workspace_c);
-
-// // Clients A and B see that client B is following A, and client C is not present in the followers.
-// cx_c.executor().run_until_parked();
-// for (name, cx) in [("A", &cx_a), ("B", &cx_b), ("C", &cx_c), ("D", &cx_d)] {
-// assert_eq!(
-// followers_by_leader(project_id, cx),
-// &[(peer_id_a, vec![peer_id_b]),],
-// "followers seen by {name}"
-// );
-// }
-
-// // When client A activates a different editor, client B does so as well.
-// workspace_a.update(cx_a, |workspace, cx| {
-// workspace.activate_item(&editor_a1, cx)
-// });
-// executor.run_until_parked();
-// workspace_b.update(cx_b, |workspace, cx| {
-// assert_eq!(
-// workspace.active_item(cx).unwrap().item_id(),
-// editor_b1.item_id()
-// );
-// });
-
-// // When client A opens a multibuffer, client B does so as well.
-// let multibuffer_a = cx_a.build_model(|cx| {
-// let buffer_a1 = project_a.update(cx, |project, cx| {
-// project
-// .get_open_buffer(&(worktree_id, "1.txt").into(), cx)
-// .unwrap()
-// });
-// let buffer_a2 = project_a.update(cx, |project, cx| {
-// project
-// .get_open_buffer(&(worktree_id, "2.txt").into(), cx)
-// .unwrap()
-// });
-// let mut result = MultiBuffer::new(0);
-// result.push_excerpts(
-// buffer_a1,
-// [ExcerptRange {
-// context: 0..3,
-// primary: None,
-// }],
-// cx,
-// );
-// result.push_excerpts(
-// buffer_a2,
-// [ExcerptRange {
-// context: 4..7,
-// primary: None,
-// }],
-// cx,
-// );
-// result
-// });
-// let multibuffer_editor_a = workspace_a.update(cx_a, |workspace, cx| {
-// let editor =
-// cx.build_view(|cx| Editor::for_multibuffer(multibuffer_a, Some(project_a.clone()), cx));
-// workspace.add_item(Box::new(editor.clone()), cx);
-// editor
-// });
-// executor.run_until_parked();
-// let multibuffer_editor_b = workspace_b.update(cx_b, |workspace, cx| {
-// workspace
-// .active_item(cx)
-// .unwrap()
-// .downcast::<Editor>()
-// .unwrap()
-// });
-// assert_eq!(
-// multibuffer_editor_a.update(cx_a, |editor, cx| editor.text(cx)),
-// multibuffer_editor_b.update(cx_b, |editor, cx| editor.text(cx)),
-// );
-
-// // When client A navigates back and forth, client B does so as well.
-// workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.go_back(workspace.active_pane().downgrade(), cx)
-// })
-// .await
-// .unwrap();
-// executor.run_until_parked();
-// workspace_b.update(cx_b, |workspace, cx| {
-// assert_eq!(
-// workspace.active_item(cx).unwrap().item_id(),
-// editor_b1.item_id()
-// );
-// });
-
-// workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.go_back(workspace.active_pane().downgrade(), cx)
-// })
-// .await
-// .unwrap();
-// executor.run_until_parked();
-// workspace_b.update(cx_b, |workspace, cx| {
-// assert_eq!(
-// workspace.active_item(cx).unwrap().item_id(),
-// editor_b2.item_id()
-// );
-// });
-
-// workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.go_forward(workspace.active_pane().downgrade(), cx)
-// })
-// .await
-// .unwrap();
-// executor.run_until_parked();
-// workspace_b.update(cx_b, |workspace, cx| {
-// assert_eq!(
-// workspace.active_item(cx).unwrap().item_id(),
-// editor_b1.item_id()
-// );
-// });
-
-// // Changes to client A's editor are reflected on client B.
-// editor_a1.update(cx_a, |editor, cx| {
-// editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2]));
-// });
-// executor.run_until_parked();
-// editor_b1.update(cx_b, |editor, cx| {
-// assert_eq!(editor.selections.ranges(cx), &[1..1, 2..2]);
-// });
-
-// editor_a1.update(cx_a, |editor, cx| editor.set_text("TWO", cx));
-// executor.run_until_parked();
-// editor_b1.update(cx_b, |editor, cx| assert_eq!(editor.text(cx), "TWO"));
-
-// editor_a1.update(cx_a, |editor, cx| {
-// editor.change_selections(None, cx, |s| s.select_ranges([3..3]));
-// editor.set_scroll_position(point(0., 100.), cx);
-// });
-// executor.run_until_parked();
-// editor_b1.update(cx_b, |editor, cx| {
-// assert_eq!(editor.selections.ranges(cx), &[3..3]);
-// });
-
-// // After unfollowing, client B stops receiving updates from client A.
-// workspace_b.update(cx_b, |workspace, cx| {
-// workspace.unfollow(&workspace.active_pane().clone(), cx)
-// });
-// workspace_a.update(cx_a, |workspace, cx| {
-// workspace.activate_item(&editor_a2, cx)
-// });
-// executor.run_until_parked();
-// assert_eq!(
-// workspace_b.update(cx_b, |workspace, cx| workspace
-// .active_item(cx)
-// .unwrap()
-// .item_id()),
-// editor_b1.item_id()
-// );
-
-// // Client A starts following client B.
-// workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.follow(peer_id_b, cx).unwrap()
-// })
-// .await
-// .unwrap();
-// assert_eq!(
-// workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
-// Some(peer_id_b)
-// );
-// assert_eq!(
-// workspace_a.update(cx_a, |workspace, cx| workspace
-// .active_item(cx)
-// .unwrap()
-// .item_id()),
-// editor_a1.item_id()
-// );
-
-// // Client B activates an external window, which causes a new screen-sharing item to be added to the pane.
-// let display = MacOSDisplay::new();
-// active_call_b
-// .update(cx_b, |call, cx| call.set_location(None, cx))
-// .await
-// .unwrap();
-// active_call_b
-// .update(cx_b, |call, cx| {
-// call.room().unwrap().update(cx, |room, cx| {
-// room.set_display_sources(vec![display.clone()]);
-// room.share_screen(cx)
-// })
-// })
-// .await
-// .unwrap();
-// executor.run_until_parked();
-// let shared_screen = workspace_a.update(cx_a, |workspace, cx| {
-// workspace
-// .active_item(cx)
-// .expect("no active item")
-// .downcast::<SharedScreen>()
-// .expect("active item isn't a shared screen")
-// });
-
-// // Client B activates Zed again, which causes the previous editor to become focused again.
-// active_call_b
-// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
-// .await
-// .unwrap();
-// executor.run_until_parked();
-// workspace_a.update(cx_a, |workspace, cx| {
-// assert_eq!(
-// workspace.active_item(cx).unwrap().item_id(),
-// editor_a1.item_id()
-// )
-// });
-
-// // Client B activates a multibuffer that was created by following client A. Client A returns to that multibuffer.
-// workspace_b.update(cx_b, |workspace, cx| {
-// workspace.activate_item(&multibuffer_editor_b, cx)
-// });
-// executor.run_until_parked();
-// workspace_a.update(cx_a, |workspace, cx| {
-// assert_eq!(
-// workspace.active_item(cx).unwrap().item_id(),
-// multibuffer_editor_a.item_id()
-// )
-// });
-
-// // Client B activates a panel, and the previously-opened screen-sharing item gets activated.
-// let panel = window_b.build_view(cx_b, |_| TestPanel::new(DockPosition::Left));
-// workspace_b.update(cx_b, |workspace, cx| {
-// workspace.add_panel(panel, cx);
-// workspace.toggle_panel_focus::<TestPanel>(cx);
-// });
-// executor.run_until_parked();
-// assert_eq!(
-// workspace_a.update(cx_a, |workspace, cx| workspace
-// .active_item(cx)
-// .unwrap()
-// .item_id()),
-// shared_screen.item_id()
-// );
-
-// // Toggling the focus back to the pane causes client A to return to the multibuffer.
-// workspace_b.update(cx_b, |workspace, cx| {
-// workspace.toggle_panel_focus::<TestPanel>(cx);
-// });
-// executor.run_until_parked();
-// workspace_a.update(cx_a, |workspace, cx| {
-// assert_eq!(
-// workspace.active_item(cx).unwrap().item_id(),
-// multibuffer_editor_a.item_id()
-// )
-// });
-
-// // Client B activates an item that doesn't implement following,
-// // so the previously-opened screen-sharing item gets activated.
-// let unfollowable_item = window_b.build_view(cx_b, |_| TestItem::new());
-// workspace_b.update(cx_b, |workspace, cx| {
-// workspace.active_pane().update(cx, |pane, cx| {
-// pane.add_item(Box::new(unfollowable_item), true, true, None, cx)
-// })
-// });
-// executor.run_until_parked();
-// assert_eq!(
-// workspace_a.update(cx_a, |workspace, cx| workspace
-// .active_item(cx)
-// .unwrap()
-// .item_id()),
-// shared_screen.item_id()
-// );
-
-// // Following interrupts when client B disconnects.
-// client_b.disconnect(&cx_b.to_async());
-// executor.advance_clock(RECONNECT_TIMEOUT);
-// assert_eq!(
-// workspace_a.update(cx_a, |workspace, _| workspace.leader_for_pane(&pane_a)),
-// None
-// );
-// }
-
-// #[gpui::test]
-// async fn test_following_tab_order(
-// executor: BackgroundExecutor,
-// cx_a: &mut TestAppContext,
-// cx_b: &mut TestAppContext,
-// ) {
-// let mut server = TestServer::start(executor.clone()).await;
-// let client_a = server.create_client(cx_a, "user_a").await;
-// let client_b = server.create_client(cx_b, "user_b").await;
-// server
-// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
-// .await;
-// let active_call_a = cx_a.read(ActiveCall::global);
-// let active_call_b = cx_b.read(ActiveCall::global);
-
-// cx_a.update(editor::init);
-// cx_b.update(editor::init);
-
-// client_a
-// .fs()
-// .insert_tree(
-// "/a",
-// json!({
-// "1.txt": "one",
-// "2.txt": "two",
-// "3.txt": "three",
-// }),
-// )
-// .await;
-// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
-// active_call_a
-// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
-// .await
-// .unwrap();
-
-// let project_id = active_call_a
-// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
-// .await
-// .unwrap();
-// let project_b = client_b.build_remote_project(project_id, cx_b).await;
-// active_call_b
-// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
-// .await
-// .unwrap();
-
-// let workspace_a = client_a
-// .build_workspace(&project_a, cx_a)
-// .root(cx_a)
-// .unwrap();
-// let pane_a = workspace_a.update(cx_a, |workspace, _| workspace.active_pane().clone());
-
-// let workspace_b = client_b
-// .build_workspace(&project_b, cx_b)
-// .root(cx_b)
-// .unwrap();
-// let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone());
-
-// let client_b_id = project_a.update(cx_a, |project, _| {
-// project.collaborators().values().next().unwrap().peer_id
-// });
-
-// //Open 1, 3 in that order on client A
-// workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
-// })
-// .await
-// .unwrap();
-// workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.open_path((worktree_id, "3.txt"), None, true, cx)
-// })
-// .await
-// .unwrap();
-
-// let pane_paths = |pane: &View<workspace::Pane>, cx: &mut TestAppContext| {
-// pane.update(cx, |pane, cx| {
-// pane.items()
-// .map(|item| {
-// item.project_path(cx)
-// .unwrap()
-// .path
-// .to_str()
-// .unwrap()
-// .to_owned()
-// })
-// .collect::<Vec<_>>()
-// })
-// };
-
-// //Verify that the tabs opened in the order we expect
-// assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt"]);
-
-// //Follow client B as client A
-// workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.follow(client_b_id, cx).unwrap()
-// })
-// .await
-// .unwrap();
-
-// //Open just 2 on client B
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.open_path((worktree_id, "2.txt"), None, true, cx)
-// })
-// .await
-// .unwrap();
-// executor.run_until_parked();
-
-// // Verify that newly opened followed file is at the end
-// assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
-
-// //Open just 1 on client B
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
-// })
-// .await
-// .unwrap();
-// assert_eq!(&pane_paths(&pane_b, cx_b), &["2.txt", "1.txt"]);
-// executor.run_until_parked();
-
-// // Verify that following into 1 did not reorder
-// assert_eq!(&pane_paths(&pane_a, cx_a), &["1.txt", "3.txt", "2.txt"]);
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_peers_following_each_other(
-// executor: BackgroundExecutor,
-// cx_a: &mut TestAppContext,
-// cx_b: &mut TestAppContext,
-// ) {
-// let mut server = TestServer::start(executor.clone()).await;
-// let client_a = server.create_client(cx_a, "user_a").await;
-// let client_b = server.create_client(cx_b, "user_b").await;
-// server
-// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
-// .await;
-// let active_call_a = cx_a.read(ActiveCall::global);
-// let active_call_b = cx_b.read(ActiveCall::global);
-
-// cx_a.update(editor::init);
-// cx_b.update(editor::init);
-
-// // Client A shares a project.
-// client_a
-// .fs()
-// .insert_tree(
-// "/a",
-// json!({
-// "1.txt": "one",
-// "2.txt": "two",
-// "3.txt": "three",
-// "4.txt": "four",
-// }),
-// )
-// .await;
-// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
-// active_call_a
-// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
-// .await
-// .unwrap();
-// let project_id = active_call_a
-// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
-// .await
-// .unwrap();
-
-// // Client B joins the project.
-// let project_b = client_b.build_remote_project(project_id, cx_b).await;
-// active_call_b
-// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
-// .await
-// .unwrap();
-
-// // Client A opens a file.
-// let workspace_a = client_a
-// .build_workspace(&project_a, cx_a)
-// .root(cx_a)
-// .unwrap();
-// workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
-// })
-// .await
-// .unwrap()
-// .downcast::<Editor>()
-// .unwrap();
-
-// // Client B opens a different file.
-// let workspace_b = client_b
-// .build_workspace(&project_b, cx_b)
-// .root(cx_b)
-// .unwrap();
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.open_path((worktree_id, "2.txt"), None, true, cx)
-// })
-// .await
-// .unwrap()
-// .downcast::<Editor>()
-// .unwrap();
-
-// // Clients A and B follow each other in split panes
-// workspace_a.update(cx_a, |workspace, cx| {
-// workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx);
-// });
-// workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.follow(client_b.peer_id().unwrap(), cx).unwrap()
-// })
-// .await
-// .unwrap();
-// workspace_b.update(cx_b, |workspace, cx| {
-// workspace.split_and_clone(workspace.active_pane().clone(), SplitDirection::Right, cx);
-// });
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.follow(client_a.peer_id().unwrap(), cx).unwrap()
-// })
-// .await
-// .unwrap();
-
-// // Clients A and B return focus to the original files they had open
-// workspace_a.update(cx_a, |workspace, cx| workspace.activate_next_pane(cx));
-// workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
-// executor.run_until_parked();
-
-// // Both clients see the other client's focused file in their right pane.
-// assert_eq!(
-// pane_summaries(&workspace_a, cx_a),
-// &[
-// PaneSummary {
-// active: true,
-// leader: None,
-// items: vec![(true, "1.txt".into())]
-// },
-// PaneSummary {
-// active: false,
-// leader: client_b.peer_id(),
-// items: vec![(false, "1.txt".into()), (true, "2.txt".into())]
-// },
-// ]
-// );
-// assert_eq!(
-// pane_summaries(&workspace_b, cx_b),
-// &[
-// PaneSummary {
-// active: true,
-// leader: None,
-// items: vec![(true, "2.txt".into())]
-// },
-// PaneSummary {
-// active: false,
-// leader: client_a.peer_id(),
-// items: vec![(false, "2.txt".into()), (true, "1.txt".into())]
-// },
-// ]
-// );
-
-// // Clients A and B each open a new file.
-// workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.open_path((worktree_id, "3.txt"), None, true, cx)
-// })
-// .await
-// .unwrap();
-
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.open_path((worktree_id, "4.txt"), None, true, cx)
-// })
-// .await
-// .unwrap();
-// executor.run_until_parked();
-
-// // Both client's see the other client open the new file, but keep their
-// // focus on their own active pane.
-// assert_eq!(
-// pane_summaries(&workspace_a, cx_a),
-// &[
-// PaneSummary {
-// active: true,
-// leader: None,
-// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
-// },
-// PaneSummary {
-// active: false,
-// leader: client_b.peer_id(),
-// items: vec![
-// (false, "1.txt".into()),
-// (false, "2.txt".into()),
-// (true, "4.txt".into())
-// ]
-// },
-// ]
-// );
-// assert_eq!(
-// pane_summaries(&workspace_b, cx_b),
-// &[
-// PaneSummary {
-// active: true,
-// leader: None,
-// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
-// },
-// PaneSummary {
-// active: false,
-// leader: client_a.peer_id(),
-// items: vec![
-// (false, "2.txt".into()),
-// (false, "1.txt".into()),
-// (true, "3.txt".into())
-// ]
-// },
-// ]
-// );
-
-// // Client A focuses their right pane, in which they're following client B.
-// workspace_a.update(cx_a, |workspace, cx| workspace.activate_next_pane(cx));
-// executor.run_until_parked();
-
-// // Client B sees that client A is now looking at the same file as them.
-// assert_eq!(
-// pane_summaries(&workspace_a, cx_a),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: client_b.peer_id(),
-// items: vec![
-// (false, "1.txt".into()),
-// (false, "2.txt".into()),
-// (true, "4.txt".into())
-// ]
-// },
-// ]
-// );
-// assert_eq!(
-// pane_summaries(&workspace_b, cx_b),
-// &[
-// PaneSummary {
-// active: true,
-// leader: None,
-// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
-// },
-// PaneSummary {
-// active: false,
-// leader: client_a.peer_id(),
-// items: vec![
-// (false, "2.txt".into()),
-// (false, "1.txt".into()),
-// (false, "3.txt".into()),
-// (true, "4.txt".into())
-// ]
-// },
-// ]
-// );
-
-// // Client B focuses their right pane, in which they're following client A,
-// // who is following them.
-// workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
-// executor.run_until_parked();
-
-// // Client A sees that client B is now looking at the same file as them.
-// assert_eq!(
-// pane_summaries(&workspace_b, cx_b),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: client_a.peer_id(),
-// items: vec![
-// (false, "2.txt".into()),
-// (false, "1.txt".into()),
-// (false, "3.txt".into()),
-// (true, "4.txt".into())
-// ]
-// },
-// ]
-// );
-// assert_eq!(
-// pane_summaries(&workspace_a, cx_a),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: client_b.peer_id(),
-// items: vec![
-// (false, "1.txt".into()),
-// (false, "2.txt".into()),
-// (true, "4.txt".into())
-// ]
-// },
-// ]
-// );
-
-// // Client B focuses a file that they previously followed A to, breaking
-// // the follow.
-// workspace_b.update(cx_b, |workspace, cx| {
-// workspace.active_pane().update(cx, |pane, cx| {
-// pane.activate_prev_item(true, cx);
-// });
-// });
-// executor.run_until_parked();
-
-// // Both clients see that client B is looking at that previous file.
-// assert_eq!(
-// pane_summaries(&workspace_b, cx_b),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: None,
-// items: vec![
-// (false, "2.txt".into()),
-// (false, "1.txt".into()),
-// (true, "3.txt".into()),
-// (false, "4.txt".into())
-// ]
-// },
-// ]
-// );
-// assert_eq!(
-// pane_summaries(&workspace_a, cx_a),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: client_b.peer_id(),
-// items: vec![
-// (false, "1.txt".into()),
-// (false, "2.txt".into()),
-// (false, "4.txt".into()),
-// (true, "3.txt".into()),
-// ]
-// },
-// ]
-// );
-
-// // Client B closes tabs, some of which were originally opened by client A,
-// // and some of which were originally opened by client B.
-// workspace_b.update(cx_b, |workspace, cx| {
-// workspace.active_pane().update(cx, |pane, cx| {
-// pane.close_inactive_items(&Default::default(), cx)
-// .unwrap()
-// .detach();
-// });
-// });
-
-// executor.run_until_parked();
-
-// // Both clients see that Client B is looking at the previous tab.
-// assert_eq!(
-// pane_summaries(&workspace_b, cx_b),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: None,
-// items: vec![(true, "3.txt".into()),]
-// },
-// ]
-// );
-// assert_eq!(
-// pane_summaries(&workspace_a, cx_a),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: client_b.peer_id(),
-// items: vec![
-// (false, "1.txt".into()),
-// (false, "2.txt".into()),
-// (false, "4.txt".into()),
-// (true, "3.txt".into()),
-// ]
-// },
-// ]
-// );
-
-// // Client B follows client A again.
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.follow(client_a.peer_id().unwrap(), cx).unwrap()
-// })
-// .await
-// .unwrap();
-
-// // Client A cycles through some tabs.
-// workspace_a.update(cx_a, |workspace, cx| {
-// workspace.active_pane().update(cx, |pane, cx| {
-// pane.activate_prev_item(true, cx);
-// });
-// });
-// executor.run_until_parked();
-
-// // Client B follows client A into those tabs.
-// assert_eq!(
-// pane_summaries(&workspace_a, cx_a),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: None,
-// items: vec![
-// (false, "1.txt".into()),
-// (false, "2.txt".into()),
-// (true, "4.txt".into()),
-// (false, "3.txt".into()),
-// ]
-// },
-// ]
-// );
-// assert_eq!(
-// pane_summaries(&workspace_b, cx_b),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: client_a.peer_id(),
-// items: vec![(false, "3.txt".into()), (true, "4.txt".into())]
-// },
-// ]
-// );
-
-// workspace_a.update(cx_a, |workspace, cx| {
-// workspace.active_pane().update(cx, |pane, cx| {
-// pane.activate_prev_item(true, cx);
-// });
-// });
-// executor.run_until_parked();
-
-// assert_eq!(
-// pane_summaries(&workspace_a, cx_a),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: None,
-// items: vec![
-// (false, "1.txt".into()),
-// (true, "2.txt".into()),
-// (false, "4.txt".into()),
-// (false, "3.txt".into()),
-// ]
-// },
-// ]
-// );
-// assert_eq!(
-// pane_summaries(&workspace_b, cx_b),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: client_a.peer_id(),
-// items: vec![
-// (false, "3.txt".into()),
-// (false, "4.txt".into()),
-// (true, "2.txt".into())
-// ]
-// },
-// ]
-// );
-
-// workspace_a.update(cx_a, |workspace, cx| {
-// workspace.active_pane().update(cx, |pane, cx| {
-// pane.activate_prev_item(true, cx);
-// });
-// });
-// executor.run_until_parked();
-
-// assert_eq!(
-// pane_summaries(&workspace_a, cx_a),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "1.txt".into()), (true, "3.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: None,
-// items: vec![
-// (true, "1.txt".into()),
-// (false, "2.txt".into()),
-// (false, "4.txt".into()),
-// (false, "3.txt".into()),
-// ]
-// },
-// ]
-// );
-// assert_eq!(
-// pane_summaries(&workspace_b, cx_b),
-// &[
-// PaneSummary {
-// active: false,
-// leader: None,
-// items: vec![(false, "2.txt".into()), (true, "4.txt".into())]
-// },
-// PaneSummary {
-// active: true,
-// leader: client_a.peer_id(),
-// items: vec![
-// (false, "3.txt".into()),
-// (false, "4.txt".into()),
-// (false, "2.txt".into()),
-// (true, "1.txt".into()),
-// ]
-// },
-// ]
-// );
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_auto_unfollowing(
-// executor: BackgroundExecutor,
-// cx_a: &mut TestAppContext,
-// cx_b: &mut TestAppContext,
-// ) {
-// // 2 clients connect to a server.
-// let mut server = TestServer::start(executor.clone()).await;
-// let client_a = server.create_client(cx_a, "user_a").await;
-// let client_b = server.create_client(cx_b, "user_b").await;
-// server
-// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
-// .await;
-// let active_call_a = cx_a.read(ActiveCall::global);
-// let active_call_b = cx_b.read(ActiveCall::global);
-
-// cx_a.update(editor::init);
-// cx_b.update(editor::init);
-
-// // Client A shares a project.
-// client_a
-// .fs()
-// .insert_tree(
-// "/a",
-// json!({
-// "1.txt": "one",
-// "2.txt": "two",
-// "3.txt": "three",
-// }),
-// )
-// .await;
-// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
-// active_call_a
-// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
-// .await
-// .unwrap();
-
-// let project_id = active_call_a
-// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
-// .await
-// .unwrap();
-// let project_b = client_b.build_remote_project(project_id, cx_b).await;
-// active_call_b
-// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
-// .await
-// .unwrap();
-
-// todo!("could be wrong")
-// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
-// let cx_a = &mut cx_a;
-// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
-// let cx_b = &mut cx_b;
-
-// // Client A opens some editors.
-// let workspace_a = client_a
-// .build_workspace(&project_a, cx_a)
-// .root(cx_a)
-// .unwrap();
-// let _editor_a1 = workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
-// })
-// .await
-// .unwrap()
-// .downcast::<Editor>()
-// .unwrap();
-
-// // Client B starts following client A.
-// let workspace_b = client_b
-// .build_workspace(&project_b, cx_b)
-// .root(cx_b)
-// .unwrap();
-// let pane_b = workspace_b.update(cx_b, |workspace, _| workspace.active_pane().clone());
-// let leader_id = project_b.update(cx_b, |project, _| {
-// project.collaborators().values().next().unwrap().peer_id
-// });
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.follow(leader_id, cx).unwrap()
-// })
-// .await
-// .unwrap();
-// assert_eq!(
-// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
-// Some(leader_id)
-// );
-// let editor_b2 = workspace_b.update(cx_b, |workspace, cx| {
-// workspace
-// .active_item(cx)
-// .unwrap()
-// .downcast::<Editor>()
-// .unwrap()
-// });
-
-// // When client B moves, it automatically stops following client A.
-// editor_b2.update(cx_b, |editor, cx| editor.move_right(&editor::MoveRight, cx));
-// assert_eq!(
-// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
-// None
-// );
-
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.follow(leader_id, cx).unwrap()
-// })
-// .await
-// .unwrap();
-// assert_eq!(
-// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
-// Some(leader_id)
-// );
-
-// // When client B edits, it automatically stops following client A.
-// editor_b2.update(cx_b, |editor, cx| editor.insert("X", cx));
-// assert_eq!(
-// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
-// None
-// );
-
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.follow(leader_id, cx).unwrap()
-// })
-// .await
-// .unwrap();
-// assert_eq!(
-// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
-// Some(leader_id)
-// );
-
-// // When client B scrolls, it automatically stops following client A.
-// editor_b2.update(cx_b, |editor, cx| {
-// editor.set_scroll_position(point(0., 3.), cx)
-// });
-// assert_eq!(
-// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
-// None
-// );
-
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.follow(leader_id, cx).unwrap()
-// })
-// .await
-// .unwrap();
-// assert_eq!(
-// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
-// Some(leader_id)
-// );
-
-// // When client B activates a different pane, it continues following client A in the original pane.
-// workspace_b.update(cx_b, |workspace, cx| {
-// workspace.split_and_clone(pane_b.clone(), SplitDirection::Right, cx)
-// });
-// assert_eq!(
-// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
-// Some(leader_id)
-// );
-
-// workspace_b.update(cx_b, |workspace, cx| workspace.activate_next_pane(cx));
-// assert_eq!(
-// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
-// Some(leader_id)
-// );
-
-// // When client B activates a different item in the original pane, it automatically stops following client A.
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.open_path((worktree_id, "2.txt"), None, true, cx)
-// })
-// .await
-// .unwrap();
-// assert_eq!(
-// workspace_b.update(cx_b, |workspace, _| workspace.leader_for_pane(&pane_b)),
-// None
-// );
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_peers_simultaneously_following_each_other(
-// executor: BackgroundExecutor,
-// cx_a: &mut TestAppContext,
-// cx_b: &mut TestAppContext,
-// ) {
-// let mut server = TestServer::start(executor.clone()).await;
-// let client_a = server.create_client(cx_a, "user_a").await;
-// let client_b = server.create_client(cx_b, "user_b").await;
-// server
-// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
-// .await;
-// let active_call_a = cx_a.read(ActiveCall::global);
-
-// cx_a.update(editor::init);
-// cx_b.update(editor::init);
-
-// client_a.fs().insert_tree("/a", json!({})).await;
-// let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
-// let workspace_a = client_a
-// .build_workspace(&project_a, cx_a)
-// .root(cx_a)
-// .unwrap();
-// let project_id = active_call_a
-// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
-// .await
-// .unwrap();
-
-// let project_b = client_b.build_remote_project(project_id, cx_b).await;
-// let workspace_b = client_b
-// .build_workspace(&project_b, cx_b)
-// .root(cx_b)
-// .unwrap();
-
-// executor.run_until_parked();
-// let client_a_id = project_b.update(cx_b, |project, _| {
-// project.collaborators().values().next().unwrap().peer_id
-// });
-// let client_b_id = project_a.update(cx_a, |project, _| {
-// project.collaborators().values().next().unwrap().peer_id
-// });
-
-// let a_follow_b = workspace_a.update(cx_a, |workspace, cx| {
-// workspace.follow(client_b_id, cx).unwrap()
-// });
-// let b_follow_a = workspace_b.update(cx_b, |workspace, cx| {
-// workspace.follow(client_a_id, cx).unwrap()
-// });
-
-// futures::try_join!(a_follow_b, b_follow_a).unwrap();
-// workspace_a.update(cx_a, |workspace, _| {
-// assert_eq!(
-// workspace.leader_for_pane(workspace.active_pane()),
-// Some(client_b_id)
-// );
-// });
-// workspace_b.update(cx_b, |workspace, _| {
-// assert_eq!(
-// workspace.leader_for_pane(workspace.active_pane()),
-// Some(client_a_id)
-// );
-// });
-// }
-
-// #[gpui::test(iterations = 10)]
-// async fn test_following_across_workspaces(
-// executor: BackgroundExecutor,
-// cx_a: &mut TestAppContext,
-// cx_b: &mut TestAppContext,
-// ) {
-// // a and b join a channel/call
-// // a shares project 1
-// // b shares project 2
-// //
-// // b follows a: causes project 2 to be joined, and b to follow a.
-// // b opens a different file in project 2, a follows b
-// // b opens a different file in project 1, a cannot follow b
-// // b shares the project, a joins the project and follows b
-// let mut server = TestServer::start(executor.clone()).await;
-// let client_a = server.create_client(cx_a, "user_a").await;
-// let client_b = server.create_client(cx_b, "user_b").await;
-// cx_a.update(editor::init);
-// cx_b.update(editor::init);
-
-// client_a
-// .fs()
-// .insert_tree(
-// "/a",
-// json!({
-// "w.rs": "",
-// "x.rs": "",
-// }),
-// )
-// .await;
-
-// client_b
-// .fs()
-// .insert_tree(
-// "/b",
-// json!({
-// "y.rs": "",
-// "z.rs": "",
-// }),
-// )
-// .await;
-
-// server
-// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
-// .await;
-// let active_call_a = cx_a.read(ActiveCall::global);
-// let active_call_b = cx_b.read(ActiveCall::global);
-
-// let (project_a, worktree_id_a) = client_a.build_local_project("/a", cx_a).await;
-// let (project_b, worktree_id_b) = client_b.build_local_project("/b", cx_b).await;
-
-// let workspace_a = client_a
-// .build_workspace(&project_a, cx_a)
-// .root(cx_a)
-// .unwrap();
-// let workspace_b = client_b
-// .build_workspace(&project_b, cx_b)
-// .root(cx_b)
-// .unwrap();
-
-// cx_a.update(|cx| collab_ui::init(&client_a.app_state, cx));
-// cx_b.update(|cx| collab_ui::init(&client_b.app_state, cx));
-
-// active_call_a
-// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
-// .await
-// .unwrap();
-
-// active_call_a
-// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
-// .await
-// .unwrap();
-// active_call_b
-// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
-// .await
-// .unwrap();
-
-// todo!("could be wrong")
-// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
-// let cx_a = &mut cx_a;
-// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
-// let cx_b = &mut cx_b;
-
-// workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.open_path((worktree_id_a, "w.rs"), None, true, cx)
-// })
-// .await
-// .unwrap();
-
-// executor.run_until_parked();
-// assert_eq!(visible_push_notifications(cx_b).len(), 1);
-
-// workspace_b.update(cx_b, |workspace, cx| {
-// workspace
-// .follow(client_a.peer_id().unwrap(), cx)
-// .unwrap()
-// .detach()
-// });
-
-// executor.run_until_parked();
-// let workspace_b_project_a = cx_b
-// .windows()
-// .iter()
-// .max_by_key(|window| window.item_id())
-// .unwrap()
-// .downcast::<Workspace>()
-// .unwrap()
-// .root(cx_b)
-// .unwrap();
-
-// // assert that b is following a in project a in w.rs
-// workspace_b_project_a.update(cx_b, |workspace, cx| {
-// assert!(workspace.is_being_followed(client_a.peer_id().unwrap()));
-// assert_eq!(
-// client_a.peer_id(),
-// workspace.leader_for_pane(workspace.active_pane())
-// );
-// let item = workspace.active_item(cx).unwrap();
-// assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("w.rs"));
-// });
-
-// // TODO: in app code, this would be done by the collab_ui.
-// active_call_b
-// .update(cx_b, |call, cx| {
-// let project = workspace_b_project_a.read(cx).project().clone();
-// call.set_location(Some(&project), cx)
-// })
-// .await
-// .unwrap();
-
-// // assert that there are no share notifications open
-// assert_eq!(visible_push_notifications(cx_b).len(), 0);
-
-// // b moves to x.rs in a's project, and a follows
-// workspace_b_project_a
-// .update(cx_b, |workspace, cx| {
-// workspace.open_path((worktree_id_a, "x.rs"), None, true, cx)
-// })
-// .await
-// .unwrap();
-
-// executor.run_until_parked();
-// workspace_b_project_a.update(cx_b, |workspace, cx| {
-// let item = workspace.active_item(cx).unwrap();
-// assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("x.rs"));
-// });
-
-// workspace_a.update(cx_a, |workspace, cx| {
-// workspace
-// .follow(client_b.peer_id().unwrap(), cx)
-// .unwrap()
-// .detach()
-// });
-
-// executor.run_until_parked();
-// workspace_a.update(cx_a, |workspace, cx| {
-// assert!(workspace.is_being_followed(client_b.peer_id().unwrap()));
-// assert_eq!(
-// client_b.peer_id(),
-// workspace.leader_for_pane(workspace.active_pane())
-// );
-// let item = workspace.active_pane().read(cx).active_item().unwrap();
-// assert_eq!(item.tab_description(0, cx).unwrap(), "x.rs".into());
-// });
-
-// // b moves to y.rs in b's project, a is still following but can't yet see
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.open_path((worktree_id_b, "y.rs"), None, true, cx)
-// })
-// .await
-// .unwrap();
-
-// // TODO: in app code, this would be done by the collab_ui.
-// active_call_b
-// .update(cx_b, |call, cx| {
-// let project = workspace_b.read(cx).project().clone();
-// call.set_location(Some(&project), cx)
-// })
-// .await
-// .unwrap();
-
-// let project_b_id = active_call_b
-// .update(cx_b, |call, cx| call.share_project(project_b.clone(), cx))
-// .await
-// .unwrap();
-
-// executor.run_until_parked();
-// assert_eq!(visible_push_notifications(cx_a).len(), 1);
-// cx_a.update(|cx| {
-// workspace::join_remote_project(
-// project_b_id,
-// client_b.user_id().unwrap(),
-// client_a.app_state.clone(),
-// cx,
-// )
-// })
-// .await
-// .unwrap();
-
-// executor.run_until_parked();
-
-// assert_eq!(visible_push_notifications(cx_a).len(), 0);
-// let workspace_a_project_b = cx_a
-// .windows()
-// .iter()
-// .max_by_key(|window| window.item_id())
-// .unwrap()
-// .downcast::<Workspace>()
-// .unwrap()
-// .root(cx_a)
-// .unwrap();
-
-// workspace_a_project_b.update(cx_a, |workspace, cx| {
-// assert_eq!(workspace.project().read(cx).remote_id(), Some(project_b_id));
-// assert!(workspace.is_being_followed(client_b.peer_id().unwrap()));
-// assert_eq!(
-// client_b.peer_id(),
-// workspace.leader_for_pane(workspace.active_pane())
-// );
-// let item = workspace.active_item(cx).unwrap();
-// assert_eq!(item.tab_description(0, cx).unwrap(), Cow::Borrowed("y.rs"));
-// });
-// }
-
-// #[gpui::test]
-// async fn test_following_into_excluded_file(
-// executor: BackgroundExecutor,
-// mut cx_a: &mut TestAppContext,
-// mut cx_b: &mut TestAppContext,
-// ) {
-// let mut server = TestServer::start(executor.clone()).await;
-// let client_a = server.create_client(cx_a, "user_a").await;
-// let client_b = server.create_client(cx_b, "user_b").await;
-// for cx in [&mut cx_a, &mut cx_b] {
-// cx.update(|cx| {
-// cx.update_global::<SettingsStore, _>(|store, cx| {
-// store.update_user_settings::<ProjectSettings>(cx, |project_settings| {
-// project_settings.file_scan_exclusions = Some(vec!["**/.git".to_string()]);
-// });
-// });
-// });
-// }
-// server
-// .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
-// .await;
-// let active_call_a = cx_a.read(ActiveCall::global);
-// let active_call_b = cx_b.read(ActiveCall::global);
-
-// cx_a.update(editor::init);
-// cx_b.update(editor::init);
-
-// client_a
-// .fs()
-// .insert_tree(
-// "/a",
-// json!({
-// ".git": {
-// "COMMIT_EDITMSG": "write your commit message here",
-// },
-// "1.txt": "one\none\none",
-// "2.txt": "two\ntwo\ntwo",
-// "3.txt": "three\nthree\nthree",
-// }),
-// )
-// .await;
-// let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
-// active_call_a
-// .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
-// .await
-// .unwrap();
-
-// let project_id = active_call_a
-// .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
-// .await
-// .unwrap();
-// let project_b = client_b.build_remote_project(project_id, cx_b).await;
-// active_call_b
-// .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
-// .await
-// .unwrap();
-
-// let window_a = client_a.build_workspace(&project_a, cx_a);
-// let workspace_a = window_a.root(cx_a).unwrap();
-// let peer_id_a = client_a.peer_id().unwrap();
-// let window_b = client_b.build_workspace(&project_b, cx_b);
-// let workspace_b = window_b.root(cx_b).unwrap();
-
-// todo!("could be wrong")
-// let mut cx_a = VisualTestContext::from_window(*window_a, cx_a);
-// let cx_a = &mut cx_a;
-// let mut cx_b = VisualTestContext::from_window(*window_b, cx_b);
-// let cx_b = &mut cx_b;
-
-// // Client A opens editors for a regular file and an excluded file.
-// let editor_for_regular = workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.open_path((worktree_id, "1.txt"), None, true, cx)
-// })
-// .await
-// .unwrap()
-// .downcast::<Editor>()
-// .unwrap();
-// let editor_for_excluded_a = workspace_a
-// .update(cx_a, |workspace, cx| {
-// workspace.open_path((worktree_id, ".git/COMMIT_EDITMSG"), None, true, cx)
-// })
-// .await
-// .unwrap()
-// .downcast::<Editor>()
-// .unwrap();
-
-// // Client A updates their selections in those editors
-// editor_for_regular.update(cx_a, |editor, cx| {
-// editor.handle_input("a", cx);
-// editor.handle_input("b", cx);
-// editor.handle_input("c", cx);
-// editor.select_left(&Default::default(), cx);
-// assert_eq!(editor.selections.ranges(cx), vec![3..2]);
-// });
-// editor_for_excluded_a.update(cx_a, |editor, cx| {
-// editor.select_all(&Default::default(), cx);
-// editor.handle_input("new commit message", cx);
-// editor.select_left(&Default::default(), cx);
-// assert_eq!(editor.selections.ranges(cx), vec![18..17]);
-// });
-
-// // When client B starts following client A, currently visible file is replicated
-// workspace_b
-// .update(cx_b, |workspace, cx| {
-// workspace.follow(peer_id_a, cx).unwrap()
-// })
-// .await
-// .unwrap();
-
-// let editor_for_excluded_b = workspace_b.update(cx_b, |workspace, cx| {
-// workspace
-// .active_item(cx)
-// .unwrap()
-// .downcast::<Editor>()
-// .unwrap()
-// });
-// assert_eq!(
-// cx_b.read(|cx| editor_for_excluded_b.project_path(cx)),
-// Some((worktree_id, ".git/COMMIT_EDITMSG").into())
-// );
-// assert_eq!(
-// editor_for_excluded_b.update(cx_b, |editor, cx| editor.selections.ranges(cx)),
-// vec![18..17]
-// );
-
-// // Changes from B to the excluded file are replicated in A's editor
-// editor_for_excluded_b.update(cx_b, |editor, cx| {
-// editor.handle_input("\nCo-Authored-By: B <b@b.b>", cx);
-// });
-// executor.run_until_parked();
-// editor_for_excluded_a.update(cx_a, |editor, cx| {
-// assert_eq!(
-// editor.text(cx),
-// "new commit messag\nCo-Authored-By: B <b@b.b>"
-// );
-// });
-// }
-
-// fn visible_push_notifications(
-// cx: &mut TestAppContext,
-// ) -> Vec<gpui::View<ProjectSharedNotification>> {
-// let mut ret = Vec::new();
-// for window in cx.windows() {
-// window.update(cx, |window| {
-// if let Some(handle) = window
-// .root_view()
-// .clone()
-// .downcast::<ProjectSharedNotification>()
-// {
-// ret.push(handle)
-// }
-// });
-// }
-// ret
-// }
-
-// #[derive(Debug, PartialEq, Eq)]
-// struct PaneSummary {
-// active: bool,
-// leader: Option<PeerId>,
-// items: Vec<(bool, String)>,
-// }
-
-// fn followers_by_leader(project_id: u64, cx: &TestAppContext) -> Vec<(PeerId, Vec<PeerId>)> {
-// cx.read(|cx| {
-// let active_call = ActiveCall::global(cx).read(cx);
-// let peer_id = active_call.client().peer_id();
-// let room = active_call.room().unwrap().read(cx);
-// let mut result = room
-// .remote_participants()
-// .values()
-// .map(|participant| participant.peer_id)
-// .chain(peer_id)
-// .filter_map(|peer_id| {
-// let followers = room.followers_for(peer_id, project_id);
-// if followers.is_empty() {
-// None
-// } else {
-// Some((peer_id, followers.to_vec()))
-// }
-// })
-// .collect::<Vec<_>>();
-// result.sort_by_key(|e| e.0);
-// result
-// })
-// }
-
-// fn pane_summaries(workspace: &View<Workspace>, cx: &mut WindowContext<'_>) -> Vec<PaneSummary> {
-// workspace.update(cx, |workspace, cx| {
-// let active_pane = workspace.active_pane();
-// workspace
-// .panes()
-// .iter()
-// .map(|pane| {
-// let leader = workspace.leader_for_pane(pane);
-// let active = pane == active_pane;
-// let pane = pane.read(cx);
-// let active_ix = pane.active_item_index();
-// PaneSummary {
-// active,
-// leader,
-// items: pane
-// .items()
-// .enumerate()
-// .map(|(ix, item)| {
-// (
-// ix == active_ix,
-// item.tab_description(0, cx)
-// .map_or(String::new(), |s| s.to_string()),
-// )
-// })
-// .collect(),
-// }
-// })
-// .collect()
-// })
-// }
@@ -1,5722 +0,0 @@
-use crate::{
- rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
- tests::{channel_id, room_participants, RoomParticipants, TestClient, TestServer},
-};
-use call::{room, ActiveCall, ParticipantLocation, Room};
-use client::{User, RECEIVE_TIMEOUT};
-use collections::{HashMap, HashSet};
-use fs::{repository::GitFileStatus, FakeFs, Fs as _, RemoveOptions};
-use futures::StreamExt as _;
-use gpui::{AppContext, BackgroundExecutor, Model, TestAppContext};
-use language::{
- language_settings::{AllLanguageSettings, Formatter},
- tree_sitter_rust, Diagnostic, DiagnosticEntry, FakeLspAdapter, Language, LanguageConfig,
- LineEnding, OffsetRangeExt, Point, Rope,
-};
-use live_kit_client::MacOSDisplay;
-use lsp::LanguageServerId;
-use project::{
- search::SearchQuery, DiagnosticSummary, FormatTrigger, HoverBlockKind, Project, ProjectPath,
-};
-use rand::prelude::*;
-use serde_json::json;
-use settings::SettingsStore;
-use std::{
- cell::{Cell, RefCell},
- env, future, mem,
- path::{Path, PathBuf},
- rc::Rc,
- sync::{
- atomic::{AtomicBool, Ordering::SeqCst},
- Arc,
- },
-};
-use unindent::Unindent as _;
-
-#[ctor::ctor]
-fn init_logger() {
- if std::env::var("RUST_LOG").is_ok() {
- env_logger::init();
- }
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_basic_calls(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_b2: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
-
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- server
- .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
- let active_call_c = cx_c.read(ActiveCall::global);
-
- // Call user B from client A.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: Default::default(),
- pending: vec!["user_b".to_string()]
- }
- );
-
- // User B receives the call.
-
- let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
- let call_b = incoming_call_b.next().await.unwrap().unwrap();
- assert_eq!(call_b.calling_user.github_login, "user_a");
-
- // User B connects via another client and also receives a ring on the newly-connected client.
- let _client_b2 = server.create_client(cx_b2, "user_b").await;
- let active_call_b2 = cx_b2.read(ActiveCall::global);
-
- let mut incoming_call_b2 = active_call_b2.read_with(cx_b2, |call, _| call.incoming());
- executor.run_until_parked();
- let call_b2 = incoming_call_b2.next().await.unwrap().unwrap();
- assert_eq!(call_b2.calling_user.github_login, "user_a");
-
- // User B joins the room using the first client.
- active_call_b
- .update(cx_b, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
-
- let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
- assert!(incoming_call_b.next().await.unwrap().is_none());
-
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string()],
- pending: Default::default()
- }
- );
-
- // Call user C from client B.
-
- let mut incoming_call_c = active_call_c.read_with(cx_c, |call, _| call.incoming());
- active_call_b
- .update(cx_b, |call, cx| {
- call.invite(client_c.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: vec!["user_c".to_string()]
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string()],
- pending: vec!["user_c".to_string()]
- }
- );
-
- // User C receives the call, but declines it.
- let call_c = incoming_call_c.next().await.unwrap().unwrap();
- assert_eq!(call_c.calling_user.github_login, "user_b");
- active_call_c.update(cx_c, |call, cx| call.decline_incoming(cx).unwrap());
- assert!(incoming_call_c.next().await.unwrap().is_none());
-
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string()],
- pending: Default::default()
- }
- );
-
- // Call user C again from user A.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_c.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: vec!["user_c".to_string()]
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string()],
- pending: vec!["user_c".to_string()]
- }
- );
-
- // User C accepts the call.
- let call_c = incoming_call_c.next().await.unwrap().unwrap();
- assert_eq!(call_c.calling_user.github_login, "user_a");
- active_call_c
- .update(cx_c, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
- assert!(incoming_call_c.next().await.unwrap().is_none());
-
- let room_c = active_call_c.read_with(cx_c, |call, _| call.room().unwrap().clone());
-
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string(), "user_c".to_string()],
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string(), "user_c".to_string()],
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_c, cx_c),
- RoomParticipants {
- remote: vec!["user_a".to_string(), "user_b".to_string()],
- pending: Default::default()
- }
- );
-
- // User A shares their screen
- let display = MacOSDisplay::new();
- let events_b = active_call_events(cx_b);
- let events_c = active_call_events(cx_c);
- active_call_a
- .update(cx_a, |call, cx| {
- call.room().unwrap().update(cx, |room, cx| {
- room.set_display_sources(vec![display.clone()]);
- room.share_screen(cx)
- })
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // User B observes the remote screen sharing track.
- assert_eq!(events_b.borrow().len(), 1);
- let event_b = events_b.borrow().first().unwrap().clone();
- if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event_b {
- assert_eq!(participant_id, client_a.peer_id().unwrap());
-
- room_b.read_with(cx_b, |room, _| {
- assert_eq!(
- room.remote_participants()[&client_a.user_id().unwrap()]
- .video_tracks
- .len(),
- 1
- );
- });
- } else {
- panic!("unexpected event")
- }
-
- // User C observes the remote screen sharing track.
- assert_eq!(events_c.borrow().len(), 1);
- let event_c = events_c.borrow().first().unwrap().clone();
- if let call::room::Event::RemoteVideoTracksChanged { participant_id } = event_c {
- assert_eq!(participant_id, client_a.peer_id().unwrap());
-
- room_c.read_with(cx_c, |room, _| {
- assert_eq!(
- room.remote_participants()[&client_a.user_id().unwrap()]
- .video_tracks
- .len(),
- 1
- );
- });
- } else {
- panic!("unexpected event")
- }
-
- // User A leaves the room.
- active_call_a
- .update(cx_a, |call, cx| {
- let hang_up = call.hang_up(cx);
- assert!(call.room().is_none());
- hang_up
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: Default::default(),
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_c".to_string()],
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_c, cx_c),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: Default::default()
- }
- );
-
- // User B gets disconnected from the LiveKit server, which causes them
- // to automatically leave the room. User C leaves the room as well because
- // nobody else is in there.
- server
- .test_live_kit_server
- .disconnect_client(client_b.user_id().unwrap().to_string())
- .await;
- executor.run_until_parked();
-
- active_call_b.read_with(cx_b, |call, _| assert!(call.room().is_none()));
-
- active_call_c.read_with(cx_c, |call, _| assert!(call.room().is_none()));
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: Default::default(),
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: Default::default(),
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_c, cx_c),
- RoomParticipants {
- remote: Default::default(),
- pending: Default::default()
- }
- );
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_calling_multiple_users_simultaneously(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
- cx_d: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
-
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- let client_d = server.create_client(cx_d, "user_d").await;
- server
- .make_contacts(&mut [
- (&client_a, cx_a),
- (&client_b, cx_b),
- (&client_c, cx_c),
- (&client_d, cx_d),
- ])
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
- let active_call_c = cx_c.read(ActiveCall::global);
- let active_call_d = cx_d.read(ActiveCall::global);
-
- // Simultaneously call user B and user C from client A.
- let b_invite = active_call_a.update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- });
- let c_invite = active_call_a.update(cx_a, |call, cx| {
- call.invite(client_c.user_id().unwrap(), None, cx)
- });
- b_invite.await.unwrap();
- c_invite.await.unwrap();
-
- let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: Default::default(),
- pending: vec!["user_b".to_string(), "user_c".to_string()]
- }
- );
-
- // Call client D from client A.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_d.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: Default::default(),
- pending: vec![
- "user_b".to_string(),
- "user_c".to_string(),
- "user_d".to_string()
- ]
- }
- );
-
- // Accept the call on all clients simultaneously.
- let accept_b = active_call_b.update(cx_b, |call, cx| call.accept_incoming(cx));
- let accept_c = active_call_c.update(cx_c, |call, cx| call.accept_incoming(cx));
- let accept_d = active_call_d.update(cx_d, |call, cx| call.accept_incoming(cx));
- accept_b.await.unwrap();
- accept_c.await.unwrap();
- accept_d.await.unwrap();
-
- executor.run_until_parked();
-
- let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
-
- let room_c = active_call_c.read_with(cx_c, |call, _| call.room().unwrap().clone());
-
- let room_d = active_call_d.read_with(cx_d, |call, _| call.room().unwrap().clone());
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec![
- "user_b".to_string(),
- "user_c".to_string(),
- "user_d".to_string(),
- ],
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec![
- "user_a".to_string(),
- "user_c".to_string(),
- "user_d".to_string(),
- ],
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_c, cx_c),
- RoomParticipants {
- remote: vec![
- "user_a".to_string(),
- "user_b".to_string(),
- "user_d".to_string(),
- ],
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_d, cx_d),
- RoomParticipants {
- remote: vec![
- "user_a".to_string(),
- "user_b".to_string(),
- "user_c".to_string(),
- ],
- pending: Default::default()
- }
- );
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_joining_channels_and_calling_multiple_users_simultaneously(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
-
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- server
- .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
- .await;
-
- let channel_1 = server
- .make_channel(
- "channel1",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b), (&client_c, cx_c)],
- )
- .await;
-
- let channel_2 = server
- .make_channel(
- "channel2",
- None,
- (&client_a, cx_a),
- &mut [(&client_b, cx_b), (&client_c, cx_c)],
- )
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
-
- // Simultaneously join channel 1 and then channel 2
- active_call_a
- .update(cx_a, |call, cx| call.join_channel(channel_1, cx))
- .detach();
- let join_channel_2 = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_2, cx));
-
- join_channel_2.await.unwrap();
-
- let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- executor.run_until_parked();
-
- assert_eq!(channel_id(&room_a, cx_a), Some(channel_2));
-
- // Leave the room
- active_call_a
- .update(cx_a, |call, cx| {
- let hang_up = call.hang_up(cx);
- hang_up
- })
- .await
- .unwrap();
-
- // Initiating invites and then joining a channel should fail gracefully
- let b_invite = active_call_a.update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- });
- let c_invite = active_call_a.update(cx_a, |call, cx| {
- call.invite(client_c.user_id().unwrap(), None, cx)
- });
-
- let join_channel = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, cx));
-
- b_invite.await.unwrap();
- c_invite.await.unwrap();
- join_channel.await.unwrap();
-
- let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- executor.run_until_parked();
-
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: Default::default(),
- pending: vec!["user_b".to_string(), "user_c".to_string()]
- }
- );
-
- assert_eq!(channel_id(&room_a, cx_a), None);
-
- // Leave the room
- active_call_a
- .update(cx_a, |call, cx| {
- let hang_up = call.hang_up(cx);
- hang_up
- })
- .await
- .unwrap();
-
- // Simultaneously join channel 1 and call user B and user C from client A.
- let join_channel = active_call_a.update(cx_a, |call, cx| call.join_channel(channel_1, cx));
-
- let b_invite = active_call_a.update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- });
- let c_invite = active_call_a.update(cx_a, |call, cx| {
- call.invite(client_c.user_id().unwrap(), None, cx)
- });
-
- join_channel.await.unwrap();
- b_invite.await.unwrap();
- c_invite.await.unwrap();
-
- active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- executor.run_until_parked();
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_room_uniqueness(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_a2: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_b2: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let _client_a2 = server.create_client(cx_a2, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let _client_b2 = server.create_client(cx_b2, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- server
- .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_a2 = cx_a2.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
- let active_call_b2 = cx_b2.read(ActiveCall::global);
- let active_call_c = cx_c.read(ActiveCall::global);
-
- // Call user B from client A.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
-
- // Ensure a new room can't be created given user A just created one.
- active_call_a2
- .update(cx_a2, |call, cx| {
- call.invite(client_c.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap_err();
-
- active_call_a2.read_with(cx_a2, |call, _| assert!(call.room().is_none()));
-
- // User B receives the call from user A.
-
- let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
- let call_b1 = incoming_call_b.next().await.unwrap().unwrap();
- assert_eq!(call_b1.calling_user.github_login, "user_a");
-
- // Ensure calling users A and B from client C fails.
- active_call_c
- .update(cx_c, |call, cx| {
- call.invite(client_a.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap_err();
- active_call_c
- .update(cx_c, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap_err();
-
- // Ensure User B can't create a room while they still have an incoming call.
- active_call_b2
- .update(cx_b2, |call, cx| {
- call.invite(client_c.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap_err();
-
- active_call_b2.read_with(cx_b2, |call, _| assert!(call.room().is_none()));
-
- // User B joins the room and calling them after they've joined still fails.
- active_call_b
- .update(cx_b, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
- active_call_c
- .update(cx_c, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap_err();
-
- // Ensure User B can't create a room while they belong to another room.
- active_call_b2
- .update(cx_b2, |call, cx| {
- call.invite(client_c.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap_err();
-
- active_call_b2.read_with(cx_b2, |call, _| assert!(call.room().is_none()));
-
- // Client C can successfully call client B after client B leaves the room.
- active_call_b
- .update(cx_b, |call, cx| call.hang_up(cx))
- .await
- .unwrap();
- executor.run_until_parked();
- active_call_c
- .update(cx_c, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- let call_b2 = incoming_call_b.next().await.unwrap().unwrap();
- assert_eq!(call_b2.calling_user.github_login, "user_c");
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_client_disconnecting_from_room(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- // Call user B from client A.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
-
- let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
-
- // User B receives the call and joins the room.
-
- let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
- incoming_call_b.next().await.unwrap().unwrap();
- active_call_b
- .update(cx_b, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
-
- let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string()],
- pending: Default::default()
- }
- );
-
- // User A automatically reconnects to the room upon disconnection.
- server.disconnect_client(client_a.peer_id().unwrap());
- executor.advance_clock(RECEIVE_TIMEOUT);
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string()],
- pending: Default::default()
- }
- );
-
- // When user A disconnects, both client A and B clear their room on the active call.
- server.forbid_connections();
- server.disconnect_client(client_a.peer_id().unwrap());
- executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
-
- active_call_a.read_with(cx_a, |call, _| assert!(call.room().is_none()));
-
- active_call_b.read_with(cx_b, |call, _| assert!(call.room().is_none()));
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: Default::default(),
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: Default::default(),
- pending: Default::default()
- }
- );
-
- // Allow user A to reconnect to the server.
- server.allow_connections();
- executor.advance_clock(RECEIVE_TIMEOUT);
-
- // Call user B again from client A.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
-
- let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
-
- // User B receives the call and joins the room.
-
- let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
- incoming_call_b.next().await.unwrap().unwrap();
- active_call_b
- .update(cx_b, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
-
- let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string()],
- pending: Default::default()
- }
- );
-
- // User B gets disconnected from the LiveKit server, which causes it
- // to automatically leave the room.
- server
- .test_live_kit_server
- .disconnect_client(client_b.user_id().unwrap().to_string())
- .await;
- executor.run_until_parked();
- active_call_a.update(cx_a, |call, _| assert!(call.room().is_none()));
- active_call_b.update(cx_b, |call, _| assert!(call.room().is_none()));
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: Default::default(),
- pending: Default::default()
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: Default::default(),
- pending: Default::default()
- }
- );
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_server_restarts(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
- cx_d: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- client_a
- .fs()
- .insert_tree("/a", json!({ "a.txt": "a-contents" }))
- .await;
-
- // Invite client B to collaborate on a project
- let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
-
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- let client_d = server.create_client(cx_d, "user_d").await;
- server
- .make_contacts(&mut [
- (&client_a, cx_a),
- (&client_b, cx_b),
- (&client_c, cx_c),
- (&client_d, cx_d),
- ])
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
- let active_call_c = cx_c.read(ActiveCall::global);
- let active_call_d = cx_d.read(ActiveCall::global);
-
- // User A calls users B, C, and D.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), Some(project_a.clone()), cx)
- })
- .await
- .unwrap();
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_c.user_id().unwrap(), Some(project_a.clone()), cx)
- })
- .await
- .unwrap();
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_d.user_id().unwrap(), Some(project_a.clone()), cx)
- })
- .await
- .unwrap();
-
- let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
-
- // User B receives the call and joins the room.
-
- let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
- assert!(incoming_call_b.next().await.unwrap().is_some());
- active_call_b
- .update(cx_b, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
-
- let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
-
- // User C receives the call and joins the room.
-
- let mut incoming_call_c = active_call_c.read_with(cx_c, |call, _| call.incoming());
- assert!(incoming_call_c.next().await.unwrap().is_some());
- active_call_c
- .update(cx_c, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
-
- let room_c = active_call_c.read_with(cx_c, |call, _| call.room().unwrap().clone());
-
- // User D receives the call but doesn't join the room yet.
-
- let mut incoming_call_d = active_call_d.read_with(cx_d, |call, _| call.incoming());
- assert!(incoming_call_d.next().await.unwrap().is_some());
-
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string(), "user_c".to_string()],
- pending: vec!["user_d".to_string()]
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string(), "user_c".to_string()],
- pending: vec!["user_d".to_string()]
- }
- );
- assert_eq!(
- room_participants(&room_c, cx_c),
- RoomParticipants {
- remote: vec!["user_a".to_string(), "user_b".to_string()],
- pending: vec!["user_d".to_string()]
- }
- );
-
- // The server is torn down.
- server.reset().await;
-
- // Users A and B reconnect to the call. User C has troubles reconnecting, so it leaves the room.
- client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
- executor.advance_clock(RECONNECT_TIMEOUT);
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string(), "user_c".to_string()],
- pending: vec!["user_d".to_string()]
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string(), "user_c".to_string()],
- pending: vec!["user_d".to_string()]
- }
- );
- assert_eq!(
- room_participants(&room_c, cx_c),
- RoomParticipants {
- remote: vec![],
- pending: vec![]
- }
- );
-
- // User D is notified again of the incoming call and accepts it.
- assert!(incoming_call_d.next().await.unwrap().is_some());
- active_call_d
- .update(cx_d, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
- executor.run_until_parked();
-
- let room_d = active_call_d.read_with(cx_d, |call, _| call.room().unwrap().clone());
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec![
- "user_b".to_string(),
- "user_c".to_string(),
- "user_d".to_string(),
- ],
- pending: vec![]
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec![
- "user_a".to_string(),
- "user_c".to_string(),
- "user_d".to_string(),
- ],
- pending: vec![]
- }
- );
- assert_eq!(
- room_participants(&room_c, cx_c),
- RoomParticipants {
- remote: vec![],
- pending: vec![]
- }
- );
- assert_eq!(
- room_participants(&room_d, cx_d),
- RoomParticipants {
- remote: vec![
- "user_a".to_string(),
- "user_b".to_string(),
- "user_c".to_string(),
- ],
- pending: vec![]
- }
- );
-
- // The server finishes restarting, cleaning up stale connections.
- server.start().await.unwrap();
- executor.advance_clock(CLEANUP_TIMEOUT);
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string(), "user_d".to_string()],
- pending: vec![]
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string(), "user_d".to_string()],
- pending: vec![]
- }
- );
- assert_eq!(
- room_participants(&room_c, cx_c),
- RoomParticipants {
- remote: vec![],
- pending: vec![]
- }
- );
- assert_eq!(
- room_participants(&room_d, cx_d),
- RoomParticipants {
- remote: vec!["user_a".to_string(), "user_b".to_string()],
- pending: vec![]
- }
- );
-
- // User D hangs up.
- active_call_d
- .update(cx_d, |call, cx| call.hang_up(cx))
- .await
- .unwrap();
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: vec![]
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string()],
- pending: vec![]
- }
- );
- assert_eq!(
- room_participants(&room_c, cx_c),
- RoomParticipants {
- remote: vec![],
- pending: vec![]
- }
- );
- assert_eq!(
- room_participants(&room_d, cx_d),
- RoomParticipants {
- remote: vec![],
- pending: vec![]
- }
- );
-
- // User B calls user D again.
- active_call_b
- .update(cx_b, |call, cx| {
- call.invite(client_d.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
-
- // User D receives the call but doesn't join the room yet.
-
- let mut incoming_call_d = active_call_d.read_with(cx_d, |call, _| call.incoming());
- assert!(incoming_call_d.next().await.unwrap().is_some());
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: vec!["user_d".to_string()]
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string()],
- pending: vec!["user_d".to_string()]
- }
- );
-
- // The server is torn down.
- server.reset().await;
-
- // Users A and B have troubles reconnecting, so they leave the room.
- client_a.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
- client_b.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
- client_c.override_establish_connection(|_, cx| cx.spawn(|_| future::pending()));
- executor.advance_clock(RECONNECT_TIMEOUT);
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec![],
- pending: vec![]
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec![],
- pending: vec![]
- }
- );
-
- // User D is notified again of the incoming call but doesn't accept it.
- assert!(incoming_call_d.next().await.unwrap().is_some());
-
- // The server finishes restarting, cleaning up stale connections and canceling the
- // call to user D because the room has become empty.
- server.start().await.unwrap();
- executor.advance_clock(CLEANUP_TIMEOUT);
- assert!(incoming_call_d.next().await.unwrap().is_none());
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_calls_on_multiple_connections(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b1: &mut TestAppContext,
- cx_b2: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b1 = server.create_client(cx_b1, "user_b").await;
- let client_b2 = server.create_client(cx_b2, "user_b").await;
- server
- .make_contacts(&mut [(&client_a, cx_a), (&client_b1, cx_b1)])
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b1 = cx_b1.read(ActiveCall::global);
- let active_call_b2 = cx_b2.read(ActiveCall::global);
-
- let mut incoming_call_b1 = active_call_b1.read_with(cx_b1, |call, _| call.incoming());
-
- let mut incoming_call_b2 = active_call_b2.read_with(cx_b2, |call, _| call.incoming());
- assert!(incoming_call_b1.next().await.unwrap().is_none());
- assert!(incoming_call_b2.next().await.unwrap().is_none());
-
- // Call user B from client A, ensuring both clients for user B ring.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b1.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(incoming_call_b1.next().await.unwrap().is_some());
- assert!(incoming_call_b2.next().await.unwrap().is_some());
-
- // User B declines the call on one of the two connections, causing both connections
- // to stop ringing.
- active_call_b2.update(cx_b2, |call, cx| call.decline_incoming(cx).unwrap());
- executor.run_until_parked();
- assert!(incoming_call_b1.next().await.unwrap().is_none());
- assert!(incoming_call_b2.next().await.unwrap().is_none());
-
- // Call user B again from client A.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b1.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(incoming_call_b1.next().await.unwrap().is_some());
- assert!(incoming_call_b2.next().await.unwrap().is_some());
-
- // User B accepts the call on one of the two connections, causing both connections
- // to stop ringing.
- active_call_b2
- .update(cx_b2, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(incoming_call_b1.next().await.unwrap().is_none());
- assert!(incoming_call_b2.next().await.unwrap().is_none());
-
- // User B disconnects the client that is not on the call. Everything should be fine.
- client_b1.disconnect(&cx_b1.to_async());
- executor.advance_clock(RECEIVE_TIMEOUT);
- client_b1
- .authenticate_and_connect(false, &cx_b1.to_async())
- .await
- .unwrap();
-
- // User B hangs up, and user A calls them again.
- active_call_b2
- .update(cx_b2, |call, cx| call.hang_up(cx))
- .await
- .unwrap();
- executor.run_until_parked();
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b1.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(incoming_call_b1.next().await.unwrap().is_some());
- assert!(incoming_call_b2.next().await.unwrap().is_some());
-
- // User A cancels the call, causing both connections to stop ringing.
- active_call_a
- .update(cx_a, |call, cx| {
- call.cancel_invite(client_b1.user_id().unwrap(), cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(incoming_call_b1.next().await.unwrap().is_none());
- assert!(incoming_call_b2.next().await.unwrap().is_none());
-
- // User A calls user B again.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b1.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(incoming_call_b1.next().await.unwrap().is_some());
- assert!(incoming_call_b2.next().await.unwrap().is_some());
-
- // User A hangs up, causing both connections to stop ringing.
- active_call_a
- .update(cx_a, |call, cx| call.hang_up(cx))
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(incoming_call_b1.next().await.unwrap().is_none());
- assert!(incoming_call_b2.next().await.unwrap().is_none());
-
- // User A calls user B again.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b1.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(incoming_call_b1.next().await.unwrap().is_some());
- assert!(incoming_call_b2.next().await.unwrap().is_some());
-
- // User A disconnects, causing both connections to stop ringing.
- server.forbid_connections();
- server.disconnect_client(client_a.peer_id().unwrap());
- executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- assert!(incoming_call_b1.next().await.unwrap().is_none());
- assert!(incoming_call_b2.next().await.unwrap().is_none());
-
- // User A reconnects automatically, then calls user B again.
- server.allow_connections();
- executor.advance_clock(RECEIVE_TIMEOUT);
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b1.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(incoming_call_b1.next().await.unwrap().is_some());
- assert!(incoming_call_b2.next().await.unwrap().is_some());
-
- // User B disconnects all clients, causing user A to no longer see a pending call for them.
- server.forbid_connections();
- server.disconnect_client(client_b1.peer_id().unwrap());
- server.disconnect_client(client_b2.peer_id().unwrap());
- executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
-
- active_call_a.read_with(cx_a, |call, _| assert!(call.room().is_none()));
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_unshare_project(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/a",
- json!({
- "a.txt": "a-contents",
- "b.txt": "b-contents",
- }),
- )
- .await;
-
- let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
-
- let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
- executor.run_until_parked();
-
- assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
-
- project_b
- .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
- .await
- .unwrap();
-
- // When client B leaves the room, the project becomes read-only.
- active_call_b
- .update(cx_b, |call, cx| call.hang_up(cx))
- .await
- .unwrap();
- executor.run_until_parked();
-
- assert!(project_b.read_with(cx_b, |project, _| project.is_read_only()));
-
- // Client C opens the project.
- let project_c = client_c.build_remote_project(project_id, cx_c).await;
-
- // When client A unshares the project, client C's project becomes read-only.
- project_a
- .update(cx_a, |project, cx| project.unshare(cx))
- .unwrap();
- executor.run_until_parked();
-
- assert!(worktree_a.read_with(cx_a, |tree, _| !tree.as_local().unwrap().is_shared()));
-
- assert!(project_c.read_with(cx_c, |project, _| project.is_read_only()));
-
- // Client C can open the project again after client A re-shares.
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_c2 = client_c.build_remote_project(project_id, cx_c).await;
- executor.run_until_parked();
-
- assert!(worktree_a.read_with(cx_a, |tree, _| tree.as_local().unwrap().is_shared()));
- project_c2
- .update(cx_c, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
- .await
- .unwrap();
-
- // When client A (the host) leaves the room, the project gets unshared and guests are notified.
- active_call_a
- .update(cx_a, |call, cx| call.hang_up(cx))
- .await
- .unwrap();
- executor.run_until_parked();
-
- project_a.read_with(cx_a, |project, _| assert!(!project.is_shared()));
-
- project_c2.read_with(cx_c, |project, _| {
- assert!(project.is_read_only());
- assert!(project.collaborators().is_empty());
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_project_reconnect(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
-
- cx_b.update(editor::init);
-
- client_a
- .fs()
- .insert_tree(
- "/root-1",
- json!({
- "dir1": {
- "a.txt": "a",
- "b.txt": "b",
- "subdir1": {
- "c.txt": "c",
- "d.txt": "d",
- "e.txt": "e",
- }
- },
- "dir2": {
- "v.txt": "v",
- },
- "dir3": {
- "w.txt": "w",
- "x.txt": "x",
- "y.txt": "y",
- },
- "dir4": {
- "z.txt": "z",
- },
- }),
- )
- .await;
- client_a
- .fs()
- .insert_tree(
- "/root-2",
- json!({
- "2.txt": "2",
- }),
- )
- .await;
- client_a
- .fs()
- .insert_tree(
- "/root-3",
- json!({
- "3.txt": "3",
- }),
- )
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let (project_a1, _) = client_a.build_local_project("/root-1/dir1", cx_a).await;
- let (project_a2, _) = client_a.build_local_project("/root-2", cx_a).await;
- let (project_a3, _) = client_a.build_local_project("/root-3", cx_a).await;
- let worktree_a1 = project_a1.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
- let project1_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a1.clone(), cx))
- .await
- .unwrap();
- let project2_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a2.clone(), cx))
- .await
- .unwrap();
- let project3_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a3.clone(), cx))
- .await
- .unwrap();
-
- let project_b1 = client_b.build_remote_project(project1_id, cx_b).await;
- let project_b2 = client_b.build_remote_project(project2_id, cx_b).await;
- let project_b3 = client_b.build_remote_project(project3_id, cx_b).await;
- executor.run_until_parked();
-
- let worktree1_id = worktree_a1.read_with(cx_a, |worktree, _| {
- assert!(worktree.as_local().unwrap().is_shared());
- worktree.id()
- });
- let (worktree_a2, _) = project_a1
- .update(cx_a, |p, cx| {
- p.find_or_create_local_worktree("/root-1/dir2", true, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
-
- let worktree2_id = worktree_a2.read_with(cx_a, |tree, _| {
- assert!(tree.as_local().unwrap().is_shared());
- tree.id()
- });
- executor.run_until_parked();
-
- project_b1.read_with(cx_b, |project, cx| {
- assert!(project.worktree_for_id(worktree2_id, cx).is_some())
- });
-
- let buffer_a1 = project_a1
- .update(cx_a, |p, cx| p.open_buffer((worktree1_id, "a.txt"), cx))
- .await
- .unwrap();
- let buffer_b1 = project_b1
- .update(cx_b, |p, cx| p.open_buffer((worktree1_id, "a.txt"), cx))
- .await
- .unwrap();
-
- // Drop client A's connection.
- server.forbid_connections();
- server.disconnect_client(client_a.peer_id().unwrap());
- executor.advance_clock(RECEIVE_TIMEOUT);
-
- project_a1.read_with(cx_a, |project, _| {
- assert!(project.is_shared());
- assert_eq!(project.collaborators().len(), 1);
- });
-
- project_b1.read_with(cx_b, |project, _| {
- assert!(!project.is_read_only());
- assert_eq!(project.collaborators().len(), 1);
- });
-
- worktree_a1.read_with(cx_a, |tree, _| {
- assert!(tree.as_local().unwrap().is_shared())
- });
-
- // While client A is disconnected, add and remove files from client A's project.
- client_a
- .fs()
- .insert_tree(
- "/root-1/dir1/subdir2",
- json!({
- "f.txt": "f-contents",
- "g.txt": "g-contents",
- "h.txt": "h-contents",
- "i.txt": "i-contents",
- }),
- )
- .await;
- client_a
- .fs()
- .remove_dir(
- "/root-1/dir1/subdir1".as_ref(),
- RemoveOptions {
- recursive: true,
- ..Default::default()
- },
- )
- .await
- .unwrap();
-
- // While client A is disconnected, add and remove worktrees from client A's project.
- project_a1.update(cx_a, |project, cx| {
- project.remove_worktree(worktree2_id, cx)
- });
- let (worktree_a3, _) = project_a1
- .update(cx_a, |p, cx| {
- p.find_or_create_local_worktree("/root-1/dir3", true, cx)
- })
- .await
- .unwrap();
- worktree_a3
- .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
- .await;
-
- let worktree3_id = worktree_a3.read_with(cx_a, |tree, _| {
- assert!(!tree.as_local().unwrap().is_shared());
- tree.id()
- });
- executor.run_until_parked();
-
- // While client A is disconnected, close project 2
- cx_a.update(|_| drop(project_a2));
-
- // While client A is disconnected, mutate a buffer on both the host and the guest.
- buffer_a1.update(cx_a, |buf, cx| buf.edit([(0..0, "W")], None, cx));
- buffer_b1.update(cx_b, |buf, cx| buf.edit([(1..1, "Z")], None, cx));
- executor.run_until_parked();
-
- // Client A reconnects. Their project is re-shared, and client B re-joins it.
- server.allow_connections();
- client_a
- .authenticate_and_connect(false, &cx_a.to_async())
- .await
- .unwrap();
- executor.run_until_parked();
-
- project_a1.read_with(cx_a, |project, cx| {
- assert!(project.is_shared());
- assert!(worktree_a1.read(cx).as_local().unwrap().is_shared());
- assert_eq!(
- worktree_a1
- .read(cx)
- .snapshot()
- .paths()
- .map(|p| p.to_str().unwrap())
- .collect::<Vec<_>>(),
- vec![
- "a.txt",
- "b.txt",
- "subdir2",
- "subdir2/f.txt",
- "subdir2/g.txt",
- "subdir2/h.txt",
- "subdir2/i.txt"
- ]
- );
- assert!(worktree_a3.read(cx).as_local().unwrap().is_shared());
- assert_eq!(
- worktree_a3
- .read(cx)
- .snapshot()
- .paths()
- .map(|p| p.to_str().unwrap())
- .collect::<Vec<_>>(),
- vec!["w.txt", "x.txt", "y.txt"]
- );
- });
-
- project_b1.read_with(cx_b, |project, cx| {
- assert!(!project.is_read_only());
- assert_eq!(
- project
- .worktree_for_id(worktree1_id, cx)
- .unwrap()
- .read(cx)
- .snapshot()
- .paths()
- .map(|p| p.to_str().unwrap())
- .collect::<Vec<_>>(),
- vec![
- "a.txt",
- "b.txt",
- "subdir2",
- "subdir2/f.txt",
- "subdir2/g.txt",
- "subdir2/h.txt",
- "subdir2/i.txt"
- ]
- );
- assert!(project.worktree_for_id(worktree2_id, cx).is_none());
- assert_eq!(
- project
- .worktree_for_id(worktree3_id, cx)
- .unwrap()
- .read(cx)
- .snapshot()
- .paths()
- .map(|p| p.to_str().unwrap())
- .collect::<Vec<_>>(),
- vec!["w.txt", "x.txt", "y.txt"]
- );
- });
-
- project_b2.read_with(cx_b, |project, _| assert!(project.is_read_only()));
-
- project_b3.read_with(cx_b, |project, _| assert!(!project.is_read_only()));
-
- buffer_a1.read_with(cx_a, |buffer, _| assert_eq!(buffer.text(), "WaZ"));
-
- buffer_b1.read_with(cx_b, |buffer, _| assert_eq!(buffer.text(), "WaZ"));
-
- // Drop client B's connection.
- server.forbid_connections();
- server.disconnect_client(client_b.peer_id().unwrap());
- executor.advance_clock(RECEIVE_TIMEOUT);
-
- // While client B is disconnected, add and remove files from client A's project
- client_a
- .fs()
- .insert_file("/root-1/dir1/subdir2/j.txt", "j-contents".into())
- .await;
- client_a
- .fs()
- .remove_file("/root-1/dir1/subdir2/i.txt".as_ref(), Default::default())
- .await
- .unwrap();
-
- // While client B is disconnected, add and remove worktrees from client A's project.
- let (worktree_a4, _) = project_a1
- .update(cx_a, |p, cx| {
- p.find_or_create_local_worktree("/root-1/dir4", true, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
-
- let worktree4_id = worktree_a4.read_with(cx_a, |tree, _| {
- assert!(tree.as_local().unwrap().is_shared());
- tree.id()
- });
- project_a1.update(cx_a, |project, cx| {
- project.remove_worktree(worktree3_id, cx)
- });
- executor.run_until_parked();
-
- // While client B is disconnected, mutate a buffer on both the host and the guest.
- buffer_a1.update(cx_a, |buf, cx| buf.edit([(1..1, "X")], None, cx));
- buffer_b1.update(cx_b, |buf, cx| buf.edit([(2..2, "Y")], None, cx));
- executor.run_until_parked();
-
- // While disconnected, close project 3
- cx_a.update(|_| drop(project_a3));
-
- // Client B reconnects. They re-join the room and the remaining shared project.
- server.allow_connections();
- client_b
- .authenticate_and_connect(false, &cx_b.to_async())
- .await
- .unwrap();
- executor.run_until_parked();
-
- project_b1.read_with(cx_b, |project, cx| {
- assert!(!project.is_read_only());
- assert_eq!(
- project
- .worktree_for_id(worktree1_id, cx)
- .unwrap()
- .read(cx)
- .snapshot()
- .paths()
- .map(|p| p.to_str().unwrap())
- .collect::<Vec<_>>(),
- vec![
- "a.txt",
- "b.txt",
- "subdir2",
- "subdir2/f.txt",
- "subdir2/g.txt",
- "subdir2/h.txt",
- "subdir2/j.txt"
- ]
- );
- assert!(project.worktree_for_id(worktree2_id, cx).is_none());
- assert_eq!(
- project
- .worktree_for_id(worktree4_id, cx)
- .unwrap()
- .read(cx)
- .snapshot()
- .paths()
- .map(|p| p.to_str().unwrap())
- .collect::<Vec<_>>(),
- vec!["z.txt"]
- );
- });
-
- project_b3.read_with(cx_b, |project, _| assert!(project.is_read_only()));
-
- buffer_a1.read_with(cx_a, |buffer, _| assert_eq!(buffer.text(), "WXaYZ"));
-
- buffer_b1.read_with(cx_b, |buffer, _| assert_eq!(buffer.text(), "WXaYZ"));
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_active_call_events(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- client_a.fs().insert_tree("/a", json!({})).await;
- client_b.fs().insert_tree("/b", json!({})).await;
-
- let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
- let (project_b, _) = client_b.build_local_project("/b", cx_b).await;
-
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- let events_a = active_call_events(cx_a);
- let events_b = active_call_events(cx_b);
-
- let project_a_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- executor.run_until_parked();
- assert_eq!(mem::take(&mut *events_a.borrow_mut()), vec![]);
- assert_eq!(
- mem::take(&mut *events_b.borrow_mut()),
- vec![room::Event::RemoteProjectShared {
- owner: Arc::new(User {
- id: client_a.user_id().unwrap(),
- github_login: "user_a".to_string(),
- avatar_uri: "avatar_a".into(),
- }),
- project_id: project_a_id,
- worktree_root_names: vec!["a".to_string()],
- }]
- );
-
- let project_b_id = active_call_b
- .update(cx_b, |call, cx| call.share_project(project_b.clone(), cx))
- .await
- .unwrap();
- executor.run_until_parked();
- assert_eq!(
- mem::take(&mut *events_a.borrow_mut()),
- vec![room::Event::RemoteProjectShared {
- owner: Arc::new(User {
- id: client_b.user_id().unwrap(),
- github_login: "user_b".to_string(),
- avatar_uri: "avatar_b".into(),
- }),
- project_id: project_b_id,
- worktree_root_names: vec!["b".to_string()]
- }]
- );
- assert_eq!(mem::take(&mut *events_b.borrow_mut()), vec![]);
-
- // Sharing a project twice is idempotent.
- let project_b_id_2 = active_call_b
- .update(cx_b, |call, cx| call.share_project(project_b.clone(), cx))
- .await
- .unwrap();
- assert_eq!(project_b_id_2, project_b_id);
- executor.run_until_parked();
- assert_eq!(mem::take(&mut *events_a.borrow_mut()), vec![]);
- assert_eq!(mem::take(&mut *events_b.borrow_mut()), vec![]);
-}
-
-fn active_call_events(cx: &mut TestAppContext) -> Rc<RefCell<Vec<room::Event>>> {
- let events = Rc::new(RefCell::new(Vec::new()));
- let active_call = cx.read(ActiveCall::global);
- cx.update({
- let events = events.clone();
- |cx| {
- cx.subscribe(&active_call, move |_, event, _| {
- events.borrow_mut().push(event.clone())
- })
- .detach()
- }
- });
- events
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_room_location(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- client_a.fs().insert_tree("/a", json!({})).await;
- client_b.fs().insert_tree("/b", json!({})).await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- let a_notified = Rc::new(Cell::new(false));
- cx_a.update({
- let notified = a_notified.clone();
- |cx| {
- cx.observe(&active_call_a, move |_, _| notified.set(true))
- .detach()
- }
- });
-
- let b_notified = Rc::new(Cell::new(false));
- cx_b.update({
- let b_notified = b_notified.clone();
- |cx| {
- cx.observe(&active_call_b, move |_, _| b_notified.set(true))
- .detach()
- }
- });
-
- let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
- active_call_a
- .update(cx_a, |call, cx| call.set_location(Some(&project_a), cx))
- .await
- .unwrap();
- let (project_b, _) = client_b.build_local_project("/b", cx_b).await;
-
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
-
- let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
-
- let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
- executor.run_until_parked();
- assert!(a_notified.take());
- assert_eq!(
- participant_locations(&room_a, cx_a),
- vec![("user_b".to_string(), ParticipantLocation::External)]
- );
- assert!(b_notified.take());
- assert_eq!(
- participant_locations(&room_b, cx_b),
- vec![("user_a".to_string(), ParticipantLocation::UnsharedProject)]
- );
-
- let project_a_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(a_notified.take());
- assert_eq!(
- participant_locations(&room_a, cx_a),
- vec![("user_b".to_string(), ParticipantLocation::External)]
- );
- assert!(b_notified.take());
- assert_eq!(
- participant_locations(&room_b, cx_b),
- vec![(
- "user_a".to_string(),
- ParticipantLocation::SharedProject {
- project_id: project_a_id
- }
- )]
- );
-
- let project_b_id = active_call_b
- .update(cx_b, |call, cx| call.share_project(project_b.clone(), cx))
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(a_notified.take());
- assert_eq!(
- participant_locations(&room_a, cx_a),
- vec![("user_b".to_string(), ParticipantLocation::External)]
- );
- assert!(b_notified.take());
- assert_eq!(
- participant_locations(&room_b, cx_b),
- vec![(
- "user_a".to_string(),
- ParticipantLocation::SharedProject {
- project_id: project_a_id
- }
- )]
- );
-
- active_call_b
- .update(cx_b, |call, cx| call.set_location(Some(&project_b), cx))
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(a_notified.take());
- assert_eq!(
- participant_locations(&room_a, cx_a),
- vec![(
- "user_b".to_string(),
- ParticipantLocation::SharedProject {
- project_id: project_b_id
- }
- )]
- );
- assert!(b_notified.take());
- assert_eq!(
- participant_locations(&room_b, cx_b),
- vec![(
- "user_a".to_string(),
- ParticipantLocation::SharedProject {
- project_id: project_a_id
- }
- )]
- );
-
- active_call_b
- .update(cx_b, |call, cx| call.set_location(None, cx))
- .await
- .unwrap();
- executor.run_until_parked();
- assert!(a_notified.take());
- assert_eq!(
- participant_locations(&room_a, cx_a),
- vec![("user_b".to_string(), ParticipantLocation::External)]
- );
- assert!(b_notified.take());
- assert_eq!(
- participant_locations(&room_b, cx_b),
- vec![(
- "user_a".to_string(),
- ParticipantLocation::SharedProject {
- project_id: project_a_id
- }
- )]
- );
-
- fn participant_locations(
- room: &Model<Room>,
- cx: &TestAppContext,
- ) -> Vec<(String, ParticipantLocation)> {
- room.read_with(cx, |room, _| {
- room.remote_participants()
- .values()
- .map(|participant| {
- (
- participant.user.github_login.to_string(),
- participant.location,
- )
- })
- .collect()
- })
- }
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_propagate_saves_and_fs_changes(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
-
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- let rust = Arc::new(Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- ));
- let javascript = Arc::new(Language::new(
- LanguageConfig {
- name: "JavaScript".into(),
- path_suffixes: vec!["js".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- ));
- for client in [&client_a, &client_b, &client_c] {
- client.language_registry().add(rust.clone());
- client.language_registry().add(javascript.clone());
- }
-
- client_a
- .fs()
- .insert_tree(
- "/a",
- json!({
- "file1.rs": "",
- "file2": ""
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
-
- let worktree_a = project_a.read_with(cx_a, |p, _| p.worktrees().next().unwrap());
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
-
- // Join that worktree as clients B and C.
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
- let project_c = client_c.build_remote_project(project_id, cx_c).await;
-
- let worktree_b = project_b.read_with(cx_b, |p, _| p.worktrees().next().unwrap());
-
- let worktree_c = project_c.read_with(cx_c, |p, _| p.worktrees().next().unwrap());
-
- // Open and edit a buffer as both guests B and C.
- let buffer_b = project_b
- .update(cx_b, |p, cx| p.open_buffer((worktree_id, "file1.rs"), cx))
- .await
- .unwrap();
- let buffer_c = project_c
- .update(cx_c, |p, cx| p.open_buffer((worktree_id, "file1.rs"), cx))
- .await
- .unwrap();
-
- buffer_b.read_with(cx_b, |buffer, _| {
- assert_eq!(&*buffer.language().unwrap().name(), "Rust");
- });
-
- buffer_c.read_with(cx_c, |buffer, _| {
- assert_eq!(&*buffer.language().unwrap().name(), "Rust");
- });
- buffer_b.update(cx_b, |buf, cx| buf.edit([(0..0, "i-am-b, ")], None, cx));
- buffer_c.update(cx_c, |buf, cx| buf.edit([(0..0, "i-am-c, ")], None, cx));
-
- // Open and edit that buffer as the host.
- let buffer_a = project_a
- .update(cx_a, |p, cx| p.open_buffer((worktree_id, "file1.rs"), cx))
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- buffer_a.read_with(cx_a, |buf, _| assert_eq!(buf.text(), "i-am-c, i-am-b, "));
- buffer_a.update(cx_a, |buf, cx| {
- buf.edit([(buf.len()..buf.len(), "i-am-a")], None, cx)
- });
-
- executor.run_until_parked();
-
- buffer_a.read_with(cx_a, |buf, _| {
- assert_eq!(buf.text(), "i-am-c, i-am-b, i-am-a");
- });
-
- buffer_b.read_with(cx_b, |buf, _| {
- assert_eq!(buf.text(), "i-am-c, i-am-b, i-am-a");
- });
-
- buffer_c.read_with(cx_c, |buf, _| {
- assert_eq!(buf.text(), "i-am-c, i-am-b, i-am-a");
- });
-
- // Edit the buffer as the host and concurrently save as guest B.
- let save_b = project_b.update(cx_b, |project, cx| {
- project.save_buffer(buffer_b.clone(), cx)
- });
- buffer_a.update(cx_a, |buf, cx| buf.edit([(0..0, "hi-a, ")], None, cx));
- save_b.await.unwrap();
- assert_eq!(
- client_a.fs().load("/a/file1.rs".as_ref()).await.unwrap(),
- "hi-a, i-am-c, i-am-b, i-am-a"
- );
-
- executor.run_until_parked();
-
- buffer_a.read_with(cx_a, |buf, _| assert!(!buf.is_dirty()));
-
- buffer_b.read_with(cx_b, |buf, _| assert!(!buf.is_dirty()));
-
- buffer_c.read_with(cx_c, |buf, _| assert!(!buf.is_dirty()));
-
- // Make changes on host's file system, see those changes on guest worktrees.
- client_a
- .fs()
- .rename(
- "/a/file1.rs".as_ref(),
- "/a/file1.js".as_ref(),
- Default::default(),
- )
- .await
- .unwrap();
- client_a
- .fs()
- .rename("/a/file2".as_ref(), "/a/file3".as_ref(), Default::default())
- .await
- .unwrap();
- client_a.fs().insert_file("/a/file4", "4".into()).await;
- executor.run_until_parked();
-
- worktree_a.read_with(cx_a, |tree, _| {
- assert_eq!(
- tree.paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["file1.js", "file3", "file4"]
- )
- });
-
- worktree_b.read_with(cx_b, |tree, _| {
- assert_eq!(
- tree.paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["file1.js", "file3", "file4"]
- )
- });
-
- worktree_c.read_with(cx_c, |tree, _| {
- assert_eq!(
- tree.paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["file1.js", "file3", "file4"]
- )
- });
-
- // Ensure buffer files are updated as well.
-
- buffer_a.read_with(cx_a, |buffer, _| {
- assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js"));
- assert_eq!(&*buffer.language().unwrap().name(), "JavaScript");
- });
-
- buffer_b.read_with(cx_b, |buffer, _| {
- assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js"));
- assert_eq!(&*buffer.language().unwrap().name(), "JavaScript");
- });
-
- buffer_c.read_with(cx_c, |buffer, _| {
- assert_eq!(buffer.file().unwrap().path().to_str(), Some("file1.js"));
- assert_eq!(&*buffer.language().unwrap().name(), "JavaScript");
- });
-
- let new_buffer_a = project_a
- .update(cx_a, |p, cx| p.create_buffer("", None, cx))
- .unwrap();
-
- let new_buffer_id = new_buffer_a.read_with(cx_a, |buffer, _| buffer.remote_id());
- let new_buffer_b = project_b
- .update(cx_b, |p, cx| p.open_buffer_by_id(new_buffer_id, cx))
- .await
- .unwrap();
-
- new_buffer_b.read_with(cx_b, |buffer, _| {
- assert!(buffer.file().is_none());
- });
-
- new_buffer_a.update(cx_a, |buffer, cx| {
- buffer.edit([(0..0, "ok")], None, cx);
- });
- project_a
- .update(cx_a, |project, cx| {
- project.save_buffer_as(new_buffer_a.clone(), "/a/file3.rs".into(), cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- new_buffer_b.read_with(cx_b, |buffer_b, _| {
- assert_eq!(
- buffer_b.file().unwrap().path().as_ref(),
- Path::new("file3.rs")
- );
-
- new_buffer_a.read_with(cx_a, |buffer_a, _| {
- assert_eq!(buffer_b.saved_mtime(), buffer_a.saved_mtime());
- assert_eq!(buffer_b.saved_version(), buffer_a.saved_version());
- });
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_git_diff_base_change(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/dir",
- json!({
- ".git": {},
- "sub": {
- ".git": {},
- "b.txt": "
- one
- two
- three
- ".unindent(),
- },
- "a.txt": "
- one
- two
- three
- ".unindent(),
- }),
- )
- .await;
-
- let (project_local, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| {
- call.share_project(project_local.clone(), cx)
- })
- .await
- .unwrap();
-
- let project_remote = client_b.build_remote_project(project_id, cx_b).await;
-
- let diff_base = "
- one
- three
- "
- .unindent();
-
- let new_diff_base = "
- one
- two
- "
- .unindent();
-
- client_a.fs().set_index_for_repo(
- Path::new("/dir/.git"),
- &[(Path::new("a.txt"), diff_base.clone())],
- );
-
- // Create the buffer
- let buffer_local_a = project_local
- .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
- .await
- .unwrap();
-
- // Wait for it to catch up to the new diff
- executor.run_until_parked();
-
- // Smoke test diffing
-
- buffer_local_a.read_with(cx_a, |buffer, _| {
- assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
- git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4),
- &buffer,
- &diff_base,
- &[(1..2, "", "two\n")],
- );
- });
-
- // Create remote buffer
- let buffer_remote_a = project_remote
- .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
- .await
- .unwrap();
-
- // Wait remote buffer to catch up to the new diff
- executor.run_until_parked();
-
- // Smoke test diffing
-
- buffer_remote_a.read_with(cx_b, |buffer, _| {
- assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
- git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4),
- &buffer,
- &diff_base,
- &[(1..2, "", "two\n")],
- );
- });
-
- client_a.fs().set_index_for_repo(
- Path::new("/dir/.git"),
- &[(Path::new("a.txt"), new_diff_base.clone())],
- );
-
- // Wait for buffer_local_a to receive it
- executor.run_until_parked();
-
- // Smoke test new diffing
-
- buffer_local_a.read_with(cx_a, |buffer, _| {
- assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
-
- git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4),
- &buffer,
- &diff_base,
- &[(2..3, "", "three\n")],
- );
- });
-
- // Smoke test B
-
- buffer_remote_a.read_with(cx_b, |buffer, _| {
- assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
- git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4),
- &buffer,
- &diff_base,
- &[(2..3, "", "three\n")],
- );
- });
-
- //Nested git dir
-
- let diff_base = "
- one
- three
- "
- .unindent();
-
- let new_diff_base = "
- one
- two
- "
- .unindent();
-
- client_a.fs().set_index_for_repo(
- Path::new("/dir/sub/.git"),
- &[(Path::new("b.txt"), diff_base.clone())],
- );
-
- // Create the buffer
- let buffer_local_b = project_local
- .update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
- .await
- .unwrap();
-
- // Wait for it to catch up to the new diff
- executor.run_until_parked();
-
- // Smoke test diffing
-
- buffer_local_b.read_with(cx_a, |buffer, _| {
- assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
- git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4),
- &buffer,
- &diff_base,
- &[(1..2, "", "two\n")],
- );
- });
-
- // Create remote buffer
- let buffer_remote_b = project_remote
- .update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
- .await
- .unwrap();
-
- // Wait remote buffer to catch up to the new diff
- executor.run_until_parked();
-
- // Smoke test diffing
-
- buffer_remote_b.read_with(cx_b, |buffer, _| {
- assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
- git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4),
- &buffer,
- &diff_base,
- &[(1..2, "", "two\n")],
- );
- });
-
- client_a.fs().set_index_for_repo(
- Path::new("/dir/sub/.git"),
- &[(Path::new("b.txt"), new_diff_base.clone())],
- );
-
- // Wait for buffer_local_b to receive it
- executor.run_until_parked();
-
- // Smoke test new diffing
-
- buffer_local_b.read_with(cx_a, |buffer, _| {
- assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
- println!("{:?}", buffer.as_rope().to_string());
- println!("{:?}", buffer.diff_base());
- println!(
- "{:?}",
- buffer
- .snapshot()
- .git_diff_hunks_in_row_range(0..4)
- .collect::<Vec<_>>()
- );
-
- git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4),
- &buffer,
- &diff_base,
- &[(2..3, "", "three\n")],
- );
- });
-
- // Smoke test B
-
- buffer_remote_b.read_with(cx_b, |buffer, _| {
- assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
- git::diff::assert_hunks(
- buffer.snapshot().git_diff_hunks_in_row_range(0..4),
- &buffer,
- &diff_base,
- &[(2..3, "", "three\n")],
- );
- });
-}
-
-#[gpui::test]
-async fn test_git_branch_name(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/dir",
- json!({
- ".git": {},
- }),
- )
- .await;
-
- let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| {
- call.share_project(project_local.clone(), cx)
- })
- .await
- .unwrap();
-
- let project_remote = client_b.build_remote_project(project_id, cx_b).await;
- client_a
- .fs()
- .set_branch_name(Path::new("/dir/.git"), Some("branch-1"));
-
- // Wait for it to catch up to the new branch
- executor.run_until_parked();
-
- #[track_caller]
- fn assert_branch(branch_name: Option<impl Into<String>>, project: &Project, cx: &AppContext) {
- let branch_name = branch_name.map(Into::into);
- let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
- assert_eq!(worktrees.len(), 1);
- let worktree = worktrees[0].clone();
- let root_entry = worktree.read(cx).snapshot().root_git_entry().unwrap();
- assert_eq!(root_entry.branch(), branch_name.map(Into::into));
- }
-
- // Smoke test branch reading
-
- project_local.read_with(cx_a, |project, cx| {
- assert_branch(Some("branch-1"), project, cx)
- });
-
- project_remote.read_with(cx_b, |project, cx| {
- assert_branch(Some("branch-1"), project, cx)
- });
-
- client_a
- .fs()
- .set_branch_name(Path::new("/dir/.git"), Some("branch-2"));
-
- // Wait for buffer_local_a to receive it
- executor.run_until_parked();
-
- // Smoke test branch reading
-
- project_local.read_with(cx_a, |project, cx| {
- assert_branch(Some("branch-2"), project, cx)
- });
-
- project_remote.read_with(cx_b, |project, cx| {
- assert_branch(Some("branch-2"), project, cx)
- });
-
- let project_remote_c = client_c.build_remote_project(project_id, cx_c).await;
- executor.run_until_parked();
-
- project_remote_c.read_with(cx_c, |project, cx| {
- assert_branch(Some("branch-2"), project, cx)
- });
-}
-
-#[gpui::test]
-async fn test_git_status_sync(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/dir",
- json!({
- ".git": {},
- "a.txt": "a",
- "b.txt": "b",
- }),
- )
- .await;
-
- const A_TXT: &'static str = "a.txt";
- const B_TXT: &'static str = "b.txt";
-
- client_a.fs().set_status_for_repo_via_git_operation(
- Path::new("/dir/.git"),
- &[
- (&Path::new(A_TXT), GitFileStatus::Added),
- (&Path::new(B_TXT), GitFileStatus::Added),
- ],
- );
-
- let (project_local, _worktree_id) = client_a.build_local_project("/dir", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| {
- call.share_project(project_local.clone(), cx)
- })
- .await
- .unwrap();
-
- let project_remote = client_b.build_remote_project(project_id, cx_b).await;
-
- // Wait for it to catch up to the new status
- executor.run_until_parked();
-
- #[track_caller]
- fn assert_status(
- file: &impl AsRef<Path>,
- status: Option<GitFileStatus>,
- project: &Project,
- cx: &AppContext,
- ) {
- let file = file.as_ref();
- let worktrees = project.visible_worktrees(cx).collect::<Vec<_>>();
- assert_eq!(worktrees.len(), 1);
- let worktree = worktrees[0].clone();
- let snapshot = worktree.read(cx).snapshot();
- assert_eq!(snapshot.status_for_file(file), status);
- }
-
- // Smoke test status reading
-
- project_local.read_with(cx_a, |project, cx| {
- assert_status(&Path::new(A_TXT), Some(GitFileStatus::Added), project, cx);
- assert_status(&Path::new(B_TXT), Some(GitFileStatus::Added), project, cx);
- });
-
- project_remote.read_with(cx_b, |project, cx| {
- assert_status(&Path::new(A_TXT), Some(GitFileStatus::Added), project, cx);
- assert_status(&Path::new(B_TXT), Some(GitFileStatus::Added), project, cx);
- });
-
- client_a.fs().set_status_for_repo_via_working_copy_change(
- Path::new("/dir/.git"),
- &[
- (&Path::new(A_TXT), GitFileStatus::Modified),
- (&Path::new(B_TXT), GitFileStatus::Modified),
- ],
- );
-
- // Wait for buffer_local_a to receive it
- executor.run_until_parked();
-
- // Smoke test status reading
-
- project_local.read_with(cx_a, |project, cx| {
- assert_status(
- &Path::new(A_TXT),
- Some(GitFileStatus::Modified),
- project,
- cx,
- );
- assert_status(
- &Path::new(B_TXT),
- Some(GitFileStatus::Modified),
- project,
- cx,
- );
- });
-
- project_remote.read_with(cx_b, |project, cx| {
- assert_status(
- &Path::new(A_TXT),
- Some(GitFileStatus::Modified),
- project,
- cx,
- );
- assert_status(
- &Path::new(B_TXT),
- Some(GitFileStatus::Modified),
- project,
- cx,
- );
- });
-
- // And synchronization while joining
- let project_remote_c = client_c.build_remote_project(project_id, cx_c).await;
- executor.run_until_parked();
-
- project_remote_c.read_with(cx_c, |project, cx| {
- assert_status(
- &Path::new(A_TXT),
- Some(GitFileStatus::Modified),
- project,
- cx,
- );
- assert_status(
- &Path::new(B_TXT),
- Some(GitFileStatus::Modified),
- project,
- cx,
- );
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_fs_operations(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/dir",
- json!({
- "a.txt": "a-contents",
- "b.txt": "b-contents",
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- let worktree_a = project_a.read_with(cx_a, |project, _| project.worktrees().next().unwrap());
-
- let worktree_b = project_b.read_with(cx_b, |project, _| project.worktrees().next().unwrap());
-
- let entry = project_b
- .update(cx_b, |project, cx| {
- project.create_entry((worktree_id, "c.txt"), false, cx)
- })
- .await
- .unwrap()
- .unwrap();
-
- worktree_a.read_with(cx_a, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["a.txt", "b.txt", "c.txt"]
- );
- });
-
- worktree_b.read_with(cx_b, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["a.txt", "b.txt", "c.txt"]
- );
- });
-
- project_b
- .update(cx_b, |project, cx| {
- project.rename_entry(entry.id, Path::new("d.txt"), cx)
- })
- .await
- .unwrap()
- .unwrap();
-
- worktree_a.read_with(cx_a, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["a.txt", "b.txt", "d.txt"]
- );
- });
-
- worktree_b.read_with(cx_b, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["a.txt", "b.txt", "d.txt"]
- );
- });
-
- let dir_entry = project_b
- .update(cx_b, |project, cx| {
- project.create_entry((worktree_id, "DIR"), true, cx)
- })
- .await
- .unwrap()
- .unwrap();
-
- worktree_a.read_with(cx_a, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["DIR", "a.txt", "b.txt", "d.txt"]
- );
- });
-
- worktree_b.read_with(cx_b, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["DIR", "a.txt", "b.txt", "d.txt"]
- );
- });
-
- project_b
- .update(cx_b, |project, cx| {
- project.create_entry((worktree_id, "DIR/e.txt"), false, cx)
- })
- .await
- .unwrap()
- .unwrap();
- project_b
- .update(cx_b, |project, cx| {
- project.create_entry((worktree_id, "DIR/SUBDIR"), true, cx)
- })
- .await
- .unwrap()
- .unwrap();
- project_b
- .update(cx_b, |project, cx| {
- project.create_entry((worktree_id, "DIR/SUBDIR/f.txt"), false, cx)
- })
- .await
- .unwrap()
- .unwrap();
-
- worktree_a.read_with(cx_a, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- [
- "DIR",
- "DIR/SUBDIR",
- "DIR/SUBDIR/f.txt",
- "DIR/e.txt",
- "a.txt",
- "b.txt",
- "d.txt"
- ]
- );
- });
-
- worktree_b.read_with(cx_b, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- [
- "DIR",
- "DIR/SUBDIR",
- "DIR/SUBDIR/f.txt",
- "DIR/e.txt",
- "a.txt",
- "b.txt",
- "d.txt"
- ]
- );
- });
-
- project_b
- .update(cx_b, |project, cx| {
- project.copy_entry(entry.id, Path::new("f.txt"), cx)
- })
- .await
- .unwrap()
- .unwrap();
-
- worktree_a.read_with(cx_a, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- [
- "DIR",
- "DIR/SUBDIR",
- "DIR/SUBDIR/f.txt",
- "DIR/e.txt",
- "a.txt",
- "b.txt",
- "d.txt",
- "f.txt"
- ]
- );
- });
-
- worktree_b.read_with(cx_b, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- [
- "DIR",
- "DIR/SUBDIR",
- "DIR/SUBDIR/f.txt",
- "DIR/e.txt",
- "a.txt",
- "b.txt",
- "d.txt",
- "f.txt"
- ]
- );
- });
-
- project_b
- .update(cx_b, |project, cx| {
- project.delete_entry(dir_entry.id, cx).unwrap()
- })
- .await
- .unwrap();
- executor.run_until_parked();
-
- worktree_a.read_with(cx_a, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["a.txt", "b.txt", "d.txt", "f.txt"]
- );
- });
-
- worktree_b.read_with(cx_b, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["a.txt", "b.txt", "d.txt", "f.txt"]
- );
- });
-
- project_b
- .update(cx_b, |project, cx| {
- project.delete_entry(entry.id, cx).unwrap()
- })
- .await
- .unwrap();
-
- worktree_a.read_with(cx_a, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["a.txt", "b.txt", "f.txt"]
- );
- });
-
- worktree_b.read_with(cx_b, |worktree, _| {
- assert_eq!(
- worktree
- .paths()
- .map(|p| p.to_string_lossy())
- .collect::<Vec<_>>(),
- ["a.txt", "b.txt", "f.txt"]
- );
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_local_settings(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- // As client A, open a project that contains some local settings files
- client_a
- .fs()
- .insert_tree(
- "/dir",
- json!({
- ".zed": {
- "settings.json": r#"{ "tab_size": 2 }"#
- },
- "a": {
- ".zed": {
- "settings.json": r#"{ "tab_size": 8 }"#
- },
- "a.txt": "a-contents",
- },
- "b": {
- "b.txt": "b-contents",
- }
- }),
- )
- .await;
- let (project_a, _) = client_a.build_local_project("/dir", cx_a).await;
- executor.run_until_parked();
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
-
- // As client B, join that project and observe the local settings.
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- let worktree_b = project_b.read_with(cx_b, |project, _| project.worktrees().next().unwrap());
- executor.run_until_parked();
- cx_b.read(|cx| {
- let store = cx.global::<SettingsStore>();
- assert_eq!(
- store
- .local_settings(worktree_b.read(cx).id().to_usize())
- .collect::<Vec<_>>(),
- &[
- (Path::new("").into(), r#"{"tab_size":2}"#.to_string()),
- (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()),
- ]
- )
- });
-
- // As client A, update a settings file. As Client B, see the changed settings.
- client_a
- .fs()
- .insert_file("/dir/.zed/settings.json", r#"{}"#.into())
- .await;
- executor.run_until_parked();
- cx_b.read(|cx| {
- let store = cx.global::<SettingsStore>();
- assert_eq!(
- store
- .local_settings(worktree_b.read(cx).id().to_usize())
- .collect::<Vec<_>>(),
- &[
- (Path::new("").into(), r#"{}"#.to_string()),
- (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()),
- ]
- )
- });
-
- // As client A, create and remove some settings files. As client B, see the changed settings.
- client_a
- .fs()
- .remove_file("/dir/.zed/settings.json".as_ref(), Default::default())
- .await
- .unwrap();
- client_a
- .fs()
- .create_dir("/dir/b/.zed".as_ref())
- .await
- .unwrap();
- client_a
- .fs()
- .insert_file("/dir/b/.zed/settings.json", r#"{"tab_size": 4}"#.into())
- .await;
- executor.run_until_parked();
- cx_b.read(|cx| {
- let store = cx.global::<SettingsStore>();
- assert_eq!(
- store
- .local_settings(worktree_b.read(cx).id().to_usize())
- .collect::<Vec<_>>(),
- &[
- (Path::new("a").into(), r#"{"tab_size":8}"#.to_string()),
- (Path::new("b").into(), r#"{"tab_size":4}"#.to_string()),
- ]
- )
- });
-
- // As client B, disconnect.
- server.forbid_connections();
- server.disconnect_client(client_b.peer_id().unwrap());
-
- // As client A, change and remove settings files while client B is disconnected.
- client_a
- .fs()
- .insert_file("/dir/a/.zed/settings.json", r#"{"hard_tabs":true}"#.into())
- .await;
- client_a
- .fs()
- .remove_file("/dir/b/.zed/settings.json".as_ref(), Default::default())
- .await
- .unwrap();
- executor.run_until_parked();
-
- // As client B, reconnect and see the changed settings.
- server.allow_connections();
- executor.advance_clock(RECEIVE_TIMEOUT);
- cx_b.read(|cx| {
- let store = cx.global::<SettingsStore>();
- assert_eq!(
- store
- .local_settings(worktree_b.read(cx).id().to_usize())
- .collect::<Vec<_>>(),
- &[(Path::new("a").into(), r#"{"hard_tabs":true}"#.to_string()),]
- )
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_buffer_conflict_after_save(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/dir",
- json!({
- "a.txt": "a-contents",
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- // Open a buffer as client B
- let buffer_b = project_b
- .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
- .await
- .unwrap();
-
- buffer_b.update(cx_b, |buf, cx| buf.edit([(0..0, "world ")], None, cx));
-
- buffer_b.read_with(cx_b, |buf, _| {
- assert!(buf.is_dirty());
- assert!(!buf.has_conflict());
- });
-
- project_b
- .update(cx_b, |project, cx| {
- project.save_buffer(buffer_b.clone(), cx)
- })
- .await
- .unwrap();
-
- buffer_b.read_with(cx_b, |buffer_b, _| assert!(!buffer_b.is_dirty()));
-
- buffer_b.read_with(cx_b, |buf, _| {
- assert!(!buf.has_conflict());
- });
-
- buffer_b.update(cx_b, |buf, cx| buf.edit([(0..0, "hello ")], None, cx));
-
- buffer_b.read_with(cx_b, |buf, _| {
- assert!(buf.is_dirty());
- assert!(!buf.has_conflict());
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_buffer_reloading(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/dir",
- json!({
- "a.txt": "a\nb\nc",
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- // Open a buffer as client B
- let buffer_b = project_b
- .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
- .await
- .unwrap();
-
- buffer_b.read_with(cx_b, |buf, _| {
- assert!(!buf.is_dirty());
- assert!(!buf.has_conflict());
- assert_eq!(buf.line_ending(), LineEnding::Unix);
- });
-
- let new_contents = Rope::from("d\ne\nf");
- client_a
- .fs()
- .save("/dir/a.txt".as_ref(), &new_contents, LineEnding::Windows)
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- buffer_b.read_with(cx_b, |buf, _| {
- assert_eq!(buf.text(), new_contents.to_string());
- assert!(!buf.is_dirty());
- assert!(!buf.has_conflict());
- assert_eq!(buf.line_ending(), LineEnding::Windows);
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_editing_while_guest_opens_buffer(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree("/dir", json!({ "a.txt": "a-contents" }))
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- // Open a buffer as client A
- let buffer_a = project_a
- .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
- .await
- .unwrap();
-
- // Start opening the same buffer as client B
- let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx));
- let buffer_b = cx_b.executor().spawn(open_buffer);
-
- // Edit the buffer as client A while client B is still opening it.
- cx_b.executor().simulate_random_delay().await;
- buffer_a.update(cx_a, |buf, cx| buf.edit([(0..0, "X")], None, cx));
- cx_b.executor().simulate_random_delay().await;
- buffer_a.update(cx_a, |buf, cx| buf.edit([(1..1, "Y")], None, cx));
-
- let text = buffer_a.read_with(cx_a, |buf, _| buf.text());
- let buffer_b = buffer_b.await.unwrap();
- executor.run_until_parked();
-
- buffer_b.read_with(cx_b, |buf, _| assert_eq!(buf.text(), text));
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_leaving_worktree_while_opening_buffer(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree("/dir", json!({ "a.txt": "a-contents" }))
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- // See that a guest has joined as client A.
- executor.run_until_parked();
-
- project_a.read_with(cx_a, |p, _| assert_eq!(p.collaborators().len(), 1));
-
- // Begin opening a buffer as client B, but leave the project before the open completes.
- let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx));
- let buffer_b = cx_b.executor().spawn(open_buffer);
- cx_b.update(|_| drop(project_b));
- drop(buffer_b);
-
- // See that the guest has left.
- executor.run_until_parked();
-
- project_a.read_with(cx_a, |p, _| assert!(p.collaborators().is_empty()));
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_canceling_buffer_opening(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/dir",
- json!({
- "a.txt": "abc",
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- let buffer_a = project_a
- .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
- .await
- .unwrap();
-
- // Open a buffer as client B but cancel after a random amount of time.
- let buffer_b = project_b.update(cx_b, |p, cx| {
- p.open_buffer_by_id(buffer_a.read_with(cx_a, |a, _| a.remote_id()), cx)
- });
- executor.simulate_random_delay().await;
- drop(buffer_b);
-
- // Try opening the same buffer again as client B, and ensure we can
- // still do it despite the cancellation above.
- let buffer_b = project_b
- .update(cx_b, |p, cx| {
- p.open_buffer_by_id(buffer_a.read_with(cx_a, |a, _| a.remote_id()), cx)
- })
- .await
- .unwrap();
-
- buffer_b.read_with(cx_b, |buf, _| assert_eq!(buf.text(), "abc"));
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_leaving_project(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/a",
- json!({
- "a.txt": "a-contents",
- "b.txt": "b-contents",
- }),
- )
- .await;
- let (project_a, _) = client_a.build_local_project("/a", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b1 = client_b.build_remote_project(project_id, cx_b).await;
- let project_c = client_c.build_remote_project(project_id, cx_c).await;
-
- // Client A sees that a guest has joined.
- executor.run_until_parked();
-
- project_a.read_with(cx_a, |project, _| {
- assert_eq!(project.collaborators().len(), 2);
- });
-
- project_b1.read_with(cx_b, |project, _| {
- assert_eq!(project.collaborators().len(), 2);
- });
-
- project_c.read_with(cx_c, |project, _| {
- assert_eq!(project.collaborators().len(), 2);
- });
-
- // Client B opens a buffer.
- let buffer_b1 = project_b1
- .update(cx_b, |project, cx| {
- let worktree_id = project.worktrees().next().unwrap().read(cx).id();
- project.open_buffer((worktree_id, "a.txt"), cx)
- })
- .await
- .unwrap();
-
- buffer_b1.read_with(cx_b, |buffer, _| assert_eq!(buffer.text(), "a-contents"));
-
- // Drop client B's project and ensure client A and client C observe client B leaving.
- cx_b.update(|_| drop(project_b1));
- executor.run_until_parked();
-
- project_a.read_with(cx_a, |project, _| {
- assert_eq!(project.collaborators().len(), 1);
- });
-
- project_c.read_with(cx_c, |project, _| {
- assert_eq!(project.collaborators().len(), 1);
- });
-
- // Client B re-joins the project and can open buffers as before.
- let project_b2 = client_b.build_remote_project(project_id, cx_b).await;
- executor.run_until_parked();
-
- project_a.read_with(cx_a, |project, _| {
- assert_eq!(project.collaborators().len(), 2);
- });
-
- project_b2.read_with(cx_b, |project, _| {
- assert_eq!(project.collaborators().len(), 2);
- });
-
- project_c.read_with(cx_c, |project, _| {
- assert_eq!(project.collaborators().len(), 2);
- });
-
- let buffer_b2 = project_b2
- .update(cx_b, |project, cx| {
- let worktree_id = project.worktrees().next().unwrap().read(cx).id();
- project.open_buffer((worktree_id, "a.txt"), cx)
- })
- .await
- .unwrap();
-
- buffer_b2.read_with(cx_b, |buffer, _| assert_eq!(buffer.text(), "a-contents"));
-
- // Drop client B's connection and ensure client A and client C observe client B leaving.
- client_b.disconnect(&cx_b.to_async());
- executor.advance_clock(RECONNECT_TIMEOUT);
-
- project_a.read_with(cx_a, |project, _| {
- assert_eq!(project.collaborators().len(), 1);
- });
-
- project_b2.read_with(cx_b, |project, _| {
- assert!(project.is_read_only());
- });
-
- project_c.read_with(cx_c, |project, _| {
- assert_eq!(project.collaborators().len(), 1);
- });
-
- // Client B can't join the project, unless they re-join the room.
- cx_b.spawn(|cx| {
- Project::remote(
- project_id,
- client_b.app_state.client.clone(),
- client_b.user_store().clone(),
- client_b.language_registry().clone(),
- FakeFs::new(cx.background_executor().clone()),
- cx,
- )
- })
- .await
- .unwrap_err();
-
- // Simulate connection loss for client C and ensure client A observes client C leaving the project.
- client_c.wait_for_current_user(cx_c).await;
- server.forbid_connections();
- server.disconnect_client(client_c.peer_id().unwrap());
- executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- executor.run_until_parked();
-
- project_a.read_with(cx_a, |project, _| {
- assert_eq!(project.collaborators().len(), 0);
- });
-
- project_b2.read_with(cx_b, |project, _| {
- assert!(project.is_read_only());
- });
-
- project_c.read_with(cx_c, |project, _| {
- assert!(project.is_read_only());
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_collaborating_with_diagnostics(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- // Set up a fake language server.
- let mut language = Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- );
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
- client_a.language_registry().add(Arc::new(language));
-
- // Share a project as client A
- client_a
- .fs()
- .insert_tree(
- "/a",
- json!({
- "a.rs": "let one = two",
- "other.rs": "",
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
-
- // Cause the language server to start.
- let _buffer = project_a
- .update(cx_a, |project, cx| {
- project.open_buffer(
- ProjectPath {
- worktree_id,
- path: Path::new("other.rs").into(),
- },
- cx,
- )
- })
- .await
- .unwrap();
-
- // Simulate a language server reporting errors for a file.
- let mut fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server
- .receive_notification::<lsp::notification::DidOpenTextDocument>()
- .await;
- fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
- lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
- version: None,
- diagnostics: vec![lsp::Diagnostic {
- severity: Some(lsp::DiagnosticSeverity::WARNING),
- range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 7)),
- message: "message 0".to_string(),
- ..Default::default()
- }],
- },
- );
-
- // Client A shares the project and, simultaneously, the language server
- // publishes a diagnostic. This is done to ensure that the server always
- // observes the latest diagnostics for a worktree.
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
- lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
- version: None,
- diagnostics: vec![lsp::Diagnostic {
- severity: Some(lsp::DiagnosticSeverity::ERROR),
- range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 7)),
- message: "message 1".to_string(),
- ..Default::default()
- }],
- },
- );
-
- // Join the worktree as client B.
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- // Wait for server to see the diagnostics update.
- executor.run_until_parked();
-
- // Ensure client B observes the new diagnostics.
-
- project_b.read_with(cx_b, |project, cx| {
- assert_eq!(
- project.diagnostic_summaries(false, cx).collect::<Vec<_>>(),
- &[(
- ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("a.rs")),
- },
- LanguageServerId(0),
- DiagnosticSummary {
- error_count: 1,
- warning_count: 0,
- ..Default::default()
- },
- )]
- )
- });
-
- // Join project as client C and observe the diagnostics.
- let project_c = client_c.build_remote_project(project_id, cx_c).await;
- let project_c_diagnostic_summaries =
- Rc::new(RefCell::new(project_c.read_with(cx_c, |project, cx| {
- project.diagnostic_summaries(false, cx).collect::<Vec<_>>()
- })));
- project_c.update(cx_c, |_, cx| {
- let summaries = project_c_diagnostic_summaries.clone();
- cx.subscribe(&project_c, {
- move |p, _, event, cx| {
- if let project::Event::DiskBasedDiagnosticsFinished { .. } = event {
- *summaries.borrow_mut() = p.diagnostic_summaries(false, cx).collect();
- }
- }
- })
- .detach();
- });
-
- executor.run_until_parked();
- assert_eq!(
- project_c_diagnostic_summaries.borrow().as_slice(),
- &[(
- ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("a.rs")),
- },
- LanguageServerId(0),
- DiagnosticSummary {
- error_count: 1,
- warning_count: 0,
- ..Default::default()
- },
- )]
- );
-
- // Simulate a language server reporting more errors for a file.
- fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
- lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
- version: None,
- diagnostics: vec![
- lsp::Diagnostic {
- severity: Some(lsp::DiagnosticSeverity::ERROR),
- range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 7)),
- message: "message 1".to_string(),
- ..Default::default()
- },
- lsp::Diagnostic {
- severity: Some(lsp::DiagnosticSeverity::WARNING),
- range: lsp::Range::new(lsp::Position::new(0, 10), lsp::Position::new(0, 13)),
- message: "message 2".to_string(),
- ..Default::default()
- },
- ],
- },
- );
-
- // Clients B and C get the updated summaries
- executor.run_until_parked();
-
- project_b.read_with(cx_b, |project, cx| {
- assert_eq!(
- project.diagnostic_summaries(false, cx).collect::<Vec<_>>(),
- [(
- ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("a.rs")),
- },
- LanguageServerId(0),
- DiagnosticSummary {
- error_count: 1,
- warning_count: 1,
- },
- )]
- );
- });
-
- project_c.read_with(cx_c, |project, cx| {
- assert_eq!(
- project.diagnostic_summaries(false, cx).collect::<Vec<_>>(),
- [(
- ProjectPath {
- worktree_id,
- path: Arc::from(Path::new("a.rs")),
- },
- LanguageServerId(0),
- DiagnosticSummary {
- error_count: 1,
- warning_count: 1,
- },
- )]
- );
- });
-
- // Open the file with the errors on client B. They should be present.
- let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
- let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
-
- buffer_b.read_with(cx_b, |buffer, _| {
- assert_eq!(
- buffer
- .snapshot()
- .diagnostics_in_range::<_, Point>(0..buffer.len(), false)
- .collect::<Vec<_>>(),
- &[
- DiagnosticEntry {
- range: Point::new(0, 4)..Point::new(0, 7),
- diagnostic: Diagnostic {
- group_id: 2,
- message: "message 1".to_string(),
- severity: lsp::DiagnosticSeverity::ERROR,
- is_primary: true,
- ..Default::default()
- }
- },
- DiagnosticEntry {
- range: Point::new(0, 10)..Point::new(0, 13),
- diagnostic: Diagnostic {
- group_id: 3,
- severity: lsp::DiagnosticSeverity::WARNING,
- message: "message 2".to_string(),
- is_primary: true,
- ..Default::default()
- }
- }
- ]
- );
- });
-
- // Simulate a language server reporting no errors for a file.
- fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
- lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path("/a/a.rs").unwrap(),
- version: None,
- diagnostics: vec![],
- },
- );
- executor.run_until_parked();
-
- project_a.read_with(cx_a, |project, cx| {
- assert_eq!(
- project.diagnostic_summaries(false, cx).collect::<Vec<_>>(),
- []
- )
- });
-
- project_b.read_with(cx_b, |project, cx| {
- assert_eq!(
- project.diagnostic_summaries(false, cx).collect::<Vec<_>>(),
- []
- )
- });
-
- project_c.read_with(cx_c, |project, cx| {
- assert_eq!(
- project.diagnostic_summaries(false, cx).collect::<Vec<_>>(),
- []
- )
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_collaborating_with_lsp_progress_updates_and_diagnostics_ordering(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
-
- // Set up a fake language server.
- let mut language = Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- );
- let mut fake_language_servers = language
- .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
- disk_based_diagnostics_progress_token: Some("the-disk-based-token".into()),
- disk_based_diagnostics_sources: vec!["the-disk-based-diagnostics-source".into()],
- ..Default::default()
- }))
- .await;
- client_a.language_registry().add(Arc::new(language));
-
- let file_names = &["one.rs", "two.rs", "three.rs", "four.rs", "five.rs"];
- client_a
- .fs()
- .insert_tree(
- "/test",
- json!({
- "one.rs": "const ONE: usize = 1;",
- "two.rs": "const TWO: usize = 2;",
- "three.rs": "const THREE: usize = 3;",
- "four.rs": "const FOUR: usize = 3;",
- "five.rs": "const FIVE: usize = 3;",
- }),
- )
- .await;
-
- let (project_a, worktree_id) = client_a.build_local_project("/test", cx_a).await;
-
- // Share a project as client A
- let active_call_a = cx_a.read(ActiveCall::global);
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
-
- // Join the project as client B and open all three files.
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
- let guest_buffers = futures::future::try_join_all(file_names.iter().map(|file_name| {
- project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, file_name), cx))
- }))
- .await
- .unwrap();
-
- // Simulate a language server reporting errors for a file.
- let fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server
- .request::<lsp::request::WorkDoneProgressCreate>(lsp::WorkDoneProgressCreateParams {
- token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
- })
- .await
- .unwrap();
- fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
- token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
- value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::Begin(
- lsp::WorkDoneProgressBegin {
- title: "Progress Began".into(),
- ..Default::default()
- },
- )),
- });
- for file_name in file_names {
- fake_language_server.notify::<lsp::notification::PublishDiagnostics>(
- lsp::PublishDiagnosticsParams {
- uri: lsp::Url::from_file_path(Path::new("/test").join(file_name)).unwrap(),
- version: None,
- diagnostics: vec![lsp::Diagnostic {
- severity: Some(lsp::DiagnosticSeverity::WARNING),
- source: Some("the-disk-based-diagnostics-source".into()),
- range: lsp::Range::new(lsp::Position::new(0, 0), lsp::Position::new(0, 0)),
- message: "message one".to_string(),
- ..Default::default()
- }],
- },
- );
- }
- fake_language_server.notify::<lsp::notification::Progress>(lsp::ProgressParams {
- token: lsp::NumberOrString::String("the-disk-based-token".to_string()),
- value: lsp::ProgressParamsValue::WorkDone(lsp::WorkDoneProgress::End(
- lsp::WorkDoneProgressEnd { message: None },
- )),
- });
-
- // When the "disk base diagnostics finished" message is received, the buffers'
- // diagnostics are expected to be present.
- let disk_based_diagnostics_finished = Arc::new(AtomicBool::new(false));
- project_b.update(cx_b, {
- let project_b = project_b.clone();
- let disk_based_diagnostics_finished = disk_based_diagnostics_finished.clone();
- move |_, cx| {
- cx.subscribe(&project_b, move |_, _, event, cx| {
- if let project::Event::DiskBasedDiagnosticsFinished { .. } = event {
- disk_based_diagnostics_finished.store(true, SeqCst);
- for buffer in &guest_buffers {
- assert_eq!(
- buffer
- .read(cx)
- .snapshot()
- .diagnostics_in_range::<_, usize>(0..5, false)
- .count(),
- 1,
- "expected a diagnostic for buffer {:?}",
- buffer.read(cx).file().unwrap().path(),
- );
- }
- }
- })
- .detach();
- }
- });
-
- executor.run_until_parked();
- assert!(disk_based_diagnostics_finished.load(SeqCst));
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_reloading_buffer_manually(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree("/a", json!({ "a.rs": "let one = 1;" }))
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/a", cx_a).await;
- let buffer_a = project_a
- .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx))
- .await
- .unwrap();
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
-
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
- let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
- buffer_b.update(cx_b, |buffer, cx| {
- buffer.edit([(4..7, "six")], None, cx);
- buffer.edit([(10..11, "6")], None, cx);
- assert_eq!(buffer.text(), "let six = 6;");
- assert!(buffer.is_dirty());
- assert!(!buffer.has_conflict());
- });
- executor.run_until_parked();
-
- buffer_a.read_with(cx_a, |buffer, _| assert_eq!(buffer.text(), "let six = 6;"));
-
- client_a
- .fs()
- .save(
- "/a/a.rs".as_ref(),
- &Rope::from("let seven = 7;"),
- LineEnding::Unix,
- )
- .await
- .unwrap();
- executor.run_until_parked();
-
- buffer_a.read_with(cx_a, |buffer, _| assert!(buffer.has_conflict()));
-
- buffer_b.read_with(cx_b, |buffer, _| assert!(buffer.has_conflict()));
-
- project_b
- .update(cx_b, |project, cx| {
- project.reload_buffers(HashSet::from_iter([buffer_b.clone()]), true, cx)
- })
- .await
- .unwrap();
-
- buffer_a.read_with(cx_a, |buffer, _| {
- assert_eq!(buffer.text(), "let seven = 7;");
- assert!(!buffer.is_dirty());
- assert!(!buffer.has_conflict());
- });
-
- buffer_b.read_with(cx_b, |buffer, _| {
- assert_eq!(buffer.text(), "let seven = 7;");
- assert!(!buffer.is_dirty());
- assert!(!buffer.has_conflict());
- });
-
- buffer_a.update(cx_a, |buffer, cx| {
- // Undoing on the host is a no-op when the reload was initiated by the guest.
- buffer.undo(cx);
- assert_eq!(buffer.text(), "let seven = 7;");
- assert!(!buffer.is_dirty());
- assert!(!buffer.has_conflict());
- });
- buffer_b.update(cx_b, |buffer, cx| {
- // Undoing on the guest rolls back the buffer to before it was reloaded but the conflict gets cleared.
- buffer.undo(cx);
- assert_eq!(buffer.text(), "let six = 6;");
- assert!(buffer.is_dirty());
- assert!(!buffer.has_conflict());
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_formatting_buffer(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- executor.allow_parking();
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- // Set up a fake language server.
- let mut language = Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- );
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
- client_a.language_registry().add(Arc::new(language));
-
- // Here we insert a fake tree with a directory that exists on disk. This is needed
- // because later we'll invoke a command, which requires passing a working directory
- // that points to a valid location on disk.
- let directory = env::current_dir().unwrap();
- client_a
- .fs()
- .insert_tree(&directory, json!({ "a.rs": "let one = \"two\"" }))
- .await;
- let (project_a, worktree_id) = client_a.build_local_project(&directory, cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
- let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
-
- let fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server.handle_request::<lsp::request::Formatting, _, _>(|_, _| async move {
- Ok(Some(vec![
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(0, 4), lsp::Position::new(0, 4)),
- new_text: "h".to_string(),
- },
- lsp::TextEdit {
- range: lsp::Range::new(lsp::Position::new(0, 7), lsp::Position::new(0, 7)),
- new_text: "y".to_string(),
- },
- ]))
- });
-
- project_b
- .update(cx_b, |project, cx| {
- project.format(
- HashSet::from_iter([buffer_b.clone()]),
- true,
- FormatTrigger::Save,
- cx,
- )
- })
- .await
- .unwrap();
-
- // The edits from the LSP are applied, and a final newline is added.
- assert_eq!(
- buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
- "let honey = \"two\"\n"
- );
-
- // Ensure buffer can be formatted using an external command. Notice how the
- // host's configuration is honored as opposed to using the guest's settings.
- cx_a.update(|cx| {
- cx.update_global(|store: &mut SettingsStore, cx| {
- store.update_user_settings::<AllLanguageSettings>(cx, |file| {
- file.defaults.formatter = Some(Formatter::External {
- command: "awk".into(),
- arguments: vec!["{sub(/two/,\"{buffer_path}\")}1".to_string()].into(),
- });
- });
- });
- });
- project_b
- .update(cx_b, |project, cx| {
- project.format(
- HashSet::from_iter([buffer_b.clone()]),
- true,
- FormatTrigger::Save,
- cx,
- )
- })
- .await
- .unwrap();
- assert_eq!(
- buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
- format!("let honey = \"{}/a.rs\"\n", directory.to_str().unwrap())
- );
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_prettier_formatting_buffer(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- // Set up a fake language server.
- let mut language = Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- prettier_parser_name: Some("test_parser".to_string()),
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- );
- let test_plugin = "test_plugin";
- let mut fake_language_servers = language
- .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
- prettier_plugins: vec![test_plugin],
- ..Default::default()
- }))
- .await;
- let language = Arc::new(language);
- client_a.language_registry().add(Arc::clone(&language));
-
- // Here we insert a fake tree with a directory that exists on disk. This is needed
- // because later we'll invoke a command, which requires passing a working directory
- // that points to a valid location on disk.
- let directory = env::current_dir().unwrap();
- let buffer_text = "let one = \"two\"";
- client_a
- .fs()
- .insert_tree(&directory, json!({ "a.rs": buffer_text }))
- .await;
- let (project_a, worktree_id) = client_a.build_local_project(&directory, cx_a).await;
- let prettier_format_suffix = project::TEST_PRETTIER_FORMAT_SUFFIX;
- let open_buffer = project_a.update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
- let buffer_a = cx_a.executor().spawn(open_buffer).await.unwrap();
-
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
- let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
- let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
-
- cx_a.update(|cx| {
- cx.update_global(|store: &mut SettingsStore, cx| {
- store.update_user_settings::<AllLanguageSettings>(cx, |file| {
- file.defaults.formatter = Some(Formatter::Auto);
- });
- });
- });
- cx_b.update(|cx| {
- cx.update_global(|store: &mut SettingsStore, cx| {
- store.update_user_settings::<AllLanguageSettings>(cx, |file| {
- file.defaults.formatter = Some(Formatter::LanguageServer);
- });
- });
- });
- let fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server.handle_request::<lsp::request::Formatting, _, _>(|_, _| async move {
- panic!(
- "Unexpected: prettier should be preferred since it's enabled and language supports it"
- )
- });
-
- project_b
- .update(cx_b, |project, cx| {
- project.format(
- HashSet::from_iter([buffer_b.clone()]),
- true,
- FormatTrigger::Save,
- cx,
- )
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
- assert_eq!(
- buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
- buffer_text.to_string() + "\n" + prettier_format_suffix,
- "Prettier formatting was not applied to client buffer after client's request"
- );
-
- project_a
- .update(cx_a, |project, cx| {
- project.format(
- HashSet::from_iter([buffer_a.clone()]),
- true,
- FormatTrigger::Manual,
- cx,
- )
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
- assert_eq!(
- buffer_b.read_with(cx_b, |buffer, _| buffer.text()),
- buffer_text.to_string() + "\n" + prettier_format_suffix + "\n" + prettier_format_suffix,
- "Prettier formatting was not applied to client buffer after host's request"
- );
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_definition(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- // Set up a fake language server.
- let mut language = Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- );
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
- client_a.language_registry().add(Arc::new(language));
-
- client_a
- .fs()
- .insert_tree(
- "/root",
- json!({
- "dir-1": {
- "a.rs": "const ONE: usize = b::TWO + b::THREE;",
- },
- "dir-2": {
- "b.rs": "const TWO: c::T2 = 2;\nconst THREE: usize = 3;",
- "c.rs": "type T2 = usize;",
- }
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/root/dir-1", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- // Open the file on client B.
- let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
- let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
-
- // Request the definition of a symbol as the guest.
- let fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server.handle_request::<lsp::request::GotoDefinition, _, _>(|_, _| async move {
- Ok(Some(lsp::GotoDefinitionResponse::Scalar(
- lsp::Location::new(
- lsp::Url::from_file_path("/root/dir-2/b.rs").unwrap(),
- lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
- ),
- )))
- });
-
- let definitions_1 = project_b
- .update(cx_b, |p, cx| p.definition(&buffer_b, 23, cx))
- .await
- .unwrap();
- cx_b.read(|cx| {
- assert_eq!(definitions_1.len(), 1);
- assert_eq!(project_b.read(cx).worktrees().count(), 2);
- let target_buffer = definitions_1[0].target.buffer.read(cx);
- assert_eq!(
- target_buffer.text(),
- "const TWO: c::T2 = 2;\nconst THREE: usize = 3;"
- );
- assert_eq!(
- definitions_1[0].target.range.to_point(target_buffer),
- Point::new(0, 6)..Point::new(0, 9)
- );
- });
-
- // Try getting more definitions for the same buffer, ensuring the buffer gets reused from
- // the previous call to `definition`.
- fake_language_server.handle_request::<lsp::request::GotoDefinition, _, _>(|_, _| async move {
- Ok(Some(lsp::GotoDefinitionResponse::Scalar(
- lsp::Location::new(
- lsp::Url::from_file_path("/root/dir-2/b.rs").unwrap(),
- lsp::Range::new(lsp::Position::new(1, 6), lsp::Position::new(1, 11)),
- ),
- )))
- });
-
- let definitions_2 = project_b
- .update(cx_b, |p, cx| p.definition(&buffer_b, 33, cx))
- .await
- .unwrap();
- cx_b.read(|cx| {
- assert_eq!(definitions_2.len(), 1);
- assert_eq!(project_b.read(cx).worktrees().count(), 2);
- let target_buffer = definitions_2[0].target.buffer.read(cx);
- assert_eq!(
- target_buffer.text(),
- "const TWO: c::T2 = 2;\nconst THREE: usize = 3;"
- );
- assert_eq!(
- definitions_2[0].target.range.to_point(target_buffer),
- Point::new(1, 6)..Point::new(1, 11)
- );
- });
- assert_eq!(
- definitions_1[0].target.buffer,
- definitions_2[0].target.buffer
- );
-
- fake_language_server.handle_request::<lsp::request::GotoTypeDefinition, _, _>(
- |req, _| async move {
- assert_eq!(
- req.text_document_position_params.position,
- lsp::Position::new(0, 7)
- );
- Ok(Some(lsp::GotoDefinitionResponse::Scalar(
- lsp::Location::new(
- lsp::Url::from_file_path("/root/dir-2/c.rs").unwrap(),
- lsp::Range::new(lsp::Position::new(0, 5), lsp::Position::new(0, 7)),
- ),
- )))
- },
- );
-
- let type_definitions = project_b
- .update(cx_b, |p, cx| p.type_definition(&buffer_b, 7, cx))
- .await
- .unwrap();
- cx_b.read(|cx| {
- assert_eq!(type_definitions.len(), 1);
- let target_buffer = type_definitions[0].target.buffer.read(cx);
- assert_eq!(target_buffer.text(), "type T2 = usize;");
- assert_eq!(
- type_definitions[0].target.range.to_point(target_buffer),
- Point::new(0, 5)..Point::new(0, 7)
- );
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_references(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- // Set up a fake language server.
- let mut language = Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- );
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
- client_a.language_registry().add(Arc::new(language));
-
- client_a
- .fs()
- .insert_tree(
- "/root",
- json!({
- "dir-1": {
- "one.rs": "const ONE: usize = 1;",
- "two.rs": "const TWO: usize = one::ONE + one::ONE;",
- },
- "dir-2": {
- "three.rs": "const THREE: usize = two::TWO + one::ONE;",
- }
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/root/dir-1", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- // Open the file on client B.
- let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "one.rs"), cx));
- let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
-
- // Request references to a symbol as the guest.
- let fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server.handle_request::<lsp::request::References, _, _>(|params, _| async move {
- assert_eq!(
- params.text_document_position.text_document.uri.as_str(),
- "file:///root/dir-1/one.rs"
- );
- Ok(Some(vec![
- lsp::Location {
- uri: lsp::Url::from_file_path("/root/dir-1/two.rs").unwrap(),
- range: lsp::Range::new(lsp::Position::new(0, 24), lsp::Position::new(0, 27)),
- },
- lsp::Location {
- uri: lsp::Url::from_file_path("/root/dir-1/two.rs").unwrap(),
- range: lsp::Range::new(lsp::Position::new(0, 35), lsp::Position::new(0, 38)),
- },
- lsp::Location {
- uri: lsp::Url::from_file_path("/root/dir-2/three.rs").unwrap(),
- range: lsp::Range::new(lsp::Position::new(0, 37), lsp::Position::new(0, 40)),
- },
- ]))
- });
-
- let references = project_b
- .update(cx_b, |p, cx| p.references(&buffer_b, 7, cx))
- .await
- .unwrap();
- cx_b.read(|cx| {
- assert_eq!(references.len(), 3);
- assert_eq!(project_b.read(cx).worktrees().count(), 2);
-
- let two_buffer = references[0].buffer.read(cx);
- let three_buffer = references[2].buffer.read(cx);
- assert_eq!(
- two_buffer.file().unwrap().path().as_ref(),
- Path::new("two.rs")
- );
- assert_eq!(references[1].buffer, references[0].buffer);
- assert_eq!(
- three_buffer.file().unwrap().full_path(cx),
- Path::new("/root/dir-2/three.rs")
- );
-
- assert_eq!(references[0].range.to_offset(two_buffer), 24..27);
- assert_eq!(references[1].range.to_offset(two_buffer), 35..38);
- assert_eq!(references[2].range.to_offset(three_buffer), 37..40);
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_project_search(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/root",
- json!({
- "dir-1": {
- "a": "hello world",
- "b": "goodnight moon",
- "c": "a world of goo",
- "d": "world champion of clown world",
- },
- "dir-2": {
- "e": "disney world is fun",
- }
- }),
- )
- .await;
- let (project_a, _) = client_a.build_local_project("/root/dir-1", cx_a).await;
- let (worktree_2, _) = project_a
- .update(cx_a, |p, cx| {
- p.find_or_create_local_worktree("/root/dir-2", true, cx)
- })
- .await
- .unwrap();
- worktree_2
- .read_with(cx_a, |tree, _| tree.as_local().unwrap().scan_complete())
- .await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
-
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- // Perform a search as the guest.
- let mut results = HashMap::default();
- let mut search_rx = project_b.update(cx_b, |project, cx| {
- project.search(
- SearchQuery::text("world", false, false, false, Vec::new(), Vec::new()).unwrap(),
- cx,
- )
- });
- while let Some((buffer, ranges)) = search_rx.next().await {
- results.entry(buffer).or_insert(ranges);
- }
-
- let mut ranges_by_path = results
- .into_iter()
- .map(|(buffer, ranges)| {
- buffer.read_with(cx_b, |buffer, cx| {
- let path = buffer.file().unwrap().full_path(cx);
- let offset_ranges = ranges
- .into_iter()
- .map(|range| range.to_offset(buffer))
- .collect::<Vec<_>>();
- (path, offset_ranges)
- })
- })
- .collect::<Vec<_>>();
- ranges_by_path.sort_by_key(|(path, _)| path.clone());
-
- assert_eq!(
- ranges_by_path,
- &[
- (PathBuf::from("dir-1/a"), vec![6..11]),
- (PathBuf::from("dir-1/c"), vec![2..7]),
- (PathBuf::from("dir-1/d"), vec![0..5, 24..29]),
- (PathBuf::from("dir-2/e"), vec![7..12]),
- ]
- );
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_document_highlights(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/root-1",
- json!({
- "main.rs": "fn double(number: i32) -> i32 { number + number }",
- }),
- )
- .await;
-
- // Set up a fake language server.
- let mut language = Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- );
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
- client_a.language_registry().add(Arc::new(language));
-
- let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- // Open the file on client B.
- let open_b = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx));
- let buffer_b = cx_b.executor().spawn(open_b).await.unwrap();
-
- // Request document highlights as the guest.
- let fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server.handle_request::<lsp::request::DocumentHighlightRequest, _, _>(
- |params, _| async move {
- assert_eq!(
- params
- .text_document_position_params
- .text_document
- .uri
- .as_str(),
- "file:///root-1/main.rs"
- );
- assert_eq!(
- params.text_document_position_params.position,
- lsp::Position::new(0, 34)
- );
- Ok(Some(vec![
- lsp::DocumentHighlight {
- kind: Some(lsp::DocumentHighlightKind::WRITE),
- range: lsp::Range::new(lsp::Position::new(0, 10), lsp::Position::new(0, 16)),
- },
- lsp::DocumentHighlight {
- kind: Some(lsp::DocumentHighlightKind::READ),
- range: lsp::Range::new(lsp::Position::new(0, 32), lsp::Position::new(0, 38)),
- },
- lsp::DocumentHighlight {
- kind: Some(lsp::DocumentHighlightKind::READ),
- range: lsp::Range::new(lsp::Position::new(0, 41), lsp::Position::new(0, 47)),
- },
- ]))
- },
- );
-
- let highlights = project_b
- .update(cx_b, |p, cx| p.document_highlights(&buffer_b, 34, cx))
- .await
- .unwrap();
-
- buffer_b.read_with(cx_b, |buffer, _| {
- let snapshot = buffer.snapshot();
-
- let highlights = highlights
- .into_iter()
- .map(|highlight| (highlight.kind, highlight.range.to_offset(&snapshot)))
- .collect::<Vec<_>>();
- assert_eq!(
- highlights,
- &[
- (lsp::DocumentHighlightKind::WRITE, 10..16),
- (lsp::DocumentHighlightKind::READ, 32..38),
- (lsp::DocumentHighlightKind::READ, 41..47)
- ]
- )
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_lsp_hover(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- client_a
- .fs()
- .insert_tree(
- "/root-1",
- json!({
- "main.rs": "use std::collections::HashMap;",
- }),
- )
- .await;
-
- // Set up a fake language server.
- let mut language = Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- );
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
- client_a.language_registry().add(Arc::new(language));
-
- let (project_a, worktree_id) = client_a.build_local_project("/root-1", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- // Open the file as the guest
- let open_buffer = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "main.rs"), cx));
- let buffer_b = cx_b.executor().spawn(open_buffer).await.unwrap();
-
- // Request hover information as the guest.
- let fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server.handle_request::<lsp::request::HoverRequest, _, _>(
- |params, _| async move {
- assert_eq!(
- params
- .text_document_position_params
- .text_document
- .uri
- .as_str(),
- "file:///root-1/main.rs"
- );
- assert_eq!(
- params.text_document_position_params.position,
- lsp::Position::new(0, 22)
- );
- Ok(Some(lsp::Hover {
- contents: lsp::HoverContents::Array(vec![
- lsp::MarkedString::String("Test hover content.".to_string()),
- lsp::MarkedString::LanguageString(lsp::LanguageString {
- language: "Rust".to_string(),
- value: "let foo = 42;".to_string(),
- }),
- ]),
- range: Some(lsp::Range::new(
- lsp::Position::new(0, 22),
- lsp::Position::new(0, 29),
- )),
- }))
- },
- );
-
- let hover_info = project_b
- .update(cx_b, |p, cx| p.hover(&buffer_b, 22, cx))
- .await
- .unwrap()
- .unwrap();
-
- buffer_b.read_with(cx_b, |buffer, _| {
- let snapshot = buffer.snapshot();
- assert_eq!(hover_info.range.unwrap().to_offset(&snapshot), 22..29);
- assert_eq!(
- hover_info.contents,
- vec![
- project::HoverBlock {
- text: "Test hover content.".to_string(),
- kind: HoverBlockKind::Markdown,
- },
- project::HoverBlock {
- text: "let foo = 42;".to_string(),
- kind: HoverBlockKind::Code {
- language: "Rust".to_string()
- },
- }
- ]
- );
- });
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_project_symbols(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- // Set up a fake language server.
- let mut language = Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- );
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
- client_a.language_registry().add(Arc::new(language));
-
- client_a
- .fs()
- .insert_tree(
- "/code",
- json!({
- "crate-1": {
- "one.rs": "const ONE: usize = 1;",
- },
- "crate-2": {
- "two.rs": "const TWO: usize = 2; const THREE: usize = 3;",
- },
- "private": {
- "passwords.txt": "the-password",
- }
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/code/crate-1", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- // Cause the language server to start.
- let open_buffer_task =
- project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "one.rs"), cx));
- let _buffer = cx_b.executor().spawn(open_buffer_task).await.unwrap();
-
- let fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server.handle_request::<lsp::WorkspaceSymbolRequest, _, _>(|_, _| async move {
- Ok(Some(lsp::WorkspaceSymbolResponse::Flat(vec![
- #[allow(deprecated)]
- lsp::SymbolInformation {
- name: "TWO".into(),
- location: lsp::Location {
- uri: lsp::Url::from_file_path("/code/crate-2/two.rs").unwrap(),
- range: lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
- },
- kind: lsp::SymbolKind::CONSTANT,
- tags: None,
- container_name: None,
- deprecated: None,
- },
- ])))
- });
-
- // Request the definition of a symbol as the guest.
- let symbols = project_b
- .update(cx_b, |p, cx| p.symbols("two", cx))
- .await
- .unwrap();
- assert_eq!(symbols.len(), 1);
- assert_eq!(symbols[0].name, "TWO");
-
- // Open one of the returned symbols.
- let buffer_b_2 = project_b
- .update(cx_b, |project, cx| {
- project.open_buffer_for_symbol(&symbols[0], cx)
- })
- .await
- .unwrap();
-
- buffer_b_2.read_with(cx_b, |buffer, _| {
- assert_eq!(
- buffer.file().unwrap().path().as_ref(),
- Path::new("../crate-2/two.rs")
- );
- });
-
- // Attempt to craft a symbol and violate host's privacy by opening an arbitrary file.
- let mut fake_symbol = symbols[0].clone();
- fake_symbol.path.path = Path::new("/code/secrets").into();
- let error = project_b
- .update(cx_b, |project, cx| {
- project.open_buffer_for_symbol(&fake_symbol, cx)
- })
- .await
- .unwrap_err();
- assert!(error.to_string().contains("invalid symbol signature"));
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_open_buffer_while_getting_definition_pointing_to_it(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- mut rng: StdRng,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
-
- // Set up a fake language server.
- let mut language = Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- Some(tree_sitter_rust::language()),
- );
- let mut fake_language_servers = language.set_fake_lsp_adapter(Default::default()).await;
- client_a.language_registry().add(Arc::new(language));
-
- client_a
- .fs()
- .insert_tree(
- "/root",
- json!({
- "a.rs": "const ONE: usize = b::TWO;",
- "b.rs": "const TWO: usize = 2",
- }),
- )
- .await;
- let (project_a, worktree_id) = client_a.build_local_project("/root", cx_a).await;
- let project_id = active_call_a
- .update(cx_a, |call, cx| call.share_project(project_a.clone(), cx))
- .await
- .unwrap();
- let project_b = client_b.build_remote_project(project_id, cx_b).await;
-
- let open_buffer_task = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.rs"), cx));
- let buffer_b1 = cx_b.executor().spawn(open_buffer_task).await.unwrap();
-
- let fake_language_server = fake_language_servers.next().await.unwrap();
- fake_language_server.handle_request::<lsp::request::GotoDefinition, _, _>(|_, _| async move {
- Ok(Some(lsp::GotoDefinitionResponse::Scalar(
- lsp::Location::new(
- lsp::Url::from_file_path("/root/b.rs").unwrap(),
- lsp::Range::new(lsp::Position::new(0, 6), lsp::Position::new(0, 9)),
- ),
- )))
- });
-
- let definitions;
- let buffer_b2;
- if rng.gen() {
- definitions = project_b.update(cx_b, |p, cx| p.definition(&buffer_b1, 23, cx));
- buffer_b2 = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "b.rs"), cx));
- } else {
- buffer_b2 = project_b.update(cx_b, |p, cx| p.open_buffer((worktree_id, "b.rs"), cx));
- definitions = project_b.update(cx_b, |p, cx| p.definition(&buffer_b1, 23, cx));
- }
-
- let buffer_b2 = buffer_b2.await.unwrap();
- let definitions = definitions.await.unwrap();
- assert_eq!(definitions.len(), 1);
- assert_eq!(definitions[0].target.buffer, buffer_b2);
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_contacts(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_c: &mut TestAppContext,
- cx_d: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- let client_d = server.create_client(cx_d, "user_d").await;
- server
- .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b), (&client_c, cx_c)])
- .await;
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
- let active_call_c = cx_c.read(ActiveCall::global);
- let _active_call_d = cx_d.read(ActiveCall::global);
-
- executor.run_until_parked();
- assert_eq!(
- contacts(&client_a, cx_a),
- [
- ("user_b".to_string(), "online", "free"),
- ("user_c".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "online", "free"),
- ("user_c".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_c, cx_c),
- [
- ("user_a".to_string(), "online", "free"),
- ("user_b".to_string(), "online", "free")
- ]
- );
- assert_eq!(contacts(&client_d, cx_d), []);
-
- server.disconnect_client(client_c.peer_id().unwrap());
- server.forbid_connections();
- executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- assert_eq!(
- contacts(&client_a, cx_a),
- [
- ("user_b".to_string(), "online", "free"),
- ("user_c".to_string(), "offline", "free")
- ]
- );
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "online", "free"),
- ("user_c".to_string(), "offline", "free")
- ]
- );
- assert_eq!(contacts(&client_c, cx_c), []);
- assert_eq!(contacts(&client_d, cx_d), []);
-
- server.allow_connections();
- client_c
- .authenticate_and_connect(false, &cx_c.to_async())
- .await
- .unwrap();
-
- executor.run_until_parked();
- assert_eq!(
- contacts(&client_a, cx_a),
- [
- ("user_b".to_string(), "online", "free"),
- ("user_c".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "online", "free"),
- ("user_c".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_c, cx_c),
- [
- ("user_a".to_string(), "online", "free"),
- ("user_b".to_string(), "online", "free")
- ]
- );
- assert_eq!(contacts(&client_d, cx_d), []);
-
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert_eq!(
- contacts(&client_a, cx_a),
- [
- ("user_b".to_string(), "online", "busy"),
- ("user_c".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "online", "busy"),
- ("user_c".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_c, cx_c),
- [
- ("user_a".to_string(), "online", "busy"),
- ("user_b".to_string(), "online", "busy")
- ]
- );
- assert_eq!(contacts(&client_d, cx_d), []);
-
- // Client B and client D become contacts while client B is being called.
- server
- .make_contacts(&mut [(&client_b, cx_b), (&client_d, cx_d)])
- .await;
- executor.run_until_parked();
- assert_eq!(
- contacts(&client_a, cx_a),
- [
- ("user_b".to_string(), "online", "busy"),
- ("user_c".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "online", "busy"),
- ("user_c".to_string(), "online", "free"),
- ("user_d".to_string(), "online", "free"),
- ]
- );
- assert_eq!(
- contacts(&client_c, cx_c),
- [
- ("user_a".to_string(), "online", "busy"),
- ("user_b".to_string(), "online", "busy")
- ]
- );
- assert_eq!(
- contacts(&client_d, cx_d),
- [("user_b".to_string(), "online", "busy")]
- );
-
- active_call_b.update(cx_b, |call, cx| call.decline_incoming(cx).unwrap());
- executor.run_until_parked();
- assert_eq!(
- contacts(&client_a, cx_a),
- [
- ("user_b".to_string(), "online", "free"),
- ("user_c".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "online", "free"),
- ("user_c".to_string(), "online", "free"),
- ("user_d".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_c, cx_c),
- [
- ("user_a".to_string(), "online", "free"),
- ("user_b".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_d, cx_d),
- [("user_b".to_string(), "online", "free")]
- );
-
- active_call_c
- .update(cx_c, |call, cx| {
- call.invite(client_a.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert_eq!(
- contacts(&client_a, cx_a),
- [
- ("user_b".to_string(), "online", "free"),
- ("user_c".to_string(), "online", "busy")
- ]
- );
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "online", "busy"),
- ("user_c".to_string(), "online", "busy"),
- ("user_d".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_c, cx_c),
- [
- ("user_a".to_string(), "online", "busy"),
- ("user_b".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_d, cx_d),
- [("user_b".to_string(), "online", "free")]
- );
-
- active_call_a
- .update(cx_a, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
- executor.run_until_parked();
- assert_eq!(
- contacts(&client_a, cx_a),
- [
- ("user_b".to_string(), "online", "free"),
- ("user_c".to_string(), "online", "busy")
- ]
- );
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "online", "busy"),
- ("user_c".to_string(), "online", "busy"),
- ("user_d".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_c, cx_c),
- [
- ("user_a".to_string(), "online", "busy"),
- ("user_b".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_d, cx_d),
- [("user_b".to_string(), "online", "free")]
- );
-
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert_eq!(
- contacts(&client_a, cx_a),
- [
- ("user_b".to_string(), "online", "busy"),
- ("user_c".to_string(), "online", "busy")
- ]
- );
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "online", "busy"),
- ("user_c".to_string(), "online", "busy"),
- ("user_d".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_c, cx_c),
- [
- ("user_a".to_string(), "online", "busy"),
- ("user_b".to_string(), "online", "busy")
- ]
- );
- assert_eq!(
- contacts(&client_d, cx_d),
- [("user_b".to_string(), "online", "busy")]
- );
-
- active_call_a
- .update(cx_a, |call, cx| call.hang_up(cx))
- .await
- .unwrap();
- executor.run_until_parked();
- assert_eq!(
- contacts(&client_a, cx_a),
- [
- ("user_b".to_string(), "online", "free"),
- ("user_c".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "online", "free"),
- ("user_c".to_string(), "online", "free"),
- ("user_d".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_c, cx_c),
- [
- ("user_a".to_string(), "online", "free"),
- ("user_b".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_d, cx_d),
- [("user_b".to_string(), "online", "free")]
- );
-
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert_eq!(
- contacts(&client_a, cx_a),
- [
- ("user_b".to_string(), "online", "busy"),
- ("user_c".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "online", "busy"),
- ("user_c".to_string(), "online", "free"),
- ("user_d".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_c, cx_c),
- [
- ("user_a".to_string(), "online", "busy"),
- ("user_b".to_string(), "online", "busy")
- ]
- );
- assert_eq!(
- contacts(&client_d, cx_d),
- [("user_b".to_string(), "online", "busy")]
- );
-
- server.forbid_connections();
- server.disconnect_client(client_a.peer_id().unwrap());
- executor.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- assert_eq!(contacts(&client_a, cx_a), []);
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "offline", "free"),
- ("user_c".to_string(), "online", "free"),
- ("user_d".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_c, cx_c),
- [
- ("user_a".to_string(), "offline", "free"),
- ("user_b".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_d, cx_d),
- [("user_b".to_string(), "online", "free")]
- );
-
- // Test removing a contact
- client_b
- .user_store()
- .update(cx_b, |store, cx| {
- store.remove_contact(client_c.user_id().unwrap(), cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
- assert_eq!(
- contacts(&client_b, cx_b),
- [
- ("user_a".to_string(), "offline", "free"),
- ("user_d".to_string(), "online", "free")
- ]
- );
- assert_eq!(
- contacts(&client_c, cx_c),
- [("user_a".to_string(), "offline", "free"),]
- );
-
- fn contacts(
- client: &TestClient,
- cx: &TestAppContext,
- ) -> Vec<(String, &'static str, &'static str)> {
- client.user_store().read_with(cx, |store, _| {
- store
- .contacts()
- .iter()
- .map(|contact| {
- (
- contact.user.github_login.clone(),
- if contact.online { "online" } else { "offline" },
- if contact.busy { "busy" } else { "free" },
- )
- })
- .collect()
- })
- }
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_contact_requests(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_a2: &mut TestAppContext,
- cx_b: &mut TestAppContext,
- cx_b2: &mut TestAppContext,
- cx_c: &mut TestAppContext,
- cx_c2: &mut TestAppContext,
-) {
- // Connect to a server as 3 clients.
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_a2 = server.create_client(cx_a2, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- let client_b2 = server.create_client(cx_b2, "user_b").await;
- let client_c = server.create_client(cx_c, "user_c").await;
- let client_c2 = server.create_client(cx_c2, "user_c").await;
-
- assert_eq!(client_a.user_id().unwrap(), client_a2.user_id().unwrap());
- assert_eq!(client_b.user_id().unwrap(), client_b2.user_id().unwrap());
- assert_eq!(client_c.user_id().unwrap(), client_c2.user_id().unwrap());
-
- // User A and User C request that user B become their contact.
- client_a
- .user_store()
- .update(cx_a, |store, cx| {
- store.request_contact(client_b.user_id().unwrap(), cx)
- })
- .await
- .unwrap();
- client_c
- .user_store()
- .update(cx_c, |store, cx| {
- store.request_contact(client_b.user_id().unwrap(), cx)
- })
- .await
- .unwrap();
- executor.run_until_parked();
-
- // All users see the pending request appear in all their clients.
- assert_eq!(
- client_a.summarize_contacts(cx_a).outgoing_requests,
- &["user_b"]
- );
- assert_eq!(
- client_a2.summarize_contacts(cx_a2).outgoing_requests,
- &["user_b"]
- );
- assert_eq!(
- client_b.summarize_contacts(cx_b).incoming_requests,
- &["user_a", "user_c"]
- );
- assert_eq!(
- client_b2.summarize_contacts(cx_b2).incoming_requests,
- &["user_a", "user_c"]
- );
- assert_eq!(
- client_c.summarize_contacts(cx_c).outgoing_requests,
- &["user_b"]
- );
- assert_eq!(
- client_c2.summarize_contacts(cx_c2).outgoing_requests,
- &["user_b"]
- );
-
- // Contact requests are present upon connecting (tested here via disconnect/reconnect)
- disconnect_and_reconnect(&client_a, cx_a).await;
- disconnect_and_reconnect(&client_b, cx_b).await;
- disconnect_and_reconnect(&client_c, cx_c).await;
- executor.run_until_parked();
- assert_eq!(
- client_a.summarize_contacts(cx_a).outgoing_requests,
- &["user_b"]
- );
- assert_eq!(
- client_b.summarize_contacts(cx_b).incoming_requests,
- &["user_a", "user_c"]
- );
- assert_eq!(
- client_c.summarize_contacts(cx_c).outgoing_requests,
- &["user_b"]
- );
-
- // User B accepts the request from user A.
- client_b
- .user_store()
- .update(cx_b, |store, cx| {
- store.respond_to_contact_request(client_a.user_id().unwrap(), true, cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // User B sees user A as their contact now in all client, and the incoming request from them is removed.
- let contacts_b = client_b.summarize_contacts(cx_b);
- assert_eq!(contacts_b.current, &["user_a"]);
- assert_eq!(contacts_b.incoming_requests, &["user_c"]);
- let contacts_b2 = client_b2.summarize_contacts(cx_b2);
- assert_eq!(contacts_b2.current, &["user_a"]);
- assert_eq!(contacts_b2.incoming_requests, &["user_c"]);
-
- // User A sees user B as their contact now in all clients, and the outgoing request to them is removed.
- let contacts_a = client_a.summarize_contacts(cx_a);
- assert_eq!(contacts_a.current, &["user_b"]);
- assert!(contacts_a.outgoing_requests.is_empty());
- let contacts_a2 = client_a2.summarize_contacts(cx_a2);
- assert_eq!(contacts_a2.current, &["user_b"]);
- assert!(contacts_a2.outgoing_requests.is_empty());
-
- // Contacts are present upon connecting (tested here via disconnect/reconnect)
- disconnect_and_reconnect(&client_a, cx_a).await;
- disconnect_and_reconnect(&client_b, cx_b).await;
- disconnect_and_reconnect(&client_c, cx_c).await;
- executor.run_until_parked();
- assert_eq!(client_a.summarize_contacts(cx_a).current, &["user_b"]);
- assert_eq!(client_b.summarize_contacts(cx_b).current, &["user_a"]);
- assert_eq!(
- client_b.summarize_contacts(cx_b).incoming_requests,
- &["user_c"]
- );
- assert!(client_c.summarize_contacts(cx_c).current.is_empty());
- assert_eq!(
- client_c.summarize_contacts(cx_c).outgoing_requests,
- &["user_b"]
- );
-
- // User B rejects the request from user C.
- client_b
- .user_store()
- .update(cx_b, |store, cx| {
- store.respond_to_contact_request(client_c.user_id().unwrap(), false, cx)
- })
- .await
- .unwrap();
-
- executor.run_until_parked();
-
- // User B doesn't see user C as their contact, and the incoming request from them is removed.
- let contacts_b = client_b.summarize_contacts(cx_b);
- assert_eq!(contacts_b.current, &["user_a"]);
- assert!(contacts_b.incoming_requests.is_empty());
- let contacts_b2 = client_b2.summarize_contacts(cx_b2);
- assert_eq!(contacts_b2.current, &["user_a"]);
- assert!(contacts_b2.incoming_requests.is_empty());
-
- // User C doesn't see user B as their contact, and the outgoing request to them is removed.
- let contacts_c = client_c.summarize_contacts(cx_c);
- assert!(contacts_c.current.is_empty());
- assert!(contacts_c.outgoing_requests.is_empty());
- let contacts_c2 = client_c2.summarize_contacts(cx_c2);
- assert!(contacts_c2.current.is_empty());
- assert!(contacts_c2.outgoing_requests.is_empty());
-
- // Incoming/outgoing requests are not present upon connecting (tested here via disconnect/reconnect)
- disconnect_and_reconnect(&client_a, cx_a).await;
- disconnect_and_reconnect(&client_b, cx_b).await;
- disconnect_and_reconnect(&client_c, cx_c).await;
- executor.run_until_parked();
- assert_eq!(client_a.summarize_contacts(cx_a).current, &["user_b"]);
- assert_eq!(client_b.summarize_contacts(cx_b).current, &["user_a"]);
- assert!(client_b
- .summarize_contacts(cx_b)
- .incoming_requests
- .is_empty());
- assert!(client_c.summarize_contacts(cx_c).current.is_empty());
- assert!(client_c
- .summarize_contacts(cx_c)
- .outgoing_requests
- .is_empty());
-
- async fn disconnect_and_reconnect(client: &TestClient, cx: &mut TestAppContext) {
- client.disconnect(&cx.to_async());
- client.clear_contacts(cx).await;
- client
- .authenticate_and_connect(false, &cx.to_async())
- .await
- .unwrap();
- }
-}
-
-#[gpui::test(iterations = 10)]
-async fn test_join_call_after_screen_was_shared(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
-
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
- server
- .make_contacts(&mut [(&client_a, cx_a), (&client_b, cx_b)])
- .await;
-
- let active_call_a = cx_a.read(ActiveCall::global);
- let active_call_b = cx_b.read(ActiveCall::global);
-
- // Call users B and C from client A.
- active_call_a
- .update(cx_a, |call, cx| {
- call.invite(client_b.user_id().unwrap(), None, cx)
- })
- .await
- .unwrap();
-
- let room_a = active_call_a.read_with(cx_a, |call, _| call.room().unwrap().clone());
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: Default::default(),
- pending: vec!["user_b".to_string()]
- }
- );
-
- // User B receives the call.
-
- let mut incoming_call_b = active_call_b.read_with(cx_b, |call, _| call.incoming());
- let call_b = incoming_call_b.next().await.unwrap().unwrap();
- assert_eq!(call_b.calling_user.github_login, "user_a");
-
- // User A shares their screen
- let display = MacOSDisplay::new();
- active_call_a
- .update(cx_a, |call, cx| {
- call.room().unwrap().update(cx, |room, cx| {
- room.set_display_sources(vec![display.clone()]);
- room.share_screen(cx)
- })
- })
- .await
- .unwrap();
-
- client_b.user_store().update(cx_b, |user_store, _| {
- user_store.clear_cache();
- });
-
- // User B joins the room
- active_call_b
- .update(cx_b, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
-
- let room_b = active_call_b.read_with(cx_b, |call, _| call.room().unwrap().clone());
- assert!(incoming_call_b.next().await.unwrap().is_none());
-
- executor.run_until_parked();
- assert_eq!(
- room_participants(&room_a, cx_a),
- RoomParticipants {
- remote: vec!["user_b".to_string()],
- pending: vec![],
- }
- );
- assert_eq!(
- room_participants(&room_b, cx_b),
- RoomParticipants {
- remote: vec!["user_a".to_string()],
- pending: vec![],
- }
- );
-
- // Ensure User B sees User A's screenshare.
-
- room_b.read_with(cx_b, |room, _| {
- assert_eq!(
- room.remote_participants()
- .get(&client_a.user_id().unwrap())
- .unwrap()
- .video_tracks
- .len(),
- 1
- );
- });
-}
@@ -1,160 +0,0 @@
-use std::sync::Arc;
-
-use gpui::{BackgroundExecutor, TestAppContext};
-use notifications::NotificationEvent;
-use parking_lot::Mutex;
-use rpc::{proto, Notification};
-
-use crate::tests::TestServer;
-
-#[gpui::test]
-async fn test_notifications(
- executor: BackgroundExecutor,
- cx_a: &mut TestAppContext,
- cx_b: &mut TestAppContext,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let client_a = server.create_client(cx_a, "user_a").await;
- let client_b = server.create_client(cx_b, "user_b").await;
-
- let notification_events_a = Arc::new(Mutex::new(Vec::new()));
- let notification_events_b = Arc::new(Mutex::new(Vec::new()));
- client_a.notification_store().update(cx_a, |_, cx| {
- let events = notification_events_a.clone();
- cx.subscribe(&cx.handle(), move |_, _, event, _| {
- events.lock().push(event.clone());
- })
- .detach()
- });
- client_b.notification_store().update(cx_b, |_, cx| {
- let events = notification_events_b.clone();
- cx.subscribe(&cx.handle(), move |_, _, event, _| {
- events.lock().push(event.clone());
- })
- .detach()
- });
-
- // Client A sends a contact request to client B.
- client_a
- .user_store()
- .update(cx_a, |store, cx| store.request_contact(client_b.id(), cx))
- .await
- .unwrap();
-
- // Client B receives a contact request notification and responds to the
- // request, accepting it.
- executor.run_until_parked();
- client_b.notification_store().update(cx_b, |store, cx| {
- assert_eq!(store.notification_count(), 1);
- assert_eq!(store.unread_notification_count(), 1);
-
- let entry = store.notification_at(0).unwrap();
- assert_eq!(
- entry.notification,
- Notification::ContactRequest {
- sender_id: client_a.id()
- }
- );
- assert!(!entry.is_read);
- assert_eq!(
- ¬ification_events_b.lock()[0..],
- &[
- NotificationEvent::NewNotification {
- entry: entry.clone(),
- },
- NotificationEvent::NotificationsUpdated {
- old_range: 0..0,
- new_count: 1
- }
- ]
- );
-
- store.respond_to_notification(entry.notification.clone(), true, cx);
- });
-
- // Client B sees the notification is now read, and that they responded.
- executor.run_until_parked();
- client_b.notification_store().read_with(cx_b, |store, _| {
- assert_eq!(store.notification_count(), 1);
- assert_eq!(store.unread_notification_count(), 0);
-
- let entry = store.notification_at(0).unwrap();
- assert!(entry.is_read);
- assert_eq!(entry.response, Some(true));
- assert_eq!(
- ¬ification_events_b.lock()[2..],
- &[
- NotificationEvent::NotificationRead {
- entry: entry.clone(),
- },
- NotificationEvent::NotificationsUpdated {
- old_range: 0..1,
- new_count: 1
- }
- ]
- );
- });
-
- // Client A receives a notification that client B accepted their request.
- client_a.notification_store().read_with(cx_a, |store, _| {
- assert_eq!(store.notification_count(), 1);
- assert_eq!(store.unread_notification_count(), 1);
-
- let entry = store.notification_at(0).unwrap();
- assert_eq!(
- entry.notification,
- Notification::ContactRequestAccepted {
- responder_id: client_b.id()
- }
- );
- assert!(!entry.is_read);
- });
-
- // Client A creates a channel and invites client B to be a member.
- let channel_id = client_a
- .channel_store()
- .update(cx_a, |store, cx| {
- store.create_channel("the-channel", None, cx)
- })
- .await
- .unwrap();
- client_a
- .channel_store()
- .update(cx_a, |store, cx| {
- store.invite_member(channel_id, client_b.id(), proto::ChannelRole::Member, cx)
- })
- .await
- .unwrap();
-
- // Client B receives a channel invitation notification and responds to the
- // invitation, accepting it.
- executor.run_until_parked();
- client_b.notification_store().update(cx_b, |store, cx| {
- assert_eq!(store.notification_count(), 2);
- assert_eq!(store.unread_notification_count(), 1);
-
- let entry = store.notification_at(0).unwrap();
- assert_eq!(
- entry.notification,
- Notification::ChannelInvitation {
- channel_id,
- channel_name: "the-channel".to_string(),
- inviter_id: client_a.id()
- }
- );
- assert!(!entry.is_read);
-
- store.respond_to_notification(entry.notification.clone(), true, cx);
- });
-
- // Client B sees the notification is now read, and that they responded.
- executor.run_until_parked();
- client_b.notification_store().read_with(cx_b, |store, _| {
- assert_eq!(store.notification_count(), 2);
- assert_eq!(store.unread_notification_count(), 0);
-
- let entry = store.notification_at(0).unwrap();
- assert!(entry.is_read);
- assert_eq!(entry.response, Some(true));
- });
-}
@@ -1,288 +0,0 @@
-use crate::db::ChannelRole;
-
-use super::{run_randomized_test, RandomizedTest, TestClient, TestError, TestServer, UserTestPlan};
-use anyhow::Result;
-use async_trait::async_trait;
-use gpui::{BackgroundExecutor, SharedString, TestAppContext};
-use rand::prelude::*;
-use serde_derive::{Deserialize, Serialize};
-use std::{
- ops::{Deref, DerefMut, Range},
- rc::Rc,
- sync::Arc,
-};
-use text::Bias;
-
-#[gpui::test(
- iterations = 100,
- on_failure = "crate::tests::save_randomized_test_plan"
-)]
-async fn test_random_channel_buffers(
- cx: &mut TestAppContext,
- executor: BackgroundExecutor,
- rng: StdRng,
-) {
- run_randomized_test::<RandomChannelBufferTest>(cx, executor, rng).await;
-}
-
-struct RandomChannelBufferTest;
-
-#[derive(Clone, Serialize, Deserialize)]
-enum ChannelBufferOperation {
- JoinChannelNotes {
- channel_name: SharedString,
- },
- LeaveChannelNotes {
- channel_name: SharedString,
- },
- EditChannelNotes {
- channel_name: SharedString,
- edits: Vec<(Range<usize>, Arc<str>)>,
- },
- Noop,
-}
-
-const CHANNEL_COUNT: usize = 3;
-
-#[async_trait(?Send)]
-impl RandomizedTest for RandomChannelBufferTest {
- type Operation = ChannelBufferOperation;
-
- async fn initialize(server: &mut TestServer, users: &[UserTestPlan]) {
- let db = &server.app_state.db;
- for ix in 0..CHANNEL_COUNT {
- let id = db
- .create_root_channel(&format!("channel-{ix}"), users[0].user_id)
- .await
- .unwrap();
- for user in &users[1..] {
- db.invite_channel_member(id, user.user_id, users[0].user_id, ChannelRole::Member)
- .await
- .unwrap();
- db.respond_to_channel_invite(id, user.user_id, true)
- .await
- .unwrap();
- }
- }
- }
-
- fn generate_operation(
- client: &TestClient,
- rng: &mut StdRng,
- _: &mut UserTestPlan,
- cx: &TestAppContext,
- ) -> ChannelBufferOperation {
- let channel_store = client.channel_store().clone();
- let mut channel_buffers = client.channel_buffers();
-
- // When signed out, we can't do anything unless a channel buffer is
- // already open.
- if channel_buffers.deref_mut().is_empty()
- && channel_store.read_with(cx, |store, _| store.channel_count() == 0)
- {
- return ChannelBufferOperation::Noop;
- }
-
- loop {
- match rng.gen_range(0..100_u32) {
- 0..=29 => {
- let channel_name = client.channel_store().read_with(cx, |store, cx| {
- store.ordered_channels().find_map(|(_, channel)| {
- if store.has_open_channel_buffer(channel.id, cx) {
- None
- } else {
- Some(channel.name.clone())
- }
- })
- });
- if let Some(channel_name) = channel_name {
- break ChannelBufferOperation::JoinChannelNotes { channel_name };
- }
- }
-
- 30..=40 => {
- if let Some(buffer) = channel_buffers.deref().iter().choose(rng) {
- let channel_name =
- buffer.read_with(cx, |b, cx| b.channel(cx).unwrap().name.clone());
- break ChannelBufferOperation::LeaveChannelNotes { channel_name };
- }
- }
-
- _ => {
- if let Some(buffer) = channel_buffers.deref().iter().choose(rng) {
- break buffer.read_with(cx, |b, cx| {
- let channel_name = b.channel(cx).unwrap().name.clone();
- let edits = b
- .buffer()
- .read_with(cx, |buffer, _| buffer.get_random_edits(rng, 3));
- ChannelBufferOperation::EditChannelNotes {
- channel_name,
- edits,
- }
- });
- }
- }
- }
- }
- }
-
- async fn apply_operation(
- client: &TestClient,
- operation: ChannelBufferOperation,
- cx: &mut TestAppContext,
- ) -> Result<(), TestError> {
- match operation {
- ChannelBufferOperation::JoinChannelNotes { channel_name } => {
- let buffer = client.channel_store().update(cx, |store, cx| {
- let channel_id = store
- .ordered_channels()
- .find(|(_, c)| c.name == channel_name)
- .unwrap()
- .1
- .id;
- if store.has_open_channel_buffer(channel_id, cx) {
- Err(TestError::Inapplicable)
- } else {
- Ok(store.open_channel_buffer(channel_id, cx))
- }
- })?;
-
- log::info!(
- "{}: opening notes for channel {channel_name}",
- client.username
- );
- client.channel_buffers().deref_mut().insert(buffer.await?);
- }
-
- ChannelBufferOperation::LeaveChannelNotes { channel_name } => {
- let buffer = cx.update(|cx| {
- let mut left_buffer = Err(TestError::Inapplicable);
- client.channel_buffers().deref_mut().retain(|buffer| {
- if buffer.read(cx).channel(cx).unwrap().name == channel_name {
- left_buffer = Ok(buffer.clone());
- false
- } else {
- true
- }
- });
- left_buffer
- })?;
-
- log::info!(
- "{}: closing notes for channel {channel_name}",
- client.username
- );
- cx.update(|_| drop(buffer));
- }
-
- ChannelBufferOperation::EditChannelNotes {
- channel_name,
- edits,
- } => {
- let channel_buffer = cx
- .read(|cx| {
- client
- .channel_buffers()
- .deref()
- .iter()
- .find(|buffer| {
- buffer.read(cx).channel(cx).unwrap().name == channel_name
- })
- .cloned()
- })
- .ok_or_else(|| TestError::Inapplicable)?;
-
- log::info!(
- "{}: editing notes for channel {channel_name} with {:?}",
- client.username,
- edits
- );
-
- channel_buffer.update(cx, |buffer, cx| {
- let buffer = buffer.buffer();
- buffer.update(cx, |buffer, cx| {
- let snapshot = buffer.snapshot();
- buffer.edit(
- edits.into_iter().map(|(range, text)| {
- let start = snapshot.clip_offset(range.start, Bias::Left);
- let end = snapshot.clip_offset(range.end, Bias::Right);
- (start..end, text)
- }),
- None,
- cx,
- );
- });
- });
- }
-
- ChannelBufferOperation::Noop => Err(TestError::Inapplicable)?,
- }
- Ok(())
- }
-
- async fn on_quiesce(server: &mut TestServer, clients: &mut [(Rc<TestClient>, TestAppContext)]) {
- let channels = server.app_state.db.all_channels().await.unwrap();
-
- for (client, client_cx) in clients.iter_mut() {
- client_cx.update(|cx| {
- client
- .channel_buffers()
- .deref_mut()
- .retain(|b| b.read(cx).is_connected());
- });
- }
-
- for (channel_id, channel_name) in channels {
- let mut prev_text: Option<(u64, String)> = None;
-
- let mut collaborator_user_ids = server
- .app_state
- .db
- .get_channel_buffer_collaborators(channel_id)
- .await
- .unwrap()
- .into_iter()
- .map(|id| id.to_proto())
- .collect::<Vec<_>>();
- collaborator_user_ids.sort();
-
- for (client, client_cx) in clients.iter() {
- let user_id = client.user_id().unwrap();
- client_cx.read(|cx| {
- if let Some(channel_buffer) = client
- .channel_buffers()
- .deref()
- .iter()
- .find(|b| b.read(cx).channel_id == channel_id.to_proto())
- {
- let channel_buffer = channel_buffer.read(cx);
-
- // Assert that channel buffer's text matches other clients' copies.
- let text = channel_buffer.buffer().read(cx).text();
- if let Some((prev_user_id, prev_text)) = &prev_text {
- assert_eq!(
- &text,
- prev_text,
- "client {user_id} has different text than client {prev_user_id} for channel {channel_name}",
- );
- } else {
- prev_text = Some((user_id, text.clone()));
- }
-
- // Assert that all clients and the server agree about who is present in the
- // channel buffer.
- let collaborators = channel_buffer.collaborators();
- let mut user_ids =
- collaborators.values().map(|c| c.user_id).collect::<Vec<_>>();
- user_ids.sort();
- assert_eq!(
- user_ids,
- collaborator_user_ids,
- "client {user_id} has different user ids for channel {channel_name} than the server",
- );
- }
- });
- }
- }
- }
-}
@@ -1,1587 +0,0 @@
-use super::{RandomizedTest, TestClient, TestError, TestServer, UserTestPlan};
-use crate::{db::UserId, tests::run_randomized_test};
-use anyhow::{anyhow, Result};
-use async_trait::async_trait;
-use call::ActiveCall;
-use collections::{BTreeMap, HashMap};
-use editor::Bias;
-use fs::{repository::GitFileStatus, FakeFs, Fs as _};
-use futures::StreamExt;
-use gpui::{BackgroundExecutor, Model, TestAppContext};
-use language::{range_to_lsp, FakeLspAdapter, Language, LanguageConfig, PointUtf16};
-use lsp::FakeLanguageServer;
-use pretty_assertions::assert_eq;
-use project::{search::SearchQuery, Project, ProjectPath};
-use rand::{
- distributions::{Alphanumeric, DistString},
- prelude::*,
-};
-use serde::{Deserialize, Serialize};
-use std::{
- ops::{Deref, Range},
- path::{Path, PathBuf},
- rc::Rc,
- sync::Arc,
-};
-use util::ResultExt;
-
-#[gpui::test(
- iterations = 100,
- on_failure = "crate::tests::save_randomized_test_plan"
-)]
-async fn test_random_project_collaboration(
- cx: &mut TestAppContext,
- executor: BackgroundExecutor,
- rng: StdRng,
-) {
- run_randomized_test::<ProjectCollaborationTest>(cx, executor, rng).await;
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize)]
-enum ClientOperation {
- AcceptIncomingCall,
- RejectIncomingCall,
- LeaveCall,
- InviteContactToCall {
- user_id: UserId,
- },
- OpenLocalProject {
- first_root_name: String,
- },
- OpenRemoteProject {
- host_id: UserId,
- first_root_name: String,
- },
- AddWorktreeToProject {
- project_root_name: String,
- new_root_path: PathBuf,
- },
- CloseRemoteProject {
- project_root_name: String,
- },
- OpenBuffer {
- project_root_name: String,
- is_local: bool,
- full_path: PathBuf,
- },
- SearchProject {
- project_root_name: String,
- is_local: bool,
- query: String,
- detach: bool,
- },
- EditBuffer {
- project_root_name: String,
- is_local: bool,
- full_path: PathBuf,
- edits: Vec<(Range<usize>, Arc<str>)>,
- },
- CloseBuffer {
- project_root_name: String,
- is_local: bool,
- full_path: PathBuf,
- },
- SaveBuffer {
- project_root_name: String,
- is_local: bool,
- full_path: PathBuf,
- detach: bool,
- },
- RequestLspDataInBuffer {
- project_root_name: String,
- is_local: bool,
- full_path: PathBuf,
- offset: usize,
- kind: LspRequestKind,
- detach: bool,
- },
- CreateWorktreeEntry {
- project_root_name: String,
- is_local: bool,
- full_path: PathBuf,
- is_dir: bool,
- },
- WriteFsEntry {
- path: PathBuf,
- is_dir: bool,
- content: String,
- },
- GitOperation {
- operation: GitOperation,
- },
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize)]
-enum GitOperation {
- WriteGitIndex {
- repo_path: PathBuf,
- contents: Vec<(PathBuf, String)>,
- },
- WriteGitBranch {
- repo_path: PathBuf,
- new_branch: Option<String>,
- },
- WriteGitStatuses {
- repo_path: PathBuf,
- statuses: Vec<(PathBuf, GitFileStatus)>,
- git_operation: bool,
- },
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize)]
-enum LspRequestKind {
- Rename,
- Completion,
- CodeAction,
- Definition,
- Highlights,
-}
-
-struct ProjectCollaborationTest;
-
-#[async_trait(?Send)]
-impl RandomizedTest for ProjectCollaborationTest {
- type Operation = ClientOperation;
-
- async fn initialize(server: &mut TestServer, users: &[UserTestPlan]) {
- let db = &server.app_state.db;
- for (ix, user_a) in users.iter().enumerate() {
- for user_b in &users[ix + 1..] {
- db.send_contact_request(user_a.user_id, user_b.user_id)
- .await
- .unwrap();
- db.respond_to_contact_request(user_b.user_id, user_a.user_id, true)
- .await
- .unwrap();
- }
- }
- }
-
- fn generate_operation(
- client: &TestClient,
- rng: &mut StdRng,
- plan: &mut UserTestPlan,
- cx: &TestAppContext,
- ) -> ClientOperation {
- let call = cx.read(ActiveCall::global);
- loop {
- match rng.gen_range(0..100_u32) {
- // Mutate the call
- 0..=29 => {
- // Respond to an incoming call
- if call.read_with(cx, |call, _| call.incoming().borrow().is_some()) {
- break if rng.gen_bool(0.7) {
- ClientOperation::AcceptIncomingCall
- } else {
- ClientOperation::RejectIncomingCall
- };
- }
-
- match rng.gen_range(0..100_u32) {
- // Invite a contact to the current call
- 0..=70 => {
- let available_contacts =
- client.user_store().read_with(cx, |user_store, _| {
- user_store
- .contacts()
- .iter()
- .filter(|contact| contact.online && !contact.busy)
- .cloned()
- .collect::<Vec<_>>()
- });
- if !available_contacts.is_empty() {
- let contact = available_contacts.choose(rng).unwrap();
- break ClientOperation::InviteContactToCall {
- user_id: UserId(contact.user.id as i32),
- };
- }
- }
-
- // Leave the current call
- 71.. => {
- if plan.allow_client_disconnection
- && call.read_with(cx, |call, _| call.room().is_some())
- {
- break ClientOperation::LeaveCall;
- }
- }
- }
- }
-
- // Mutate projects
- 30..=59 => match rng.gen_range(0..100_u32) {
- // Open a new project
- 0..=70 => {
- // Open a remote project
- if let Some(room) = call.read_with(cx, |call, _| call.room().cloned()) {
- let existing_remote_project_ids = cx.read(|cx| {
- client
- .remote_projects()
- .iter()
- .map(|p| p.read(cx).remote_id().unwrap())
- .collect::<Vec<_>>()
- });
- let new_remote_projects = room.read_with(cx, |room, _| {
- room.remote_participants()
- .values()
- .flat_map(|participant| {
- participant.projects.iter().filter_map(|project| {
- if existing_remote_project_ids.contains(&project.id) {
- None
- } else {
- Some((
- UserId::from_proto(participant.user.id),
- project.worktree_root_names[0].clone(),
- ))
- }
- })
- })
- .collect::<Vec<_>>()
- });
- if !new_remote_projects.is_empty() {
- let (host_id, first_root_name) =
- new_remote_projects.choose(rng).unwrap().clone();
- break ClientOperation::OpenRemoteProject {
- host_id,
- first_root_name,
- };
- }
- }
- // Open a local project
- else {
- let first_root_name = plan.next_root_dir_name();
- break ClientOperation::OpenLocalProject { first_root_name };
- }
- }
-
- // Close a remote project
- 71..=80 => {
- if !client.remote_projects().is_empty() {
- let project = client.remote_projects().choose(rng).unwrap().clone();
- let first_root_name = root_name_for_project(&project, cx);
- break ClientOperation::CloseRemoteProject {
- project_root_name: first_root_name,
- };
- }
- }
-
- // Mutate project worktrees
- 81.. => match rng.gen_range(0..100_u32) {
- // Add a worktree to a local project
- 0..=50 => {
- let Some(project) = client.local_projects().choose(rng).cloned() else {
- continue;
- };
- let project_root_name = root_name_for_project(&project, cx);
- let mut paths = client.fs().paths(false);
- paths.remove(0);
- let new_root_path = if paths.is_empty() || rng.gen() {
- Path::new("/").join(&plan.next_root_dir_name())
- } else {
- paths.choose(rng).unwrap().clone()
- };
- break ClientOperation::AddWorktreeToProject {
- project_root_name,
- new_root_path,
- };
- }
-
- // Add an entry to a worktree
- _ => {
- let Some(project) = choose_random_project(client, rng) else {
- continue;
- };
- let project_root_name = root_name_for_project(&project, cx);
- let is_local = project.read_with(cx, |project, _| project.is_local());
- let worktree = project.read_with(cx, |project, cx| {
- project
- .worktrees()
- .filter(|worktree| {
- let worktree = worktree.read(cx);
- worktree.is_visible()
- && worktree.entries(false).any(|e| e.is_file())
- && worktree.root_entry().map_or(false, |e| e.is_dir())
- })
- .choose(rng)
- });
- let Some(worktree) = worktree else { continue };
- let is_dir = rng.gen::<bool>();
- let mut full_path =
- worktree.read_with(cx, |w, _| PathBuf::from(w.root_name()));
- full_path.push(gen_file_name(rng));
- if !is_dir {
- full_path.set_extension("rs");
- }
- break ClientOperation::CreateWorktreeEntry {
- project_root_name,
- is_local,
- full_path,
- is_dir,
- };
- }
- },
- },
-
- // Query and mutate buffers
- 60..=90 => {
- let Some(project) = choose_random_project(client, rng) else {
- continue;
- };
- let project_root_name = root_name_for_project(&project, cx);
- let is_local = project.read_with(cx, |project, _| project.is_local());
-
- match rng.gen_range(0..100_u32) {
- // Manipulate an existing buffer
- 0..=70 => {
- let Some(buffer) = client
- .buffers_for_project(&project)
- .iter()
- .choose(rng)
- .cloned()
- else {
- continue;
- };
-
- let full_path = buffer
- .read_with(cx, |buffer, cx| buffer.file().unwrap().full_path(cx));
-
- match rng.gen_range(0..100_u32) {
- // Close the buffer
- 0..=15 => {
- break ClientOperation::CloseBuffer {
- project_root_name,
- is_local,
- full_path,
- };
- }
- // Save the buffer
- 16..=29 if buffer.read_with(cx, |b, _| b.is_dirty()) => {
- let detach = rng.gen_bool(0.3);
- break ClientOperation::SaveBuffer {
- project_root_name,
- is_local,
- full_path,
- detach,
- };
- }
- // Edit the buffer
- 30..=69 => {
- let edits = buffer
- .read_with(cx, |buffer, _| buffer.get_random_edits(rng, 3));
- break ClientOperation::EditBuffer {
- project_root_name,
- is_local,
- full_path,
- edits,
- };
- }
- // Make an LSP request
- _ => {
- let offset = buffer.read_with(cx, |buffer, _| {
- buffer.clip_offset(
- rng.gen_range(0..=buffer.len()),
- language::Bias::Left,
- )
- });
- let detach = rng.gen();
- break ClientOperation::RequestLspDataInBuffer {
- project_root_name,
- full_path,
- offset,
- is_local,
- kind: match rng.gen_range(0..5_u32) {
- 0 => LspRequestKind::Rename,
- 1 => LspRequestKind::Highlights,
- 2 => LspRequestKind::Definition,
- 3 => LspRequestKind::CodeAction,
- 4.. => LspRequestKind::Completion,
- },
- detach,
- };
- }
- }
- }
-
- 71..=80 => {
- let query = rng.gen_range('a'..='z').to_string();
- let detach = rng.gen_bool(0.3);
- break ClientOperation::SearchProject {
- project_root_name,
- is_local,
- query,
- detach,
- };
- }
-
- // Open a buffer
- 81.. => {
- let worktree = project.read_with(cx, |project, cx| {
- project
- .worktrees()
- .filter(|worktree| {
- let worktree = worktree.read(cx);
- worktree.is_visible()
- && worktree.entries(false).any(|e| e.is_file())
- })
- .choose(rng)
- });
- let Some(worktree) = worktree else { continue };
- let full_path = worktree.read_with(cx, |worktree, _| {
- let entry = worktree
- .entries(false)
- .filter(|e| e.is_file())
- .choose(rng)
- .unwrap();
- if entry.path.as_ref() == Path::new("") {
- Path::new(worktree.root_name()).into()
- } else {
- Path::new(worktree.root_name()).join(&entry.path)
- }
- });
- break ClientOperation::OpenBuffer {
- project_root_name,
- is_local,
- full_path,
- };
- }
- }
- }
-
- // Update a git related action
- 91..=95 => {
- break ClientOperation::GitOperation {
- operation: generate_git_operation(rng, client),
- };
- }
-
- // Create or update a file or directory
- 96.. => {
- let is_dir = rng.gen::<bool>();
- let content;
- let mut path;
- let dir_paths = client.fs().directories(false);
-
- if is_dir {
- content = String::new();
- path = dir_paths.choose(rng).unwrap().clone();
- path.push(gen_file_name(rng));
- } else {
- content = Alphanumeric.sample_string(rng, 16);
-
- // Create a new file or overwrite an existing file
- let file_paths = client.fs().files();
- if file_paths.is_empty() || rng.gen_bool(0.5) {
- path = dir_paths.choose(rng).unwrap().clone();
- path.push(gen_file_name(rng));
- path.set_extension("rs");
- } else {
- path = file_paths.choose(rng).unwrap().clone()
- };
- }
- break ClientOperation::WriteFsEntry {
- path,
- is_dir,
- content,
- };
- }
- }
- }
- }
-
- async fn apply_operation(
- client: &TestClient,
- operation: ClientOperation,
- cx: &mut TestAppContext,
- ) -> Result<(), TestError> {
- match operation {
- ClientOperation::AcceptIncomingCall => {
- let active_call = cx.read(ActiveCall::global);
- if active_call.read_with(cx, |call, _| call.incoming().borrow().is_none()) {
- Err(TestError::Inapplicable)?;
- }
-
- log::info!("{}: accepting incoming call", client.username);
- active_call
- .update(cx, |call, cx| call.accept_incoming(cx))
- .await?;
- }
-
- ClientOperation::RejectIncomingCall => {
- let active_call = cx.read(ActiveCall::global);
- if active_call.read_with(cx, |call, _| call.incoming().borrow().is_none()) {
- Err(TestError::Inapplicable)?;
- }
-
- log::info!("{}: declining incoming call", client.username);
- active_call.update(cx, |call, cx| call.decline_incoming(cx))?;
- }
-
- ClientOperation::LeaveCall => {
- let active_call = cx.read(ActiveCall::global);
- if active_call.read_with(cx, |call, _| call.room().is_none()) {
- Err(TestError::Inapplicable)?;
- }
-
- log::info!("{}: hanging up", client.username);
- active_call.update(cx, |call, cx| call.hang_up(cx)).await?;
- }
-
- ClientOperation::InviteContactToCall { user_id } => {
- let active_call = cx.read(ActiveCall::global);
-
- log::info!("{}: inviting {}", client.username, user_id,);
- active_call
- .update(cx, |call, cx| call.invite(user_id.to_proto(), None, cx))
- .await
- .log_err();
- }
-
- ClientOperation::OpenLocalProject { first_root_name } => {
- log::info!(
- "{}: opening local project at {:?}",
- client.username,
- first_root_name
- );
-
- let root_path = Path::new("/").join(&first_root_name);
- client.fs().create_dir(&root_path).await.unwrap();
- client
- .fs()
- .create_file(&root_path.join("main.rs"), Default::default())
- .await
- .unwrap();
- let project = client.build_local_project(root_path, cx).await.0;
- ensure_project_shared(&project, client, cx).await;
- client.local_projects_mut().push(project.clone());
- }
-
- ClientOperation::AddWorktreeToProject {
- project_root_name,
- new_root_path,
- } => {
- let project = project_for_root_name(client, &project_root_name, cx)
- .ok_or(TestError::Inapplicable)?;
-
- log::info!(
- "{}: finding/creating local worktree at {:?} to project with root path {}",
- client.username,
- new_root_path,
- project_root_name
- );
-
- ensure_project_shared(&project, client, cx).await;
- if !client.fs().paths(false).contains(&new_root_path) {
- client.fs().create_dir(&new_root_path).await.unwrap();
- }
- project
- .update(cx, |project, cx| {
- project.find_or_create_local_worktree(&new_root_path, true, cx)
- })
- .await
- .unwrap();
- }
-
- ClientOperation::CloseRemoteProject { project_root_name } => {
- let project = project_for_root_name(client, &project_root_name, cx)
- .ok_or(TestError::Inapplicable)?;
-
- log::info!(
- "{}: closing remote project with root path {}",
- client.username,
- project_root_name,
- );
-
- let ix = client
- .remote_projects()
- .iter()
- .position(|p| p == &project)
- .unwrap();
- cx.update(|_| {
- client.remote_projects_mut().remove(ix);
- client.buffers().retain(|p, _| *p != project);
- drop(project);
- });
- }
-
- ClientOperation::OpenRemoteProject {
- host_id,
- first_root_name,
- } => {
- let active_call = cx.read(ActiveCall::global);
- let project = active_call
- .update(cx, |call, cx| {
- let room = call.room().cloned()?;
- let participant = room
- .read(cx)
- .remote_participants()
- .get(&host_id.to_proto())?;
- let project_id = participant
- .projects
- .iter()
- .find(|project| project.worktree_root_names[0] == first_root_name)?
- .id;
- Some(room.update(cx, |room, cx| {
- room.join_project(
- project_id,
- client.language_registry().clone(),
- FakeFs::new(cx.background_executor().clone()),
- cx,
- )
- }))
- })
- .ok_or(TestError::Inapplicable)?;
-
- log::info!(
- "{}: joining remote project of user {}, root name {}",
- client.username,
- host_id,
- first_root_name,
- );
-
- let project = project.await?;
- client.remote_projects_mut().push(project.clone());
- }
-
- ClientOperation::CreateWorktreeEntry {
- project_root_name,
- is_local,
- full_path,
- is_dir,
- } => {
- let project = project_for_root_name(client, &project_root_name, cx)
- .ok_or(TestError::Inapplicable)?;
- let project_path = project_path_for_full_path(&project, &full_path, cx)
- .ok_or(TestError::Inapplicable)?;
-
- log::info!(
- "{}: creating {} at path {:?} in {} project {}",
- client.username,
- if is_dir { "dir" } else { "file" },
- full_path,
- if is_local { "local" } else { "remote" },
- project_root_name,
- );
-
- ensure_project_shared(&project, client, cx).await;
- project
- .update(cx, |p, cx| p.create_entry(project_path, is_dir, cx))
- .await?;
- }
-
- ClientOperation::OpenBuffer {
- project_root_name,
- is_local,
- full_path,
- } => {
- let project = project_for_root_name(client, &project_root_name, cx)
- .ok_or(TestError::Inapplicable)?;
- let project_path = project_path_for_full_path(&project, &full_path, cx)
- .ok_or(TestError::Inapplicable)?;
-
- log::info!(
- "{}: opening buffer {:?} in {} project {}",
- client.username,
- full_path,
- if is_local { "local" } else { "remote" },
- project_root_name,
- );
-
- ensure_project_shared(&project, client, cx).await;
- let buffer = project
- .update(cx, |project, cx| project.open_buffer(project_path, cx))
- .await?;
- client.buffers_for_project(&project).insert(buffer);
- }
-
- ClientOperation::EditBuffer {
- project_root_name,
- is_local,
- full_path,
- edits,
- } => {
- let project = project_for_root_name(client, &project_root_name, cx)
- .ok_or(TestError::Inapplicable)?;
- let buffer = buffer_for_full_path(client, &project, &full_path, cx)
- .ok_or(TestError::Inapplicable)?;
-
- log::info!(
- "{}: editing buffer {:?} in {} project {} with {:?}",
- client.username,
- full_path,
- if is_local { "local" } else { "remote" },
- project_root_name,
- edits
- );
-
- ensure_project_shared(&project, client, cx).await;
- buffer.update(cx, |buffer, cx| {
- let snapshot = buffer.snapshot();
- buffer.edit(
- edits.into_iter().map(|(range, text)| {
- let start = snapshot.clip_offset(range.start, Bias::Left);
- let end = snapshot.clip_offset(range.end, Bias::Right);
- (start..end, text)
- }),
- None,
- cx,
- );
- });
- }
-
- ClientOperation::CloseBuffer {
- project_root_name,
- is_local,
- full_path,
- } => {
- let project = project_for_root_name(client, &project_root_name, cx)
- .ok_or(TestError::Inapplicable)?;
- let buffer = buffer_for_full_path(client, &project, &full_path, cx)
- .ok_or(TestError::Inapplicable)?;
-
- log::info!(
- "{}: closing buffer {:?} in {} project {}",
- client.username,
- full_path,
- if is_local { "local" } else { "remote" },
- project_root_name
- );
-
- ensure_project_shared(&project, client, cx).await;
- cx.update(|_| {
- client.buffers_for_project(&project).remove(&buffer);
- drop(buffer);
- });
- }
-
- ClientOperation::SaveBuffer {
- project_root_name,
- is_local,
- full_path,
- detach,
- } => {
- let project = project_for_root_name(client, &project_root_name, cx)
- .ok_or(TestError::Inapplicable)?;
- let buffer = buffer_for_full_path(client, &project, &full_path, cx)
- .ok_or(TestError::Inapplicable)?;
-
- log::info!(
- "{}: saving buffer {:?} in {} project {}, {}",
- client.username,
- full_path,
- if is_local { "local" } else { "remote" },
- project_root_name,
- if detach { "detaching" } else { "awaiting" }
- );
-
- ensure_project_shared(&project, client, cx).await;
- let requested_version = buffer.read_with(cx, |buffer, _| buffer.version());
- let save =
- project.update(cx, |project, cx| project.save_buffer(buffer.clone(), cx));
- let save = cx.spawn(|cx| async move {
- save.await
- .map_err(|err| anyhow!("save request failed: {:?}", err))?;
- assert!(buffer
- .read_with(&cx, |buffer, _| { buffer.saved_version().to_owned() })
- .expect("App should not be dropped")
- .observed_all(&requested_version));
- anyhow::Ok(())
- });
- if detach {
- cx.update(|cx| save.detach_and_log_err(cx));
- } else {
- save.await?;
- }
- }
-
- ClientOperation::RequestLspDataInBuffer {
- project_root_name,
- is_local,
- full_path,
- offset,
- kind,
- detach,
- } => {
- let project = project_for_root_name(client, &project_root_name, cx)
- .ok_or(TestError::Inapplicable)?;
- let buffer = buffer_for_full_path(client, &project, &full_path, cx)
- .ok_or(TestError::Inapplicable)?;
-
- log::info!(
- "{}: request LSP {:?} for buffer {:?} in {} project {}, {}",
- client.username,
- kind,
- full_path,
- if is_local { "local" } else { "remote" },
- project_root_name,
- if detach { "detaching" } else { "awaiting" }
- );
-
- use futures::{FutureExt as _, TryFutureExt as _};
- let offset = buffer.read_with(cx, |b, _| b.clip_offset(offset, Bias::Left));
-
- let process_lsp_request = project.update(cx, |project, cx| match kind {
- LspRequestKind::Rename => project
- .prepare_rename(buffer, offset, cx)
- .map_ok(|_| ())
- .boxed(),
- LspRequestKind::Completion => project
- .completions(&buffer, offset, cx)
- .map_ok(|_| ())
- .boxed(),
- LspRequestKind::CodeAction => project
- .code_actions(&buffer, offset..offset, cx)
- .map_ok(|_| ())
- .boxed(),
- LspRequestKind::Definition => project
- .definition(&buffer, offset, cx)
- .map_ok(|_| ())
- .boxed(),
- LspRequestKind::Highlights => project
- .document_highlights(&buffer, offset, cx)
- .map_ok(|_| ())
- .boxed(),
- });
- let request = cx.foreground_executor().spawn(process_lsp_request);
- if detach {
- request.detach();
- } else {
- request.await?;
- }
- }
-
- ClientOperation::SearchProject {
- project_root_name,
- is_local,
- query,
- detach,
- } => {
- let project = project_for_root_name(client, &project_root_name, cx)
- .ok_or(TestError::Inapplicable)?;
-
- log::info!(
- "{}: search {} project {} for {:?}, {}",
- client.username,
- if is_local { "local" } else { "remote" },
- project_root_name,
- query,
- if detach { "detaching" } else { "awaiting" }
- );
-
- let mut search = project.update(cx, |project, cx| {
- project.search(
- SearchQuery::text(query, false, false, false, Vec::new(), Vec::new())
- .unwrap(),
- cx,
- )
- });
- drop(project);
- let search = cx.executor().spawn(async move {
- let mut results = HashMap::default();
- while let Some((buffer, ranges)) = search.next().await {
- results.entry(buffer).or_insert(ranges);
- }
- results
- });
- search.await;
- }
-
- ClientOperation::WriteFsEntry {
- path,
- is_dir,
- content,
- } => {
- if !client
- .fs()
- .directories(false)
- .contains(&path.parent().unwrap().to_owned())
- {
- return Err(TestError::Inapplicable);
- }
-
- if is_dir {
- log::info!("{}: creating dir at {:?}", client.username, path);
- client.fs().create_dir(&path).await.unwrap();
- } else {
- let exists = client.fs().metadata(&path).await?.is_some();
- let verb = if exists { "updating" } else { "creating" };
- log::info!("{}: {} file at {:?}", verb, client.username, path);
-
- client
- .fs()
- .save(&path, &content.as_str().into(), text::LineEnding::Unix)
- .await
- .unwrap();
- }
- }
-
- ClientOperation::GitOperation { operation } => match operation {
- GitOperation::WriteGitIndex {
- repo_path,
- contents,
- } => {
- if !client.fs().directories(false).contains(&repo_path) {
- return Err(TestError::Inapplicable);
- }
-
- for (path, _) in contents.iter() {
- if !client.fs().files().contains(&repo_path.join(path)) {
- return Err(TestError::Inapplicable);
- }
- }
-
- log::info!(
- "{}: writing git index for repo {:?}: {:?}",
- client.username,
- repo_path,
- contents
- );
-
- let dot_git_dir = repo_path.join(".git");
- let contents = contents
- .iter()
- .map(|(path, contents)| (path.as_path(), contents.clone()))
- .collect::<Vec<_>>();
- if client.fs().metadata(&dot_git_dir).await?.is_none() {
- client.fs().create_dir(&dot_git_dir).await?;
- }
- client.fs().set_index_for_repo(&dot_git_dir, &contents);
- }
- GitOperation::WriteGitBranch {
- repo_path,
- new_branch,
- } => {
- if !client.fs().directories(false).contains(&repo_path) {
- return Err(TestError::Inapplicable);
- }
-
- log::info!(
- "{}: writing git branch for repo {:?}: {:?}",
- client.username,
- repo_path,
- new_branch
- );
-
- let dot_git_dir = repo_path.join(".git");
- if client.fs().metadata(&dot_git_dir).await?.is_none() {
- client.fs().create_dir(&dot_git_dir).await?;
- }
- client
- .fs()
- .set_branch_name(&dot_git_dir, new_branch.clone());
- }
- GitOperation::WriteGitStatuses {
- repo_path,
- statuses,
- git_operation,
- } => {
- if !client.fs().directories(false).contains(&repo_path) {
- return Err(TestError::Inapplicable);
- }
- for (path, _) in statuses.iter() {
- if !client.fs().files().contains(&repo_path.join(path)) {
- return Err(TestError::Inapplicable);
- }
- }
-
- log::info!(
- "{}: writing git statuses for repo {:?}: {:?}",
- client.username,
- repo_path,
- statuses
- );
-
- let dot_git_dir = repo_path.join(".git");
-
- let statuses = statuses
- .iter()
- .map(|(path, val)| (path.as_path(), val.clone()))
- .collect::<Vec<_>>();
-
- if client.fs().metadata(&dot_git_dir).await?.is_none() {
- client.fs().create_dir(&dot_git_dir).await?;
- }
-
- if git_operation {
- client.fs().set_status_for_repo_via_git_operation(
- &dot_git_dir,
- statuses.as_slice(),
- );
- } else {
- client.fs().set_status_for_repo_via_working_copy_change(
- &dot_git_dir,
- statuses.as_slice(),
- );
- }
- }
- },
- }
- Ok(())
- }
-
- async fn on_client_added(client: &Rc<TestClient>, _: &mut TestAppContext) {
- let mut language = Language::new(
- LanguageConfig {
- name: "Rust".into(),
- path_suffixes: vec!["rs".to_string()],
- ..Default::default()
- },
- None,
- );
- language
- .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
- name: "the-fake-language-server",
- capabilities: lsp::LanguageServer::full_capabilities(),
- initializer: Some(Box::new({
- let fs = client.app_state.fs.clone();
- move |fake_server: &mut FakeLanguageServer| {
- fake_server.handle_request::<lsp::request::Completion, _, _>(
- |_, _| async move {
- Ok(Some(lsp::CompletionResponse::Array(vec![
- lsp::CompletionItem {
- text_edit: Some(lsp::CompletionTextEdit::Edit(
- lsp::TextEdit {
- range: lsp::Range::new(
- lsp::Position::new(0, 0),
- lsp::Position::new(0, 0),
- ),
- new_text: "the-new-text".to_string(),
- },
- )),
- ..Default::default()
- },
- ])))
- },
- );
-
- fake_server.handle_request::<lsp::request::CodeActionRequest, _, _>(
- |_, _| async move {
- Ok(Some(vec![lsp::CodeActionOrCommand::CodeAction(
- lsp::CodeAction {
- title: "the-code-action".to_string(),
- ..Default::default()
- },
- )]))
- },
- );
-
- fake_server.handle_request::<lsp::request::PrepareRenameRequest, _, _>(
- |params, _| async move {
- Ok(Some(lsp::PrepareRenameResponse::Range(lsp::Range::new(
- params.position,
- params.position,
- ))))
- },
- );
-
- fake_server.handle_request::<lsp::request::GotoDefinition, _, _>({
- let fs = fs.clone();
- move |_, cx| {
- let background = cx.background_executor();
- let mut rng = background.rng();
- let count = rng.gen_range::<usize, _>(1..3);
- let files = fs.as_fake().files();
- let files = (0..count)
- .map(|_| files.choose(&mut rng).unwrap().clone())
- .collect::<Vec<_>>();
- async move {
- log::info!("LSP: Returning definitions in files {:?}", &files);
- Ok(Some(lsp::GotoDefinitionResponse::Array(
- files
- .into_iter()
- .map(|file| lsp::Location {
- uri: lsp::Url::from_file_path(file).unwrap(),
- range: Default::default(),
- })
- .collect(),
- )))
- }
- }
- });
-
- fake_server.handle_request::<lsp::request::DocumentHighlightRequest, _, _>(
- move |_, cx| {
- let mut highlights = Vec::new();
- let background = cx.background_executor();
- let mut rng = background.rng();
-
- let highlight_count = rng.gen_range(1..=5);
- for _ in 0..highlight_count {
- let start_row = rng.gen_range(0..100);
- let start_column = rng.gen_range(0..100);
- let end_row = rng.gen_range(0..100);
- let end_column = rng.gen_range(0..100);
- let start = PointUtf16::new(start_row, start_column);
- let end = PointUtf16::new(end_row, end_column);
- let range = if start > end { end..start } else { start..end };
- highlights.push(lsp::DocumentHighlight {
- range: range_to_lsp(range.clone()),
- kind: Some(lsp::DocumentHighlightKind::READ),
- });
- }
- highlights.sort_unstable_by_key(|highlight| {
- (highlight.range.start, highlight.range.end)
- });
- async move { Ok(Some(highlights)) }
- },
- );
- }
- })),
- ..Default::default()
- }))
- .await;
- client.app_state.languages.add(Arc::new(language));
- }
-
- async fn on_quiesce(_: &mut TestServer, clients: &mut [(Rc<TestClient>, TestAppContext)]) {
- for (client, client_cx) in clients.iter() {
- for guest_project in client.remote_projects().iter() {
- guest_project.read_with(client_cx, |guest_project, cx| {
- let host_project = clients.iter().find_map(|(client, cx)| {
- let project = client
- .local_projects()
- .iter()
- .find(|host_project| {
- host_project.read_with(cx, |host_project, _| {
- host_project.remote_id() == guest_project.remote_id()
- })
- })?
- .clone();
- Some((project, cx))
- });
-
- if !guest_project.is_read_only() {
- if let Some((host_project, host_cx)) = host_project {
- let host_worktree_snapshots =
- host_project.read_with(host_cx, |host_project, cx| {
- host_project
- .worktrees()
- .map(|worktree| {
- let worktree = worktree.read(cx);
- (worktree.id(), worktree.snapshot())
- })
- .collect::<BTreeMap<_, _>>()
- });
- let guest_worktree_snapshots = guest_project
- .worktrees()
- .map(|worktree| {
- let worktree = worktree.read(cx);
- (worktree.id(), worktree.snapshot())
- })
- .collect::<BTreeMap<_, _>>();
-
- assert_eq!(
- guest_worktree_snapshots.values().map(|w| w.abs_path()).collect::<Vec<_>>(),
- host_worktree_snapshots.values().map(|w| w.abs_path()).collect::<Vec<_>>(),
- "{} has different worktrees than the host for project {:?}",
- client.username, guest_project.remote_id(),
- );
-
- for (id, host_snapshot) in &host_worktree_snapshots {
- let guest_snapshot = &guest_worktree_snapshots[id];
- assert_eq!(
- guest_snapshot.root_name(),
- host_snapshot.root_name(),
- "{} has different root name than the host for worktree {}, project {:?}",
- client.username,
- id,
- guest_project.remote_id(),
- );
- assert_eq!(
- guest_snapshot.abs_path(),
- host_snapshot.abs_path(),
- "{} has different abs path than the host for worktree {}, project: {:?}",
- client.username,
- id,
- guest_project.remote_id(),
- );
- assert_eq!(
- guest_snapshot.entries(false).collect::<Vec<_>>(),
- host_snapshot.entries(false).collect::<Vec<_>>(),
- "{} has different snapshot than the host for worktree {:?} ({:?}) and project {:?}",
- client.username,
- host_snapshot.abs_path(),
- id,
- guest_project.remote_id(),
- );
- assert_eq!(guest_snapshot.repositories().collect::<Vec<_>>(), host_snapshot.repositories().collect::<Vec<_>>(),
- "{} has different repositories than the host for worktree {:?} and project {:?}",
- client.username,
- host_snapshot.abs_path(),
- guest_project.remote_id(),
- );
- assert_eq!(guest_snapshot.scan_id(), host_snapshot.scan_id(),
- "{} has different scan id than the host for worktree {:?} and project {:?}",
- client.username,
- host_snapshot.abs_path(),
- guest_project.remote_id(),
- );
- }
- }
- }
-
- for buffer in guest_project.opened_buffers() {
- let buffer = buffer.read(cx);
- assert_eq!(
- buffer.deferred_ops_len(),
- 0,
- "{} has deferred operations for buffer {:?} in project {:?}",
- client.username,
- buffer.file().unwrap().full_path(cx),
- guest_project.remote_id(),
- );
- }
- });
- }
-
- let buffers = client.buffers().clone();
- for (guest_project, guest_buffers) in &buffers {
- let project_id = if guest_project.read_with(client_cx, |project, _| {
- project.is_local() || project.is_read_only()
- }) {
- continue;
- } else {
- guest_project
- .read_with(client_cx, |project, _| project.remote_id())
- .unwrap()
- };
- let guest_user_id = client.user_id().unwrap();
-
- let host_project = clients.iter().find_map(|(client, cx)| {
- let project = client
- .local_projects()
- .iter()
- .find(|host_project| {
- host_project.read_with(cx, |host_project, _| {
- host_project.remote_id() == Some(project_id)
- })
- })?
- .clone();
- Some((client.user_id().unwrap(), project, cx))
- });
-
- let (host_user_id, host_project, host_cx) =
- if let Some((host_user_id, host_project, host_cx)) = host_project {
- (host_user_id, host_project, host_cx)
- } else {
- continue;
- };
-
- for guest_buffer in guest_buffers {
- let buffer_id =
- guest_buffer.read_with(client_cx, |buffer, _| buffer.remote_id());
- let host_buffer = host_project.read_with(host_cx, |project, _| {
- project.buffer_for_id(buffer_id).unwrap_or_else(|| {
- panic!(
- "host does not have buffer for guest:{}, peer:{:?}, id:{}",
- client.username,
- client.peer_id(),
- buffer_id
- )
- })
- });
- let path = host_buffer
- .read_with(host_cx, |buffer, cx| buffer.file().unwrap().full_path(cx));
-
- assert_eq!(
- guest_buffer.read_with(client_cx, |buffer, _| buffer.deferred_ops_len()),
- 0,
- "{}, buffer {}, path {:?} has deferred operations",
- client.username,
- buffer_id,
- path,
- );
- assert_eq!(
- guest_buffer.read_with(client_cx, |buffer, _| buffer.text()),
- host_buffer.read_with(host_cx, |buffer, _| buffer.text()),
- "{}, buffer {}, path {:?}, differs from the host's buffer",
- client.username,
- buffer_id,
- path
- );
-
- let host_file = host_buffer.read_with(host_cx, |b, _| b.file().cloned());
- let guest_file = guest_buffer.read_with(client_cx, |b, _| b.file().cloned());
- match (host_file, guest_file) {
- (Some(host_file), Some(guest_file)) => {
- assert_eq!(guest_file.path(), host_file.path());
- assert_eq!(guest_file.is_deleted(), host_file.is_deleted());
- assert_eq!(
- guest_file.mtime(),
- host_file.mtime(),
- "guest {} mtime does not match host {} for path {:?} in project {}",
- guest_user_id,
- host_user_id,
- guest_file.path(),
- project_id,
- );
- }
- (None, None) => {}
- (None, _) => panic!("host's file is None, guest's isn't"),
- (_, None) => panic!("guest's file is None, hosts's isn't"),
- }
-
- let host_diff_base = host_buffer
- .read_with(host_cx, |b, _| b.diff_base().map(ToString::to_string));
- let guest_diff_base = guest_buffer
- .read_with(client_cx, |b, _| b.diff_base().map(ToString::to_string));
- assert_eq!(
- guest_diff_base, host_diff_base,
- "guest {} diff base does not match host's for path {path:?} in project {project_id}",
- client.username
- );
-
- let host_saved_version =
- host_buffer.read_with(host_cx, |b, _| b.saved_version().clone());
- let guest_saved_version =
- guest_buffer.read_with(client_cx, |b, _| b.saved_version().clone());
- assert_eq!(
- guest_saved_version, host_saved_version,
- "guest {} saved version does not match host's for path {path:?} in project {project_id}",
- client.username
- );
-
- let host_saved_version_fingerprint =
- host_buffer.read_with(host_cx, |b, _| b.saved_version_fingerprint());
- let guest_saved_version_fingerprint =
- guest_buffer.read_with(client_cx, |b, _| b.saved_version_fingerprint());
- assert_eq!(
- guest_saved_version_fingerprint, host_saved_version_fingerprint,
- "guest {} saved fingerprint does not match host's for path {path:?} in project {project_id}",
- client.username
- );
-
- let host_saved_mtime = host_buffer.read_with(host_cx, |b, _| b.saved_mtime());
- let guest_saved_mtime =
- guest_buffer.read_with(client_cx, |b, _| b.saved_mtime());
- assert_eq!(
- guest_saved_mtime, host_saved_mtime,
- "guest {} saved mtime does not match host's for path {path:?} in project {project_id}",
- client.username
- );
-
- let host_is_dirty = host_buffer.read_with(host_cx, |b, _| b.is_dirty());
- let guest_is_dirty = guest_buffer.read_with(client_cx, |b, _| b.is_dirty());
- assert_eq!(guest_is_dirty, host_is_dirty,
- "guest {} dirty status does not match host's for path {path:?} in project {project_id}",
- client.username
- );
-
- let host_has_conflict = host_buffer.read_with(host_cx, |b, _| b.has_conflict());
- let guest_has_conflict =
- guest_buffer.read_with(client_cx, |b, _| b.has_conflict());
- assert_eq!(guest_has_conflict, host_has_conflict,
- "guest {} conflict status does not match host's for path {path:?} in project {project_id}",
- client.username
- );
- }
- }
- }
- }
-}
-
-fn generate_git_operation(rng: &mut StdRng, client: &TestClient) -> GitOperation {
- fn generate_file_paths(
- repo_path: &Path,
- rng: &mut StdRng,
- client: &TestClient,
- ) -> Vec<PathBuf> {
- let mut paths = client
- .fs()
- .files()
- .into_iter()
- .filter(|path| path.starts_with(repo_path))
- .collect::<Vec<_>>();
-
- let count = rng.gen_range(0..=paths.len());
- paths.shuffle(rng);
- paths.truncate(count);
-
- paths
- .iter()
- .map(|path| path.strip_prefix(repo_path).unwrap().to_path_buf())
- .collect::<Vec<_>>()
- }
-
- let repo_path = client.fs().directories(false).choose(rng).unwrap().clone();
-
- match rng.gen_range(0..100_u32) {
- 0..=25 => {
- let file_paths = generate_file_paths(&repo_path, rng, client);
-
- let contents = file_paths
- .into_iter()
- .map(|path| (path, Alphanumeric.sample_string(rng, 16)))
- .collect();
-
- GitOperation::WriteGitIndex {
- repo_path,
- contents,
- }
- }
- 26..=63 => {
- let new_branch = (rng.gen_range(0..10) > 3).then(|| Alphanumeric.sample_string(rng, 8));
-
- GitOperation::WriteGitBranch {
- repo_path,
- new_branch,
- }
- }
- 64..=100 => {
- let file_paths = generate_file_paths(&repo_path, rng, client);
-
- let statuses = file_paths
- .into_iter()
- .map(|paths| {
- (
- paths,
- match rng.gen_range(0..3_u32) {
- 0 => GitFileStatus::Added,
- 1 => GitFileStatus::Modified,
- 2 => GitFileStatus::Conflict,
- _ => unreachable!(),
- },
- )
- })
- .collect::<Vec<_>>();
-
- let git_operation = rng.gen::<bool>();
-
- GitOperation::WriteGitStatuses {
- repo_path,
- statuses,
- git_operation,
- }
- }
- _ => unreachable!(),
- }
-}
-
-fn buffer_for_full_path(
- client: &TestClient,
- project: &Model<Project>,
- full_path: &PathBuf,
- cx: &TestAppContext,
-) -> Option<Model<language::Buffer>> {
- client
- .buffers_for_project(project)
- .iter()
- .find(|buffer| {
- buffer.read_with(cx, |buffer, cx| {
- buffer.file().unwrap().full_path(cx) == *full_path
- })
- })
- .cloned()
-}
-
-fn project_for_root_name(
- client: &TestClient,
- root_name: &str,
- cx: &TestAppContext,
-) -> Option<Model<Project>> {
- if let Some(ix) = project_ix_for_root_name(&*client.local_projects().deref(), root_name, cx) {
- return Some(client.local_projects()[ix].clone());
- }
- if let Some(ix) = project_ix_for_root_name(&*client.remote_projects().deref(), root_name, cx) {
- return Some(client.remote_projects()[ix].clone());
- }
- None
-}
-
-fn project_ix_for_root_name(
- projects: &[Model<Project>],
- root_name: &str,
- cx: &TestAppContext,
-) -> Option<usize> {
- projects.iter().position(|project| {
- project.read_with(cx, |project, cx| {
- let worktree = project.visible_worktrees(cx).next().unwrap();
- worktree.read(cx).root_name() == root_name
- })
- })
-}
-
-fn root_name_for_project(project: &Model<Project>, cx: &TestAppContext) -> String {
- project.read_with(cx, |project, cx| {
- project
- .visible_worktrees(cx)
- .next()
- .unwrap()
- .read(cx)
- .root_name()
- .to_string()
- })
-}
-
-fn project_path_for_full_path(
- project: &Model<Project>,
- full_path: &Path,
- cx: &TestAppContext,
-) -> Option<ProjectPath> {
- let mut components = full_path.components();
- let root_name = components.next().unwrap().as_os_str().to_str().unwrap();
- let path = components.as_path().into();
- let worktree_id = project.read_with(cx, |project, cx| {
- project.worktrees().find_map(|worktree| {
- let worktree = worktree.read(cx);
- if worktree.root_name() == root_name {
- Some(worktree.id())
- } else {
- None
- }
- })
- })?;
- Some(ProjectPath { worktree_id, path })
-}
-
-async fn ensure_project_shared(
- project: &Model<Project>,
- client: &TestClient,
- cx: &mut TestAppContext,
-) {
- let first_root_name = root_name_for_project(project, cx);
- let active_call = cx.read(ActiveCall::global);
- if active_call.read_with(cx, |call, _| call.room().is_some())
- && project.read_with(cx, |project, _| project.is_local() && !project.is_shared())
- {
- match active_call
- .update(cx, |call, cx| call.share_project(project.clone(), cx))
- .await
- {
- Ok(project_id) => {
- log::info!(
- "{}: shared project {} with id {}",
- client.username,
- first_root_name,
- project_id
- );
- }
- Err(error) => {
- log::error!(
- "{}: error sharing project {}: {:?}",
- client.username,
- first_root_name,
- error
- );
- }
- }
- }
-}
-
-fn choose_random_project(client: &TestClient, rng: &mut StdRng) -> Option<Model<Project>> {
- client
- .local_projects()
- .deref()
- .iter()
- .chain(client.remote_projects().iter())
- .choose(rng)
- .cloned()
-}
-
-fn gen_file_name(rng: &mut StdRng) -> String {
- let mut name = String::new();
- for _ in 0..10 {
- let letter = rng.gen_range('a'..='z');
- name.push(letter);
- }
- name
-}
@@ -1,677 +0,0 @@
-use crate::{
- db::{self, NewUserParams, UserId},
- rpc::{CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
- tests::{TestClient, TestServer},
-};
-use async_trait::async_trait;
-use futures::StreamExt;
-use gpui::{BackgroundExecutor, Task, TestAppContext};
-use parking_lot::Mutex;
-use rand::prelude::*;
-use rpc::RECEIVE_TIMEOUT;
-use serde::{de::DeserializeOwned, Deserialize, Serialize};
-use settings::SettingsStore;
-use std::{
- env,
- path::PathBuf,
- rc::Rc,
- sync::{
- atomic::{AtomicBool, Ordering::SeqCst},
- Arc,
- },
-};
-
-lazy_static::lazy_static! {
- static ref PLAN_LOAD_PATH: Option<PathBuf> = path_env_var("LOAD_PLAN");
- static ref PLAN_SAVE_PATH: Option<PathBuf> = path_env_var("SAVE_PLAN");
- static ref MAX_PEERS: usize = env::var("MAX_PEERS")
- .map(|i| i.parse().expect("invalid `MAX_PEERS` variable"))
- .unwrap_or(3);
- static ref MAX_OPERATIONS: usize = env::var("OPERATIONS")
- .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
- .unwrap_or(10);
-
-}
-
-static LOADED_PLAN_JSON: Mutex<Option<Vec<u8>>> = Mutex::new(None);
-static LAST_PLAN: Mutex<Option<Box<dyn Send + FnOnce() -> Vec<u8>>>> = Mutex::new(None);
-
-struct TestPlan<T: RandomizedTest> {
- rng: StdRng,
- replay: bool,
- stored_operations: Vec<(StoredOperation<T::Operation>, Arc<AtomicBool>)>,
- max_operations: usize,
- operation_ix: usize,
- users: Vec<UserTestPlan>,
- next_batch_id: usize,
- allow_server_restarts: bool,
- allow_client_reconnection: bool,
- allow_client_disconnection: bool,
-}
-
-pub struct UserTestPlan {
- pub user_id: UserId,
- pub username: String,
- pub allow_client_reconnection: bool,
- pub allow_client_disconnection: bool,
- next_root_id: usize,
- operation_ix: usize,
- online: bool,
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize)]
-#[serde(untagged)]
-enum StoredOperation<T> {
- Server(ServerOperation),
- Client {
- user_id: UserId,
- batch_id: usize,
- operation: T,
- },
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize)]
-enum ServerOperation {
- AddConnection {
- user_id: UserId,
- },
- RemoveConnection {
- user_id: UserId,
- },
- BounceConnection {
- user_id: UserId,
- },
- RestartServer,
- MutateClients {
- batch_id: usize,
- #[serde(skip_serializing)]
- #[serde(skip_deserializing)]
- user_ids: Vec<UserId>,
- quiesce: bool,
- },
-}
-
-pub enum TestError {
- Inapplicable,
- Other(anyhow::Error),
-}
-
-#[async_trait(?Send)]
-pub trait RandomizedTest: 'static + Sized {
- type Operation: Send + Clone + Serialize + DeserializeOwned;
-
- fn generate_operation(
- client: &TestClient,
- rng: &mut StdRng,
- plan: &mut UserTestPlan,
- cx: &TestAppContext,
- ) -> Self::Operation;
-
- async fn apply_operation(
- client: &TestClient,
- operation: Self::Operation,
- cx: &mut TestAppContext,
- ) -> Result<(), TestError>;
-
- async fn initialize(server: &mut TestServer, users: &[UserTestPlan]);
-
- async fn on_client_added(_client: &Rc<TestClient>, _cx: &mut TestAppContext) {}
-
- async fn on_quiesce(server: &mut TestServer, client: &mut [(Rc<TestClient>, TestAppContext)]);
-}
-
-pub async fn run_randomized_test<T: RandomizedTest>(
- cx: &mut TestAppContext,
- executor: BackgroundExecutor,
- rng: StdRng,
-) {
- let mut server = TestServer::start(executor.clone()).await;
- let plan = TestPlan::<T>::new(&mut server, rng).await;
-
- LAST_PLAN.lock().replace({
- let plan = plan.clone();
- Box::new(move || plan.lock().serialize())
- });
-
- let mut clients = Vec::new();
- let mut client_tasks = Vec::new();
- let mut operation_channels = Vec::new();
- loop {
- let Some((next_operation, applied)) = plan.lock().next_server_operation(&clients) else {
- break;
- };
- applied.store(true, SeqCst);
- let did_apply = TestPlan::apply_server_operation(
- plan.clone(),
- executor.clone(),
- &mut server,
- &mut clients,
- &mut client_tasks,
- &mut operation_channels,
- next_operation,
- cx,
- )
- .await;
- if !did_apply {
- applied.store(false, SeqCst);
- }
- }
-
- drop(operation_channels);
- executor.start_waiting();
- futures::future::join_all(client_tasks).await;
- executor.finish_waiting();
-
- executor.run_until_parked();
- T::on_quiesce(&mut server, &mut clients).await;
-
- for (client, cx) in clients {
- cx.update(|cx| {
- let store = cx.remove_global::<SettingsStore>();
- cx.clear_globals();
- cx.set_global(store);
- drop(client);
- });
- }
- executor.run_until_parked();
-
- if let Some(path) = &*PLAN_SAVE_PATH {
- eprintln!("saved test plan to path {:?}", path);
- std::fs::write(path, plan.lock().serialize()).unwrap();
- }
-}
-
-pub fn save_randomized_test_plan() {
- if let Some(serialize_plan) = LAST_PLAN.lock().take() {
- if let Some(path) = &*PLAN_SAVE_PATH {
- eprintln!("saved test plan to path {:?}", path);
- std::fs::write(path, serialize_plan()).unwrap();
- }
- }
-}
-
-impl<T: RandomizedTest> TestPlan<T> {
- pub async fn new(server: &mut TestServer, mut rng: StdRng) -> Arc<Mutex<Self>> {
- let allow_server_restarts = rng.gen_bool(0.7);
- let allow_client_reconnection = rng.gen_bool(0.7);
- let allow_client_disconnection = rng.gen_bool(0.1);
-
- let mut users = Vec::new();
- for ix in 0..*MAX_PEERS {
- let username = format!("user-{}", ix + 1);
- let user_id = server
- .app_state
- .db
- .create_user(
- &format!("{username}@example.com"),
- false,
- NewUserParams {
- github_login: username.clone(),
- github_user_id: ix as i32,
- },
- )
- .await
- .unwrap()
- .user_id;
- users.push(UserTestPlan {
- user_id,
- username,
- online: false,
- next_root_id: 0,
- operation_ix: 0,
- allow_client_disconnection,
- allow_client_reconnection,
- });
- }
-
- T::initialize(server, &users).await;
-
- let plan = Arc::new(Mutex::new(Self {
- replay: false,
- allow_server_restarts,
- allow_client_reconnection,
- allow_client_disconnection,
- stored_operations: Vec::new(),
- operation_ix: 0,
- next_batch_id: 0,
- max_operations: *MAX_OPERATIONS,
- users,
- rng,
- }));
-
- if let Some(path) = &*PLAN_LOAD_PATH {
- let json = LOADED_PLAN_JSON
- .lock()
- .get_or_insert_with(|| {
- eprintln!("loaded test plan from path {:?}", path);
- std::fs::read(path).unwrap()
- })
- .clone();
- plan.lock().deserialize(json);
- }
-
- plan
- }
-
- fn deserialize(&mut self, json: Vec<u8>) {
- let stored_operations: Vec<StoredOperation<T::Operation>> =
- serde_json::from_slice(&json).unwrap();
- self.replay = true;
- self.stored_operations = stored_operations
- .iter()
- .cloned()
- .enumerate()
- .map(|(i, mut operation)| {
- let did_apply = Arc::new(AtomicBool::new(false));
- if let StoredOperation::Server(ServerOperation::MutateClients {
- batch_id: current_batch_id,
- user_ids,
- ..
- }) = &mut operation
- {
- assert!(user_ids.is_empty());
- user_ids.extend(stored_operations[i + 1..].iter().filter_map(|operation| {
- if let StoredOperation::Client {
- user_id, batch_id, ..
- } = operation
- {
- if batch_id == current_batch_id {
- return Some(user_id);
- }
- }
- None
- }));
- user_ids.sort_unstable();
- }
- (operation, did_apply)
- })
- .collect()
- }
-
- fn serialize(&mut self) -> Vec<u8> {
- // Format each operation as one line
- let mut json = Vec::new();
- json.push(b'[');
- for (operation, applied) in &self.stored_operations {
- if !applied.load(SeqCst) {
- continue;
- }
- if json.len() > 1 {
- json.push(b',');
- }
- json.extend_from_slice(b"\n ");
- serde_json::to_writer(&mut json, operation).unwrap();
- }
- json.extend_from_slice(b"\n]\n");
- json
- }
-
- fn next_server_operation(
- &mut self,
- clients: &[(Rc<TestClient>, TestAppContext)],
- ) -> Option<(ServerOperation, Arc<AtomicBool>)> {
- if self.replay {
- while let Some(stored_operation) = self.stored_operations.get(self.operation_ix) {
- self.operation_ix += 1;
- if let (StoredOperation::Server(operation), applied) = stored_operation {
- return Some((operation.clone(), applied.clone()));
- }
- }
- None
- } else {
- let operation = self.generate_server_operation(clients)?;
- let applied = Arc::new(AtomicBool::new(false));
- self.stored_operations
- .push((StoredOperation::Server(operation.clone()), applied.clone()));
- Some((operation, applied))
- }
- }
-
- fn next_client_operation(
- &mut self,
- client: &TestClient,
- current_batch_id: usize,
- cx: &TestAppContext,
- ) -> Option<(T::Operation, Arc<AtomicBool>)> {
- let current_user_id = client.current_user_id(cx);
- let user_ix = self
- .users
- .iter()
- .position(|user| user.user_id == current_user_id)
- .unwrap();
- let user_plan = &mut self.users[user_ix];
-
- if self.replay {
- while let Some(stored_operation) = self.stored_operations.get(user_plan.operation_ix) {
- user_plan.operation_ix += 1;
- if let (
- StoredOperation::Client {
- user_id, operation, ..
- },
- applied,
- ) = stored_operation
- {
- if user_id == ¤t_user_id {
- return Some((operation.clone(), applied.clone()));
- }
- }
- }
- None
- } else {
- if self.operation_ix == self.max_operations {
- return None;
- }
- self.operation_ix += 1;
- let operation = T::generate_operation(
- client,
- &mut self.rng,
- self.users
- .iter_mut()
- .find(|user| user.user_id == current_user_id)
- .unwrap(),
- cx,
- );
- let applied = Arc::new(AtomicBool::new(false));
- self.stored_operations.push((
- StoredOperation::Client {
- user_id: current_user_id,
- batch_id: current_batch_id,
- operation: operation.clone(),
- },
- applied.clone(),
- ));
- Some((operation, applied))
- }
- }
-
- fn generate_server_operation(
- &mut self,
- clients: &[(Rc<TestClient>, TestAppContext)],
- ) -> Option<ServerOperation> {
- if self.operation_ix == self.max_operations {
- return None;
- }
-
- Some(loop {
- break match self.rng.gen_range(0..100) {
- 0..=29 if clients.len() < self.users.len() => {
- let user = self
- .users
- .iter()
- .filter(|u| !u.online)
- .choose(&mut self.rng)
- .unwrap();
- self.operation_ix += 1;
- ServerOperation::AddConnection {
- user_id: user.user_id,
- }
- }
- 30..=34 if clients.len() > 1 && self.allow_client_disconnection => {
- let (client, cx) = &clients[self.rng.gen_range(0..clients.len())];
- let user_id = client.current_user_id(cx);
- self.operation_ix += 1;
- ServerOperation::RemoveConnection { user_id }
- }
- 35..=39 if clients.len() > 1 && self.allow_client_reconnection => {
- let (client, cx) = &clients[self.rng.gen_range(0..clients.len())];
- let user_id = client.current_user_id(cx);
- self.operation_ix += 1;
- ServerOperation::BounceConnection { user_id }
- }
- 40..=44 if self.allow_server_restarts && clients.len() > 1 => {
- self.operation_ix += 1;
- ServerOperation::RestartServer
- }
- _ if !clients.is_empty() => {
- let count = self
- .rng
- .gen_range(1..10)
- .min(self.max_operations - self.operation_ix);
- let batch_id = util::post_inc(&mut self.next_batch_id);
- let mut user_ids = (0..count)
- .map(|_| {
- let ix = self.rng.gen_range(0..clients.len());
- let (client, cx) = &clients[ix];
- client.current_user_id(cx)
- })
- .collect::<Vec<_>>();
- user_ids.sort_unstable();
- ServerOperation::MutateClients {
- user_ids,
- batch_id,
- quiesce: self.rng.gen_bool(0.7),
- }
- }
- _ => continue,
- };
- })
- }
-
- async fn apply_server_operation(
- plan: Arc<Mutex<Self>>,
- deterministic: BackgroundExecutor,
- server: &mut TestServer,
- clients: &mut Vec<(Rc<TestClient>, TestAppContext)>,
- client_tasks: &mut Vec<Task<()>>,
- operation_channels: &mut Vec<futures::channel::mpsc::UnboundedSender<usize>>,
- operation: ServerOperation,
- cx: &mut TestAppContext,
- ) -> bool {
- match operation {
- ServerOperation::AddConnection { user_id } => {
- let username;
- {
- let mut plan = plan.lock();
- let user = plan.user(user_id);
- if user.online {
- return false;
- }
- user.online = true;
- username = user.username.clone();
- };
- log::info!("adding new connection for {}", username);
-
- let mut client_cx = cx.new_app();
-
- let (operation_tx, operation_rx) = futures::channel::mpsc::unbounded();
- let client = Rc::new(server.create_client(&mut client_cx, &username).await);
- operation_channels.push(operation_tx);
- clients.push((client.clone(), client_cx.clone()));
-
- let foreground_executor = client_cx.foreground_executor().clone();
- let simulate_client =
- Self::simulate_client(plan.clone(), client, operation_rx, client_cx);
- client_tasks.push(foreground_executor.spawn(simulate_client));
-
- log::info!("added connection for {}", username);
- }
-
- ServerOperation::RemoveConnection {
- user_id: removed_user_id,
- } => {
- log::info!("simulating full disconnection of user {}", removed_user_id);
- let client_ix = clients
- .iter()
- .position(|(client, cx)| client.current_user_id(cx) == removed_user_id);
- let Some(client_ix) = client_ix else {
- return false;
- };
- let user_connection_ids = server
- .connection_pool
- .lock()
- .user_connection_ids(removed_user_id)
- .collect::<Vec<_>>();
- assert_eq!(user_connection_ids.len(), 1);
- let removed_peer_id = user_connection_ids[0].into();
- let (client, client_cx) = clients.remove(client_ix);
- let client_task = client_tasks.remove(client_ix);
- operation_channels.remove(client_ix);
- server.forbid_connections();
- server.disconnect_client(removed_peer_id);
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- deterministic.start_waiting();
- log::info!("waiting for user {} to exit...", removed_user_id);
- client_task.await;
- deterministic.finish_waiting();
- server.allow_connections();
-
- for project in client.remote_projects().iter() {
- project.read_with(&client_cx, |project, _| {
- assert!(
- project.is_read_only(),
- "project {:?} should be read only",
- project.remote_id()
- )
- });
- }
-
- for (client, cx) in clients {
- let contacts = server
- .app_state
- .db
- .get_contacts(client.current_user_id(cx))
- .await
- .unwrap();
- let pool = server.connection_pool.lock();
- for contact in contacts {
- if let db::Contact::Accepted { user_id, busy, .. } = contact {
- if user_id == removed_user_id {
- assert!(!pool.is_user_online(user_id));
- assert!(!busy);
- }
- }
- }
- }
-
- log::info!("{} removed", client.username);
- plan.lock().user(removed_user_id).online = false;
- client_cx.update(|cx| {
- cx.clear_globals();
- drop(client);
- });
- }
-
- ServerOperation::BounceConnection { user_id } => {
- log::info!("simulating temporary disconnection of user {}", user_id);
- let user_connection_ids = server
- .connection_pool
- .lock()
- .user_connection_ids(user_id)
- .collect::<Vec<_>>();
- if user_connection_ids.is_empty() {
- return false;
- }
- assert_eq!(user_connection_ids.len(), 1);
- let peer_id = user_connection_ids[0].into();
- server.disconnect_client(peer_id);
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- }
-
- ServerOperation::RestartServer => {
- log::info!("simulating server restart");
- server.reset().await;
- deterministic.advance_clock(RECEIVE_TIMEOUT);
- server.start().await.unwrap();
- deterministic.advance_clock(CLEANUP_TIMEOUT);
- let environment = &server.app_state.config.zed_environment;
- let (stale_room_ids, _) = server
- .app_state
- .db
- .stale_server_resource_ids(environment, server.id())
- .await
- .unwrap();
- assert_eq!(stale_room_ids, vec![]);
- }
-
- ServerOperation::MutateClients {
- user_ids,
- batch_id,
- quiesce,
- } => {
- let mut applied = false;
- for user_id in user_ids {
- let client_ix = clients
- .iter()
- .position(|(client, cx)| client.current_user_id(cx) == user_id);
- let Some(client_ix) = client_ix else { continue };
- applied = true;
- if let Err(err) = operation_channels[client_ix].unbounded_send(batch_id) {
- log::error!("error signaling user {user_id}: {err}");
- }
- }
-
- if quiesce && applied {
- deterministic.run_until_parked();
- T::on_quiesce(server, clients).await;
- }
-
- return applied;
- }
- }
- true
- }
-
- async fn simulate_client(
- plan: Arc<Mutex<Self>>,
- client: Rc<TestClient>,
- mut operation_rx: futures::channel::mpsc::UnboundedReceiver<usize>,
- mut cx: TestAppContext,
- ) {
- T::on_client_added(&client, &mut cx).await;
-
- while let Some(batch_id) = operation_rx.next().await {
- let Some((operation, applied)) =
- plan.lock().next_client_operation(&client, batch_id, &cx)
- else {
- break;
- };
- applied.store(true, SeqCst);
- match T::apply_operation(&client, operation, &mut cx).await {
- Ok(()) => {}
- Err(TestError::Inapplicable) => {
- applied.store(false, SeqCst);
- log::info!("skipped operation");
- }
- Err(TestError::Other(error)) => {
- log::error!("{} error: {}", client.username, error);
- }
- }
- cx.executor().simulate_random_delay().await;
- }
- log::info!("{}: done", client.username);
- }
-
- fn user(&mut self, user_id: UserId) -> &mut UserTestPlan {
- self.users
- .iter_mut()
- .find(|user| user.user_id == user_id)
- .unwrap()
- }
-}
-
-impl UserTestPlan {
- pub fn next_root_dir_name(&mut self) -> String {
- let user_id = self.user_id;
- let root_id = util::post_inc(&mut self.next_root_id);
- format!("dir-{user_id}-{root_id}")
- }
-}
-
-impl From<anyhow::Error> for TestError {
- fn from(value: anyhow::Error) -> Self {
- Self::Other(value)
- }
-}
-
-fn path_env_var(name: &str) -> Option<PathBuf> {
- let value = env::var(name).ok()?;
- let mut path = PathBuf::from(value);
- if path.is_relative() {
- let mut abs_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
- abs_path.pop();
- abs_path.pop();
- abs_path.push(path);
- path = abs_path
- }
- Some(path)
-}
@@ -1,616 +0,0 @@
-use crate::{
- db::{tests::TestDb, NewUserParams, UserId},
- executor::Executor,
- rpc::{Server, CLEANUP_TIMEOUT, RECONNECT_TIMEOUT},
- AppState,
-};
-use anyhow::anyhow;
-use call::ActiveCall;
-use channel::{ChannelBuffer, ChannelStore};
-use client::{
- self, proto::PeerId, Client, Connection, Credentials, EstablishConnectionError, UserStore,
-};
-use collections::{HashMap, HashSet};
-use fs::FakeFs;
-use futures::{channel::oneshot, StreamExt as _};
-use gpui::{BackgroundExecutor, Context, Model, TestAppContext, View, VisualTestContext};
-use language::LanguageRegistry;
-use node_runtime::FakeNodeRuntime;
-
-use notifications::NotificationStore;
-use parking_lot::Mutex;
-use project::{Project, WorktreeId};
-use rpc::{proto::ChannelRole, RECEIVE_TIMEOUT};
-use settings::SettingsStore;
-use std::{
- cell::{Ref, RefCell, RefMut},
- env,
- ops::{Deref, DerefMut},
- path::Path,
- sync::{
- atomic::{AtomicBool, AtomicUsize, Ordering::SeqCst},
- Arc,
- },
-};
-use util::http::FakeHttpClient;
-use workspace::{Workspace, WorkspaceStore};
-
-pub struct TestServer {
- pub app_state: Arc<AppState>,
- pub test_live_kit_server: Arc<live_kit_client::TestServer>,
- server: Arc<Server>,
- connection_killers: Arc<Mutex<HashMap<PeerId, Arc<AtomicBool>>>>,
- forbid_connections: Arc<AtomicBool>,
- _test_db: TestDb,
-}
-
-pub struct TestClient {
- pub username: String,
- pub app_state: Arc<workspace::AppState>,
- channel_store: Model<ChannelStore>,
- notification_store: Model<NotificationStore>,
- state: RefCell<TestClientState>,
-}
-
-#[derive(Default)]
-struct TestClientState {
- local_projects: Vec<Model<Project>>,
- remote_projects: Vec<Model<Project>>,
- buffers: HashMap<Model<Project>, HashSet<Model<language::Buffer>>>,
- channel_buffers: HashSet<Model<ChannelBuffer>>,
-}
-
-pub struct ContactsSummary {
- pub current: Vec<String>,
- pub outgoing_requests: Vec<String>,
- pub incoming_requests: Vec<String>,
-}
-
-impl TestServer {
- pub async fn start(deterministic: BackgroundExecutor) -> Self {
- static NEXT_LIVE_KIT_SERVER_ID: AtomicUsize = AtomicUsize::new(0);
-
- let use_postgres = env::var("USE_POSTGRES").ok();
- let use_postgres = use_postgres.as_deref();
- let test_db = if use_postgres == Some("true") || use_postgres == Some("1") {
- TestDb::postgres(deterministic.clone())
- } else {
- TestDb::sqlite(deterministic.clone())
- };
- let live_kit_server_id = NEXT_LIVE_KIT_SERVER_ID.fetch_add(1, SeqCst);
- let live_kit_server = live_kit_client::TestServer::create(
- format!("http://livekit.{}.test", live_kit_server_id),
- format!("devkey-{}", live_kit_server_id),
- format!("secret-{}", live_kit_server_id),
- deterministic.clone(),
- )
- .unwrap();
- let app_state = Self::build_app_state(&test_db, &live_kit_server).await;
- let epoch = app_state
- .db
- .create_server(&app_state.config.zed_environment)
- .await
- .unwrap();
- let server = Server::new(
- epoch,
- app_state.clone(),
- Executor::Deterministic(deterministic.clone()),
- );
- server.start().await.unwrap();
- // Advance clock to ensure the server's cleanup task is finished.
- deterministic.advance_clock(CLEANUP_TIMEOUT);
- Self {
- app_state,
- server,
- connection_killers: Default::default(),
- forbid_connections: Default::default(),
- _test_db: test_db,
- test_live_kit_server: live_kit_server,
- }
- }
-
- pub async fn reset(&self) {
- self.app_state.db.reset();
- let epoch = self
- .app_state
- .db
- .create_server(&self.app_state.config.zed_environment)
- .await
- .unwrap();
- self.server.reset(epoch);
- }
-
- pub async fn create_client(&mut self, cx: &mut TestAppContext, name: &str) -> TestClient {
- cx.update(|cx| {
- if cx.has_global::<SettingsStore>() {
- panic!("Same cx used to create two test clients")
- }
- let settings = SettingsStore::test(cx);
- cx.set_global(settings);
- });
-
- let http = FakeHttpClient::with_404_response();
- let user_id = if let Ok(Some(user)) = self.app_state.db.get_user_by_github_login(name).await
- {
- user.id
- } else {
- self.app_state
- .db
- .create_user(
- &format!("{name}@example.com"),
- false,
- NewUserParams {
- github_login: name.into(),
- github_user_id: 0,
- },
- )
- .await
- .expect("creating user failed")
- .user_id
- };
- let client_name = name.to_string();
- let mut client = cx.update(|cx| Client::new(http.clone(), cx));
- let server = self.server.clone();
- let db = self.app_state.db.clone();
- let connection_killers = self.connection_killers.clone();
- let forbid_connections = self.forbid_connections.clone();
-
- Arc::get_mut(&mut client)
- .unwrap()
- .set_id(user_id.to_proto())
- .override_authenticate(move |cx| {
- cx.spawn(|_| async move {
- let access_token = "the-token".to_string();
- Ok(Credentials {
- user_id: user_id.to_proto(),
- access_token,
- })
- })
- })
- .override_establish_connection(move |credentials, cx| {
- assert_eq!(credentials.user_id, user_id.0 as u64);
- assert_eq!(credentials.access_token, "the-token");
-
- let server = server.clone();
- let db = db.clone();
- let connection_killers = connection_killers.clone();
- let forbid_connections = forbid_connections.clone();
- let client_name = client_name.clone();
- cx.spawn(move |cx| async move {
- if forbid_connections.load(SeqCst) {
- Err(EstablishConnectionError::other(anyhow!(
- "server is forbidding connections"
- )))
- } else {
- let (client_conn, server_conn, killed) =
- Connection::in_memory(cx.background_executor().clone());
- let (connection_id_tx, connection_id_rx) = oneshot::channel();
- let user = db
- .get_user_by_id(user_id)
- .await
- .expect("retrieving user failed")
- .unwrap();
- cx.background_executor()
- .spawn(server.handle_connection(
- server_conn,
- client_name,
- user,
- Some(connection_id_tx),
- Executor::Deterministic(cx.background_executor().clone()),
- ))
- .detach();
- let connection_id = connection_id_rx.await.unwrap();
- connection_killers
- .lock()
- .insert(connection_id.into(), killed);
- Ok(client_conn)
- }
- })
- });
-
- let fs = FakeFs::new(cx.executor());
- let user_store = cx.new_model(|cx| UserStore::new(client.clone(), cx));
- let workspace_store = cx.new_model(|cx| WorkspaceStore::new(client.clone(), cx));
- let mut language_registry = LanguageRegistry::test();
- language_registry.set_executor(cx.executor());
- let app_state = Arc::new(workspace::AppState {
- client: client.clone(),
- user_store: user_store.clone(),
- workspace_store,
- languages: Arc::new(language_registry),
- fs: fs.clone(),
- build_window_options: |_, _, _| Default::default(),
- node_runtime: FakeNodeRuntime::new(),
- });
-
- cx.update(|cx| {
- theme::init(theme::LoadThemes::JustBase, cx);
- Project::init(&client, cx);
- client::init(&client, cx);
- language::init(cx);
- editor::init_settings(cx);
- workspace::init(app_state.clone(), cx);
- audio::init((), cx);
- call::init(client.clone(), user_store.clone(), cx);
- channel::init(&client, user_store.clone(), cx);
- notifications::init(client.clone(), user_store, cx);
- });
-
- client
- .authenticate_and_connect(false, &cx.to_async())
- .await
- .unwrap();
-
- let client = TestClient {
- app_state,
- username: name.to_string(),
- channel_store: cx.read(ChannelStore::global).clone(),
- notification_store: cx.read(NotificationStore::global).clone(),
- state: Default::default(),
- };
- client.wait_for_current_user(cx).await;
- client
- }
-
- pub fn disconnect_client(&self, peer_id: PeerId) {
- self.connection_killers
- .lock()
- .remove(&peer_id)
- .unwrap()
- .store(true, SeqCst);
- }
-
- pub fn simulate_long_connection_interruption(
- &self,
- peer_id: PeerId,
- deterministic: BackgroundExecutor,
- ) {
- self.forbid_connections();
- self.disconnect_client(peer_id);
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- self.allow_connections();
- deterministic.advance_clock(RECEIVE_TIMEOUT + RECONNECT_TIMEOUT);
- deterministic.run_until_parked();
- }
-
- pub fn forbid_connections(&self) {
- self.forbid_connections.store(true, SeqCst);
- }
-
- pub fn allow_connections(&self) {
- self.forbid_connections.store(false, SeqCst);
- }
-
- pub async fn make_contacts(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) {
- for ix in 1..clients.len() {
- let (left, right) = clients.split_at_mut(ix);
- let (client_a, cx_a) = left.last_mut().unwrap();
- for (client_b, cx_b) in right {
- client_a
- .app_state
- .user_store
- .update(*cx_a, |store, cx| {
- store.request_contact(client_b.user_id().unwrap(), cx)
- })
- .await
- .unwrap();
- cx_a.executor().run_until_parked();
- client_b
- .app_state
- .user_store
- .update(*cx_b, |store, cx| {
- store.respond_to_contact_request(client_a.user_id().unwrap(), true, cx)
- })
- .await
- .unwrap();
- }
- }
- }
-
- pub async fn make_channel(
- &self,
- channel: &str,
- parent: Option<u64>,
- admin: (&TestClient, &mut TestAppContext),
- members: &mut [(&TestClient, &mut TestAppContext)],
- ) -> u64 {
- let (_, admin_cx) = admin;
- let channel_id = admin_cx
- .read(ChannelStore::global)
- .update(admin_cx, |channel_store, cx| {
- channel_store.create_channel(channel, parent, cx)
- })
- .await
- .unwrap();
-
- for (member_client, member_cx) in members {
- admin_cx
- .read(ChannelStore::global)
- .update(admin_cx, |channel_store, cx| {
- channel_store.invite_member(
- channel_id,
- member_client.user_id().unwrap(),
- ChannelRole::Member,
- cx,
- )
- })
- .await
- .unwrap();
-
- admin_cx.executor().run_until_parked();
-
- member_cx
- .read(ChannelStore::global)
- .update(*member_cx, |channels, cx| {
- channels.respond_to_channel_invite(channel_id, true, cx)
- })
- .await
- .unwrap();
- }
-
- channel_id
- }
-
- pub async fn make_channel_tree(
- &self,
- channels: &[(&str, Option<&str>)],
- creator: (&TestClient, &mut TestAppContext),
- ) -> Vec<u64> {
- let mut observed_channels = HashMap::default();
- let mut result = Vec::new();
- for (channel, parent) in channels {
- let id;
- if let Some(parent) = parent {
- if let Some(parent_id) = observed_channels.get(parent) {
- id = self
- .make_channel(channel, Some(*parent_id), (creator.0, creator.1), &mut [])
- .await;
- } else {
- panic!(
- "Edge {}->{} referenced before {} was created",
- parent, channel, parent
- )
- }
- } else {
- id = self
- .make_channel(channel, None, (creator.0, creator.1), &mut [])
- .await;
- }
-
- observed_channels.insert(channel, id);
- result.push(id);
- }
-
- result
- }
-
- pub async fn create_room(&self, clients: &mut [(&TestClient, &mut TestAppContext)]) {
- self.make_contacts(clients).await;
-
- let (left, right) = clients.split_at_mut(1);
- let (_client_a, cx_a) = &mut left[0];
- let active_call_a = cx_a.read(ActiveCall::global);
-
- for (client_b, cx_b) in right {
- let user_id_b = client_b.current_user_id(*cx_b).to_proto();
- active_call_a
- .update(*cx_a, |call, cx| call.invite(user_id_b, None, cx))
- .await
- .unwrap();
-
- cx_b.executor().run_until_parked();
- let active_call_b = cx_b.read(ActiveCall::global);
- active_call_b
- .update(*cx_b, |call, cx| call.accept_incoming(cx))
- .await
- .unwrap();
- }
- }
-
- pub async fn build_app_state(
- test_db: &TestDb,
- fake_server: &live_kit_client::TestServer,
- ) -> Arc<AppState> {
- Arc::new(AppState {
- db: test_db.db().clone(),
- live_kit_client: Some(Arc::new(fake_server.create_api_client())),
- config: Default::default(),
- })
- }
-}
-
-impl Deref for TestServer {
- type Target = Server;
-
- fn deref(&self) -> &Self::Target {
- &self.server
- }
-}
-
-impl Drop for TestServer {
- fn drop(&mut self) {
- self.server.teardown();
- self.test_live_kit_server.teardown().unwrap();
- }
-}
-
-impl Deref for TestClient {
- type Target = Arc<Client>;
-
- fn deref(&self) -> &Self::Target {
- &self.app_state.client
- }
-}
-
-impl TestClient {
- pub fn fs(&self) -> &FakeFs {
- self.app_state.fs.as_fake()
- }
-
- pub fn channel_store(&self) -> &Model<ChannelStore> {
- &self.channel_store
- }
-
- pub fn notification_store(&self) -> &Model<NotificationStore> {
- &self.notification_store
- }
-
- pub fn user_store(&self) -> &Model<UserStore> {
- &self.app_state.user_store
- }
-
- pub fn language_registry(&self) -> &Arc<LanguageRegistry> {
- &self.app_state.languages
- }
-
- pub fn client(&self) -> &Arc<Client> {
- &self.app_state.client
- }
-
- pub fn current_user_id(&self, cx: &TestAppContext) -> UserId {
- UserId::from_proto(
- self.app_state
- .user_store
- .read_with(cx, |user_store, _| user_store.current_user().unwrap().id),
- )
- }
-
- pub async fn wait_for_current_user(&self, cx: &TestAppContext) {
- let mut authed_user = self
- .app_state
- .user_store
- .read_with(cx, |user_store, _| user_store.watch_current_user());
- while authed_user.next().await.unwrap().is_none() {}
- }
-
- pub async fn clear_contacts(&self, cx: &mut TestAppContext) {
- self.app_state
- .user_store
- .update(cx, |store, _| store.clear_contacts())
- .await;
- }
-
- pub fn local_projects<'a>(&'a self) -> impl Deref<Target = Vec<Model<Project>>> + 'a {
- Ref::map(self.state.borrow(), |state| &state.local_projects)
- }
-
- pub fn remote_projects<'a>(&'a self) -> impl Deref<Target = Vec<Model<Project>>> + 'a {
- Ref::map(self.state.borrow(), |state| &state.remote_projects)
- }
-
- pub fn local_projects_mut<'a>(&'a self) -> impl DerefMut<Target = Vec<Model<Project>>> + 'a {
- RefMut::map(self.state.borrow_mut(), |state| &mut state.local_projects)
- }
-
- pub fn remote_projects_mut<'a>(&'a self) -> impl DerefMut<Target = Vec<Model<Project>>> + 'a {
- RefMut::map(self.state.borrow_mut(), |state| &mut state.remote_projects)
- }
-
- pub fn buffers_for_project<'a>(
- &'a self,
- project: &Model<Project>,
- ) -> impl DerefMut<Target = HashSet<Model<language::Buffer>>> + 'a {
- RefMut::map(self.state.borrow_mut(), |state| {
- state.buffers.entry(project.clone()).or_default()
- })
- }
-
- pub fn buffers<'a>(
- &'a self,
- ) -> impl DerefMut<Target = HashMap<Model<Project>, HashSet<Model<language::Buffer>>>> + 'a
- {
- RefMut::map(self.state.borrow_mut(), |state| &mut state.buffers)
- }
-
- pub fn channel_buffers<'a>(
- &'a self,
- ) -> impl DerefMut<Target = HashSet<Model<ChannelBuffer>>> + 'a {
- RefMut::map(self.state.borrow_mut(), |state| &mut state.channel_buffers)
- }
-
- pub fn summarize_contacts(&self, cx: &TestAppContext) -> ContactsSummary {
- self.app_state
- .user_store
- .read_with(cx, |store, _| ContactsSummary {
- current: store
- .contacts()
- .iter()
- .map(|contact| contact.user.github_login.clone())
- .collect(),
- outgoing_requests: store
- .outgoing_contact_requests()
- .iter()
- .map(|user| user.github_login.clone())
- .collect(),
- incoming_requests: store
- .incoming_contact_requests()
- .iter()
- .map(|user| user.github_login.clone())
- .collect(),
- })
- }
-
- pub async fn build_local_project(
- &self,
- root_path: impl AsRef<Path>,
- cx: &mut TestAppContext,
- ) -> (Model<Project>, WorktreeId) {
- let project = self.build_empty_local_project(cx);
- let (worktree, _) = project
- .update(cx, |p, cx| {
- p.find_or_create_local_worktree(root_path, true, cx)
- })
- .await
- .unwrap();
- worktree
- .read_with(cx, |tree, _| tree.as_local().unwrap().scan_complete())
- .await;
- (project, worktree.read_with(cx, |tree, _| tree.id()))
- }
-
- pub fn build_empty_local_project(&self, cx: &mut TestAppContext) -> Model<Project> {
- cx.update(|cx| {
- Project::local(
- self.client().clone(),
- self.app_state.node_runtime.clone(),
- self.app_state.user_store.clone(),
- self.app_state.languages.clone(),
- self.app_state.fs.clone(),
- cx,
- )
- })
- }
-
- pub async fn build_remote_project(
- &self,
- host_project_id: u64,
- guest_cx: &mut TestAppContext,
- ) -> Model<Project> {
- let active_call = guest_cx.read(ActiveCall::global);
- let room = active_call.read_with(guest_cx, |call, _| call.room().unwrap().clone());
- room.update(guest_cx, |room, cx| {
- room.join_project(
- host_project_id,
- self.app_state.languages.clone(),
- self.app_state.fs.clone(),
- cx,
- )
- })
- .await
- .unwrap()
- }
-
- pub fn build_workspace<'a>(
- &'a self,
- project: &Model<Project>,
- cx: &'a mut TestAppContext,
- ) -> (View<Workspace>, &'a mut VisualTestContext) {
- cx.add_window_view(|cx| Workspace::new(0, project.clone(), self.app_state.clone(), cx))
- }
-}
-
-impl Drop for TestClient {
- fn drop(&mut self) {
- self.app_state.client.teardown();
- }
-}
@@ -36,14 +36,14 @@ feedback = { path = "../feedback" }
fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { path = "../language" }
-menu = { package = "menu2", path = "../menu2" }
+menu = { path = "../menu" }
notifications = { path = "../notifications" }
rich_text = { path = "../rich_text" }
picker = { path = "../picker" }
project = { path = "../project" }
recent_projects = { path = "../recent_projects" }
rpc = { path = "../rpc" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
feature_flags = { path = "../feature_flags"}
theme = { package = "theme2", path = "../theme2" }
theme_selector = { path = "../theme_selector" }
@@ -73,7 +73,7 @@ gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
notifications = { path = "../notifications", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
workspace = { path = "../workspace", features = ["test-support"] }
@@ -15,7 +15,7 @@ fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
picker = { path = "../picker" }
project = { path = "../project" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
ui = { package = "ui2", path = "../ui2" }
util = { path = "../util" }
theme = { package = "theme2", path = "../theme2" }
@@ -29,7 +29,7 @@ gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
editor = { path = "../editor", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
-menu = { package = "menu2", path = "../menu2" }
+menu = { path = "../menu" }
go_to_line = { path = "../go_to_line" }
serde_json.workspace = true
workspace = { path = "../workspace", features = ["test-support"] }
@@ -23,7 +23,7 @@ collections = { path = "../collections" }
# context_menu = { path = "../context_menu" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { path = "../language" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
theme = { package = "theme2", path = "../theme2" }
lsp = { path = "../lsp" }
node_runtime = { path = "../node_runtime"}
@@ -47,5 +47,5 @@ gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
@@ -15,7 +15,7 @@ fs = { path = "../fs" }
zed_actions = { path = "../zed_actions"}
gpui = { package = "gpui2", path = "../gpui2" }
language = { path = "../language" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" }
workspace = {path = "../workspace" }
@@ -16,7 +16,7 @@ ui = { package = "ui2", path = "../ui2" }
language = { path = "../language" }
lsp = { path = "../lsp" }
project = { path = "../project" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" }
workspace = {path = "../workspace" }
@@ -38,7 +38,7 @@ multi_buffer = { path = "../multi_buffer" }
project = { path = "../project" }
rpc = { path = "../rpc" }
rich_text = { path = "../rich_text" }
-settings = { package="settings2", path = "../settings2" }
+settings = { path = "../settings" }
snippet = { path = "../snippet" }
sum_tree = { path = "../sum_tree" }
text = { package="text2", path = "../text2" }
@@ -79,7 +79,7 @@ lsp = { path = "../lsp", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
workspace = { path = "../workspace", features = ["test-support"] }
multi_buffer = { path = "../multi_buffer", features = ["test-support"] }
@@ -16,10 +16,10 @@ db = { path = "../db" }
editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { path = "../language" }
-menu = { package = "menu2", path = "../menu2" }
+menu = { path = "../menu" }
project = { path = "../project" }
search = { path = "../search" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
theme = { package = "theme2", path = "../theme2" }
ui = { package = "ui2", path = "../ui2" }
util = { path = "../util" }
@@ -13,10 +13,10 @@ editor = { path = "../editor" }
collections = { path = "../collections" }
fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
-menu = { package = "menu2", path = "../menu2" }
+menu = { path = "../menu" }
picker = { path = "../picker" }
project = { path = "../project" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
text = { package = "text2", path = "../text2" }
util = { path = "../util" }
theme = { package = "theme2", path = "../theme2" }
@@ -9,7 +9,7 @@ path = "src/fs.rs"
[dependencies]
collections = { path = "../collections" }
-rope = { package = "rope2", path = "../rope2" }
+rope = { path = "../rope" }
text = { package = "text2", path = "../text2" }
util = { path = "../util" }
sum_tree = { path = "../sum_tree" }
@@ -11,9 +11,9 @@ doctest = false
[dependencies]
editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
-menu = { package = "menu2", path = "../menu2" }
+menu = { path = "../menu" }
serde.workspace = true
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
text = { package = "text2", path = "../text2" }
workspace = { path = "../workspace" }
postage.workspace = true
@@ -13,8 +13,7 @@ editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
util = { path = "../util" }
workspace = { path = "../workspace" }
-settings2 = { path = "../settings2" }
-
+settings = { path = "../settings" }
anyhow.workspace = true
chrono = "0.4"
dirs = "4.0"
@@ -3,7 +3,7 @@ use chrono::{Datelike, Local, NaiveTime, Timelike};
use gpui::{actions, AppContext, ViewContext};
use schemars::JsonSchema;
use serde::{Deserialize, Serialize};
-use settings2::Settings;
+use settings::Settings;
use std::{
fs::OpenOptions,
path::{Path, PathBuf},
@@ -36,7 +36,7 @@ pub enum HourFormat {
Hour24,
}
-impl settings2::Settings for JournalSettings {
+impl settings::Settings for JournalSettings {
const KEY: Option<&'static str> = Some("journal");
type FileContent = Self;
@@ -29,7 +29,7 @@ git = { path = "../git" }
gpui = { package = "gpui2", path = "../gpui2" }
lsp = { path = "../lsp" }
rpc = { path = "../rpc" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
sum_tree = { path = "../sum_tree" }
text = { package = "text2", path = "../text2" }
theme = { package = "theme2", path = "../theme2" }
@@ -66,7 +66,7 @@ collections = { path = "../collections", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
text = { package = "text2", path = "../text2", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
ctor.workspace = true
env_logger.workspace = true
@@ -17,7 +17,7 @@ picker = { path = "../picker" }
project = { path = "../project" }
theme = { package = "theme2", path = "../theme2" }
ui = { package = "ui2", path = "../ui2" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
util = { path = "../util" }
workspace = { path = "../workspace" }
anyhow.workspace = true
@@ -11,7 +11,7 @@ doctest = false
[dependencies]
collections = { path = "../collections" }
editor = { path = "../editor" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
theme = { package = "theme2", path = "../theme2" }
language = { path = "../language" }
project = { path = "../project" }
@@ -9,4 +9,5 @@ path = "src/menu.rs"
doctest = false
[dependencies]
-gpui = { path = "../gpui" }
+gpui = { package = "gpui2", path = "../gpui2" }
+serde = { workspace = true }
@@ -1,4 +1,15 @@
-gpui::actions!(
+use gpui::actions;
+
+// If the zed binary doesn't use anything in this crate, it will be optimized away
+// and the actions won't initialize. So we just provide an empty initialization function
+// to be called from main.
+//
+// These may provide relevant context:
+// https://github.com/rust-lang/rust/issues/47384
+// https://github.com/mmastrac/rust-ctor/issues/280
+pub fn init() {}
+
+actions!(
menu,
[
Cancel,
@@ -1,13 +0,0 @@
-[package]
-name = "menu2"
-version = "0.1.0"
-edition = "2021"
-publish = false
-
-[lib]
-path = "src/menu2.rs"
-doctest = false
-
-[dependencies]
-gpui = { package = "gpui2", path = "../gpui2" }
-serde = { workspace = true }
@@ -1,24 +0,0 @@
-use gpui::actions;
-
-// If the zed binary doesn't use anything in this crate, it will be optimized away
-// and the actions won't initialize. So we just provide an empty initialization function
-// to be called from main.
-//
-// These may provide relevant context:
-// https://github.com/rust-lang/rust/issues/47384
-// https://github.com/mmastrac/rust-ctor/issues/280
-pub fn init() {}
-
-actions!(
- menu,
- [
- Cancel,
- Confirm,
- SecondaryConfirm,
- SelectPrev,
- SelectNext,
- SelectFirst,
- SelectLast,
- ShowContextMenu
- ]
-);
@@ -28,7 +28,7 @@ gpui = { package = "gpui2", path = "../gpui2" }
language = { path = "../language" }
lsp = { path = "../lsp" }
rich_text = { path = "../rich_text" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
snippet = { path = "../snippet" }
sum_tree = { path = "../sum_tree" }
text = { package = "text2", path = "../text2" }
@@ -66,7 +66,7 @@ lsp = { path = "../lsp", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
ctor.workspace = true
env_logger.workspace = true
@@ -25,7 +25,7 @@ db = { path = "../db" }
feature_flags = { path = "../feature_flags" }
gpui = { package = "gpui2", path = "../gpui2" }
rpc = { path = "../rpc" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
sum_tree = { path = "../sum_tree" }
text = { package = "text2", path = "../text2" }
util = { path = "../util" }
@@ -38,5 +38,5 @@ client = { path = "../client", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
@@ -15,7 +15,7 @@ gpui = { package = "gpui2", path = "../gpui2" }
ui = { package = "ui2", path = "../ui2" }
language = { path = "../language" }
picker = { path = "../picker" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
text = { package = "text2", path = "../text2" }
theme = { package = "theme2", path = "../theme2" }
workspace = { path = "../workspace" }
@@ -12,8 +12,8 @@ doctest = false
editor = { path = "../editor" }
ui = { package = "ui2", path = "../ui2" }
gpui = { package = "gpui2", path = "../gpui2" }
-menu = { package = "menu2", path = "../menu2" }
-settings = { package = "settings2", path = "../settings2" }
+menu = { path = "../menu" }
+settings = { path = "../settings" }
util = { path = "../util" }
theme = { package = "theme2", path = "../theme2" }
workspace = { path = "../workspace"}
@@ -36,9 +36,9 @@ lsp = { path = "../lsp" }
node_runtime = { path = "../node_runtime" }
prettier = { path = "../prettier" }
rpc = { path = "../rpc" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
sum_tree = { path = "../sum_tree" }
-terminal = { package = "terminal2", path = "../terminal2" }
+terminal = { path = "../terminal" }
util = { path = "../util" }
aho-corasick = "1.1"
@@ -76,7 +76,7 @@ fs = { path = "../fs", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
prettier = { path = "../prettier", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
@@ -13,10 +13,10 @@ collections = { path = "../collections" }
db = { path = "../db" }
editor = { path = "../editor" }
gpui = { path = "../gpui2", package = "gpui2" }
-menu = { path = "../menu2", package = "menu2" }
+menu = { path = "../menu" }
project = { path = "../project" }
search = { path = "../search" }
-settings = { path = "../settings2", package = "settings2" }
+settings = { path = "../settings" }
theme = { path = "../theme2", package = "theme2" }
ui = { path = "../ui2", package = "ui2" }
util = { path = "../util" }
@@ -15,7 +15,7 @@ gpui = { package = "gpui2", path = "../gpui2" }
picker = { path = "../picker" }
project = { path = "../project" }
text = { package = "text2", path = "../text2" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
workspace = { path = "../workspace" }
theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" }
@@ -28,7 +28,7 @@ smol.workspace = true
[dev-dependencies]
futures.workspace = true
editor = { path = "../editor", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
@@ -14,7 +14,7 @@ fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { path = "../language" }
picker = { path = "../picker" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
text = { package = "text2", path = "../text2" }
util = { path = "../util"}
theme = { package = "theme2", path = "../theme2" }
@@ -18,4 +18,4 @@ util = { path = "../util" }
[dev-dependencies]
rand.workspace = true
util = { path = "../util", features = ["test-support"] }
-gpui = { path = "../gpui", features = ["test-support"] }
+gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
@@ -906,7 +906,7 @@ impl Chunk {
fn clip_offset_utf16(&self, target: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
let mut code_units = self.0.encode_utf16();
- let mut offset = code_units.by_ref().take(target.0 as usize).count();
+ let mut offset = code_units.by_ref().take(target.0).count();
if char::decode_utf16(code_units).next().transpose().is_err() {
match bias {
Bias::Left => offset -= 1,
@@ -1,21 +0,0 @@
-[package]
-name = "rope2"
-version = "0.1.0"
-edition = "2021"
-publish = false
-
-[lib]
-path = "src/rope2.rs"
-
-[dependencies]
-bromberg_sl2 = { git = "https://github.com/zed-industries/bromberg_sl2", rev = "950bc5482c216c395049ae33ae4501e08975f17f" }
-smallvec.workspace = true
-sum_tree = { path = "../sum_tree" }
-arrayvec = "0.7.1"
-log.workspace = true
-util = { path = "../util" }
-
-[dev-dependencies]
-rand.workspace = true
-util = { path = "../util", features = ["test-support"] }
-gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
@@ -1,50 +0,0 @@
-use std::ops::{Add, AddAssign, Sub};
-
-#[derive(Copy, Clone, Debug, Default, Eq, PartialEq, Ord, PartialOrd)]
-pub struct OffsetUtf16(pub usize);
-
-impl<'a> Add<&'a Self> for OffsetUtf16 {
- type Output = Self;
-
- fn add(self, other: &'a Self) -> Self::Output {
- Self(self.0 + other.0)
- }
-}
-
-impl Add for OffsetUtf16 {
- type Output = Self;
-
- fn add(self, other: Self) -> Self::Output {
- Self(self.0 + other.0)
- }
-}
-
-impl<'a> Sub<&'a Self> for OffsetUtf16 {
- type Output = Self;
-
- fn sub(self, other: &'a Self) -> Self::Output {
- debug_assert!(*other <= self);
- Self(self.0 - other.0)
- }
-}
-
-impl Sub for OffsetUtf16 {
- type Output = OffsetUtf16;
-
- fn sub(self, other: Self) -> Self::Output {
- debug_assert!(other <= self);
- Self(self.0 - other.0)
- }
-}
-
-impl<'a> AddAssign<&'a Self> for OffsetUtf16 {
- fn add_assign(&mut self, other: &'a Self) {
- self.0 += other.0;
- }
-}
-
-impl AddAssign<Self> for OffsetUtf16 {
- fn add_assign(&mut self, other: Self) {
- self.0 += other.0;
- }
-}
@@ -1,128 +0,0 @@
-use std::{
- cmp::Ordering,
- ops::{Add, AddAssign, Sub},
-};
-
-#[derive(Clone, Copy, Default, Eq, PartialEq, Debug, Hash)]
-pub struct Point {
- pub row: u32,
- pub column: u32,
-}
-
-impl Point {
- pub const MAX: Self = Self {
- row: u32::MAX,
- column: u32::MAX,
- };
-
- pub fn new(row: u32, column: u32) -> Self {
- Point { row, column }
- }
-
- pub fn zero() -> Self {
- Point::new(0, 0)
- }
-
- pub fn parse_str(s: &str) -> Self {
- let mut point = Self::zero();
- for (row, line) in s.split('\n').enumerate() {
- point.row = row as u32;
- point.column = line.len() as u32;
- }
- point
- }
-
- pub fn is_zero(&self) -> bool {
- self.row == 0 && self.column == 0
- }
-
- pub fn saturating_sub(self, other: Self) -> Self {
- if self < other {
- Self::zero()
- } else {
- self - other
- }
- }
-}
-
-impl<'a> Add<&'a Self> for Point {
- type Output = Point;
-
- fn add(self, other: &'a Self) -> Self::Output {
- self + *other
- }
-}
-
-impl Add for Point {
- type Output = Point;
-
- fn add(self, other: Self) -> Self::Output {
- if other.row == 0 {
- Point::new(self.row, self.column + other.column)
- } else {
- Point::new(self.row + other.row, other.column)
- }
- }
-}
-
-impl<'a> Sub<&'a Self> for Point {
- type Output = Point;
-
- fn sub(self, other: &'a Self) -> Self::Output {
- self - *other
- }
-}
-
-impl Sub for Point {
- type Output = Point;
-
- fn sub(self, other: Self) -> Self::Output {
- debug_assert!(other <= self);
-
- if self.row == other.row {
- Point::new(0, self.column - other.column)
- } else {
- Point::new(self.row - other.row, self.column)
- }
- }
-}
-
-impl<'a> AddAssign<&'a Self> for Point {
- fn add_assign(&mut self, other: &'a Self) {
- *self += *other;
- }
-}
-
-impl AddAssign<Self> for Point {
- fn add_assign(&mut self, other: Self) {
- if other.row == 0 {
- self.column += other.column;
- } else {
- self.row += other.row;
- self.column = other.column;
- }
- }
-}
-
-impl PartialOrd for Point {
- fn partial_cmp(&self, other: &Point) -> Option<Ordering> {
- Some(self.cmp(other))
- }
-}
-
-impl Ord for Point {
- #[cfg(target_pointer_width = "64")]
- fn cmp(&self, other: &Point) -> Ordering {
- let a = (self.row as usize) << 32 | self.column as usize;
- let b = (other.row as usize) << 32 | other.column as usize;
- a.cmp(&b)
- }
-
- #[cfg(target_pointer_width = "32")]
- fn cmp(&self, other: &Point) -> Ordering {
- match self.row.cmp(&other.row) {
- Ordering::Equal => self.column.cmp(&other.column),
- comparison @ _ => comparison,
- }
- }
-}
@@ -1,119 +0,0 @@
-use std::{
- cmp::Ordering,
- ops::{Add, AddAssign, Sub},
-};
-
-#[derive(Clone, Copy, Default, Eq, PartialEq, Debug, Hash)]
-pub struct PointUtf16 {
- pub row: u32,
- pub column: u32,
-}
-
-impl PointUtf16 {
- pub const MAX: Self = Self {
- row: u32::MAX,
- column: u32::MAX,
- };
-
- pub fn new(row: u32, column: u32) -> Self {
- PointUtf16 { row, column }
- }
-
- pub fn zero() -> Self {
- PointUtf16::new(0, 0)
- }
-
- pub fn is_zero(&self) -> bool {
- self.row == 0 && self.column == 0
- }
-
- pub fn saturating_sub(self, other: Self) -> Self {
- if self < other {
- Self::zero()
- } else {
- self - other
- }
- }
-}
-
-impl<'a> Add<&'a Self> for PointUtf16 {
- type Output = PointUtf16;
-
- fn add(self, other: &'a Self) -> Self::Output {
- self + *other
- }
-}
-
-impl Add for PointUtf16 {
- type Output = PointUtf16;
-
- fn add(self, other: Self) -> Self::Output {
- if other.row == 0 {
- PointUtf16::new(self.row, self.column + other.column)
- } else {
- PointUtf16::new(self.row + other.row, other.column)
- }
- }
-}
-
-impl<'a> Sub<&'a Self> for PointUtf16 {
- type Output = PointUtf16;
-
- fn sub(self, other: &'a Self) -> Self::Output {
- self - *other
- }
-}
-
-impl Sub for PointUtf16 {
- type Output = PointUtf16;
-
- fn sub(self, other: Self) -> Self::Output {
- debug_assert!(other <= self);
-
- if self.row == other.row {
- PointUtf16::new(0, self.column - other.column)
- } else {
- PointUtf16::new(self.row - other.row, self.column)
- }
- }
-}
-
-impl<'a> AddAssign<&'a Self> for PointUtf16 {
- fn add_assign(&mut self, other: &'a Self) {
- *self += *other;
- }
-}
-
-impl AddAssign<Self> for PointUtf16 {
- fn add_assign(&mut self, other: Self) {
- if other.row == 0 {
- self.column += other.column;
- } else {
- self.row += other.row;
- self.column = other.column;
- }
- }
-}
-
-impl PartialOrd for PointUtf16 {
- fn partial_cmp(&self, other: &PointUtf16) -> Option<Ordering> {
- Some(self.cmp(other))
- }
-}
-
-impl Ord for PointUtf16 {
- #[cfg(target_pointer_width = "64")]
- fn cmp(&self, other: &PointUtf16) -> Ordering {
- let a = (self.row as usize) << 32 | self.column as usize;
- let b = (other.row as usize) << 32 | other.column as usize;
- a.cmp(&b)
- }
-
- #[cfg(target_pointer_width = "32")]
- fn cmp(&self, other: &PointUtf16) -> Ordering {
- match self.row.cmp(&other.row) {
- Ordering::Equal => self.column.cmp(&other.column),
- comparison @ _ => comparison,
- }
- }
-}
@@ -1,1437 +0,0 @@
-mod offset_utf16;
-mod point;
-mod point_utf16;
-mod unclipped;
-
-use arrayvec::ArrayString;
-use bromberg_sl2::HashMatrix;
-use smallvec::SmallVec;
-use std::{
- cmp, fmt, io, mem,
- ops::{AddAssign, Range},
- str,
-};
-use sum_tree::{Bias, Dimension, SumTree};
-use util::debug_panic;
-
-pub use offset_utf16::OffsetUtf16;
-pub use point::Point;
-pub use point_utf16::PointUtf16;
-pub use unclipped::Unclipped;
-
-#[cfg(test)]
-const CHUNK_BASE: usize = 6;
-
-#[cfg(not(test))]
-const CHUNK_BASE: usize = 16;
-
-/// Type alias to [HashMatrix], an implementation of a homomorphic hash function. Two [Rope] instances
-/// containing the same text will produce the same fingerprint. This hash function is special in that
-/// it allows us to hash individual chunks and aggregate them up the [Rope]'s tree, with the resulting
-/// hash being equivalent to hashing all the text contained in the [Rope] at once.
-pub type RopeFingerprint = HashMatrix;
-
-#[derive(Clone, Default)]
-pub struct Rope {
- chunks: SumTree<Chunk>,
-}
-
-impl Rope {
- pub fn new() -> Self {
- Self::default()
- }
-
- pub fn text_fingerprint(text: &str) -> RopeFingerprint {
- bromberg_sl2::hash_strict(text.as_bytes())
- }
-
- pub fn append(&mut self, rope: Rope) {
- let mut chunks = rope.chunks.cursor::<()>();
- chunks.next(&());
- if let Some(chunk) = chunks.item() {
- if self.chunks.last().map_or(false, |c| c.0.len() < CHUNK_BASE)
- || chunk.0.len() < CHUNK_BASE
- {
- self.push(&chunk.0);
- chunks.next(&());
- }
- }
-
- self.chunks.append(chunks.suffix(&()), &());
- self.check_invariants();
- }
-
- pub fn replace(&mut self, range: Range<usize>, text: &str) {
- let mut new_rope = Rope::new();
- let mut cursor = self.cursor(0);
- new_rope.append(cursor.slice(range.start));
- cursor.seek_forward(range.end);
- new_rope.push(text);
- new_rope.append(cursor.suffix());
- *self = new_rope;
- }
-
- pub fn slice(&self, range: Range<usize>) -> Rope {
- let mut cursor = self.cursor(0);
- cursor.seek_forward(range.start);
- cursor.slice(range.end)
- }
-
- pub fn slice_rows(&self, range: Range<u32>) -> Rope {
- //This would be more efficient with a forward advance after the first, but it's fine
- let start = self.point_to_offset(Point::new(range.start, 0));
- let end = self.point_to_offset(Point::new(range.end, 0));
- self.slice(start..end)
- }
-
- pub fn push(&mut self, text: &str) {
- let mut new_chunks = SmallVec::<[_; 16]>::new();
- let mut new_chunk = ArrayString::new();
- for ch in text.chars() {
- if new_chunk.len() + ch.len_utf8() > 2 * CHUNK_BASE {
- new_chunks.push(Chunk(new_chunk));
- new_chunk = ArrayString::new();
- }
-
- new_chunk.push(ch);
- }
- if !new_chunk.is_empty() {
- new_chunks.push(Chunk(new_chunk));
- }
-
- let mut new_chunks = new_chunks.into_iter();
- let mut first_new_chunk = new_chunks.next();
- self.chunks.update_last(
- |last_chunk| {
- if let Some(first_new_chunk_ref) = first_new_chunk.as_mut() {
- if last_chunk.0.len() + first_new_chunk_ref.0.len() <= 2 * CHUNK_BASE {
- last_chunk.0.push_str(&first_new_chunk.take().unwrap().0);
- } else {
- let mut text = ArrayString::<{ 4 * CHUNK_BASE }>::new();
- text.push_str(&last_chunk.0);
- text.push_str(&first_new_chunk_ref.0);
- let (left, right) = text.split_at(find_split_ix(&text));
- last_chunk.0.clear();
- last_chunk.0.push_str(left);
- first_new_chunk_ref.0.clear();
- first_new_chunk_ref.0.push_str(right);
- }
- }
- },
- &(),
- );
-
- self.chunks
- .extend(first_new_chunk.into_iter().chain(new_chunks), &());
- self.check_invariants();
- }
-
- pub fn push_front(&mut self, text: &str) {
- let suffix = mem::replace(self, Rope::from(text));
- self.append(suffix);
- }
-
- fn check_invariants(&self) {
- #[cfg(test)]
- {
- // Ensure all chunks except maybe the last one are not underflowing.
- // Allow some wiggle room for multibyte characters at chunk boundaries.
- let mut chunks = self.chunks.cursor::<()>().peekable();
- while let Some(chunk) = chunks.next() {
- if chunks.peek().is_some() {
- assert!(chunk.0.len() + 3 >= CHUNK_BASE);
- }
- }
- }
- }
-
- pub fn summary(&self) -> TextSummary {
- self.chunks.summary().text.clone()
- }
-
- pub fn len(&self) -> usize {
- self.chunks.extent(&())
- }
-
- pub fn is_empty(&self) -> bool {
- self.len() == 0
- }
-
- pub fn max_point(&self) -> Point {
- self.chunks.extent(&())
- }
-
- pub fn max_point_utf16(&self) -> PointUtf16 {
- self.chunks.extent(&())
- }
-
- pub fn cursor(&self, offset: usize) -> Cursor {
- Cursor::new(self, offset)
- }
-
- pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
- self.chars_at(0)
- }
-
- pub fn chars_at(&self, start: usize) -> impl Iterator<Item = char> + '_ {
- self.chunks_in_range(start..self.len()).flat_map(str::chars)
- }
-
- pub fn reversed_chars_at(&self, start: usize) -> impl Iterator<Item = char> + '_ {
- self.reversed_chunks_in_range(0..start)
- .flat_map(|chunk| chunk.chars().rev())
- }
-
- pub fn bytes_in_range(&self, range: Range<usize>) -> Bytes {
- Bytes::new(self, range, false)
- }
-
- pub fn reversed_bytes_in_range(&self, range: Range<usize>) -> Bytes {
- Bytes::new(self, range, true)
- }
-
- pub fn chunks(&self) -> Chunks {
- self.chunks_in_range(0..self.len())
- }
-
- pub fn chunks_in_range(&self, range: Range<usize>) -> Chunks {
- Chunks::new(self, range, false)
- }
-
- pub fn reversed_chunks_in_range(&self, range: Range<usize>) -> Chunks {
- Chunks::new(self, range, true)
- }
-
- pub fn offset_to_offset_utf16(&self, offset: usize) -> OffsetUtf16 {
- if offset >= self.summary().len {
- return self.summary().len_utf16;
- }
- let mut cursor = self.chunks.cursor::<(usize, OffsetUtf16)>();
- cursor.seek(&offset, Bias::Left, &());
- let overshoot = offset - cursor.start().0;
- cursor.start().1
- + cursor.item().map_or(Default::default(), |chunk| {
- chunk.offset_to_offset_utf16(overshoot)
- })
- }
-
- pub fn offset_utf16_to_offset(&self, offset: OffsetUtf16) -> usize {
- if offset >= self.summary().len_utf16 {
- return self.summary().len;
- }
- let mut cursor = self.chunks.cursor::<(OffsetUtf16, usize)>();
- cursor.seek(&offset, Bias::Left, &());
- let overshoot = offset - cursor.start().0;
- cursor.start().1
- + cursor.item().map_or(Default::default(), |chunk| {
- chunk.offset_utf16_to_offset(overshoot)
- })
- }
-
- pub fn offset_to_point(&self, offset: usize) -> Point {
- if offset >= self.summary().len {
- return self.summary().lines;
- }
- let mut cursor = self.chunks.cursor::<(usize, Point)>();
- cursor.seek(&offset, Bias::Left, &());
- let overshoot = offset - cursor.start().0;
- cursor.start().1
- + cursor
- .item()
- .map_or(Point::zero(), |chunk| chunk.offset_to_point(overshoot))
- }
-
- pub fn offset_to_point_utf16(&self, offset: usize) -> PointUtf16 {
- if offset >= self.summary().len {
- return self.summary().lines_utf16();
- }
- let mut cursor = self.chunks.cursor::<(usize, PointUtf16)>();
- cursor.seek(&offset, Bias::Left, &());
- let overshoot = offset - cursor.start().0;
- cursor.start().1
- + cursor.item().map_or(PointUtf16::zero(), |chunk| {
- chunk.offset_to_point_utf16(overshoot)
- })
- }
-
- pub fn point_to_point_utf16(&self, point: Point) -> PointUtf16 {
- if point >= self.summary().lines {
- return self.summary().lines_utf16();
- }
- let mut cursor = self.chunks.cursor::<(Point, PointUtf16)>();
- cursor.seek(&point, Bias::Left, &());
- let overshoot = point - cursor.start().0;
- cursor.start().1
- + cursor.item().map_or(PointUtf16::zero(), |chunk| {
- chunk.point_to_point_utf16(overshoot)
- })
- }
-
- pub fn point_to_offset(&self, point: Point) -> usize {
- if point >= self.summary().lines {
- return self.summary().len;
- }
- let mut cursor = self.chunks.cursor::<(Point, usize)>();
- cursor.seek(&point, Bias::Left, &());
- let overshoot = point - cursor.start().0;
- cursor.start().1
- + cursor
- .item()
- .map_or(0, |chunk| chunk.point_to_offset(overshoot))
- }
-
- pub fn point_utf16_to_offset(&self, point: PointUtf16) -> usize {
- self.point_utf16_to_offset_impl(point, false)
- }
-
- pub fn unclipped_point_utf16_to_offset(&self, point: Unclipped<PointUtf16>) -> usize {
- self.point_utf16_to_offset_impl(point.0, true)
- }
-
- fn point_utf16_to_offset_impl(&self, point: PointUtf16, clip: bool) -> usize {
- if point >= self.summary().lines_utf16() {
- return self.summary().len;
- }
- let mut cursor = self.chunks.cursor::<(PointUtf16, usize)>();
- cursor.seek(&point, Bias::Left, &());
- let overshoot = point - cursor.start().0;
- cursor.start().1
- + cursor
- .item()
- .map_or(0, |chunk| chunk.point_utf16_to_offset(overshoot, clip))
- }
-
- pub fn unclipped_point_utf16_to_point(&self, point: Unclipped<PointUtf16>) -> Point {
- if point.0 >= self.summary().lines_utf16() {
- return self.summary().lines;
- }
- let mut cursor = self.chunks.cursor::<(PointUtf16, Point)>();
- cursor.seek(&point.0, Bias::Left, &());
- let overshoot = Unclipped(point.0 - cursor.start().0);
- cursor.start().1
- + cursor.item().map_or(Point::zero(), |chunk| {
- chunk.unclipped_point_utf16_to_point(overshoot)
- })
- }
-
- pub fn clip_offset(&self, mut offset: usize, bias: Bias) -> usize {
- let mut cursor = self.chunks.cursor::<usize>();
- cursor.seek(&offset, Bias::Left, &());
- if let Some(chunk) = cursor.item() {
- let mut ix = offset - cursor.start();
- while !chunk.0.is_char_boundary(ix) {
- match bias {
- Bias::Left => {
- ix -= 1;
- offset -= 1;
- }
- Bias::Right => {
- ix += 1;
- offset += 1;
- }
- }
- }
- offset
- } else {
- self.summary().len
- }
- }
-
- pub fn clip_offset_utf16(&self, offset: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
- let mut cursor = self.chunks.cursor::<OffsetUtf16>();
- cursor.seek(&offset, Bias::Right, &());
- if let Some(chunk) = cursor.item() {
- let overshoot = offset - cursor.start();
- *cursor.start() + chunk.clip_offset_utf16(overshoot, bias)
- } else {
- self.summary().len_utf16
- }
- }
-
- pub fn clip_point(&self, point: Point, bias: Bias) -> Point {
- let mut cursor = self.chunks.cursor::<Point>();
- cursor.seek(&point, Bias::Right, &());
- if let Some(chunk) = cursor.item() {
- let overshoot = point - cursor.start();
- *cursor.start() + chunk.clip_point(overshoot, bias)
- } else {
- self.summary().lines
- }
- }
-
- pub fn clip_point_utf16(&self, point: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
- let mut cursor = self.chunks.cursor::<PointUtf16>();
- cursor.seek(&point.0, Bias::Right, &());
- if let Some(chunk) = cursor.item() {
- let overshoot = Unclipped(point.0 - cursor.start());
- *cursor.start() + chunk.clip_point_utf16(overshoot, bias)
- } else {
- self.summary().lines_utf16()
- }
- }
-
- pub fn line_len(&self, row: u32) -> u32 {
- self.clip_point(Point::new(row, u32::MAX), Bias::Left)
- .column
- }
-
- pub fn fingerprint(&self) -> RopeFingerprint {
- self.chunks.summary().fingerprint
- }
-}
-
-impl<'a> From<&'a str> for Rope {
- fn from(text: &'a str) -> Self {
- let mut rope = Self::new();
- rope.push(text);
- rope
- }
-}
-
-impl<'a> FromIterator<&'a str> for Rope {
- fn from_iter<T: IntoIterator<Item = &'a str>>(iter: T) -> Self {
- let mut rope = Rope::new();
- for chunk in iter {
- rope.push(chunk);
- }
- rope
- }
-}
-
-impl From<String> for Rope {
- fn from(text: String) -> Self {
- Rope::from(text.as_str())
- }
-}
-
-impl fmt::Display for Rope {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- for chunk in self.chunks() {
- write!(f, "{}", chunk)?;
- }
- Ok(())
- }
-}
-
-impl fmt::Debug for Rope {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- use std::fmt::Write as _;
-
- write!(f, "\"")?;
- let mut format_string = String::new();
- for chunk in self.chunks() {
- write!(&mut format_string, "{:?}", chunk)?;
- write!(f, "{}", &format_string[1..format_string.len() - 1])?;
- format_string.clear();
- }
- write!(f, "\"")?;
- Ok(())
- }
-}
-
-pub struct Cursor<'a> {
- rope: &'a Rope,
- chunks: sum_tree::Cursor<'a, Chunk, usize>,
- offset: usize,
-}
-
-impl<'a> Cursor<'a> {
- pub fn new(rope: &'a Rope, offset: usize) -> Self {
- let mut chunks = rope.chunks.cursor();
- chunks.seek(&offset, Bias::Right, &());
- Self {
- rope,
- chunks,
- offset,
- }
- }
-
- pub fn seek_forward(&mut self, end_offset: usize) {
- debug_assert!(end_offset >= self.offset);
-
- self.chunks.seek_forward(&end_offset, Bias::Right, &());
- self.offset = end_offset;
- }
-
- pub fn slice(&mut self, end_offset: usize) -> Rope {
- debug_assert!(
- end_offset >= self.offset,
- "cannot slice backwards from {} to {}",
- self.offset,
- end_offset
- );
-
- let mut slice = Rope::new();
- if let Some(start_chunk) = self.chunks.item() {
- let start_ix = self.offset - self.chunks.start();
- let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
- slice.push(&start_chunk.0[start_ix..end_ix]);
- }
-
- if end_offset > self.chunks.end(&()) {
- self.chunks.next(&());
- slice.append(Rope {
- chunks: self.chunks.slice(&end_offset, Bias::Right, &()),
- });
- if let Some(end_chunk) = self.chunks.item() {
- let end_ix = end_offset - self.chunks.start();
- slice.push(&end_chunk.0[..end_ix]);
- }
- }
-
- self.offset = end_offset;
- slice
- }
-
- pub fn summary<D: TextDimension>(&mut self, end_offset: usize) -> D {
- debug_assert!(end_offset >= self.offset);
-
- let mut summary = D::default();
- if let Some(start_chunk) = self.chunks.item() {
- let start_ix = self.offset - self.chunks.start();
- let end_ix = cmp::min(end_offset, self.chunks.end(&())) - self.chunks.start();
- summary.add_assign(&D::from_text_summary(&TextSummary::from(
- &start_chunk.0[start_ix..end_ix],
- )));
- }
-
- if end_offset > self.chunks.end(&()) {
- self.chunks.next(&());
- summary.add_assign(&self.chunks.summary(&end_offset, Bias::Right, &()));
- if let Some(end_chunk) = self.chunks.item() {
- let end_ix = end_offset - self.chunks.start();
- summary.add_assign(&D::from_text_summary(&TextSummary::from(
- &end_chunk.0[..end_ix],
- )));
- }
- }
-
- self.offset = end_offset;
- summary
- }
-
- pub fn suffix(mut self) -> Rope {
- self.slice(self.rope.chunks.extent(&()))
- }
-
- pub fn offset(&self) -> usize {
- self.offset
- }
-}
-
-pub struct Chunks<'a> {
- chunks: sum_tree::Cursor<'a, Chunk, usize>,
- range: Range<usize>,
- reversed: bool,
-}
-
-impl<'a> Chunks<'a> {
- pub fn new(rope: &'a Rope, range: Range<usize>, reversed: bool) -> Self {
- let mut chunks = rope.chunks.cursor();
- if reversed {
- chunks.seek(&range.end, Bias::Left, &());
- } else {
- chunks.seek(&range.start, Bias::Right, &());
- }
- Self {
- chunks,
- range,
- reversed,
- }
- }
-
- pub fn offset(&self) -> usize {
- if self.reversed {
- self.range.end.min(self.chunks.end(&()))
- } else {
- self.range.start.max(*self.chunks.start())
- }
- }
-
- pub fn seek(&mut self, offset: usize) {
- let bias = if self.reversed {
- Bias::Left
- } else {
- Bias::Right
- };
-
- if offset >= self.chunks.end(&()) {
- self.chunks.seek_forward(&offset, bias, &());
- } else {
- self.chunks.seek(&offset, bias, &());
- }
-
- if self.reversed {
- self.range.end = offset;
- } else {
- self.range.start = offset;
- }
- }
-
- pub fn peek(&self) -> Option<&'a str> {
- let chunk = self.chunks.item()?;
- if self.reversed && self.range.start >= self.chunks.end(&()) {
- return None;
- }
- let chunk_start = *self.chunks.start();
- if self.range.end <= chunk_start {
- return None;
- }
-
- let start = self.range.start.saturating_sub(chunk_start);
- let end = self.range.end - chunk_start;
- Some(&chunk.0[start..chunk.0.len().min(end)])
- }
-}
-
-impl<'a> Iterator for Chunks<'a> {
- type Item = &'a str;
-
- fn next(&mut self) -> Option<Self::Item> {
- let result = self.peek();
- if result.is_some() {
- if self.reversed {
- self.chunks.prev(&());
- } else {
- self.chunks.next(&());
- }
- }
- result
- }
-}
-
-pub struct Bytes<'a> {
- chunks: sum_tree::Cursor<'a, Chunk, usize>,
- range: Range<usize>,
- reversed: bool,
-}
-
-impl<'a> Bytes<'a> {
- pub fn new(rope: &'a Rope, range: Range<usize>, reversed: bool) -> Self {
- let mut chunks = rope.chunks.cursor();
- if reversed {
- chunks.seek(&range.end, Bias::Left, &());
- } else {
- chunks.seek(&range.start, Bias::Right, &());
- }
- Self {
- chunks,
- range,
- reversed,
- }
- }
-
- pub fn peek(&self) -> Option<&'a [u8]> {
- let chunk = self.chunks.item()?;
- if self.reversed && self.range.start >= self.chunks.end(&()) {
- return None;
- }
- let chunk_start = *self.chunks.start();
- if self.range.end <= chunk_start {
- return None;
- }
- let start = self.range.start.saturating_sub(chunk_start);
- let end = self.range.end - chunk_start;
- Some(&chunk.0.as_bytes()[start..chunk.0.len().min(end)])
- }
-}
-
-impl<'a> Iterator for Bytes<'a> {
- type Item = &'a [u8];
-
- fn next(&mut self) -> Option<Self::Item> {
- let result = self.peek();
- if result.is_some() {
- if self.reversed {
- self.chunks.prev(&());
- } else {
- self.chunks.next(&());
- }
- }
- result
- }
-}
-
-impl<'a> io::Read for Bytes<'a> {
- fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
- if let Some(chunk) = self.peek() {
- let len = cmp::min(buf.len(), chunk.len());
- if self.reversed {
- buf[..len].copy_from_slice(&chunk[chunk.len() - len..]);
- buf[..len].reverse();
- self.range.end -= len;
- } else {
- buf[..len].copy_from_slice(&chunk[..len]);
- self.range.start += len;
- }
-
- if len == chunk.len() {
- if self.reversed {
- self.chunks.prev(&());
- } else {
- self.chunks.next(&());
- }
- }
- Ok(len)
- } else {
- Ok(0)
- }
- }
-}
-
-#[derive(Clone, Debug, Default)]
-struct Chunk(ArrayString<{ 2 * CHUNK_BASE }>);
-
-impl Chunk {
- fn offset_to_offset_utf16(&self, target: usize) -> OffsetUtf16 {
- let mut offset = 0;
- let mut offset_utf16 = OffsetUtf16(0);
- for ch in self.0.chars() {
- if offset >= target {
- break;
- }
-
- offset += ch.len_utf8();
- offset_utf16.0 += ch.len_utf16();
- }
- offset_utf16
- }
-
- fn offset_utf16_to_offset(&self, target: OffsetUtf16) -> usize {
- let mut offset_utf16 = OffsetUtf16(0);
- let mut offset = 0;
- for ch in self.0.chars() {
- if offset_utf16 >= target {
- break;
- }
-
- offset += ch.len_utf8();
- offset_utf16.0 += ch.len_utf16();
- }
- offset
- }
-
- fn offset_to_point(&self, target: usize) -> Point {
- let mut offset = 0;
- let mut point = Point::new(0, 0);
- for ch in self.0.chars() {
- if offset >= target {
- break;
- }
-
- if ch == '\n' {
- point.row += 1;
- point.column = 0;
- } else {
- point.column += ch.len_utf8() as u32;
- }
- offset += ch.len_utf8();
- }
- point
- }
-
- fn offset_to_point_utf16(&self, target: usize) -> PointUtf16 {
- let mut offset = 0;
- let mut point = PointUtf16::new(0, 0);
- for ch in self.0.chars() {
- if offset >= target {
- break;
- }
-
- if ch == '\n' {
- point.row += 1;
- point.column = 0;
- } else {
- point.column += ch.len_utf16() as u32;
- }
- offset += ch.len_utf8();
- }
- point
- }
-
- fn point_to_offset(&self, target: Point) -> usize {
- let mut offset = 0;
- let mut point = Point::new(0, 0);
-
- for ch in self.0.chars() {
- if point >= target {
- if point > target {
- debug_panic!("point {target:?} is inside of character {ch:?}");
- }
- break;
- }
-
- if ch == '\n' {
- point.row += 1;
- point.column = 0;
-
- if point.row > target.row {
- debug_panic!(
- "point {target:?} is beyond the end of a line with length {}",
- point.column
- );
- break;
- }
- } else {
- point.column += ch.len_utf8() as u32;
- }
-
- offset += ch.len_utf8();
- }
-
- offset
- }
-
- fn point_to_point_utf16(&self, target: Point) -> PointUtf16 {
- let mut point = Point::zero();
- let mut point_utf16 = PointUtf16::new(0, 0);
- for ch in self.0.chars() {
- if point >= target {
- break;
- }
-
- if ch == '\n' {
- point_utf16.row += 1;
- point_utf16.column = 0;
- point.row += 1;
- point.column = 0;
- } else {
- point_utf16.column += ch.len_utf16() as u32;
- point.column += ch.len_utf8() as u32;
- }
- }
- point_utf16
- }
-
- fn point_utf16_to_offset(&self, target: PointUtf16, clip: bool) -> usize {
- let mut offset = 0;
- let mut point = PointUtf16::new(0, 0);
-
- for ch in self.0.chars() {
- if point == target {
- break;
- }
-
- if ch == '\n' {
- point.row += 1;
- point.column = 0;
-
- if point.row > target.row {
- if !clip {
- debug_panic!(
- "point {target:?} is beyond the end of a line with length {}",
- point.column
- );
- }
- // Return the offset of the newline
- return offset;
- }
- } else {
- point.column += ch.len_utf16() as u32;
- }
-
- if point > target {
- if !clip {
- debug_panic!("point {target:?} is inside of codepoint {ch:?}");
- }
- // Return the offset of the codepoint which we have landed within, bias left
- return offset;
- }
-
- offset += ch.len_utf8();
- }
-
- offset
- }
-
- fn unclipped_point_utf16_to_point(&self, target: Unclipped<PointUtf16>) -> Point {
- let mut point = Point::zero();
- let mut point_utf16 = PointUtf16::zero();
-
- for ch in self.0.chars() {
- if point_utf16 == target.0 {
- break;
- }
-
- if point_utf16 > target.0 {
- // If the point is past the end of a line or inside of a code point,
- // return the last valid point before the target.
- return point;
- }
-
- if ch == '\n' {
- point_utf16 += PointUtf16::new(1, 0);
- point += Point::new(1, 0);
- } else {
- point_utf16 += PointUtf16::new(0, ch.len_utf16() as u32);
- point += Point::new(0, ch.len_utf8() as u32);
- }
- }
-
- point
- }
-
- fn clip_point(&self, target: Point, bias: Bias) -> Point {
- for (row, line) in self.0.split('\n').enumerate() {
- if row == target.row as usize {
- let mut column = target.column.min(line.len() as u32);
- while !line.is_char_boundary(column as usize) {
- match bias {
- Bias::Left => column -= 1,
- Bias::Right => column += 1,
- }
- }
- return Point::new(row as u32, column);
- }
- }
- unreachable!()
- }
-
- fn clip_point_utf16(&self, target: Unclipped<PointUtf16>, bias: Bias) -> PointUtf16 {
- for (row, line) in self.0.split('\n').enumerate() {
- if row == target.0.row as usize {
- let mut code_units = line.encode_utf16();
- let mut column = code_units.by_ref().take(target.0.column as usize).count();
- if char::decode_utf16(code_units).next().transpose().is_err() {
- match bias {
- Bias::Left => column -= 1,
- Bias::Right => column += 1,
- }
- }
- return PointUtf16::new(row as u32, column as u32);
- }
- }
- unreachable!()
- }
-
- fn clip_offset_utf16(&self, target: OffsetUtf16, bias: Bias) -> OffsetUtf16 {
- let mut code_units = self.0.encode_utf16();
- let mut offset = code_units.by_ref().take(target.0).count();
- if char::decode_utf16(code_units).next().transpose().is_err() {
- match bias {
- Bias::Left => offset -= 1,
- Bias::Right => offset += 1,
- }
- }
- OffsetUtf16(offset)
- }
-}
-
-impl sum_tree::Item for Chunk {
- type Summary = ChunkSummary;
-
- fn summary(&self) -> Self::Summary {
- ChunkSummary::from(self.0.as_str())
- }
-}
-
-#[derive(Clone, Debug, Default, Eq, PartialEq)]
-pub struct ChunkSummary {
- text: TextSummary,
- fingerprint: RopeFingerprint,
-}
-
-impl<'a> From<&'a str> for ChunkSummary {
- fn from(text: &'a str) -> Self {
- Self {
- text: TextSummary::from(text),
- fingerprint: Rope::text_fingerprint(text),
- }
- }
-}
-
-impl sum_tree::Summary for ChunkSummary {
- type Context = ();
-
- fn add_summary(&mut self, summary: &Self, _: &()) {
- self.text += &summary.text;
- self.fingerprint = self.fingerprint * summary.fingerprint;
- }
-}
-
-#[derive(Clone, Debug, Default, Eq, PartialEq)]
-pub struct TextSummary {
- pub len: usize,
- pub len_utf16: OffsetUtf16,
- pub lines: Point,
- pub first_line_chars: u32,
- pub last_line_chars: u32,
- pub last_line_len_utf16: u32,
- pub longest_row: u32,
- pub longest_row_chars: u32,
-}
-
-impl TextSummary {
- pub fn lines_utf16(&self) -> PointUtf16 {
- PointUtf16 {
- row: self.lines.row,
- column: self.last_line_len_utf16,
- }
- }
-}
-
-impl<'a> From<&'a str> for TextSummary {
- fn from(text: &'a str) -> Self {
- let mut len_utf16 = OffsetUtf16(0);
- let mut lines = Point::new(0, 0);
- let mut first_line_chars = 0;
- let mut last_line_chars = 0;
- let mut last_line_len_utf16 = 0;
- let mut longest_row = 0;
- let mut longest_row_chars = 0;
- for c in text.chars() {
- len_utf16.0 += c.len_utf16();
-
- if c == '\n' {
- lines += Point::new(1, 0);
- last_line_len_utf16 = 0;
- last_line_chars = 0;
- } else {
- lines.column += c.len_utf8() as u32;
- last_line_len_utf16 += c.len_utf16() as u32;
- last_line_chars += 1;
- }
-
- if lines.row == 0 {
- first_line_chars = last_line_chars;
- }
-
- if last_line_chars > longest_row_chars {
- longest_row = lines.row;
- longest_row_chars = last_line_chars;
- }
- }
-
- TextSummary {
- len: text.len(),
- len_utf16,
- lines,
- first_line_chars,
- last_line_chars,
- last_line_len_utf16,
- longest_row,
- longest_row_chars,
- }
- }
-}
-
-impl sum_tree::Summary for TextSummary {
- type Context = ();
-
- fn add_summary(&mut self, summary: &Self, _: &Self::Context) {
- *self += summary;
- }
-}
-
-impl std::ops::Add<Self> for TextSummary {
- type Output = Self;
-
- fn add(mut self, rhs: Self) -> Self::Output {
- AddAssign::add_assign(&mut self, &rhs);
- self
- }
-}
-
-impl<'a> std::ops::AddAssign<&'a Self> for TextSummary {
- fn add_assign(&mut self, other: &'a Self) {
- let joined_chars = self.last_line_chars + other.first_line_chars;
- if joined_chars > self.longest_row_chars {
- self.longest_row = self.lines.row;
- self.longest_row_chars = joined_chars;
- }
- if other.longest_row_chars > self.longest_row_chars {
- self.longest_row = self.lines.row + other.longest_row;
- self.longest_row_chars = other.longest_row_chars;
- }
-
- if self.lines.row == 0 {
- self.first_line_chars += other.first_line_chars;
- }
-
- if other.lines.row == 0 {
- self.last_line_chars += other.first_line_chars;
- self.last_line_len_utf16 += other.last_line_len_utf16;
- } else {
- self.last_line_chars = other.last_line_chars;
- self.last_line_len_utf16 = other.last_line_len_utf16;
- }
-
- self.len += other.len;
- self.len_utf16 += other.len_utf16;
- self.lines += other.lines;
- }
-}
-
-impl std::ops::AddAssign<Self> for TextSummary {
- fn add_assign(&mut self, other: Self) {
- *self += &other;
- }
-}
-
-pub trait TextDimension: 'static + for<'a> Dimension<'a, ChunkSummary> {
- fn from_text_summary(summary: &TextSummary) -> Self;
- fn add_assign(&mut self, other: &Self);
-}
-
-impl<D1: TextDimension, D2: TextDimension> TextDimension for (D1, D2) {
- fn from_text_summary(summary: &TextSummary) -> Self {
- (
- D1::from_text_summary(summary),
- D2::from_text_summary(summary),
- )
- }
-
- fn add_assign(&mut self, other: &Self) {
- self.0.add_assign(&other.0);
- self.1.add_assign(&other.1);
- }
-}
-
-impl<'a> sum_tree::Dimension<'a, ChunkSummary> for TextSummary {
- fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) {
- *self += &summary.text;
- }
-}
-
-impl TextDimension for TextSummary {
- fn from_text_summary(summary: &TextSummary) -> Self {
- summary.clone()
- }
-
- fn add_assign(&mut self, other: &Self) {
- *self += other;
- }
-}
-
-impl<'a> sum_tree::Dimension<'a, ChunkSummary> for usize {
- fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) {
- *self += summary.text.len;
- }
-}
-
-impl TextDimension for usize {
- fn from_text_summary(summary: &TextSummary) -> Self {
- summary.len
- }
-
- fn add_assign(&mut self, other: &Self) {
- *self += other;
- }
-}
-
-impl<'a> sum_tree::Dimension<'a, ChunkSummary> for OffsetUtf16 {
- fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) {
- *self += summary.text.len_utf16;
- }
-}
-
-impl TextDimension for OffsetUtf16 {
- fn from_text_summary(summary: &TextSummary) -> Self {
- summary.len_utf16
- }
-
- fn add_assign(&mut self, other: &Self) {
- *self += other;
- }
-}
-
-impl<'a> sum_tree::Dimension<'a, ChunkSummary> for Point {
- fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) {
- *self += summary.text.lines;
- }
-}
-
-impl TextDimension for Point {
- fn from_text_summary(summary: &TextSummary) -> Self {
- summary.lines
- }
-
- fn add_assign(&mut self, other: &Self) {
- *self += other;
- }
-}
-
-impl<'a> sum_tree::Dimension<'a, ChunkSummary> for PointUtf16 {
- fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) {
- *self += summary.text.lines_utf16();
- }
-}
-
-impl TextDimension for PointUtf16 {
- fn from_text_summary(summary: &TextSummary) -> Self {
- summary.lines_utf16()
- }
-
- fn add_assign(&mut self, other: &Self) {
- *self += other;
- }
-}
-
-fn find_split_ix(text: &str) -> usize {
- let mut ix = text.len() / 2;
- while !text.is_char_boundary(ix) {
- if ix < 2 * CHUNK_BASE {
- ix += 1;
- } else {
- ix = (text.len() / 2) - 1;
- break;
- }
- }
- while !text.is_char_boundary(ix) {
- ix -= 1;
- }
-
- debug_assert!(ix <= 2 * CHUNK_BASE);
- debug_assert!(text.len() - ix <= 2 * CHUNK_BASE);
- ix
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use rand::prelude::*;
- use std::{cmp::Ordering, env, io::Read};
- use util::RandomCharIter;
- use Bias::{Left, Right};
-
- #[test]
- fn test_all_4_byte_chars() {
- let mut rope = Rope::new();
- let text = "🏀".repeat(256);
- rope.push(&text);
- assert_eq!(rope.text(), text);
- }
-
- #[test]
- fn test_clip() {
- let rope = Rope::from("🧘");
-
- assert_eq!(rope.clip_offset(1, Bias::Left), 0);
- assert_eq!(rope.clip_offset(1, Bias::Right), 4);
- assert_eq!(rope.clip_offset(5, Bias::Right), 4);
-
- assert_eq!(
- rope.clip_point(Point::new(0, 1), Bias::Left),
- Point::new(0, 0)
- );
- assert_eq!(
- rope.clip_point(Point::new(0, 1), Bias::Right),
- Point::new(0, 4)
- );
- assert_eq!(
- rope.clip_point(Point::new(0, 5), Bias::Right),
- Point::new(0, 4)
- );
-
- assert_eq!(
- rope.clip_point_utf16(Unclipped(PointUtf16::new(0, 1)), Bias::Left),
- PointUtf16::new(0, 0)
- );
- assert_eq!(
- rope.clip_point_utf16(Unclipped(PointUtf16::new(0, 1)), Bias::Right),
- PointUtf16::new(0, 2)
- );
- assert_eq!(
- rope.clip_point_utf16(Unclipped(PointUtf16::new(0, 3)), Bias::Right),
- PointUtf16::new(0, 2)
- );
-
- assert_eq!(
- rope.clip_offset_utf16(OffsetUtf16(1), Bias::Left),
- OffsetUtf16(0)
- );
- assert_eq!(
- rope.clip_offset_utf16(OffsetUtf16(1), Bias::Right),
- OffsetUtf16(2)
- );
- assert_eq!(
- rope.clip_offset_utf16(OffsetUtf16(3), Bias::Right),
- OffsetUtf16(2)
- );
- }
-
- #[gpui::test(iterations = 100)]
- fn test_random_rope(mut rng: StdRng) {
- let operations = env::var("OPERATIONS")
- .map(|i| i.parse().expect("invalid `OPERATIONS` variable"))
- .unwrap_or(10);
-
- let mut expected = String::new();
- let mut actual = Rope::new();
- for _ in 0..operations {
- let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
- let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
- let len = rng.gen_range(0..=64);
- let new_text: String = RandomCharIter::new(&mut rng).take(len).collect();
-
- let mut new_actual = Rope::new();
- let mut cursor = actual.cursor(0);
- new_actual.append(cursor.slice(start_ix));
- new_actual.push(&new_text);
- cursor.seek_forward(end_ix);
- new_actual.append(cursor.suffix());
- actual = new_actual;
-
- expected.replace_range(start_ix..end_ix, &new_text);
-
- assert_eq!(actual.text(), expected);
- log::info!("text: {:?}", expected);
-
- for _ in 0..5 {
- let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
- let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
-
- let actual_text = actual.chunks_in_range(start_ix..end_ix).collect::<String>();
- assert_eq!(actual_text, &expected[start_ix..end_ix]);
-
- let mut actual_text = String::new();
- actual
- .bytes_in_range(start_ix..end_ix)
- .read_to_string(&mut actual_text)
- .unwrap();
- assert_eq!(actual_text, &expected[start_ix..end_ix]);
-
- assert_eq!(
- actual
- .reversed_chunks_in_range(start_ix..end_ix)
- .collect::<Vec<&str>>()
- .into_iter()
- .rev()
- .collect::<String>(),
- &expected[start_ix..end_ix]
- );
- }
-
- let mut offset_utf16 = OffsetUtf16(0);
- let mut point = Point::new(0, 0);
- let mut point_utf16 = PointUtf16::new(0, 0);
- for (ix, ch) in expected.char_indices().chain(Some((expected.len(), '\0'))) {
- assert_eq!(actual.offset_to_point(ix), point, "offset_to_point({})", ix);
- assert_eq!(
- actual.offset_to_point_utf16(ix),
- point_utf16,
- "offset_to_point_utf16({})",
- ix
- );
- assert_eq!(
- actual.point_to_offset(point),
- ix,
- "point_to_offset({:?})",
- point
- );
- assert_eq!(
- actual.point_utf16_to_offset(point_utf16),
- ix,
- "point_utf16_to_offset({:?})",
- point_utf16
- );
- assert_eq!(
- actual.offset_to_offset_utf16(ix),
- offset_utf16,
- "offset_to_offset_utf16({:?})",
- ix
- );
- assert_eq!(
- actual.offset_utf16_to_offset(offset_utf16),
- ix,
- "offset_utf16_to_offset({:?})",
- offset_utf16
- );
- if ch == '\n' {
- point += Point::new(1, 0);
- point_utf16 += PointUtf16::new(1, 0);
- } else {
- point.column += ch.len_utf8() as u32;
- point_utf16.column += ch.len_utf16() as u32;
- }
- offset_utf16.0 += ch.len_utf16();
- }
-
- let mut offset_utf16 = OffsetUtf16(0);
- let mut point_utf16 = Unclipped(PointUtf16::zero());
- for unit in expected.encode_utf16() {
- let left_offset = actual.clip_offset_utf16(offset_utf16, Bias::Left);
- let right_offset = actual.clip_offset_utf16(offset_utf16, Bias::Right);
- assert!(right_offset >= left_offset);
- // Ensure translating UTF-16 offsets to UTF-8 offsets doesn't panic.
- actual.offset_utf16_to_offset(left_offset);
- actual.offset_utf16_to_offset(right_offset);
-
- let left_point = actual.clip_point_utf16(point_utf16, Bias::Left);
- let right_point = actual.clip_point_utf16(point_utf16, Bias::Right);
- assert!(right_point >= left_point);
- // Ensure translating valid UTF-16 points to offsets doesn't panic.
- actual.point_utf16_to_offset(left_point);
- actual.point_utf16_to_offset(right_point);
-
- offset_utf16.0 += 1;
- if unit == b'\n' as u16 {
- point_utf16.0 += PointUtf16::new(1, 0);
- } else {
- point_utf16.0 += PointUtf16::new(0, 1);
- }
- }
-
- for _ in 0..5 {
- let end_ix = clip_offset(&expected, rng.gen_range(0..=expected.len()), Right);
- let start_ix = clip_offset(&expected, rng.gen_range(0..=end_ix), Left);
- assert_eq!(
- actual.cursor(start_ix).summary::<TextSummary>(end_ix),
- TextSummary::from(&expected[start_ix..end_ix])
- );
- }
-
- let mut expected_longest_rows = Vec::new();
- let mut longest_line_len = -1_isize;
- for (row, line) in expected.split('\n').enumerate() {
- let row = row as u32;
- assert_eq!(
- actual.line_len(row),
- line.len() as u32,
- "invalid line len for row {}",
- row
- );
-
- let line_char_count = line.chars().count() as isize;
- match line_char_count.cmp(&longest_line_len) {
- Ordering::Less => {}
- Ordering::Equal => expected_longest_rows.push(row),
- Ordering::Greater => {
- longest_line_len = line_char_count;
- expected_longest_rows.clear();
- expected_longest_rows.push(row);
- }
- }
- }
-
- let longest_row = actual.summary().longest_row;
- assert!(
- expected_longest_rows.contains(&longest_row),
- "incorrect longest row {}. expected {:?} with length {}",
- longest_row,
- expected_longest_rows,
- longest_line_len,
- );
- }
- }
-
- fn clip_offset(text: &str, mut offset: usize, bias: Bias) -> usize {
- while !text.is_char_boundary(offset) {
- match bias {
- Bias::Left => offset -= 1,
- Bias::Right => offset += 1,
- }
- }
- offset
- }
-
- impl Rope {
- fn text(&self) -> String {
- let mut text = String::new();
- for chunk in self.chunks.cursor::<()>() {
- text.push_str(&chunk.0);
- }
- text
- }
- }
-}
@@ -1,57 +0,0 @@
-use crate::{ChunkSummary, TextDimension, TextSummary};
-use std::ops::{Add, AddAssign, Sub, SubAssign};
-
-#[derive(Debug, Default, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
-pub struct Unclipped<T>(pub T);
-
-impl<T> From<T> for Unclipped<T> {
- fn from(value: T) -> Self {
- Unclipped(value)
- }
-}
-
-impl<'a, T: sum_tree::Dimension<'a, ChunkSummary>> sum_tree::Dimension<'a, ChunkSummary>
- for Unclipped<T>
-{
- fn add_summary(&mut self, summary: &'a ChunkSummary, _: &()) {
- self.0.add_summary(summary, &());
- }
-}
-
-impl<T: TextDimension> TextDimension for Unclipped<T> {
- fn from_text_summary(summary: &TextSummary) -> Self {
- Unclipped(T::from_text_summary(summary))
- }
-
- fn add_assign(&mut self, other: &Self) {
- TextDimension::add_assign(&mut self.0, &other.0);
- }
-}
-
-impl<T: Add<T, Output = T>> Add<Unclipped<T>> for Unclipped<T> {
- type Output = Unclipped<T>;
-
- fn add(self, rhs: Unclipped<T>) -> Self::Output {
- Unclipped(self.0 + rhs.0)
- }
-}
-
-impl<T: Sub<T, Output = T>> Sub<Unclipped<T>> for Unclipped<T> {
- type Output = Unclipped<T>;
-
- fn sub(self, rhs: Unclipped<T>) -> Self::Output {
- Unclipped(self.0 - rhs.0)
- }
-}
-
-impl<T: AddAssign<T>> AddAssign<Unclipped<T>> for Unclipped<T> {
- fn add_assign(&mut self, rhs: Unclipped<T>) {
- self.0 += rhs.0;
- }
-}
-
-impl<T: SubAssign<T>> SubAssign<Unclipped<T>> for Unclipped<T> {
- fn sub_assign(&mut self, rhs: Unclipped<T>) {
- self.0 -= rhs.0;
- }
-}
@@ -14,9 +14,9 @@ collections = { path = "../collections" }
editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { path = "../language" }
-menu = { package = "menu2", path = "../menu2" }
+menu = { path = "../menu" }
project = { path = "../project" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" }
ui = {package = "ui2", path = "../ui2"}
@@ -17,7 +17,7 @@ project = { path = "../project" }
workspace = { path = "../workspace" }
util = { path = "../util" }
rpc = { path = "../rpc" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
anyhow.workspace = true
postage.workspace = true
futures.workspace = true
@@ -46,7 +46,7 @@ language = { path = "../language", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
rpc = { path = "../rpc", features = ["test-support"] }
workspace = { path = "../workspace", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"]}
+settings = { path = "../settings", features = ["test-support"]}
rust-embed = { version = "8.0", features = ["include-exclude"] }
client = { path = "../client" }
node_runtime = { path = "../node_runtime"}
@@ -13,7 +13,7 @@ test-support = ["gpui/test-support", "fs/test-support"]
[dependencies]
collections = { path = "../collections" }
-gpui = { path = "../gpui" }
+gpui = {package = "gpui2", path = "../gpui2" }
sqlez = { path = "../sqlez" }
fs = { path = "../fs" }
feature_flags = { path = "../feature_flags" }
@@ -35,7 +35,7 @@ tree-sitter.workspace = true
tree-sitter-json = "*"
[dev-dependencies]
-gpui = { path = "../gpui", features = ["test-support"] }
+gpui = {package = "gpui2", path = "../gpui2", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] }
indoc.workspace = true
pretty_assertions.workspace = true
@@ -1,7 +1,7 @@
use crate::{settings_store::parse_json_with_comments, SettingsAssets};
use anyhow::{anyhow, Context, Result};
use collections::BTreeMap;
-use gpui::{keymap_matcher::Binding, AppContext, NoAction};
+use gpui::{Action, AppContext, KeyBinding, SharedString};
use schemars::{
gen::{SchemaGenerator, SchemaSettings},
schema::{InstanceType, Schema, SchemaObject, SingleOrVec, SubschemaValidation},
@@ -73,9 +73,9 @@ impl KeymapFile {
"Expected first item in array to be a string."
)));
};
- cx.deserialize_action(&name, Some(data))
+ cx.build_action(&name, Some(data))
}
- Value::String(name) => cx.deserialize_action(&name, None),
+ Value::String(name) => cx.build_action(&name, None),
Value::Null => Ok(no_action()),
_ => {
return Some(Err(anyhow!("Expected two-element array, got {action:?}")))
@@ -87,16 +87,16 @@ impl KeymapFile {
)
})
.log_err()
- .map(|action| Binding::load(&keystroke, action, context.as_deref()))
+ .map(|action| KeyBinding::load(&keystroke, action, context.as_deref()))
})
.collect::<Result<Vec<_>>>()?;
- cx.add_bindings(bindings);
+ cx.bind_keys(bindings);
}
Ok(())
}
- pub fn generate_json_schema(action_names: &[&'static str]) -> serde_json::Value {
+ pub fn generate_json_schema(action_names: &[SharedString]) -> serde_json::Value {
let mut root_schema = SchemaSettings::draft07()
.with(|settings| settings.option_add_null_type = false)
.into_generator()
@@ -138,7 +138,7 @@ impl KeymapFile {
}
fn no_action() -> Box<dyn gpui::Action> {
- Box::new(NoAction {})
+ gpui::NoAction.boxed_clone()
}
#[cfg(test)]
@@ -8,7 +8,7 @@ use util::asset_str;
pub use keymap_file::KeymapFile;
pub use settings_file::*;
-pub use settings_store::{Setting, SettingsJsonSchemaParams, SettingsStore};
+pub use settings_store::{Settings, SettingsJsonSchemaParams, SettingsStore};
#[derive(RustEmbed)]
#[folder = "../../assets"]
@@ -1,31 +1,11 @@
-use crate::{settings_store::SettingsStore, Setting};
+use crate::{settings_store::SettingsStore, KeymapFile, Settings};
use anyhow::Result;
use fs::Fs;
use futures::{channel::mpsc, StreamExt};
-use gpui::{executor::Background, AppContext};
-use std::{
- io::ErrorKind,
- path::{Path, PathBuf},
- str,
- sync::Arc,
- time::Duration,
-};
+use gpui::{AppContext, BackgroundExecutor};
+use std::{io::ErrorKind, path::PathBuf, str, sync::Arc, time::Duration};
use util::{paths, ResultExt};
-pub fn register<T: Setting>(cx: &mut AppContext) {
- cx.update_global::<SettingsStore, _, _>(|store, cx| {
- store.register_setting::<T>(cx);
- });
-}
-
-pub fn get<'a, T: Setting>(cx: &'a AppContext) -> &'a T {
- cx.global::<SettingsStore>().get(None)
-}
-
-pub fn get_local<'a, T: Setting>(location: Option<(usize, &Path)>, cx: &'a AppContext) -> &'a T {
- cx.global::<SettingsStore>().get(location)
-}
-
pub const EMPTY_THEME_NAME: &'static str = "empty-theme";
#[cfg(any(test, feature = "test-support"))]
@@ -36,6 +16,9 @@ pub fn test_settings() -> String {
.unwrap();
util::merge_non_null_json_value_into(
serde_json::json!({
+ "ui_font_family": "Courier",
+ "ui_font_features": {},
+ "ui_font_size": 14,
"buffer_font_family": "Courier",
"buffer_font_features": {},
"buffer_font_size": 14,
@@ -48,7 +31,7 @@ pub fn test_settings() -> String {
}
pub fn watch_config_file(
- executor: Arc<Background>,
+ executor: &BackgroundExecutor,
fs: Arc<dyn Fs>,
path: PathBuf,
) -> mpsc::UnboundedReceiver<String> {
@@ -83,22 +66,27 @@ pub fn handle_settings_file_changes(
mut user_settings_file_rx: mpsc::UnboundedReceiver<String>,
cx: &mut AppContext,
) {
- let user_settings_content = cx.background().block(user_settings_file_rx.next()).unwrap();
- cx.update_global::<SettingsStore, _, _>(|store, cx| {
+ let user_settings_content = cx
+ .background_executor()
+ .block(user_settings_file_rx.next())
+ .unwrap();
+ cx.update_global(|store: &mut SettingsStore, cx| {
store
.set_user_settings(&user_settings_content, cx)
.log_err();
});
cx.spawn(move |mut cx| async move {
while let Some(user_settings_content) = user_settings_file_rx.next().await {
- cx.update(|cx| {
- cx.update_global::<SettingsStore, _, _>(|store, cx| {
- store
- .set_user_settings(&user_settings_content, cx)
- .log_err();
- });
- cx.refresh_windows();
+ eprintln!("settings file changed");
+ let result = cx.update_global(|store: &mut SettingsStore, cx| {
+ store
+ .set_user_settings(&user_settings_content, cx)
+ .log_err();
+ cx.refresh();
});
+ if result.is_err() {
+ break; // App dropped
+ }
}
})
.detach();
@@ -118,29 +106,29 @@ async fn load_settings(fs: &Arc<dyn Fs>) -> Result<String> {
}
}
-pub fn update_settings_file<T: Setting>(
+pub fn update_settings_file<T: Settings>(
fs: Arc<dyn Fs>,
cx: &mut AppContext,
update: impl 'static + Send + FnOnce(&mut T::FileContent),
) {
cx.spawn(|cx| async move {
- let old_text = cx
- .background()
- .spawn({
- let fs = fs.clone();
- async move { load_settings(&fs).await }
- })
- .await?;
-
- let new_text = cx.read(|cx| {
- cx.global::<SettingsStore>()
- .new_text_for_update::<T>(old_text, update)
- });
-
- cx.background()
- .spawn(async move { fs.atomic_write(paths::SETTINGS.clone(), new_text).await })
- .await?;
+ let old_text = load_settings(&fs).await?;
+ let new_text = cx.read_global(|store: &SettingsStore, _cx| {
+ store.new_text_for_update::<T>(old_text, update)
+ })?;
+ fs.atomic_write(paths::SETTINGS.clone(), new_text).await?;
anyhow::Ok(())
})
.detach_and_log_err(cx);
}
+
+pub fn load_default_keymap(cx: &mut AppContext) {
+ for path in ["keymaps/default.json", "keymaps/vim.json"] {
+ KeymapFile::load_asset(path, cx).unwrap();
+ }
+
+ // todo!()
+ // if let Some(asset_path) = settings::get::<BaseKeymap>(cx).asset_path() {
+ // KeymapFile::load_asset(asset_path, cx).unwrap();
+ // }
+}
@@ -18,7 +18,7 @@ use util::{merge_non_null_json_value_into, RangeExt, ResultExt as _};
/// A value that can be defined as a user setting.
///
/// Settings can be loaded from a combination of multiple JSON files.
-pub trait Setting: 'static {
+pub trait Settings: 'static + Send + Sync {
/// The name of a key within the JSON file from which this setting should
/// be deserialized. If this is `None`, then the setting will be deserialized
/// from the root object.
@@ -35,7 +35,7 @@ pub trait Setting: 'static {
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
- cx: &AppContext,
+ cx: &mut AppContext,
) -> Result<Self>
where
Self: Sized;
@@ -76,6 +76,39 @@ pub trait Setting: 'static {
fn missing_default() -> anyhow::Error {
anyhow::anyhow!("missing default")
}
+
+ fn register(cx: &mut AppContext)
+ where
+ Self: Sized,
+ {
+ cx.update_global(|store: &mut SettingsStore, cx| {
+ store.register_setting::<Self>(cx);
+ });
+ }
+
+ #[track_caller]
+ fn get<'a>(path: Option<(usize, &Path)>, cx: &'a AppContext) -> &'a Self
+ where
+ Self: Sized,
+ {
+ cx.global::<SettingsStore>().get(path)
+ }
+
+ #[track_caller]
+ fn get_global<'a>(cx: &'a AppContext) -> &'a Self
+ where
+ Self: Sized,
+ {
+ cx.global::<SettingsStore>().get(None)
+ }
+
+ #[track_caller]
+ fn override_global<'a>(settings: Self, cx: &'a mut AppContext)
+ where
+ Self: Sized,
+ {
+ cx.global_mut::<SettingsStore>().override_global(settings)
+ }
}
pub struct SettingsJsonSchemaParams<'a> {
@@ -89,7 +122,10 @@ pub struct SettingsStore {
raw_default_settings: serde_json::Value,
raw_user_settings: serde_json::Value,
raw_local_settings: BTreeMap<(usize, Arc<Path>), serde_json::Value>,
- tab_size_callback: Option<(TypeId, Box<dyn Fn(&dyn Any) -> Option<usize>>)>,
+ tab_size_callback: Option<(
+ TypeId,
+ Box<dyn Fn(&dyn Any) -> Option<usize> + Send + Sync + 'static>,
+ )>,
}
impl Default for SettingsStore {
@@ -110,7 +146,7 @@ struct SettingValue<T> {
local_values: Vec<(usize, Arc<Path>, T)>,
}
-trait AnySettingValue {
+trait AnySettingValue: 'static + Send + Sync {
fn key(&self) -> Option<&'static str>;
fn setting_type_name(&self) -> &'static str;
fn deserialize_setting(&self, json: &serde_json::Value) -> Result<DeserializedSetting>;
@@ -118,7 +154,7 @@ trait AnySettingValue {
&self,
default_value: &DeserializedSetting,
custom: &[DeserializedSetting],
- cx: &AppContext,
+ cx: &mut AppContext,
) -> Result<Box<dyn Any>>;
fn value_for_path(&self, path: Option<(usize, &Path)>) -> &dyn Any;
fn set_global_value(&mut self, value: Box<dyn Any>);
@@ -135,7 +171,7 @@ struct DeserializedSetting(Box<dyn Any>);
impl SettingsStore {
/// Add a new type of setting to the store.
- pub fn register_setting<T: Setting>(&mut self, cx: &AppContext) {
+ pub fn register_setting<T: Settings>(&mut self, cx: &mut AppContext) {
let setting_type_id = TypeId::of::<T>();
let entry = self.setting_values.entry(setting_type_id);
if matches!(entry, hash_map::Entry::Occupied(_)) {
@@ -174,7 +210,7 @@ impl SettingsStore {
///
/// Panics if the given setting type has not been registered, or if there is no
/// value for this setting.
- pub fn get<T: Setting>(&self, path: Option<(usize, &Path)>) -> &T {
+ pub fn get<T: Settings>(&self, path: Option<(usize, &Path)>) -> &T {
self.setting_values
.get(&TypeId::of::<T>())
.unwrap_or_else(|| panic!("unregistered setting type {}", type_name::<T>()))
@@ -186,7 +222,7 @@ impl SettingsStore {
/// Override the global value for a setting.
///
/// The given value will be overwritten if the user settings file changes.
- pub fn override_global<T: Setting>(&mut self, value: T) {
+ pub fn override_global<T: Settings>(&mut self, value: T) {
self.setting_values
.get_mut(&TypeId::of::<T>())
.unwrap_or_else(|| panic!("unregistered setting type {}", type_name::<T>()))
@@ -202,7 +238,7 @@ impl SettingsStore {
}
#[cfg(any(test, feature = "test-support"))]
- pub fn test(cx: &AppContext) -> Self {
+ pub fn test(cx: &mut AppContext) -> Self {
let mut this = Self::default();
this.set_default_settings(&crate::test_settings(), cx)
.unwrap();
@@ -215,9 +251,9 @@ impl SettingsStore {
/// This is only for tests. Normally, settings are only loaded from
/// JSON files.
#[cfg(any(test, feature = "test-support"))]
- pub fn update_user_settings<T: Setting>(
+ pub fn update_user_settings<T: Settings>(
&mut self,
- cx: &AppContext,
+ cx: &mut AppContext,
update: impl FnOnce(&mut T::FileContent),
) {
let old_text = serde_json::to_string(&self.raw_user_settings).unwrap();
@@ -227,7 +263,7 @@ impl SettingsStore {
/// Update the value of a setting in a JSON file, returning the new text
/// for that JSON file.
- pub fn new_text_for_update<T: Setting>(
+ pub fn new_text_for_update<T: Settings>(
&self,
old_text: String,
update: impl FnOnce(&mut T::FileContent),
@@ -242,7 +278,7 @@ impl SettingsStore {
/// Update the value of a setting in a JSON file, returning a list
/// of edits to apply to the JSON file.
- pub fn edits_for_update<T: Setting>(
+ pub fn edits_for_update<T: Settings>(
&self,
text: &str,
update: impl FnOnce(&mut T::FileContent),
@@ -284,7 +320,7 @@ impl SettingsStore {
}
/// Configure the tab sized when updating JSON files.
- pub fn set_json_tab_size_callback<T: Setting>(
+ pub fn set_json_tab_size_callback<T: Settings>(
&mut self,
get_tab_size: fn(&T) -> Option<usize>,
) {
@@ -314,7 +350,7 @@ impl SettingsStore {
pub fn set_default_settings(
&mut self,
default_settings_content: &str,
- cx: &AppContext,
+ cx: &mut AppContext,
) -> Result<()> {
let settings: serde_json::Value = parse_json_with_comments(default_settings_content)?;
if settings.is_object() {
@@ -330,7 +366,7 @@ impl SettingsStore {
pub fn set_user_settings(
&mut self,
user_settings_content: &str,
- cx: &AppContext,
+ cx: &mut AppContext,
) -> Result<()> {
let settings: serde_json::Value = parse_json_with_comments(user_settings_content)?;
if settings.is_object() {
@@ -348,7 +384,7 @@ impl SettingsStore {
root_id: usize,
path: Arc<Path>,
settings_content: Option<&str>,
- cx: &AppContext,
+ cx: &mut AppContext,
) -> Result<()> {
if let Some(content) = settings_content {
self.raw_local_settings
@@ -361,7 +397,7 @@ impl SettingsStore {
}
/// Add or remove a set of local settings via a JSON string.
- pub fn clear_local_settings(&mut self, root_id: usize, cx: &AppContext) -> Result<()> {
+ pub fn clear_local_settings(&mut self, root_id: usize, cx: &mut AppContext) -> Result<()> {
self.raw_local_settings.retain(|k, _| k.0 != root_id);
self.recompute_values(Some((root_id, "".as_ref())), cx)?;
Ok(())
@@ -453,7 +489,7 @@ impl SettingsStore {
fn recompute_values(
&mut self,
changed_local_path: Option<(usize, &Path)>,
- cx: &AppContext,
+ cx: &mut AppContext,
) -> Result<()> {
// Reload the global and local values for every setting.
let mut user_settings_stack = Vec::<DeserializedSetting>::new();
@@ -541,7 +577,7 @@ impl Debug for SettingsStore {
}
}
-impl<T: Setting> AnySettingValue for SettingValue<T> {
+impl<T: Settings> AnySettingValue for SettingValue<T> {
fn key(&self) -> Option<&'static str> {
T::KEY
}
@@ -554,7 +590,7 @@ impl<T: Setting> AnySettingValue for SettingValue<T> {
&self,
default_value: &DeserializedSetting,
user_values: &[DeserializedSetting],
- cx: &AppContext,
+ cx: &mut AppContext,
) -> Result<Box<dyn Any>> {
let default_value = default_value.0.downcast_ref::<T::FileContent>().unwrap();
let values: SmallVec<[&T::FileContent; 6]> = user_values
@@ -837,6 +873,7 @@ fn to_pretty_json(value: &impl Serialize, indent_size: usize, indent_prefix_len:
pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> {
Ok(serde_json_lenient::from_str(content)?)
}
+
#[cfg(test)]
mod tests {
use super::*;
@@ -1123,7 +1160,7 @@ mod tests {
);
}
- fn check_settings_update<T: Setting>(
+ fn check_settings_update<T: Settings>(
store: &mut SettingsStore,
old_json: String,
update: fn(&mut T::FileContent),
@@ -1153,14 +1190,14 @@ mod tests {
staff: Option<bool>,
}
- impl Setting for UserSettings {
+ impl Settings for UserSettings {
const KEY: Option<&'static str> = Some("user");
type FileContent = UserSettingsJson;
fn load(
default_value: &UserSettingsJson,
user_values: &[&UserSettingsJson],
- _: &AppContext,
+ _: &mut AppContext,
) -> Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}
@@ -1169,14 +1206,14 @@ mod tests {
#[derive(Debug, Deserialize, PartialEq)]
struct TurboSetting(bool);
- impl Setting for TurboSetting {
+ impl Settings for TurboSetting {
const KEY: Option<&'static str> = Some("turbo");
type FileContent = Option<bool>;
fn load(
default_value: &Option<bool>,
user_values: &[&Option<bool>],
- _: &AppContext,
+ _: &mut AppContext,
) -> Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}
@@ -1196,7 +1233,7 @@ mod tests {
key2: Option<String>,
}
- impl Setting for MultiKeySettings {
+ impl Settings for MultiKeySettings {
const KEY: Option<&'static str> = None;
type FileContent = MultiKeySettingsJson;
@@ -1204,7 +1241,7 @@ mod tests {
fn load(
default_value: &MultiKeySettingsJson,
user_values: &[&MultiKeySettingsJson],
- _: &AppContext,
+ _: &mut AppContext,
) -> Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}
@@ -1229,7 +1266,7 @@ mod tests {
pub hour_format: Option<HourFormat>,
}
- impl Setting for JournalSettings {
+ impl Settings for JournalSettings {
const KEY: Option<&'static str> = Some("journal");
type FileContent = JournalSettingsJson;
@@ -1237,7 +1274,7 @@ mod tests {
fn load(
default_value: &JournalSettingsJson,
user_values: &[&JournalSettingsJson],
- _: &AppContext,
+ _: &mut AppContext,
) -> Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}
@@ -1255,12 +1292,12 @@ mod tests {
language_setting_2: Option<bool>,
}
- impl Setting for LanguageSettings {
+ impl Settings for LanguageSettings {
const KEY: Option<&'static str> = None;
type FileContent = Self;
- fn load(default_value: &Self, user_values: &[&Self], _: &AppContext) -> Result<Self> {
+ fn load(default_value: &Self, user_values: &[&Self], _: &mut AppContext) -> Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}
}
@@ -1,42 +0,0 @@
-[package]
-name = "settings2"
-version = "0.1.0"
-edition = "2021"
-publish = false
-
-[lib]
-path = "src/settings2.rs"
-doctest = false
-
-[features]
-test-support = ["gpui/test-support", "fs/test-support"]
-
-[dependencies]
-collections = { path = "../collections" }
-gpui = {package = "gpui2", path = "../gpui2" }
-sqlez = { path = "../sqlez" }
-fs = { path = "../fs" }
-feature_flags = { path = "../feature_flags" }
-util = { path = "../util" }
-
-anyhow.workspace = true
-futures.workspace = true
-serde_json_lenient = {version = "0.1", features = ["preserve_order", "raw_value"]}
-lazy_static.workspace = true
-postage.workspace = true
-rust-embed.workspace = true
-schemars.workspace = true
-serde.workspace = true
-serde_derive.workspace = true
-serde_json.workspace = true
-smallvec.workspace = true
-toml.workspace = true
-tree-sitter.workspace = true
-tree-sitter-json = "*"
-
-[dev-dependencies]
-gpui = {package = "gpui2", path = "../gpui2", features = ["test-support"] }
-fs = { path = "../fs", features = ["test-support"] }
-indoc.workspace = true
-pretty_assertions.workspace = true
-unindent.workspace = true
@@ -1,163 +0,0 @@
-use crate::{settings_store::parse_json_with_comments, SettingsAssets};
-use anyhow::{anyhow, Context, Result};
-use collections::BTreeMap;
-use gpui::{Action, AppContext, KeyBinding, SharedString};
-use schemars::{
- gen::{SchemaGenerator, SchemaSettings},
- schema::{InstanceType, Schema, SchemaObject, SingleOrVec, SubschemaValidation},
- JsonSchema,
-};
-use serde::Deserialize;
-use serde_json::Value;
-use util::{asset_str, ResultExt};
-
-#[derive(Debug, Deserialize, Default, Clone, JsonSchema)]
-#[serde(transparent)]
-pub struct KeymapFile(Vec<KeymapBlock>);
-
-#[derive(Debug, Deserialize, Default, Clone, JsonSchema)]
-pub struct KeymapBlock {
- #[serde(default)]
- context: Option<String>,
- bindings: BTreeMap<String, KeymapAction>,
-}
-
-#[derive(Debug, Deserialize, Default, Clone)]
-#[serde(transparent)]
-pub struct KeymapAction(Value);
-
-impl JsonSchema for KeymapAction {
- fn schema_name() -> String {
- "KeymapAction".into()
- }
-
- fn json_schema(_: &mut SchemaGenerator) -> Schema {
- Schema::Bool(true)
- }
-}
-
-#[derive(Deserialize)]
-struct ActionWithData(Box<str>, Value);
-
-impl KeymapFile {
- pub fn load_asset(asset_path: &str, cx: &mut AppContext) -> Result<()> {
- let content = asset_str::<SettingsAssets>(asset_path);
-
- Self::parse(content.as_ref())?.add_to_cx(cx)
- }
-
- pub fn parse(content: &str) -> Result<Self> {
- parse_json_with_comments::<Self>(content)
- }
-
- pub fn add_to_cx(self, cx: &mut AppContext) -> Result<()> {
- for KeymapBlock { context, bindings } in self.0 {
- let bindings = bindings
- .into_iter()
- .filter_map(|(keystroke, action)| {
- let action = action.0;
-
- // This is a workaround for a limitation in serde: serde-rs/json#497
- // We want to deserialize the action data as a `RawValue` so that we can
- // deserialize the action itself dynamically directly from the JSON
- // string. But `RawValue` currently does not work inside of an untagged enum.
- match action {
- Value::Array(items) => {
- let Ok([name, data]): Result<[serde_json::Value; 2], _> =
- items.try_into()
- else {
- return Some(Err(anyhow!("Expected array of length 2")));
- };
- let serde_json::Value::String(name) = name else {
- return Some(Err(anyhow!(
- "Expected first item in array to be a string."
- )));
- };
- cx.build_action(&name, Some(data))
- }
- Value::String(name) => cx.build_action(&name, None),
- Value::Null => Ok(no_action()),
- _ => {
- return Some(Err(anyhow!("Expected two-element array, got {action:?}")))
- }
- }
- .with_context(|| {
- format!(
- "invalid binding value for keystroke {keystroke}, context {context:?}"
- )
- })
- .log_err()
- .map(|action| KeyBinding::load(&keystroke, action, context.as_deref()))
- })
- .collect::<Result<Vec<_>>>()?;
-
- cx.bind_keys(bindings);
- }
- Ok(())
- }
-
- pub fn generate_json_schema(action_names: &[SharedString]) -> serde_json::Value {
- let mut root_schema = SchemaSettings::draft07()
- .with(|settings| settings.option_add_null_type = false)
- .into_generator()
- .into_root_schema_for::<KeymapFile>();
-
- let action_schema = Schema::Object(SchemaObject {
- subschemas: Some(Box::new(SubschemaValidation {
- one_of: Some(vec![
- Schema::Object(SchemaObject {
- instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::String))),
- enum_values: Some(
- action_names
- .iter()
- .map(|name| Value::String(name.to_string()))
- .collect(),
- ),
- ..Default::default()
- }),
- Schema::Object(SchemaObject {
- instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::Array))),
- ..Default::default()
- }),
- Schema::Object(SchemaObject {
- instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::Null))),
- ..Default::default()
- }),
- ]),
- ..Default::default()
- })),
- ..Default::default()
- });
-
- root_schema
- .definitions
- .insert("KeymapAction".to_owned(), action_schema);
-
- serde_json::to_value(root_schema).unwrap()
- }
-}
-
-fn no_action() -> Box<dyn gpui::Action> {
- gpui::NoAction.boxed_clone()
-}
-
-#[cfg(test)]
-mod tests {
- use crate::KeymapFile;
-
- #[test]
- fn can_deserialize_keymap_with_trailing_comma() {
- let json = indoc::indoc! {"[
- // Standard macOS bindings
- {
- \"bindings\": {
- \"up\": \"menu::SelectPrev\",
- },
- },
- ]
- "
-
- };
- KeymapFile::parse(json).unwrap();
- }
-}
@@ -1,38 +0,0 @@
-mod keymap_file;
-mod settings_file;
-mod settings_store;
-
-use rust_embed::RustEmbed;
-use std::{borrow::Cow, str};
-use util::asset_str;
-
-pub use keymap_file::KeymapFile;
-pub use settings_file::*;
-pub use settings_store::{Settings, SettingsJsonSchemaParams, SettingsStore};
-
-#[derive(RustEmbed)]
-#[folder = "../../assets"]
-#[include = "settings/*"]
-#[include = "keymaps/*"]
-#[exclude = "*.DS_Store"]
-pub struct SettingsAssets;
-
-pub fn default_settings() -> Cow<'static, str> {
- asset_str::<SettingsAssets>("settings/default.json")
-}
-
-pub fn default_keymap() -> Cow<'static, str> {
- asset_str::<SettingsAssets>("keymaps/default.json")
-}
-
-pub fn vim_keymap() -> Cow<'static, str> {
- asset_str::<SettingsAssets>("keymaps/vim.json")
-}
-
-pub fn initial_user_settings_content() -> Cow<'static, str> {
- asset_str::<SettingsAssets>("settings/initial_user_settings.json")
-}
-
-pub fn initial_local_settings_content() -> Cow<'static, str> {
- asset_str::<SettingsAssets>("settings/initial_local_settings.json")
-}
@@ -1,134 +0,0 @@
-use crate::{settings_store::SettingsStore, KeymapFile, Settings};
-use anyhow::Result;
-use fs::Fs;
-use futures::{channel::mpsc, StreamExt};
-use gpui::{AppContext, BackgroundExecutor};
-use std::{io::ErrorKind, path::PathBuf, str, sync::Arc, time::Duration};
-use util::{paths, ResultExt};
-
-pub const EMPTY_THEME_NAME: &'static str = "empty-theme";
-
-#[cfg(any(test, feature = "test-support"))]
-pub fn test_settings() -> String {
- let mut value = crate::settings_store::parse_json_with_comments::<serde_json::Value>(
- crate::default_settings().as_ref(),
- )
- .unwrap();
- util::merge_non_null_json_value_into(
- serde_json::json!({
- "ui_font_family": "Courier",
- "ui_font_features": {},
- "ui_font_size": 14,
- "buffer_font_family": "Courier",
- "buffer_font_features": {},
- "buffer_font_size": 14,
- "theme": EMPTY_THEME_NAME,
- }),
- &mut value,
- );
- value.as_object_mut().unwrap().remove("languages");
- serde_json::to_string(&value).unwrap()
-}
-
-pub fn watch_config_file(
- executor: &BackgroundExecutor,
- fs: Arc<dyn Fs>,
- path: PathBuf,
-) -> mpsc::UnboundedReceiver<String> {
- let (tx, rx) = mpsc::unbounded();
- executor
- .spawn(async move {
- let events = fs.watch(&path, Duration::from_millis(100)).await;
- futures::pin_mut!(events);
-
- let contents = fs.load(&path).await.unwrap_or_default();
- if tx.unbounded_send(contents).is_err() {
- return;
- }
-
- loop {
- if events.next().await.is_none() {
- break;
- }
-
- if let Ok(contents) = fs.load(&path).await {
- if !tx.unbounded_send(contents).is_ok() {
- break;
- }
- }
- }
- })
- .detach();
- rx
-}
-
-pub fn handle_settings_file_changes(
- mut user_settings_file_rx: mpsc::UnboundedReceiver<String>,
- cx: &mut AppContext,
-) {
- let user_settings_content = cx
- .background_executor()
- .block(user_settings_file_rx.next())
- .unwrap();
- cx.update_global(|store: &mut SettingsStore, cx| {
- store
- .set_user_settings(&user_settings_content, cx)
- .log_err();
- });
- cx.spawn(move |mut cx| async move {
- while let Some(user_settings_content) = user_settings_file_rx.next().await {
- eprintln!("settings file changed");
- let result = cx.update_global(|store: &mut SettingsStore, cx| {
- store
- .set_user_settings(&user_settings_content, cx)
- .log_err();
- cx.refresh();
- });
- if result.is_err() {
- break; // App dropped
- }
- }
- })
- .detach();
-}
-
-async fn load_settings(fs: &Arc<dyn Fs>) -> Result<String> {
- match fs.load(&paths::SETTINGS).await {
- result @ Ok(_) => result,
- Err(err) => {
- if let Some(e) = err.downcast_ref::<std::io::Error>() {
- if e.kind() == ErrorKind::NotFound {
- return Ok(crate::initial_user_settings_content().to_string());
- }
- }
- return Err(err);
- }
- }
-}
-
-pub fn update_settings_file<T: Settings>(
- fs: Arc<dyn Fs>,
- cx: &mut AppContext,
- update: impl 'static + Send + FnOnce(&mut T::FileContent),
-) {
- cx.spawn(|cx| async move {
- let old_text = load_settings(&fs).await?;
- let new_text = cx.read_global(|store: &SettingsStore, _cx| {
- store.new_text_for_update::<T>(old_text, update)
- })?;
- fs.atomic_write(paths::SETTINGS.clone(), new_text).await?;
- anyhow::Ok(())
- })
- .detach_and_log_err(cx);
-}
-
-pub fn load_default_keymap(cx: &mut AppContext) {
- for path in ["keymaps/default.json", "keymaps/vim.json"] {
- KeymapFile::load_asset(path, cx).unwrap();
- }
-
- // todo!()
- // if let Some(asset_path) = settings::get::<BaseKeymap>(cx).asset_path() {
- // KeymapFile::load_asset(asset_path, cx).unwrap();
- // }
-}
@@ -1,1304 +0,0 @@
-use anyhow::{anyhow, Context, Result};
-use collections::{btree_map, hash_map, BTreeMap, HashMap};
-use gpui::AppContext;
-use lazy_static::lazy_static;
-use schemars::{gen::SchemaGenerator, schema::RootSchema, JsonSchema};
-use serde::{de::DeserializeOwned, Deserialize as _, Serialize};
-use smallvec::SmallVec;
-use std::{
- any::{type_name, Any, TypeId},
- fmt::Debug,
- ops::Range,
- path::Path,
- str,
- sync::Arc,
-};
-use util::{merge_non_null_json_value_into, RangeExt, ResultExt as _};
-
-/// A value that can be defined as a user setting.
-///
-/// Settings can be loaded from a combination of multiple JSON files.
-pub trait Settings: 'static + Send + Sync {
- /// The name of a key within the JSON file from which this setting should
- /// be deserialized. If this is `None`, then the setting will be deserialized
- /// from the root object.
- const KEY: Option<&'static str>;
-
- /// The type that is stored in an individual JSON file.
- type FileContent: Clone + Default + Serialize + DeserializeOwned + JsonSchema;
-
- /// The logic for combining together values from one or more JSON files into the
- /// final value for this setting.
- ///
- /// The user values are ordered from least specific (the global settings file)
- /// to most specific (the innermost local settings file).
- fn load(
- default_value: &Self::FileContent,
- user_values: &[&Self::FileContent],
- cx: &mut AppContext,
- ) -> Result<Self>
- where
- Self: Sized;
-
- fn json_schema(
- generator: &mut SchemaGenerator,
- _: &SettingsJsonSchemaParams,
- _: &AppContext,
- ) -> RootSchema {
- generator.root_schema_for::<Self::FileContent>()
- }
-
- fn json_merge(
- default_value: &Self::FileContent,
- user_values: &[&Self::FileContent],
- ) -> Result<Self::FileContent> {
- let mut merged = serde_json::Value::Null;
- for value in [default_value].iter().chain(user_values) {
- merge_non_null_json_value_into(serde_json::to_value(value).unwrap(), &mut merged);
- }
- Ok(serde_json::from_value(merged)?)
- }
-
- fn load_via_json_merge(
- default_value: &Self::FileContent,
- user_values: &[&Self::FileContent],
- ) -> Result<Self>
- where
- Self: DeserializeOwned,
- {
- let mut merged = serde_json::Value::Null;
- for value in [default_value].iter().chain(user_values) {
- merge_non_null_json_value_into(serde_json::to_value(value).unwrap(), &mut merged);
- }
- Ok(serde_json::from_value(merged)?)
- }
-
- fn missing_default() -> anyhow::Error {
- anyhow::anyhow!("missing default")
- }
-
- fn register(cx: &mut AppContext)
- where
- Self: Sized,
- {
- cx.update_global(|store: &mut SettingsStore, cx| {
- store.register_setting::<Self>(cx);
- });
- }
-
- #[track_caller]
- fn get<'a>(path: Option<(usize, &Path)>, cx: &'a AppContext) -> &'a Self
- where
- Self: Sized,
- {
- cx.global::<SettingsStore>().get(path)
- }
-
- #[track_caller]
- fn get_global<'a>(cx: &'a AppContext) -> &'a Self
- where
- Self: Sized,
- {
- cx.global::<SettingsStore>().get(None)
- }
-
- #[track_caller]
- fn override_global<'a>(settings: Self, cx: &'a mut AppContext)
- where
- Self: Sized,
- {
- cx.global_mut::<SettingsStore>().override_global(settings)
- }
-}
-
-pub struct SettingsJsonSchemaParams<'a> {
- pub staff_mode: bool,
- pub language_names: &'a [String],
-}
-
-/// A set of strongly-typed setting values defined via multiple JSON files.
-pub struct SettingsStore {
- setting_values: HashMap<TypeId, Box<dyn AnySettingValue>>,
- raw_default_settings: serde_json::Value,
- raw_user_settings: serde_json::Value,
- raw_local_settings: BTreeMap<(usize, Arc<Path>), serde_json::Value>,
- tab_size_callback: Option<(
- TypeId,
- Box<dyn Fn(&dyn Any) -> Option<usize> + Send + Sync + 'static>,
- )>,
-}
-
-impl Default for SettingsStore {
- fn default() -> Self {
- SettingsStore {
- setting_values: Default::default(),
- raw_default_settings: serde_json::json!({}),
- raw_user_settings: serde_json::json!({}),
- raw_local_settings: Default::default(),
- tab_size_callback: Default::default(),
- }
- }
-}
-
-#[derive(Debug)]
-struct SettingValue<T> {
- global_value: Option<T>,
- local_values: Vec<(usize, Arc<Path>, T)>,
-}
-
-trait AnySettingValue: 'static + Send + Sync {
- fn key(&self) -> Option<&'static str>;
- fn setting_type_name(&self) -> &'static str;
- fn deserialize_setting(&self, json: &serde_json::Value) -> Result<DeserializedSetting>;
- fn load_setting(
- &self,
- default_value: &DeserializedSetting,
- custom: &[DeserializedSetting],
- cx: &mut AppContext,
- ) -> Result<Box<dyn Any>>;
- fn value_for_path(&self, path: Option<(usize, &Path)>) -> &dyn Any;
- fn set_global_value(&mut self, value: Box<dyn Any>);
- fn set_local_value(&mut self, root_id: usize, path: Arc<Path>, value: Box<dyn Any>);
- fn json_schema(
- &self,
- generator: &mut SchemaGenerator,
- _: &SettingsJsonSchemaParams,
- cx: &AppContext,
- ) -> RootSchema;
-}
-
-struct DeserializedSetting(Box<dyn Any>);
-
-impl SettingsStore {
- /// Add a new type of setting to the store.
- pub fn register_setting<T: Settings>(&mut self, cx: &mut AppContext) {
- let setting_type_id = TypeId::of::<T>();
- let entry = self.setting_values.entry(setting_type_id);
- if matches!(entry, hash_map::Entry::Occupied(_)) {
- return;
- }
-
- let setting_value = entry.or_insert(Box::new(SettingValue::<T> {
- global_value: None,
- local_values: Vec::new(),
- }));
-
- if let Some(default_settings) = setting_value
- .deserialize_setting(&self.raw_default_settings)
- .log_err()
- {
- let mut user_values_stack = Vec::new();
-
- if let Some(user_settings) = setting_value
- .deserialize_setting(&self.raw_user_settings)
- .log_err()
- {
- user_values_stack = vec![user_settings];
- }
-
- if let Some(setting) = setting_value
- .load_setting(&default_settings, &user_values_stack, cx)
- .context("A default setting must be added to the `default.json` file")
- .log_err()
- {
- setting_value.set_global_value(setting);
- }
- }
- }
-
- /// Get the value of a setting.
- ///
- /// Panics if the given setting type has not been registered, or if there is no
- /// value for this setting.
- pub fn get<T: Settings>(&self, path: Option<(usize, &Path)>) -> &T {
- self.setting_values
- .get(&TypeId::of::<T>())
- .unwrap_or_else(|| panic!("unregistered setting type {}", type_name::<T>()))
- .value_for_path(path)
- .downcast_ref::<T>()
- .expect("no default value for setting type")
- }
-
- /// Override the global value for a setting.
- ///
- /// The given value will be overwritten if the user settings file changes.
- pub fn override_global<T: Settings>(&mut self, value: T) {
- self.setting_values
- .get_mut(&TypeId::of::<T>())
- .unwrap_or_else(|| panic!("unregistered setting type {}", type_name::<T>()))
- .set_global_value(Box::new(value))
- }
-
- /// Get the user's settings as a raw JSON value.
- ///
- /// This is only for debugging and reporting. For user-facing functionality,
- /// use the typed setting interface.
- pub fn raw_user_settings(&self) -> &serde_json::Value {
- &self.raw_user_settings
- }
-
- #[cfg(any(test, feature = "test-support"))]
- pub fn test(cx: &mut AppContext) -> Self {
- let mut this = Self::default();
- this.set_default_settings(&crate::test_settings(), cx)
- .unwrap();
- this.set_user_settings("{}", cx).unwrap();
- this
- }
-
- /// Update the value of a setting in the user's global configuration.
- ///
- /// This is only for tests. Normally, settings are only loaded from
- /// JSON files.
- #[cfg(any(test, feature = "test-support"))]
- pub fn update_user_settings<T: Settings>(
- &mut self,
- cx: &mut AppContext,
- update: impl FnOnce(&mut T::FileContent),
- ) {
- let old_text = serde_json::to_string(&self.raw_user_settings).unwrap();
- let new_text = self.new_text_for_update::<T>(old_text, update);
- self.set_user_settings(&new_text, cx).unwrap();
- }
-
- /// Update the value of a setting in a JSON file, returning the new text
- /// for that JSON file.
- pub fn new_text_for_update<T: Settings>(
- &self,
- old_text: String,
- update: impl FnOnce(&mut T::FileContent),
- ) -> String {
- let edits = self.edits_for_update::<T>(&old_text, update);
- let mut new_text = old_text;
- for (range, replacement) in edits.into_iter() {
- new_text.replace_range(range, &replacement);
- }
- new_text
- }
-
- /// Update the value of a setting in a JSON file, returning a list
- /// of edits to apply to the JSON file.
- pub fn edits_for_update<T: Settings>(
- &self,
- text: &str,
- update: impl FnOnce(&mut T::FileContent),
- ) -> Vec<(Range<usize>, String)> {
- let setting_type_id = TypeId::of::<T>();
-
- let setting = self
- .setting_values
- .get(&setting_type_id)
- .unwrap_or_else(|| panic!("unregistered setting type {}", type_name::<T>()));
- let raw_settings = parse_json_with_comments::<serde_json::Value>(text).unwrap_or_default();
- let old_content = match setting.deserialize_setting(&raw_settings) {
- Ok(content) => content.0.downcast::<T::FileContent>().unwrap(),
- Err(_) => Box::new(T::FileContent::default()),
- };
- let mut new_content = old_content.clone();
- update(&mut new_content);
-
- let old_value = serde_json::to_value(&old_content).unwrap();
- let new_value = serde_json::to_value(new_content).unwrap();
-
- let mut key_path = Vec::new();
- if let Some(key) = T::KEY {
- key_path.push(key);
- }
-
- let mut edits = Vec::new();
- let tab_size = self.json_tab_size();
- let mut text = text.to_string();
- update_value_in_json_text(
- &mut text,
- &mut key_path,
- tab_size,
- &old_value,
- &new_value,
- &mut edits,
- );
- return edits;
- }
-
- /// Configure the tab sized when updating JSON files.
- pub fn set_json_tab_size_callback<T: Settings>(
- &mut self,
- get_tab_size: fn(&T) -> Option<usize>,
- ) {
- self.tab_size_callback = Some((
- TypeId::of::<T>(),
- Box::new(move |value| get_tab_size(value.downcast_ref::<T>().unwrap())),
- ));
- }
-
- fn json_tab_size(&self) -> usize {
- const DEFAULT_JSON_TAB_SIZE: usize = 2;
-
- if let Some((setting_type_id, callback)) = &self.tab_size_callback {
- let setting_value = self.setting_values.get(setting_type_id).unwrap();
- let value = setting_value.value_for_path(None);
- if let Some(value) = callback(value) {
- return value;
- }
- }
-
- DEFAULT_JSON_TAB_SIZE
- }
-
- /// Set the default settings via a JSON string.
- ///
- /// The string should contain a JSON object with a default value for every setting.
- pub fn set_default_settings(
- &mut self,
- default_settings_content: &str,
- cx: &mut AppContext,
- ) -> Result<()> {
- let settings: serde_json::Value = parse_json_with_comments(default_settings_content)?;
- if settings.is_object() {
- self.raw_default_settings = settings;
- self.recompute_values(None, cx)?;
- Ok(())
- } else {
- Err(anyhow!("settings must be an object"))
- }
- }
-
- /// Set the user settings via a JSON string.
- pub fn set_user_settings(
- &mut self,
- user_settings_content: &str,
- cx: &mut AppContext,
- ) -> Result<()> {
- let settings: serde_json::Value = parse_json_with_comments(user_settings_content)?;
- if settings.is_object() {
- self.raw_user_settings = settings;
- self.recompute_values(None, cx)?;
- Ok(())
- } else {
- Err(anyhow!("settings must be an object"))
- }
- }
-
- /// Add or remove a set of local settings via a JSON string.
- pub fn set_local_settings(
- &mut self,
- root_id: usize,
- path: Arc<Path>,
- settings_content: Option<&str>,
- cx: &mut AppContext,
- ) -> Result<()> {
- if let Some(content) = settings_content {
- self.raw_local_settings
- .insert((root_id, path.clone()), parse_json_with_comments(content)?);
- } else {
- self.raw_local_settings.remove(&(root_id, path.clone()));
- }
- self.recompute_values(Some((root_id, &path)), cx)?;
- Ok(())
- }
-
- /// Add or remove a set of local settings via a JSON string.
- pub fn clear_local_settings(&mut self, root_id: usize, cx: &mut AppContext) -> Result<()> {
- self.raw_local_settings.retain(|k, _| k.0 != root_id);
- self.recompute_values(Some((root_id, "".as_ref())), cx)?;
- Ok(())
- }
-
- pub fn local_settings(&self, root_id: usize) -> impl '_ + Iterator<Item = (Arc<Path>, String)> {
- self.raw_local_settings
- .range((root_id, Path::new("").into())..(root_id + 1, Path::new("").into()))
- .map(|((_, path), content)| (path.clone(), serde_json::to_string(content).unwrap()))
- }
-
- pub fn json_schema(
- &self,
- schema_params: &SettingsJsonSchemaParams,
- cx: &AppContext,
- ) -> serde_json::Value {
- use schemars::{
- gen::SchemaSettings,
- schema::{Schema, SchemaObject},
- };
-
- let settings = SchemaSettings::draft07().with(|settings| {
- settings.option_add_null_type = false;
- });
- let mut generator = SchemaGenerator::new(settings);
- let mut combined_schema = RootSchema::default();
-
- for setting_value in self.setting_values.values() {
- let setting_schema = setting_value.json_schema(&mut generator, schema_params, cx);
- combined_schema
- .definitions
- .extend(setting_schema.definitions);
-
- let target_schema = if let Some(key) = setting_value.key() {
- let key_schema = combined_schema
- .schema
- .object()
- .properties
- .entry(key.to_string())
- .or_insert_with(|| Schema::Object(SchemaObject::default()));
- if let Schema::Object(key_schema) = key_schema {
- key_schema
- } else {
- continue;
- }
- } else {
- &mut combined_schema.schema
- };
-
- merge_schema(target_schema, setting_schema.schema);
- }
-
- fn merge_schema(target: &mut SchemaObject, source: SchemaObject) {
- if let Some(source) = source.object {
- let target_properties = &mut target.object().properties;
- for (key, value) in source.properties {
- match target_properties.entry(key) {
- btree_map::Entry::Vacant(e) => {
- e.insert(value);
- }
- btree_map::Entry::Occupied(e) => {
- if let (Schema::Object(target), Schema::Object(src)) =
- (e.into_mut(), value)
- {
- merge_schema(target, src);
- }
- }
- }
- }
- }
-
- overwrite(&mut target.instance_type, source.instance_type);
- overwrite(&mut target.string, source.string);
- overwrite(&mut target.number, source.number);
- overwrite(&mut target.reference, source.reference);
- overwrite(&mut target.array, source.array);
- overwrite(&mut target.enum_values, source.enum_values);
-
- fn overwrite<T>(target: &mut Option<T>, source: Option<T>) {
- if let Some(source) = source {
- *target = Some(source);
- }
- }
- }
-
- serde_json::to_value(&combined_schema).unwrap()
- }
-
- fn recompute_values(
- &mut self,
- changed_local_path: Option<(usize, &Path)>,
- cx: &mut AppContext,
- ) -> Result<()> {
- // Reload the global and local values for every setting.
- let mut user_settings_stack = Vec::<DeserializedSetting>::new();
- let mut paths_stack = Vec::<Option<(usize, &Path)>>::new();
- for setting_value in self.setting_values.values_mut() {
- let default_settings = setting_value.deserialize_setting(&self.raw_default_settings)?;
-
- user_settings_stack.clear();
- paths_stack.clear();
-
- if let Some(user_settings) = setting_value
- .deserialize_setting(&self.raw_user_settings)
- .log_err()
- {
- user_settings_stack.push(user_settings);
- paths_stack.push(None);
- }
-
- // If the global settings file changed, reload the global value for the field.
- if changed_local_path.is_none() {
- if let Some(value) = setting_value
- .load_setting(&default_settings, &user_settings_stack, cx)
- .log_err()
- {
- setting_value.set_global_value(value);
- }
- }
-
- // Reload the local values for the setting.
- for ((root_id, path), local_settings) in &self.raw_local_settings {
- // Build a stack of all of the local values for that setting.
- while let Some(prev_entry) = paths_stack.last() {
- if let Some((prev_root_id, prev_path)) = prev_entry {
- if root_id != prev_root_id || !path.starts_with(prev_path) {
- paths_stack.pop();
- user_settings_stack.pop();
- continue;
- }
- }
- break;
- }
-
- if let Some(local_settings) =
- setting_value.deserialize_setting(&local_settings).log_err()
- {
- paths_stack.push(Some((*root_id, path.as_ref())));
- user_settings_stack.push(local_settings);
-
- // If a local settings file changed, then avoid recomputing local
- // settings for any path outside of that directory.
- if changed_local_path.map_or(false, |(changed_root_id, changed_local_path)| {
- *root_id != changed_root_id || !path.starts_with(changed_local_path)
- }) {
- continue;
- }
-
- if let Some(value) = setting_value
- .load_setting(&default_settings, &user_settings_stack, cx)
- .log_err()
- {
- setting_value.set_local_value(*root_id, path.clone(), value);
- }
- }
- }
- }
- Ok(())
- }
-}
-
-impl Debug for SettingsStore {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- f.debug_struct("SettingsStore")
- .field(
- "types",
- &self
- .setting_values
- .values()
- .map(|value| value.setting_type_name())
- .collect::<Vec<_>>(),
- )
- .field("default_settings", &self.raw_default_settings)
- .field("user_settings", &self.raw_user_settings)
- .field("local_settings", &self.raw_local_settings)
- .finish_non_exhaustive()
- }
-}
-
-impl<T: Settings> AnySettingValue for SettingValue<T> {
- fn key(&self) -> Option<&'static str> {
- T::KEY
- }
-
- fn setting_type_name(&self) -> &'static str {
- type_name::<T>()
- }
-
- fn load_setting(
- &self,
- default_value: &DeserializedSetting,
- user_values: &[DeserializedSetting],
- cx: &mut AppContext,
- ) -> Result<Box<dyn Any>> {
- let default_value = default_value.0.downcast_ref::<T::FileContent>().unwrap();
- let values: SmallVec<[&T::FileContent; 6]> = user_values
- .iter()
- .map(|value| value.0.downcast_ref().unwrap())
- .collect();
- Ok(Box::new(T::load(default_value, &values, cx)?))
- }
-
- fn deserialize_setting(&self, mut json: &serde_json::Value) -> Result<DeserializedSetting> {
- if let Some(key) = T::KEY {
- if let Some(value) = json.get(key) {
- json = value;
- } else {
- let value = T::FileContent::default();
- return Ok(DeserializedSetting(Box::new(value)));
- }
- }
- let value = T::FileContent::deserialize(json)?;
- Ok(DeserializedSetting(Box::new(value)))
- }
-
- fn value_for_path(&self, path: Option<(usize, &Path)>) -> &dyn Any {
- if let Some((root_id, path)) = path {
- for (settings_root_id, settings_path, value) in self.local_values.iter().rev() {
- if root_id == *settings_root_id && path.starts_with(&settings_path) {
- return value;
- }
- }
- }
- self.global_value
- .as_ref()
- .unwrap_or_else(|| panic!("no default value for setting {}", self.setting_type_name()))
- }
-
- fn set_global_value(&mut self, value: Box<dyn Any>) {
- self.global_value = Some(*value.downcast().unwrap());
- }
-
- fn set_local_value(&mut self, root_id: usize, path: Arc<Path>, value: Box<dyn Any>) {
- let value = *value.downcast().unwrap();
- match self
- .local_values
- .binary_search_by_key(&(root_id, &path), |e| (e.0, &e.1))
- {
- Ok(ix) => self.local_values[ix].2 = value,
- Err(ix) => self.local_values.insert(ix, (root_id, path, value)),
- }
- }
-
- fn json_schema(
- &self,
- generator: &mut SchemaGenerator,
- params: &SettingsJsonSchemaParams,
- cx: &AppContext,
- ) -> RootSchema {
- T::json_schema(generator, params, cx)
- }
-}
-
-fn update_value_in_json_text<'a>(
- text: &mut String,
- key_path: &mut Vec<&'a str>,
- tab_size: usize,
- old_value: &'a serde_json::Value,
- new_value: &'a serde_json::Value,
- edits: &mut Vec<(Range<usize>, String)>,
-) {
- // If the old and new values are both objects, then compare them key by key,
- // preserving the comments and formatting of the unchanged parts. Otherwise,
- // replace the old value with the new value.
- if let (serde_json::Value::Object(old_object), serde_json::Value::Object(new_object)) =
- (old_value, new_value)
- {
- for (key, old_sub_value) in old_object.iter() {
- key_path.push(key);
- let new_sub_value = new_object.get(key).unwrap_or(&serde_json::Value::Null);
- update_value_in_json_text(
- text,
- key_path,
- tab_size,
- old_sub_value,
- new_sub_value,
- edits,
- );
- key_path.pop();
- }
- for (key, new_sub_value) in new_object.iter() {
- key_path.push(key);
- if !old_object.contains_key(key) {
- update_value_in_json_text(
- text,
- key_path,
- tab_size,
- &serde_json::Value::Null,
- new_sub_value,
- edits,
- );
- }
- key_path.pop();
- }
- } else if old_value != new_value {
- let mut new_value = new_value.clone();
- if let Some(new_object) = new_value.as_object_mut() {
- new_object.retain(|_, v| !v.is_null());
- }
- let (range, replacement) =
- replace_value_in_json_text(text, &key_path, tab_size, &new_value);
- text.replace_range(range.clone(), &replacement);
- edits.push((range, replacement));
- }
-}
-
-fn replace_value_in_json_text(
- text: &str,
- key_path: &[&str],
- tab_size: usize,
- new_value: &serde_json::Value,
-) -> (Range<usize>, String) {
- const LANGUAGE_OVERRIDES: &'static str = "language_overrides";
- const LANGUAGES: &'static str = "languages";
-
- lazy_static! {
- static ref PAIR_QUERY: tree_sitter::Query = tree_sitter::Query::new(
- &tree_sitter_json::language(),
- "(pair key: (string) @key value: (_) @value)",
- )
- .unwrap();
- }
-
- let mut parser = tree_sitter::Parser::new();
- parser.set_language(&tree_sitter_json::language()).unwrap();
- let syntax_tree = parser.parse(text, None).unwrap();
-
- let mut cursor = tree_sitter::QueryCursor::new();
-
- let has_language_overrides = text.contains(LANGUAGE_OVERRIDES);
-
- let mut depth = 0;
- let mut last_value_range = 0..0;
- let mut first_key_start = None;
- let mut existing_value_range = 0..text.len();
- let matches = cursor.matches(&PAIR_QUERY, syntax_tree.root_node(), text.as_bytes());
- for mat in matches {
- if mat.captures.len() != 2 {
- continue;
- }
-
- let key_range = mat.captures[0].node.byte_range();
- let value_range = mat.captures[1].node.byte_range();
-
- // Don't enter sub objects until we find an exact
- // match for the current keypath
- if last_value_range.contains_inclusive(&value_range) {
- continue;
- }
-
- last_value_range = value_range.clone();
-
- if key_range.start > existing_value_range.end {
- break;
- }
-
- first_key_start.get_or_insert_with(|| key_range.start);
-
- let found_key = text
- .get(key_range.clone())
- .map(|key_text| {
- if key_path[depth] == LANGUAGES && has_language_overrides {
- return key_text == format!("\"{}\"", LANGUAGE_OVERRIDES);
- } else {
- return key_text == format!("\"{}\"", key_path[depth]);
- }
- })
- .unwrap_or(false);
-
- if found_key {
- existing_value_range = value_range;
- // Reset last value range when increasing in depth
- last_value_range = existing_value_range.start..existing_value_range.start;
- depth += 1;
-
- if depth == key_path.len() {
- break;
- } else {
- first_key_start = None;
- }
- }
- }
-
- // We found the exact key we want, insert the new value
- if depth == key_path.len() {
- let new_val = to_pretty_json(&new_value, tab_size, tab_size * depth);
- (existing_value_range, new_val)
- } else {
- // We have key paths, construct the sub objects
- let new_key = if has_language_overrides && key_path[depth] == LANGUAGES {
- LANGUAGE_OVERRIDES
- } else {
- key_path[depth]
- };
-
- // We don't have the key, construct the nested objects
- let mut new_value = serde_json::to_value(new_value).unwrap();
- for key in key_path[(depth + 1)..].iter().rev() {
- if has_language_overrides && key == &LANGUAGES {
- new_value = serde_json::json!({ LANGUAGE_OVERRIDES.to_string(): new_value });
- } else {
- new_value = serde_json::json!({ key.to_string(): new_value });
- }
- }
-
- if let Some(first_key_start) = first_key_start {
- let mut row = 0;
- let mut column = 0;
- for (ix, char) in text.char_indices() {
- if ix == first_key_start {
- break;
- }
- if char == '\n' {
- row += 1;
- column = 0;
- } else {
- column += char.len_utf8();
- }
- }
-
- if row > 0 {
- // depth is 0 based, but division needs to be 1 based.
- let new_val = to_pretty_json(&new_value, column / (depth + 1), column);
- let space = ' ';
- let content = format!("\"{new_key}\": {new_val},\n{space:width$}", width = column);
- (first_key_start..first_key_start, content)
- } else {
- let new_val = serde_json::to_string(&new_value).unwrap();
- let mut content = format!(r#""{new_key}": {new_val},"#);
- content.push(' ');
- (first_key_start..first_key_start, content)
- }
- } else {
- new_value = serde_json::json!({ new_key.to_string(): new_value });
- let indent_prefix_len = 4 * depth;
- let mut new_val = to_pretty_json(&new_value, 4, indent_prefix_len);
- if depth == 0 {
- new_val.push('\n');
- }
-
- (existing_value_range, new_val)
- }
- }
-}
-
-fn to_pretty_json(value: &impl Serialize, indent_size: usize, indent_prefix_len: usize) -> String {
- const SPACES: [u8; 32] = [b' '; 32];
-
- debug_assert!(indent_size <= SPACES.len());
- debug_assert!(indent_prefix_len <= SPACES.len());
-
- let mut output = Vec::new();
- let mut ser = serde_json::Serializer::with_formatter(
- &mut output,
- serde_json::ser::PrettyFormatter::with_indent(&SPACES[0..indent_size.min(SPACES.len())]),
- );
-
- value.serialize(&mut ser).unwrap();
- let text = String::from_utf8(output).unwrap();
-
- let mut adjusted_text = String::new();
- for (i, line) in text.split('\n').enumerate() {
- if i > 0 {
- adjusted_text.push_str(str::from_utf8(&SPACES[0..indent_prefix_len]).unwrap());
- }
- adjusted_text.push_str(line);
- adjusted_text.push('\n');
- }
- adjusted_text.pop();
- adjusted_text
-}
-
-pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> {
- Ok(serde_json_lenient::from_str(content)?)
-}
-
-#[cfg(test)]
-mod tests {
- use super::*;
- use serde_derive::Deserialize;
- use unindent::Unindent;
-
- #[gpui::test]
- fn test_settings_store_basic(cx: &mut AppContext) {
- let mut store = SettingsStore::default();
- store.register_setting::<UserSettings>(cx);
- store.register_setting::<TurboSetting>(cx);
- store.register_setting::<MultiKeySettings>(cx);
- store
- .set_default_settings(
- r#"{
- "turbo": false,
- "user": {
- "name": "John Doe",
- "age": 30,
- "staff": false
- }
- }"#,
- cx,
- )
- .unwrap();
-
- assert_eq!(store.get::<TurboSetting>(None), &TurboSetting(false));
- assert_eq!(
- store.get::<UserSettings>(None),
- &UserSettings {
- name: "John Doe".to_string(),
- age: 30,
- staff: false,
- }
- );
- assert_eq!(
- store.get::<MultiKeySettings>(None),
- &MultiKeySettings {
- key1: String::new(),
- key2: String::new(),
- }
- );
-
- store
- .set_user_settings(
- r#"{
- "turbo": true,
- "user": { "age": 31 },
- "key1": "a"
- }"#,
- cx,
- )
- .unwrap();
-
- assert_eq!(store.get::<TurboSetting>(None), &TurboSetting(true));
- assert_eq!(
- store.get::<UserSettings>(None),
- &UserSettings {
- name: "John Doe".to_string(),
- age: 31,
- staff: false
- }
- );
-
- store
- .set_local_settings(
- 1,
- Path::new("/root1").into(),
- Some(r#"{ "user": { "staff": true } }"#),
- cx,
- )
- .unwrap();
- store
- .set_local_settings(
- 1,
- Path::new("/root1/subdir").into(),
- Some(r#"{ "user": { "name": "Jane Doe" } }"#),
- cx,
- )
- .unwrap();
-
- store
- .set_local_settings(
- 1,
- Path::new("/root2").into(),
- Some(r#"{ "user": { "age": 42 }, "key2": "b" }"#),
- cx,
- )
- .unwrap();
-
- assert_eq!(
- store.get::<UserSettings>(Some((1, Path::new("/root1/something")))),
- &UserSettings {
- name: "John Doe".to_string(),
- age: 31,
- staff: true
- }
- );
- assert_eq!(
- store.get::<UserSettings>(Some((1, Path::new("/root1/subdir/something")))),
- &UserSettings {
- name: "Jane Doe".to_string(),
- age: 31,
- staff: true
- }
- );
- assert_eq!(
- store.get::<UserSettings>(Some((1, Path::new("/root2/something")))),
- &UserSettings {
- name: "John Doe".to_string(),
- age: 42,
- staff: false
- }
- );
- assert_eq!(
- store.get::<MultiKeySettings>(Some((1, Path::new("/root2/something")))),
- &MultiKeySettings {
- key1: "a".to_string(),
- key2: "b".to_string(),
- }
- );
- }
-
- #[gpui::test]
- fn test_setting_store_assign_json_before_register(cx: &mut AppContext) {
- let mut store = SettingsStore::default();
- store
- .set_default_settings(
- r#"{
- "turbo": true,
- "user": {
- "name": "John Doe",
- "age": 30,
- "staff": false
- },
- "key1": "x"
- }"#,
- cx,
- )
- .unwrap();
- store
- .set_user_settings(r#"{ "turbo": false }"#, cx)
- .unwrap();
- store.register_setting::<UserSettings>(cx);
- store.register_setting::<TurboSetting>(cx);
-
- assert_eq!(store.get::<TurboSetting>(None), &TurboSetting(false));
- assert_eq!(
- store.get::<UserSettings>(None),
- &UserSettings {
- name: "John Doe".to_string(),
- age: 30,
- staff: false,
- }
- );
-
- store.register_setting::<MultiKeySettings>(cx);
- assert_eq!(
- store.get::<MultiKeySettings>(None),
- &MultiKeySettings {
- key1: "x".into(),
- key2: String::new(),
- }
- );
- }
-
- #[gpui::test]
- fn test_setting_store_update(cx: &mut AppContext) {
- let mut store = SettingsStore::default();
- store.register_setting::<MultiKeySettings>(cx);
- store.register_setting::<UserSettings>(cx);
- store.register_setting::<LanguageSettings>(cx);
-
- // entries added and updated
- check_settings_update::<LanguageSettings>(
- &mut store,
- r#"{
- "languages": {
- "JSON": {
- "language_setting_1": true
- }
- }
- }"#
- .unindent(),
- |settings| {
- settings
- .languages
- .get_mut("JSON")
- .unwrap()
- .language_setting_1 = Some(false);
- settings.languages.insert(
- "Rust".into(),
- LanguageSettingEntry {
- language_setting_2: Some(true),
- ..Default::default()
- },
- );
- },
- r#"{
- "languages": {
- "Rust": {
- "language_setting_2": true
- },
- "JSON": {
- "language_setting_1": false
- }
- }
- }"#
- .unindent(),
- cx,
- );
-
- // weird formatting
- check_settings_update::<UserSettings>(
- &mut store,
- r#"{
- "user": { "age": 36, "name": "Max", "staff": true }
- }"#
- .unindent(),
- |settings| settings.age = Some(37),
- r#"{
- "user": { "age": 37, "name": "Max", "staff": true }
- }"#
- .unindent(),
- cx,
- );
-
- // single-line formatting, other keys
- check_settings_update::<MultiKeySettings>(
- &mut store,
- r#"{ "one": 1, "two": 2 }"#.unindent(),
- |settings| settings.key1 = Some("x".into()),
- r#"{ "key1": "x", "one": 1, "two": 2 }"#.unindent(),
- cx,
- );
-
- // empty object
- check_settings_update::<UserSettings>(
- &mut store,
- r#"{
- "user": {}
- }"#
- .unindent(),
- |settings| settings.age = Some(37),
- r#"{
- "user": {
- "age": 37
- }
- }"#
- .unindent(),
- cx,
- );
-
- // no content
- check_settings_update::<UserSettings>(
- &mut store,
- r#""#.unindent(),
- |settings| settings.age = Some(37),
- r#"{
- "user": {
- "age": 37
- }
- }
- "#
- .unindent(),
- cx,
- );
-
- check_settings_update::<UserSettings>(
- &mut store,
- r#"{
- }
- "#
- .unindent(),
- |settings| settings.age = Some(37),
- r#"{
- "user": {
- "age": 37
- }
- }
- "#
- .unindent(),
- cx,
- );
- }
-
- fn check_settings_update<T: Settings>(
- store: &mut SettingsStore,
- old_json: String,
- update: fn(&mut T::FileContent),
- expected_new_json: String,
- cx: &mut AppContext,
- ) {
- store.set_user_settings(&old_json, cx).ok();
- let edits = store.edits_for_update::<T>(&old_json, update);
- let mut new_json = old_json;
- for (range, replacement) in edits.into_iter() {
- new_json.replace_range(range, &replacement);
- }
- pretty_assertions::assert_eq!(new_json, expected_new_json);
- }
-
- #[derive(Debug, PartialEq, Deserialize)]
- struct UserSettings {
- name: String,
- age: u32,
- staff: bool,
- }
-
- #[derive(Default, Clone, Serialize, Deserialize, JsonSchema)]
- struct UserSettingsJson {
- name: Option<String>,
- age: Option<u32>,
- staff: Option<bool>,
- }
-
- impl Settings for UserSettings {
- const KEY: Option<&'static str> = Some("user");
- type FileContent = UserSettingsJson;
-
- fn load(
- default_value: &UserSettingsJson,
- user_values: &[&UserSettingsJson],
- _: &mut AppContext,
- ) -> Result<Self> {
- Self::load_via_json_merge(default_value, user_values)
- }
- }
-
- #[derive(Debug, Deserialize, PartialEq)]
- struct TurboSetting(bool);
-
- impl Settings for TurboSetting {
- const KEY: Option<&'static str> = Some("turbo");
- type FileContent = Option<bool>;
-
- fn load(
- default_value: &Option<bool>,
- user_values: &[&Option<bool>],
- _: &mut AppContext,
- ) -> Result<Self> {
- Self::load_via_json_merge(default_value, user_values)
- }
- }
-
- #[derive(Clone, Debug, PartialEq, Deserialize)]
- struct MultiKeySettings {
- #[serde(default)]
- key1: String,
- #[serde(default)]
- key2: String,
- }
-
- #[derive(Clone, Default, Serialize, Deserialize, JsonSchema)]
- struct MultiKeySettingsJson {
- key1: Option<String>,
- key2: Option<String>,
- }
-
- impl Settings for MultiKeySettings {
- const KEY: Option<&'static str> = None;
-
- type FileContent = MultiKeySettingsJson;
-
- fn load(
- default_value: &MultiKeySettingsJson,
- user_values: &[&MultiKeySettingsJson],
- _: &mut AppContext,
- ) -> Result<Self> {
- Self::load_via_json_merge(default_value, user_values)
- }
- }
-
- #[derive(Debug, Deserialize)]
- struct JournalSettings {
- pub path: String,
- pub hour_format: HourFormat,
- }
-
- #[derive(Clone, Debug, Serialize, Deserialize, JsonSchema)]
- #[serde(rename_all = "snake_case")]
- enum HourFormat {
- Hour12,
- Hour24,
- }
-
- #[derive(Clone, Default, Debug, Serialize, Deserialize, JsonSchema)]
- struct JournalSettingsJson {
- pub path: Option<String>,
- pub hour_format: Option<HourFormat>,
- }
-
- impl Settings for JournalSettings {
- const KEY: Option<&'static str> = Some("journal");
-
- type FileContent = JournalSettingsJson;
-
- fn load(
- default_value: &JournalSettingsJson,
- user_values: &[&JournalSettingsJson],
- _: &mut AppContext,
- ) -> Result<Self> {
- Self::load_via_json_merge(default_value, user_values)
- }
- }
-
- #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
- struct LanguageSettings {
- #[serde(default)]
- languages: HashMap<String, LanguageSettingEntry>,
- }
-
- #[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
- struct LanguageSettingEntry {
- language_setting_1: Option<bool>,
- language_setting_2: Option<bool>,
- }
-
- impl Settings for LanguageSettings {
- const KEY: Option<&'static str> = None;
-
- type FileContent = Self;
-
- fn load(default_value: &Self, user_values: &[&Self], _: &mut AppContext) -> Result<Self> {
- Self::load_via_json_merge(default_value, user_values)
- }
- }
-}
@@ -24,13 +24,13 @@ language = { path = "../language" }
log.workspace = true
rust-embed.workspace = true
serde.workspace = true
-settings2 = { path = "../settings2" }
+settings = { path = "../settings" }
simplelog = "0.9"
smallvec.workspace = true
story = { path = "../story" }
strum = { version = "0.25.0", features = ["derive"] }
theme2 = { path = "../theme2" }
-menu = { package = "menu2", path = "../menu2" }
+menu = { path = "../menu" }
ui = { package = "ui2", path = "../ui2", features = ["stories"] }
util = { path = "../util" }
picker = { path = "../picker" }
@@ -11,7 +11,7 @@ use gpui::{
WindowOptions,
};
use log::LevelFilter;
-use settings2::{default_settings, Settings, SettingsStore};
+use settings::{default_settings, Settings, SettingsStore};
use simplelog::SimpleLogger;
use strum::IntoEnumIterator;
use theme2::{ThemeRegistry, ThemeSettings};
@@ -10,10 +10,10 @@ doctest = false
[dependencies]
-gpui = { path = "../gpui" }
+gpui = { package = "gpui2", path = "../gpui2" }
settings = { path = "../settings" }
db = { path = "../db" }
-theme = { path = "../theme" }
+theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" }
alacritty_terminal = { git = "https://github.com/zed-industries/alacritty", rev = "33306142195b354ef3485ca2b1d8a85dfc6605ca" }
@@ -1,130 +1,12 @@
-use alacritty_terminal::{ansi::Color as AnsiColor, term::color::Rgb as AlacRgb};
-use gpui::color::Color;
-use theme::TerminalStyle;
+use alacritty_terminal::term::color::Rgb as AlacRgb;
-///Converts a 2, 8, or 24 bit color ANSI color to the GPUI equivalent
-pub fn convert_color(alac_color: &AnsiColor, style: &TerminalStyle) -> Color {
- match alac_color {
- //Named and theme defined colors
- alacritty_terminal::ansi::Color::Named(n) => match n {
- alacritty_terminal::ansi::NamedColor::Black => style.black,
- alacritty_terminal::ansi::NamedColor::Red => style.red,
- alacritty_terminal::ansi::NamedColor::Green => style.green,
- alacritty_terminal::ansi::NamedColor::Yellow => style.yellow,
- alacritty_terminal::ansi::NamedColor::Blue => style.blue,
- alacritty_terminal::ansi::NamedColor::Magenta => style.magenta,
- alacritty_terminal::ansi::NamedColor::Cyan => style.cyan,
- alacritty_terminal::ansi::NamedColor::White => style.white,
- alacritty_terminal::ansi::NamedColor::BrightBlack => style.bright_black,
- alacritty_terminal::ansi::NamedColor::BrightRed => style.bright_red,
- alacritty_terminal::ansi::NamedColor::BrightGreen => style.bright_green,
- alacritty_terminal::ansi::NamedColor::BrightYellow => style.bright_yellow,
- alacritty_terminal::ansi::NamedColor::BrightBlue => style.bright_blue,
- alacritty_terminal::ansi::NamedColor::BrightMagenta => style.bright_magenta,
- alacritty_terminal::ansi::NamedColor::BrightCyan => style.bright_cyan,
- alacritty_terminal::ansi::NamedColor::BrightWhite => style.bright_white,
- alacritty_terminal::ansi::NamedColor::Foreground => style.foreground,
- alacritty_terminal::ansi::NamedColor::Background => style.background,
- alacritty_terminal::ansi::NamedColor::Cursor => style.cursor,
- alacritty_terminal::ansi::NamedColor::DimBlack => style.dim_black,
- alacritty_terminal::ansi::NamedColor::DimRed => style.dim_red,
- alacritty_terminal::ansi::NamedColor::DimGreen => style.dim_green,
- alacritty_terminal::ansi::NamedColor::DimYellow => style.dim_yellow,
- alacritty_terminal::ansi::NamedColor::DimBlue => style.dim_blue,
- alacritty_terminal::ansi::NamedColor::DimMagenta => style.dim_magenta,
- alacritty_terminal::ansi::NamedColor::DimCyan => style.dim_cyan,
- alacritty_terminal::ansi::NamedColor::DimWhite => style.dim_white,
- alacritty_terminal::ansi::NamedColor::BrightForeground => style.bright_foreground,
- alacritty_terminal::ansi::NamedColor::DimForeground => style.dim_foreground,
- },
- //'True' colors
- alacritty_terminal::ansi::Color::Spec(rgb) => Color::new(rgb.r, rgb.g, rgb.b, u8::MAX),
- //8 bit, indexed colors
- alacritty_terminal::ansi::Color::Indexed(i) => get_color_at_index(&(*i as usize), style),
- }
-}
-
-///Converts an 8 bit ANSI color to it's GPUI equivalent.
-///Accepts usize for compatibility with the alacritty::Colors interface,
-///Other than that use case, should only be called with values in the [0,255] range
-pub fn get_color_at_index(index: &usize, style: &TerminalStyle) -> Color {
- match index {
- //0-15 are the same as the named colors above
- 0 => style.black,
- 1 => style.red,
- 2 => style.green,
- 3 => style.yellow,
- 4 => style.blue,
- 5 => style.magenta,
- 6 => style.cyan,
- 7 => style.white,
- 8 => style.bright_black,
- 9 => style.bright_red,
- 10 => style.bright_green,
- 11 => style.bright_yellow,
- 12 => style.bright_blue,
- 13 => style.bright_magenta,
- 14 => style.bright_cyan,
- 15 => style.bright_white,
- //16-231 are mapped to their RGB colors on a 0-5 range per channel
- 16..=231 => {
- let (r, g, b) = rgb_for_index(&(*index as u8)); //Split the index into it's ANSI-RGB components
- let step = (u8::MAX as f32 / 5.).floor() as u8; //Split the RGB range into 5 chunks, with floor so no overflow
- Color::new(r * step, g * step, b * step, u8::MAX) //Map the ANSI-RGB components to an RGB color
- }
- //232-255 are a 24 step grayscale from black to white
- 232..=255 => {
- let i = *index as u8 - 232; //Align index to 0..24
- let step = (u8::MAX as f32 / 24.).floor() as u8; //Split the RGB grayscale values into 24 chunks
- Color::new(i * step, i * step, i * step, u8::MAX) //Map the ANSI-grayscale components to the RGB-grayscale
- }
- //For compatibility with the alacritty::Colors interface
- 256 => style.foreground,
- 257 => style.background,
- 258 => style.cursor,
- 259 => style.dim_black,
- 260 => style.dim_red,
- 261 => style.dim_green,
- 262 => style.dim_yellow,
- 263 => style.dim_blue,
- 264 => style.dim_magenta,
- 265 => style.dim_cyan,
- 266 => style.dim_white,
- 267 => style.bright_foreground,
- 268 => style.black, //'Dim Background', non-standard color
- _ => Color::new(0, 0, 0, 255),
- }
-}
-///Generates the rgb channels in [0, 5] for a given index into the 6x6x6 ANSI color cube
-///See: [8 bit ansi color](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit).
-///
-///Wikipedia gives a formula for calculating the index for a given color:
-///
-///index = 16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5)
-///
-///This function does the reverse, calculating the r, g, and b components from a given index.
-fn rgb_for_index(i: &u8) -> (u8, u8, u8) {
- debug_assert!((&16..=&231).contains(&i));
- let i = i - 16;
- let r = (i - (i % 36)) / 36;
- let g = ((i % 36) - (i % 6)) / 6;
- let b = (i % 36) % 6;
- (r, g, b)
-}
+use gpui::Rgba;
//Convenience method to convert from a GPUI color to an alacritty Rgb
-pub fn to_alac_rgb(color: Color) -> AlacRgb {
- AlacRgb::new(color.r, color.g, color.g)
-}
-
-#[cfg(test)]
-mod tests {
- #[test]
- fn test_rgb_for_index() {
- //Test every possible value in the color cube
- for i in 16..=231 {
- let (r, g, b) = crate::mappings::colors::rgb_for_index(&(i as u8));
- assert_eq!(i, 16 + 36 * r + 6 * g + b);
- }
- }
+pub fn to_alac_rgb(color: impl Into<Rgba>) -> AlacRgb {
+ let color = color.into();
+ let r = ((color.r * color.a) * 255.) as u8;
+ let g = ((color.g * color.a) * 255.) as u8;
+ let b = ((color.b * color.a) * 255.) as u8;
+ AlacRgb::new(r, g, b)
}
@@ -1,9 +1,9 @@
/// The mappings defined in this file where created from reading the alacritty source
use alacritty_terminal::term::TermMode;
-use gpui::keymap_matcher::Keystroke;
+use gpui::Keystroke;
#[derive(Debug, PartialEq, Eq)]
-pub enum Modifiers {
+enum AlacModifiers {
None,
Alt,
Ctrl,
@@ -12,179 +12,184 @@ pub enum Modifiers {
Other,
}
-impl Modifiers {
+impl AlacModifiers {
fn new(ks: &Keystroke) -> Self {
- match (ks.alt, ks.ctrl, ks.shift, ks.cmd) {
- (false, false, false, false) => Modifiers::None,
- (true, false, false, false) => Modifiers::Alt,
- (false, true, false, false) => Modifiers::Ctrl,
- (false, false, true, false) => Modifiers::Shift,
- (false, true, true, false) => Modifiers::CtrlShift,
- _ => Modifiers::Other,
+ match (
+ ks.modifiers.alt,
+ ks.modifiers.control,
+ ks.modifiers.shift,
+ ks.modifiers.command,
+ ) {
+ (false, false, false, false) => AlacModifiers::None,
+ (true, false, false, false) => AlacModifiers::Alt,
+ (false, true, false, false) => AlacModifiers::Ctrl,
+ (false, false, true, false) => AlacModifiers::Shift,
+ (false, true, true, false) => AlacModifiers::CtrlShift,
+ _ => AlacModifiers::Other,
}
}
fn any(&self) -> bool {
match &self {
- Modifiers::None => false,
- Modifiers::Alt => true,
- Modifiers::Ctrl => true,
- Modifiers::Shift => true,
- Modifiers::CtrlShift => true,
- Modifiers::Other => true,
+ AlacModifiers::None => false,
+ AlacModifiers::Alt => true,
+ AlacModifiers::Ctrl => true,
+ AlacModifiers::Shift => true,
+ AlacModifiers::CtrlShift => true,
+ AlacModifiers::Other => true,
}
}
}
pub fn to_esc_str(keystroke: &Keystroke, mode: &TermMode, alt_is_meta: bool) -> Option<String> {
- let modifiers = Modifiers::new(keystroke);
+ let modifiers = AlacModifiers::new(keystroke);
// Manual Bindings including modifiers
let manual_esc_str = match (keystroke.key.as_ref(), &modifiers) {
//Basic special keys
- ("tab", Modifiers::None) => Some("\x09".to_string()),
- ("escape", Modifiers::None) => Some("\x1b".to_string()),
- ("enter", Modifiers::None) => Some("\x0d".to_string()),
- ("enter", Modifiers::Shift) => Some("\x0d".to_string()),
- ("backspace", Modifiers::None) => Some("\x7f".to_string()),
+ ("tab", AlacModifiers::None) => Some("\x09".to_string()),
+ ("escape", AlacModifiers::None) => Some("\x1b".to_string()),
+ ("enter", AlacModifiers::None) => Some("\x0d".to_string()),
+ ("enter", AlacModifiers::Shift) => Some("\x0d".to_string()),
+ ("backspace", AlacModifiers::None) => Some("\x7f".to_string()),
//Interesting escape codes
- ("tab", Modifiers::Shift) => Some("\x1b[Z".to_string()),
- ("backspace", Modifiers::Alt) => Some("\x1b\x7f".to_string()),
- ("backspace", Modifiers::Shift) => Some("\x7f".to_string()),
- ("home", Modifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
+ ("tab", AlacModifiers::Shift) => Some("\x1b[Z".to_string()),
+ ("backspace", AlacModifiers::Alt) => Some("\x1b\x7f".to_string()),
+ ("backspace", AlacModifiers::Shift) => Some("\x7f".to_string()),
+ ("home", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
Some("\x1b[1;2H".to_string())
}
- ("end", Modifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
+ ("end", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
Some("\x1b[1;2F".to_string())
}
- ("pageup", Modifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
+ ("pageup", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
Some("\x1b[5;2~".to_string())
}
- ("pagedown", Modifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
+ ("pagedown", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
Some("\x1b[6;2~".to_string())
}
- ("home", Modifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
+ ("home", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
Some("\x1bOH".to_string())
}
- ("home", Modifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
+ ("home", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
Some("\x1b[H".to_string())
}
- ("end", Modifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
+ ("end", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
Some("\x1bOF".to_string())
}
- ("end", Modifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
+ ("end", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
Some("\x1b[F".to_string())
}
- ("up", Modifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
+ ("up", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
Some("\x1bOA".to_string())
}
- ("up", Modifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
+ ("up", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
Some("\x1b[A".to_string())
}
- ("down", Modifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
+ ("down", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
Some("\x1bOB".to_string())
}
- ("down", Modifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
+ ("down", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
Some("\x1b[B".to_string())
}
- ("right", Modifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
+ ("right", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
Some("\x1bOC".to_string())
}
- ("right", Modifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
+ ("right", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
Some("\x1b[C".to_string())
}
- ("left", Modifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
+ ("left", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
Some("\x1bOD".to_string())
}
- ("left", Modifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
+ ("left", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
Some("\x1b[D".to_string())
}
- ("back", Modifiers::None) => Some("\x7f".to_string()),
- ("insert", Modifiers::None) => Some("\x1b[2~".to_string()),
- ("delete", Modifiers::None) => Some("\x1b[3~".to_string()),
- ("pageup", Modifiers::None) => Some("\x1b[5~".to_string()),
- ("pagedown", Modifiers::None) => Some("\x1b[6~".to_string()),
- ("f1", Modifiers::None) => Some("\x1bOP".to_string()),
- ("f2", Modifiers::None) => Some("\x1bOQ".to_string()),
- ("f3", Modifiers::None) => Some("\x1bOR".to_string()),
- ("f4", Modifiers::None) => Some("\x1bOS".to_string()),
- ("f5", Modifiers::None) => Some("\x1b[15~".to_string()),
- ("f6", Modifiers::None) => Some("\x1b[17~".to_string()),
- ("f7", Modifiers::None) => Some("\x1b[18~".to_string()),
- ("f8", Modifiers::None) => Some("\x1b[19~".to_string()),
- ("f9", Modifiers::None) => Some("\x1b[20~".to_string()),
- ("f10", Modifiers::None) => Some("\x1b[21~".to_string()),
- ("f11", Modifiers::None) => Some("\x1b[23~".to_string()),
- ("f12", Modifiers::None) => Some("\x1b[24~".to_string()),
- ("f13", Modifiers::None) => Some("\x1b[25~".to_string()),
- ("f14", Modifiers::None) => Some("\x1b[26~".to_string()),
- ("f15", Modifiers::None) => Some("\x1b[28~".to_string()),
- ("f16", Modifiers::None) => Some("\x1b[29~".to_string()),
- ("f17", Modifiers::None) => Some("\x1b[31~".to_string()),
- ("f18", Modifiers::None) => Some("\x1b[32~".to_string()),
- ("f19", Modifiers::None) => Some("\x1b[33~".to_string()),
- ("f20", Modifiers::None) => Some("\x1b[34~".to_string()),
+ ("back", AlacModifiers::None) => Some("\x7f".to_string()),
+ ("insert", AlacModifiers::None) => Some("\x1b[2~".to_string()),
+ ("delete", AlacModifiers::None) => Some("\x1b[3~".to_string()),
+ ("pageup", AlacModifiers::None) => Some("\x1b[5~".to_string()),
+ ("pagedown", AlacModifiers::None) => Some("\x1b[6~".to_string()),
+ ("f1", AlacModifiers::None) => Some("\x1bOP".to_string()),
+ ("f2", AlacModifiers::None) => Some("\x1bOQ".to_string()),
+ ("f3", AlacModifiers::None) => Some("\x1bOR".to_string()),
+ ("f4", AlacModifiers::None) => Some("\x1bOS".to_string()),
+ ("f5", AlacModifiers::None) => Some("\x1b[15~".to_string()),
+ ("f6", AlacModifiers::None) => Some("\x1b[17~".to_string()),
+ ("f7", AlacModifiers::None) => Some("\x1b[18~".to_string()),
+ ("f8", AlacModifiers::None) => Some("\x1b[19~".to_string()),
+ ("f9", AlacModifiers::None) => Some("\x1b[20~".to_string()),
+ ("f10", AlacModifiers::None) => Some("\x1b[21~".to_string()),
+ ("f11", AlacModifiers::None) => Some("\x1b[23~".to_string()),
+ ("f12", AlacModifiers::None) => Some("\x1b[24~".to_string()),
+ ("f13", AlacModifiers::None) => Some("\x1b[25~".to_string()),
+ ("f14", AlacModifiers::None) => Some("\x1b[26~".to_string()),
+ ("f15", AlacModifiers::None) => Some("\x1b[28~".to_string()),
+ ("f16", AlacModifiers::None) => Some("\x1b[29~".to_string()),
+ ("f17", AlacModifiers::None) => Some("\x1b[31~".to_string()),
+ ("f18", AlacModifiers::None) => Some("\x1b[32~".to_string()),
+ ("f19", AlacModifiers::None) => Some("\x1b[33~".to_string()),
+ ("f20", AlacModifiers::None) => Some("\x1b[34~".to_string()),
// NumpadEnter, Action::Esc("\n".into());
//Mappings for caret notation keys
- ("a", Modifiers::Ctrl) => Some("\x01".to_string()), //1
- ("A", Modifiers::CtrlShift) => Some("\x01".to_string()), //1
- ("b", Modifiers::Ctrl) => Some("\x02".to_string()), //2
- ("B", Modifiers::CtrlShift) => Some("\x02".to_string()), //2
- ("c", Modifiers::Ctrl) => Some("\x03".to_string()), //3
- ("C", Modifiers::CtrlShift) => Some("\x03".to_string()), //3
- ("d", Modifiers::Ctrl) => Some("\x04".to_string()), //4
- ("D", Modifiers::CtrlShift) => Some("\x04".to_string()), //4
- ("e", Modifiers::Ctrl) => Some("\x05".to_string()), //5
- ("E", Modifiers::CtrlShift) => Some("\x05".to_string()), //5
- ("f", Modifiers::Ctrl) => Some("\x06".to_string()), //6
- ("F", Modifiers::CtrlShift) => Some("\x06".to_string()), //6
- ("g", Modifiers::Ctrl) => Some("\x07".to_string()), //7
- ("G", Modifiers::CtrlShift) => Some("\x07".to_string()), //7
- ("h", Modifiers::Ctrl) => Some("\x08".to_string()), //8
- ("H", Modifiers::CtrlShift) => Some("\x08".to_string()), //8
- ("i", Modifiers::Ctrl) => Some("\x09".to_string()), //9
- ("I", Modifiers::CtrlShift) => Some("\x09".to_string()), //9
- ("j", Modifiers::Ctrl) => Some("\x0a".to_string()), //10
- ("J", Modifiers::CtrlShift) => Some("\x0a".to_string()), //10
- ("k", Modifiers::Ctrl) => Some("\x0b".to_string()), //11
- ("K", Modifiers::CtrlShift) => Some("\x0b".to_string()), //11
- ("l", Modifiers::Ctrl) => Some("\x0c".to_string()), //12
- ("L", Modifiers::CtrlShift) => Some("\x0c".to_string()), //12
- ("m", Modifiers::Ctrl) => Some("\x0d".to_string()), //13
- ("M", Modifiers::CtrlShift) => Some("\x0d".to_string()), //13
- ("n", Modifiers::Ctrl) => Some("\x0e".to_string()), //14
- ("N", Modifiers::CtrlShift) => Some("\x0e".to_string()), //14
- ("o", Modifiers::Ctrl) => Some("\x0f".to_string()), //15
- ("O", Modifiers::CtrlShift) => Some("\x0f".to_string()), //15
- ("p", Modifiers::Ctrl) => Some("\x10".to_string()), //16
- ("P", Modifiers::CtrlShift) => Some("\x10".to_string()), //16
- ("q", Modifiers::Ctrl) => Some("\x11".to_string()), //17
- ("Q", Modifiers::CtrlShift) => Some("\x11".to_string()), //17
- ("r", Modifiers::Ctrl) => Some("\x12".to_string()), //18
- ("R", Modifiers::CtrlShift) => Some("\x12".to_string()), //18
- ("s", Modifiers::Ctrl) => Some("\x13".to_string()), //19
- ("S", Modifiers::CtrlShift) => Some("\x13".to_string()), //19
- ("t", Modifiers::Ctrl) => Some("\x14".to_string()), //20
- ("T", Modifiers::CtrlShift) => Some("\x14".to_string()), //20
- ("u", Modifiers::Ctrl) => Some("\x15".to_string()), //21
- ("U", Modifiers::CtrlShift) => Some("\x15".to_string()), //21
- ("v", Modifiers::Ctrl) => Some("\x16".to_string()), //22
- ("V", Modifiers::CtrlShift) => Some("\x16".to_string()), //22
- ("w", Modifiers::Ctrl) => Some("\x17".to_string()), //23
- ("W", Modifiers::CtrlShift) => Some("\x17".to_string()), //23
- ("x", Modifiers::Ctrl) => Some("\x18".to_string()), //24
- ("X", Modifiers::CtrlShift) => Some("\x18".to_string()), //24
- ("y", Modifiers::Ctrl) => Some("\x19".to_string()), //25
- ("Y", Modifiers::CtrlShift) => Some("\x19".to_string()), //25
- ("z", Modifiers::Ctrl) => Some("\x1a".to_string()), //26
- ("Z", Modifiers::CtrlShift) => Some("\x1a".to_string()), //26
- ("@", Modifiers::Ctrl) => Some("\x00".to_string()), //0
- ("[", Modifiers::Ctrl) => Some("\x1b".to_string()), //27
- ("\\", Modifiers::Ctrl) => Some("\x1c".to_string()), //28
- ("]", Modifiers::Ctrl) => Some("\x1d".to_string()), //29
- ("^", Modifiers::Ctrl) => Some("\x1e".to_string()), //30
- ("_", Modifiers::Ctrl) => Some("\x1f".to_string()), //31
- ("?", Modifiers::Ctrl) => Some("\x7f".to_string()), //127
+ ("a", AlacModifiers::Ctrl) => Some("\x01".to_string()), //1
+ ("A", AlacModifiers::CtrlShift) => Some("\x01".to_string()), //1
+ ("b", AlacModifiers::Ctrl) => Some("\x02".to_string()), //2
+ ("B", AlacModifiers::CtrlShift) => Some("\x02".to_string()), //2
+ ("c", AlacModifiers::Ctrl) => Some("\x03".to_string()), //3
+ ("C", AlacModifiers::CtrlShift) => Some("\x03".to_string()), //3
+ ("d", AlacModifiers::Ctrl) => Some("\x04".to_string()), //4
+ ("D", AlacModifiers::CtrlShift) => Some("\x04".to_string()), //4
+ ("e", AlacModifiers::Ctrl) => Some("\x05".to_string()), //5
+ ("E", AlacModifiers::CtrlShift) => Some("\x05".to_string()), //5
+ ("f", AlacModifiers::Ctrl) => Some("\x06".to_string()), //6
+ ("F", AlacModifiers::CtrlShift) => Some("\x06".to_string()), //6
+ ("g", AlacModifiers::Ctrl) => Some("\x07".to_string()), //7
+ ("G", AlacModifiers::CtrlShift) => Some("\x07".to_string()), //7
+ ("h", AlacModifiers::Ctrl) => Some("\x08".to_string()), //8
+ ("H", AlacModifiers::CtrlShift) => Some("\x08".to_string()), //8
+ ("i", AlacModifiers::Ctrl) => Some("\x09".to_string()), //9
+ ("I", AlacModifiers::CtrlShift) => Some("\x09".to_string()), //9
+ ("j", AlacModifiers::Ctrl) => Some("\x0a".to_string()), //10
+ ("J", AlacModifiers::CtrlShift) => Some("\x0a".to_string()), //10
+ ("k", AlacModifiers::Ctrl) => Some("\x0b".to_string()), //11
+ ("K", AlacModifiers::CtrlShift) => Some("\x0b".to_string()), //11
+ ("l", AlacModifiers::Ctrl) => Some("\x0c".to_string()), //12
+ ("L", AlacModifiers::CtrlShift) => Some("\x0c".to_string()), //12
+ ("m", AlacModifiers::Ctrl) => Some("\x0d".to_string()), //13
+ ("M", AlacModifiers::CtrlShift) => Some("\x0d".to_string()), //13
+ ("n", AlacModifiers::Ctrl) => Some("\x0e".to_string()), //14
+ ("N", AlacModifiers::CtrlShift) => Some("\x0e".to_string()), //14
+ ("o", AlacModifiers::Ctrl) => Some("\x0f".to_string()), //15
+ ("O", AlacModifiers::CtrlShift) => Some("\x0f".to_string()), //15
+ ("p", AlacModifiers::Ctrl) => Some("\x10".to_string()), //16
+ ("P", AlacModifiers::CtrlShift) => Some("\x10".to_string()), //16
+ ("q", AlacModifiers::Ctrl) => Some("\x11".to_string()), //17
+ ("Q", AlacModifiers::CtrlShift) => Some("\x11".to_string()), //17
+ ("r", AlacModifiers::Ctrl) => Some("\x12".to_string()), //18
+ ("R", AlacModifiers::CtrlShift) => Some("\x12".to_string()), //18
+ ("s", AlacModifiers::Ctrl) => Some("\x13".to_string()), //19
+ ("S", AlacModifiers::CtrlShift) => Some("\x13".to_string()), //19
+ ("t", AlacModifiers::Ctrl) => Some("\x14".to_string()), //20
+ ("T", AlacModifiers::CtrlShift) => Some("\x14".to_string()), //20
+ ("u", AlacModifiers::Ctrl) => Some("\x15".to_string()), //21
+ ("U", AlacModifiers::CtrlShift) => Some("\x15".to_string()), //21
+ ("v", AlacModifiers::Ctrl) => Some("\x16".to_string()), //22
+ ("V", AlacModifiers::CtrlShift) => Some("\x16".to_string()), //22
+ ("w", AlacModifiers::Ctrl) => Some("\x17".to_string()), //23
+ ("W", AlacModifiers::CtrlShift) => Some("\x17".to_string()), //23
+ ("x", AlacModifiers::Ctrl) => Some("\x18".to_string()), //24
+ ("X", AlacModifiers::CtrlShift) => Some("\x18".to_string()), //24
+ ("y", AlacModifiers::Ctrl) => Some("\x19".to_string()), //25
+ ("Y", AlacModifiers::CtrlShift) => Some("\x19".to_string()), //25
+ ("z", AlacModifiers::Ctrl) => Some("\x1a".to_string()), //26
+ ("Z", AlacModifiers::CtrlShift) => Some("\x1a".to_string()), //26
+ ("@", AlacModifiers::Ctrl) => Some("\x00".to_string()), //0
+ ("[", AlacModifiers::Ctrl) => Some("\x1b".to_string()), //27
+ ("\\", AlacModifiers::Ctrl) => Some("\x1c".to_string()), //28
+ ("]", AlacModifiers::Ctrl) => Some("\x1d".to_string()), //29
+ ("^", AlacModifiers::Ctrl) => Some("\x1e".to_string()), //30
+ ("_", AlacModifiers::Ctrl) => Some("\x1f".to_string()), //31
+ ("?", AlacModifiers::Ctrl) => Some("\x7f".to_string()), //127
_ => None,
};
if manual_esc_str.is_some() {
@@ -232,12 +237,12 @@ pub fn to_esc_str(keystroke: &Keystroke, mode: &TermMode, alt_is_meta: bool) ->
}
}
- let alt_meta_binding = if alt_is_meta && modifiers == Modifiers::Alt && keystroke.key.is_ascii()
- {
- Some(format!("\x1b{}", keystroke.key))
- } else {
- None
- };
+ let alt_meta_binding =
+ if alt_is_meta && modifiers == AlacModifiers::Alt && keystroke.key.is_ascii() {
+ Some(format!("\x1b{}", keystroke.key))
+ } else {
+ None
+ };
if alt_meta_binding.is_some() {
return alt_meta_binding;
@@ -259,13 +264,13 @@ pub fn to_esc_str(keystroke: &Keystroke, mode: &TermMode, alt_is_meta: bool) ->
/// from: https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h2-PC-Style-Function-Keys
fn modifier_code(keystroke: &Keystroke) -> u32 {
let mut modifier_code = 0;
- if keystroke.shift {
+ if keystroke.modifiers.shift {
modifier_code |= 1;
}
- if keystroke.alt {
+ if keystroke.modifiers.alt {
modifier_code |= 1 << 1;
}
- if keystroke.ctrl {
+ if keystroke.modifiers.control {
modifier_code |= 1 << 2;
}
modifier_code + 1
@@ -273,7 +278,7 @@ fn modifier_code(keystroke: &Keystroke) -> u32 {
#[cfg(test)]
mod test {
- use gpui::keymap_matcher::Keystroke;
+ use gpui::Modifiers;
use super::*;
@@ -327,11 +332,13 @@ mod test {
#[test]
fn test_plain_inputs() {
let ks = Keystroke {
- ctrl: false,
- alt: false,
- shift: false,
- cmd: false,
- function: false,
+ modifiers: Modifiers {
+ control: false,
+ alt: false,
+ shift: false,
+ command: false,
+ function: false,
+ },
key: "🖖🏻".to_string(), //2 char string
ime_key: None,
};
@@ -1,52 +1,15 @@
-use std::cmp::{max, min};
+use std::cmp::{self, max, min};
use std::iter::repeat;
use alacritty_terminal::grid::Dimensions;
/// Most of the code, and specifically the constants, in this are copied from Alacritty,
/// with modifications for our circumstances
-use alacritty_terminal::index::{Column as GridCol, Line as GridLine, Point, Side};
+use alacritty_terminal::index::{Column as GridCol, Line as GridLine, Point as AlacPoint, Side};
use alacritty_terminal::term::TermMode;
-use gpui::platform;
-use gpui::scene::MouseScrollWheel;
-use gpui::{
- geometry::vector::Vector2F,
- platform::{MouseButtonEvent, MouseMovedEvent, ScrollWheelEvent},
-};
+use gpui::{px, Modifiers, MouseButton, MouseMoveEvent, Pixels, Point, ScrollWheelEvent};
use crate::TerminalSize;
-struct Modifiers {
- ctrl: bool,
- shift: bool,
- alt: bool,
-}
-
-impl Modifiers {
- fn from_moved(e: &MouseMovedEvent) -> Self {
- Modifiers {
- ctrl: e.ctrl,
- shift: e.shift,
- alt: e.alt,
- }
- }
-
- fn from_button(e: &MouseButtonEvent) -> Self {
- Modifiers {
- ctrl: e.ctrl,
- shift: e.shift,
- alt: e.alt,
- }
- }
-
- fn from_scroll(scroll: &ScrollWheelEvent) -> Self {
- Modifiers {
- ctrl: scroll.ctrl,
- shift: scroll.shift,
- alt: scroll.alt,
- }
- }
-}
-
enum MouseFormat {
SGR,
Normal(bool),
@@ -65,7 +28,7 @@ impl MouseFormat {
}
#[derive(Debug)]
-enum MouseButton {
+enum AlacMouseButton {
LeftButton = 0,
MiddleButton = 1,
RightButton = 2,
@@ -78,56 +41,61 @@ enum MouseButton {
Other = 99,
}
-impl MouseButton {
- fn from_move(e: &MouseMovedEvent) -> Self {
+impl AlacMouseButton {
+ fn from_move(e: &MouseMoveEvent) -> Self {
match e.pressed_button {
Some(b) => match b {
- platform::MouseButton::Left => MouseButton::LeftMove,
- platform::MouseButton::Middle => MouseButton::MiddleMove,
- platform::MouseButton::Right => MouseButton::RightMove,
- platform::MouseButton::Navigate(_) => MouseButton::Other,
+ gpui::MouseButton::Left => AlacMouseButton::LeftMove,
+ gpui::MouseButton::Middle => AlacMouseButton::MiddleMove,
+ gpui::MouseButton::Right => AlacMouseButton::RightMove,
+ gpui::MouseButton::Navigate(_) => AlacMouseButton::Other,
},
- None => MouseButton::NoneMove,
+ None => AlacMouseButton::NoneMove,
}
}
- fn from_button(e: &MouseButtonEvent) -> Self {
- match e.button {
- platform::MouseButton::Left => MouseButton::LeftButton,
- platform::MouseButton::Right => MouseButton::MiddleButton,
- platform::MouseButton::Middle => MouseButton::RightButton,
- platform::MouseButton::Navigate(_) => MouseButton::Other,
+ fn from_button(e: MouseButton) -> Self {
+ match e {
+ gpui::MouseButton::Left => AlacMouseButton::LeftButton,
+ gpui::MouseButton::Right => AlacMouseButton::MiddleButton,
+ gpui::MouseButton::Middle => AlacMouseButton::RightButton,
+ gpui::MouseButton::Navigate(_) => AlacMouseButton::Other,
}
}
fn from_scroll(e: &ScrollWheelEvent) -> Self {
- if e.delta.raw().y() > 0. {
- MouseButton::ScrollUp
+ let is_positive = match e.delta {
+ gpui::ScrollDelta::Pixels(pixels) => pixels.y > px(0.),
+ gpui::ScrollDelta::Lines(lines) => lines.y > 0.,
+ };
+
+ if is_positive {
+ AlacMouseButton::ScrollUp
} else {
- MouseButton::ScrollDown
+ AlacMouseButton::ScrollDown
}
}
fn is_other(&self) -> bool {
match self {
- MouseButton::Other => true,
+ AlacMouseButton::Other => true,
_ => false,
}
}
}
pub fn scroll_report(
- point: Point,
+ point: AlacPoint,
scroll_lines: i32,
- e: &MouseScrollWheel,
+ e: &ScrollWheelEvent,
mode: TermMode,
) -> Option<impl Iterator<Item = Vec<u8>>> {
if mode.intersects(TermMode::MOUSE_MODE) {
mouse_report(
point,
- MouseButton::from_scroll(e),
+ AlacMouseButton::from_scroll(e),
true,
- Modifiers::from_scroll(e),
+ e.modifiers,
MouseFormat::from_mode(mode),
)
.map(|report| repeat(report).take(max(scroll_lines, 1) as usize))
@@ -149,18 +117,19 @@ pub fn alt_scroll(scroll_lines: i32) -> Vec<u8> {
}
pub fn mouse_button_report(
- point: Point,
- e: &MouseButtonEvent,
+ point: AlacPoint,
+ button: gpui::MouseButton,
+ modifiers: Modifiers,
pressed: bool,
mode: TermMode,
) -> Option<Vec<u8>> {
- let button = MouseButton::from_button(e);
+ let button = AlacMouseButton::from_button(button);
if !button.is_other() && mode.intersects(TermMode::MOUSE_MODE) {
mouse_report(
point,
button,
pressed,
- Modifiers::from_button(e),
+ modifiers,
MouseFormat::from_mode(mode),
)
} else {
@@ -168,19 +137,19 @@ pub fn mouse_button_report(
}
}
-pub fn mouse_moved_report(point: Point, e: &MouseMovedEvent, mode: TermMode) -> Option<Vec<u8>> {
- let button = MouseButton::from_move(e);
+pub fn mouse_moved_report(point: AlacPoint, e: &MouseMoveEvent, mode: TermMode) -> Option<Vec<u8>> {
+ let button = AlacMouseButton::from_move(e);
if !button.is_other() && mode.intersects(TermMode::MOUSE_MOTION | TermMode::MOUSE_DRAG) {
//Only drags are reported in drag mode, so block NoneMove.
- if mode.contains(TermMode::MOUSE_DRAG) && matches!(button, MouseButton::NoneMove) {
+ if mode.contains(TermMode::MOUSE_DRAG) && matches!(button, AlacMouseButton::NoneMove) {
None
} else {
mouse_report(
point,
button,
true,
- Modifiers::from_moved(e),
+ e.modifiers,
MouseFormat::from_mode(mode),
)
}
@@ -189,19 +158,26 @@ pub fn mouse_moved_report(point: Point, e: &MouseMovedEvent, mode: TermMode) ->
}
}
-pub fn mouse_side(pos: Vector2F, cur_size: TerminalSize) -> alacritty_terminal::index::Direction {
- if cur_size.cell_width as usize == 0 {
+pub fn mouse_side(
+ pos: Point<Pixels>,
+ cur_size: TerminalSize,
+) -> alacritty_terminal::index::Direction {
+ let cell_width = cur_size.cell_width.floor();
+ if cell_width == px(0.) {
return Side::Right;
}
- let x = pos.0.x() as usize;
- let cell_x = x.saturating_sub(cur_size.cell_width as usize) % cur_size.cell_width as usize;
- let half_cell_width = (cur_size.cell_width / 2.0) as usize;
+
+ let x = pos.x.floor();
+
+ let cell_x = cmp::max(px(0.), x - cell_width) % cell_width;
+ let half_cell_width = (cur_size.cell_width / 2.0).floor();
let additional_padding = (cur_size.width() - cur_size.cell_width * 2.) % cur_size.cell_width;
let end_of_grid = cur_size.width() - cur_size.cell_width - additional_padding;
+
//Width: Pixels or columns?
if cell_x > half_cell_width
// Edge case when mouse leaves the window.
- || x as f32 >= end_of_grid
+ || x >= end_of_grid
{
Side::Right
} else {
@@ -209,18 +185,18 @@ pub fn mouse_side(pos: Vector2F, cur_size: TerminalSize) -> alacritty_terminal::
}
}
-pub fn grid_point(pos: Vector2F, cur_size: TerminalSize, display_offset: usize) -> Point {
- let col = pos.x() / cur_size.cell_width;
- let col = min(GridCol(col as usize), cur_size.last_column());
- let line = pos.y() / cur_size.line_height;
- let line = min(line as i32, cur_size.bottommost_line().0);
- Point::new(GridLine(line - display_offset as i32), col)
+pub fn grid_point(pos: Point<Pixels>, cur_size: TerminalSize, display_offset: usize) -> AlacPoint {
+ let col = GridCol((pos.x / cur_size.cell_width) as usize);
+ let col = min(col, cur_size.last_column());
+ let line = (pos.y / cur_size.line_height) as i32;
+ let line = min(line, cur_size.bottommost_line().0);
+ AlacPoint::new(GridLine(line - display_offset as i32), col)
}
///Generate the bytes to send to the terminal, from the cell location, a mouse event, and the terminal mode
fn mouse_report(
- point: Point,
- button: MouseButton,
+ point: AlacPoint,
+ button: AlacMouseButton,
pressed: bool,
modifiers: Modifiers,
format: MouseFormat,
@@ -236,7 +212,7 @@ fn mouse_report(
if modifiers.alt {
mods += 8;
}
- if modifiers.ctrl {
+ if modifiers.control {
mods += 16;
}
@@ -254,8 +230,8 @@ fn mouse_report(
}
}
-fn normal_mouse_report(point: Point, button: u8, utf8: bool) -> Option<Vec<u8>> {
- let Point { line, column } = point;
+fn normal_mouse_report(point: AlacPoint, button: u8, utf8: bool) -> Option<Vec<u8>> {
+ let AlacPoint { line, column } = point;
let max_point = if utf8 { 2015 } else { 223 };
if line >= max_point || column >= max_point {
@@ -286,7 +262,7 @@ fn normal_mouse_report(point: Point, button: u8, utf8: bool) -> Option<Vec<u8>>
Some(msg)
}
-fn sgr_mouse_report(point: Point, button: u8, pressed: bool) -> String {
+fn sgr_mouse_report(point: AlacPoint, button: u8, pressed: bool) -> String {
let c = if pressed { 'M' } else { 'm' };
let msg = format!(
@@ -299,38 +275,3 @@ fn sgr_mouse_report(point: Point, button: u8, pressed: bool) -> String {
msg
}
-
-#[cfg(test)]
-mod test {
- use crate::mappings::mouse::grid_point;
-
- #[test]
- fn test_mouse_to_selection() {
- let term_width = 100.;
- let term_height = 200.;
- let cell_width = 10.;
- let line_height = 20.;
- let mouse_pos_x = 100.; //Window relative
- let mouse_pos_y = 100.; //Window relative
- let origin_x = 10.;
- let origin_y = 20.;
-
- let cur_size = crate::TerminalSize::new(
- line_height,
- cell_width,
- gpui::geometry::vector::vec2f(term_width, term_height),
- );
-
- let mouse_pos = gpui::geometry::vector::vec2f(mouse_pos_x, mouse_pos_y);
- let origin = gpui::geometry::vector::vec2f(origin_x, origin_y); //Position of terminal window, 1 'cell' in
- let mouse_pos = mouse_pos - origin;
- let point = grid_point(mouse_pos, cur_size, 0);
- assert_eq!(
- point,
- alacritty_terminal::index::Point::new(
- alacritty_terminal::index::Line(((mouse_pos_y - origin_y) / line_height) as i32),
- alacritty_terminal::index::Column(((mouse_pos_x - origin_x) / cell_width) as usize),
- )
- );
- }
-}
@@ -8,7 +8,7 @@ use alacritty_terminal::{
event::{Event as AlacTermEvent, EventListener, Notify, WindowSize},
event_loop::{EventLoop, Msg, Notifier},
grid::{Dimensions, Scroll as AlacScroll},
- index::{Boundary, Column, Direction as AlacDirection, Line, Point},
+ index::{Boundary, Column, Direction as AlacDirection, Line, Point as AlacPoint},
selection::{Selection, SelectionRange, SelectionType},
sync::FairMutex,
term::{
@@ -33,14 +33,16 @@ use mappings::mouse::{
use procinfo::LocalProcessInfo;
use serde::{Deserialize, Serialize};
+use settings::Settings;
use terminal_settings::{AlternateScroll, Shell, TerminalBlink, TerminalSettings};
+use theme::{ActiveTheme, Theme};
use util::truncate_and_trailoff;
use std::{
- cmp::min,
+ cmp::{self, min},
collections::{HashMap, VecDeque},
fmt::Display,
- ops::{Deref, Index, RangeInclusive, Sub},
+ ops::{Deref, Index, RangeInclusive},
os::unix::prelude::AsRawFd,
path::PathBuf,
sync::Arc,
@@ -49,28 +51,36 @@ use std::{
use thiserror::Error;
use gpui::{
- geometry::vector::{vec2f, Vector2F},
- keymap_matcher::Keystroke,
- platform::{Modifiers, MouseButton, MouseMovedEvent, TouchPhase},
- scene::{MouseDown, MouseDrag, MouseScrollWheel, MouseUp},
- AnyWindowHandle, AppContext, ClipboardItem, Entity, ModelContext, Task,
+ actions, black, px, red, AnyWindowHandle, AppContext, Bounds, ClipboardItem, EventEmitter,
+ Hsla, Keystroke, ModelContext, Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent,
+ MouseUpEvent, Pixels, Point, Rgba, ScrollWheelEvent, Size, Task, TouchPhase,
};
-use crate::mappings::{
- colors::{get_color_at_index, to_alac_rgb},
- keys::to_esc_str,
-};
+use crate::mappings::{colors::to_alac_rgb, keys::to_esc_str};
use lazy_static::lazy_static;
+actions!(
+ terminal,
+ [
+ Clear,
+ Copy,
+ Paste,
+ ShowCharacterPalette,
+ SearchTest,
+ SendText,
+ SendKeystroke,
+ ]
+);
+
///Scrolling is unbearably sluggish by default. Alacritty supports a configurable
///Scroll multiplier that is set to 3 by default. This will be removed when I
///Implement scroll bars.
const SCROLL_MULTIPLIER: f32 = 4.;
const MAX_SEARCH_LINES: usize = 100;
-const DEBUG_TERMINAL_WIDTH: f32 = 500.;
-const DEBUG_TERMINAL_HEIGHT: f32 = 30.;
-const DEBUG_CELL_WIDTH: f32 = 5.;
-const DEBUG_LINE_HEIGHT: f32 = 5.;
+const DEBUG_TERMINAL_WIDTH: Pixels = px(500.);
+const DEBUG_TERMINAL_HEIGHT: Pixels = px(30.);
+const DEBUG_CELL_WIDTH: Pixels = px(5.);
+const DEBUG_LINE_HEIGHT: Pixels = px(5.);
lazy_static! {
// Regex Copied from alacritty's ui_config.rs and modified its declaration slightly:
@@ -112,11 +122,11 @@ enum InternalEvent {
Clear,
// FocusNextMatch,
Scroll(AlacScroll),
- ScrollToPoint(Point),
- SetSelection(Option<(Selection, Point)>),
- UpdateSelection(Vector2F),
+ ScrollToAlacPoint(AlacPoint),
+ SetSelection(Option<(Selection, AlacPoint)>),
+ UpdateSelection(Point<Pixels>),
// Adjusted mouse position, should open
- FindHyperlink(Vector2F, bool),
+ FindHyperlink(Point<Pixels>, bool),
Copy,
}
@@ -131,48 +141,46 @@ impl EventListener for ZedListener {
}
pub fn init(cx: &mut AppContext) {
- settings::register::<TerminalSettings>(cx);
+ TerminalSettings::register(cx);
}
#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
pub struct TerminalSize {
- pub cell_width: f32,
- pub line_height: f32,
- pub height: f32,
- pub width: f32,
+ pub cell_width: Pixels,
+ pub line_height: Pixels,
+ pub size: Size<Pixels>,
}
impl TerminalSize {
- pub fn new(line_height: f32, cell_width: f32, size: Vector2F) -> Self {
+ pub fn new(line_height: Pixels, cell_width: Pixels, size: Size<Pixels>) -> Self {
TerminalSize {
cell_width,
line_height,
- width: size.x(),
- height: size.y(),
+ size,
}
}
pub fn num_lines(&self) -> usize {
- (self.height / self.line_height).floor() as usize
+ f32::from((self.size.height / self.line_height).floor()) as usize
}
pub fn num_columns(&self) -> usize {
- (self.width / self.cell_width).floor() as usize
+ f32::from((self.size.width / self.cell_width).floor()) as usize
}
- pub fn height(&self) -> f32 {
- self.height
+ pub fn height(&self) -> Pixels {
+ self.size.height
}
- pub fn width(&self) -> f32 {
- self.width
+ pub fn width(&self) -> Pixels {
+ self.size.width
}
- pub fn cell_width(&self) -> f32 {
+ pub fn cell_width(&self) -> Pixels {
self.cell_width
}
- pub fn line_height(&self) -> f32 {
+ pub fn line_height(&self) -> Pixels {
self.line_height
}
}
@@ -181,7 +189,10 @@ impl Default for TerminalSize {
TerminalSize::new(
DEBUG_LINE_HEIGHT,
DEBUG_CELL_WIDTH,
- vec2f(DEBUG_TERMINAL_WIDTH, DEBUG_TERMINAL_HEIGHT),
+ Size {
+ width: DEBUG_TERMINAL_WIDTH,
+ height: DEBUG_TERMINAL_HEIGHT,
+ },
)
}
}
@@ -191,8 +202,8 @@ impl From<TerminalSize> for WindowSize {
WindowSize {
num_lines: val.num_lines() as u16,
num_cols: val.num_columns() as u16,
- cell_width: val.cell_width() as u16,
- cell_height: val.line_height() as u16,
+ cell_width: f32::from(val.cell_width()) as u16,
+ cell_height: f32::from(val.line_height()) as u16,
}
}
}
@@ -346,7 +357,7 @@ impl TerminalBuilder {
let pty = match tty::new(
&pty_config,
TerminalSize::default().into(),
- window.id() as u64,
+ window.window_id().as_u64(),
) {
Ok(pty) => pty,
Err(error) => {
@@ -388,7 +399,7 @@ impl TerminalBuilder {
shell_pid,
foreground_process_info: None,
breadcrumb_text: String::new(),
- scroll_px: 0.,
+ scroll_px: px(0.),
last_mouse_position: None,
next_link_id: 0,
selection_phase: SelectionPhase::Ended,
@@ -404,18 +415,21 @@ impl TerminalBuilder {
pub fn subscribe(mut self, cx: &mut ModelContext<Terminal>) -> Terminal {
//Event loop
- cx.spawn_weak(|this, mut cx| async move {
+ cx.spawn(|this, mut cx| async move {
use futures::StreamExt;
while let Some(event) = self.events_rx.next().await {
- this.upgrade(&cx)?.update(&mut cx, |this, cx| {
+ this.update(&mut cx, |this, cx| {
//Process the first event immediately for lowered latency
this.process_event(&event, cx);
- });
+ })?;
'outer: loop {
let mut events = vec![];
- let mut timer = cx.background().timer(Duration::from_millis(4)).fuse();
+ let mut timer = cx
+ .background_executor()
+ .timer(Duration::from_millis(4))
+ .fuse();
let mut wakeup = false;
loop {
futures::select_biased! {
@@ -442,7 +456,7 @@ impl TerminalBuilder {
smol::future::yield_now().await;
break 'outer;
} else {
- this.upgrade(&cx)?.update(&mut cx, |this, cx| {
+ this.update(&mut cx, |this, cx| {
if wakeup {
this.process_event(&AlacTermEvent::Wakeup, cx);
}
@@ -450,13 +464,13 @@ impl TerminalBuilder {
for event in events {
this.process_event(&event, cx);
}
- });
+ })?;
smol::future::yield_now().await;
}
}
}
- Some(())
+ anyhow::Ok(())
})
.detach();
@@ -466,7 +480,7 @@ impl TerminalBuilder {
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct IndexedCell {
- pub point: Point,
+ pub point: AlacPoint,
pub cell: Cell,
}
@@ -496,7 +510,7 @@ pub struct TerminalContent {
#[derive(Clone)]
pub struct HoveredWord {
pub word: String,
- pub word_match: RangeInclusive<Point>,
+ pub word_match: RangeInclusive<AlacPoint>,
pub id: usize,
}
@@ -510,7 +524,7 @@ impl Default for TerminalContent {
selection: Default::default(),
cursor: RenderableCursor {
shape: alacritty_terminal::ansi::CursorShape::Block,
- point: Point::new(Line(0), Column(0)),
+ point: AlacPoint::new(Line(0), Column(0)),
},
cursor_char: Default::default(),
size: Default::default(),
@@ -530,19 +544,19 @@ pub struct Terminal {
term: Arc<FairMutex<Term<ZedListener>>>,
events: VecDeque<InternalEvent>,
/// This is only used for mouse mode cell change detection
- last_mouse: Option<(Point, AlacDirection)>,
+ last_mouse: Option<(AlacPoint, AlacDirection)>,
/// This is only used for terminal hovered word checking
- last_mouse_position: Option<Vector2F>,
- pub matches: Vec<RangeInclusive<Point>>,
+ last_mouse_position: Option<Point<Pixels>>,
+ pub matches: Vec<RangeInclusive<AlacPoint>>,
pub last_content: TerminalContent,
last_synced: Instant,
sync_task: Option<Task<()>>,
- pub selection_head: Option<Point>,
+ pub selection_head: Option<AlacPoint>,
pub breadcrumb_text: String,
shell_pid: u32,
shell_fd: u32,
pub foreground_process_info: Option<LocalProcessInfo>,
- scroll_px: f32,
+ scroll_px: Pixels,
next_link_id: usize,
selection_phase: SelectionPhase,
cmd_pressed: bool,
@@ -630,18 +644,17 @@ impl Terminal {
match event {
InternalEvent::ColorRequest(index, format) => {
let color = term.colors()[*index].unwrap_or_else(|| {
- let term_style = &theme::current(cx).terminal;
- to_alac_rgb(get_color_at_index(index, &term_style))
+ to_alac_rgb(get_color_at_index(*index, cx.theme().as_ref()))
});
self.write_to_pty(format(color))
}
InternalEvent::Resize(mut new_size) => {
- new_size.height = f32::max(new_size.line_height, new_size.height);
- new_size.width = f32::max(new_size.cell_width, new_size.width);
+ new_size.size.height = cmp::max(new_size.line_height, new_size.height());
+ new_size.size.width = cmp::max(new_size.cell_width, new_size.width());
self.last_content.size = new_size.clone();
- self.pty_tx.0.send(Msg::Resize((new_size).into())).ok();
+ self.pty_tx.0.send(Msg::Resize(new_size.into())).ok();
term.resize(new_size);
}
@@ -667,7 +680,7 @@ impl Terminal {
// Reset the cursor
term.grid_mut().cursor.point =
- Point::new(Line(0), term.grid_mut().cursor.point.column);
+ AlacPoint::new(Line(0), term.grid_mut().cursor.point.column);
let new_cursor = term.grid().cursor.point;
// Clear the lines below the new cursor
@@ -712,7 +725,7 @@ impl Terminal {
cx.write_to_clipboard(ClipboardItem::new(txt))
}
}
- InternalEvent::ScrollToPoint(point) => {
+ InternalEvent::ScrollToAlacPoint(point) => {
term.scroll_to_point(*point);
self.refresh_hovered_word();
}
@@ -825,7 +838,7 @@ impl Terminal {
fn update_selected_word(
&mut self,
prev_word: Option<HoveredWord>,
- word_match: RangeInclusive<Point>,
+ word_match: RangeInclusive<AlacPoint>,
word: String,
is_url: bool,
cx: &mut ModelContext<Self>,
@@ -873,11 +886,11 @@ impl Terminal {
self.set_selection(Some((make_selection(&search_match), *search_match.end())));
self.events
- .push_back(InternalEvent::ScrollToPoint(*search_match.start()));
+ .push_back(InternalEvent::ScrollToAlacPoint(*search_match.start()));
}
}
- pub fn select_matches(&mut self, matches: Vec<RangeInclusive<Point>>) {
+ pub fn select_matches(&mut self, matches: Vec<RangeInclusive<AlacPoint>>) {
let matches_to_select = self
.matches
.iter()
@@ -894,13 +907,13 @@ impl Terminal {
pub fn select_all(&mut self) {
let term = self.term.lock();
- let start = Point::new(term.topmost_line(), Column(0));
- let end = Point::new(term.bottommost_line(), term.last_column());
+ let start = AlacPoint::new(term.topmost_line(), Column(0));
+ let end = AlacPoint::new(term.bottommost_line(), term.last_column());
drop(term);
self.set_selection(Some((make_selection(&(start..=end)), end)));
}
- fn set_selection(&mut self, selection: Option<(Selection, Point)>) {
+ fn set_selection(&mut self, selection: Option<(Selection, AlacPoint)>) {
self.events
.push_back(InternalEvent::SetSelection(selection));
}
@@ -954,11 +967,11 @@ impl Terminal {
}
pub fn try_modifiers_change(&mut self, modifiers: &Modifiers) -> bool {
- let changed = self.cmd_pressed != modifiers.cmd;
- if !self.cmd_pressed && modifiers.cmd {
+ let changed = self.cmd_pressed != modifiers.command;
+ if !self.cmd_pressed && modifiers.command {
self.refresh_hovered_word();
}
- self.cmd_pressed = modifiers.cmd;
+ self.cmd_pressed = modifiers.command;
changed
}
@@ -982,17 +995,17 @@ impl Terminal {
term.lock_unfair() //It's been too long, force block
} else if let None = self.sync_task {
//Skip this frame
- let delay = cx.background().timer(Duration::from_millis(16));
- self.sync_task = Some(cx.spawn_weak(|weak_handle, mut cx| async move {
+ let delay = cx.background_executor().timer(Duration::from_millis(16));
+ self.sync_task = Some(cx.spawn(|weak_handle, mut cx| async move {
delay.await;
- cx.update(|cx| {
- if let Some(handle) = weak_handle.upgrade(cx) {
- handle.update(cx, |terminal, cx| {
+ if let Some(handle) = weak_handle.upgrade() {
+ handle
+ .update(&mut cx, |terminal, cx| {
terminal.sync_task.take();
cx.notify();
- });
- }
- });
+ })
+ .ok();
+ }
}));
return;
} else {
@@ -1050,7 +1063,7 @@ impl Terminal {
}
}
- pub fn mouse_changed(&mut self, point: Point, side: AlacDirection) -> bool {
+ pub fn mouse_changed(&mut self, point: AlacPoint, side: AlacDirection) -> bool {
match self.last_mouse {
Some((old_point, old_side)) => {
if old_point == point && old_side == side {
@@ -1071,10 +1084,10 @@ impl Terminal {
self.last_content.mode.intersects(TermMode::MOUSE_MODE) && !shift
}
- pub fn mouse_move(&mut self, e: &MouseMovedEvent, origin: Vector2F) {
- let position = e.position.sub(origin);
+ pub fn mouse_move(&mut self, e: &MouseMoveEvent, origin: Point<Pixels>) {
+ let position = e.position - origin;
self.last_mouse_position = Some(position);
- if self.mouse_mode(e.shift) {
+ if self.mouse_mode(e.modifiers.shift) {
let point = grid_point(
position,
self.last_content.size,
@@ -1092,7 +1105,7 @@ impl Terminal {
}
}
- fn word_from_position(&mut self, position: Option<Vector2F>) {
+ fn word_from_position(&mut self, position: Option<Point<Pixels>>) {
if self.selection_phase == SelectionPhase::Selecting {
self.last_content.last_hovered_word = None;
} else if let Some(position) = position {
@@ -1101,11 +1114,16 @@ impl Terminal {
}
}
- pub fn mouse_drag(&mut self, e: MouseDrag, origin: Vector2F) {
- let position = e.position.sub(origin);
+ pub fn mouse_drag(
+ &mut self,
+ e: &MouseMoveEvent,
+ origin: Point<Pixels>,
+ region: Bounds<Pixels>,
+ ) {
+ let position = e.position - origin;
self.last_mouse_position = Some(position);
- if !self.mouse_mode(e.shift) {
+ if !self.mouse_mode(e.modifiers.shift) {
self.selection_phase = SelectionPhase::Selecting;
// Alacritty has the same ordering, of first updating the selection
// then scrolling 15ms later
@@ -1114,7 +1132,7 @@ impl Terminal {
// Doesn't make sense to scroll the alt screen
if !self.last_content.mode.contains(TermMode::ALT_SCREEN) {
- let scroll_delta = match self.drag_line_delta(e) {
+ let scroll_delta = match self.drag_line_delta(e, region) {
Some(value) => value,
None => return,
};
@@ -1127,34 +1145,36 @@ impl Terminal {
}
}
- fn drag_line_delta(&mut self, e: MouseDrag) -> Option<f32> {
+ fn drag_line_delta(&mut self, e: &MouseMoveEvent, region: Bounds<Pixels>) -> Option<Pixels> {
//TODO: Why do these need to be doubled? Probably the same problem that the IME has
- let top = e.region.origin_y() + (self.last_content.size.line_height * 2.);
- let bottom = e.region.lower_left().y() - (self.last_content.size.line_height * 2.);
- let scroll_delta = if e.position.y() < top {
- (top - e.position.y()).powf(1.1)
- } else if e.position.y() > bottom {
- -((e.position.y() - bottom).powf(1.1))
+ let top = region.origin.y + (self.last_content.size.line_height * 2.);
+ let bottom = region.lower_left().y - (self.last_content.size.line_height * 2.);
+ let scroll_delta = if e.position.y < top {
+ (top - e.position.y).pow(1.1)
+ } else if e.position.y > bottom {
+ -((e.position.y - bottom).pow(1.1))
} else {
return None; //Nothing to do
};
Some(scroll_delta)
}
- pub fn mouse_down(&mut self, e: &MouseDown, origin: Vector2F) {
- let position = e.position.sub(origin);
+ pub fn mouse_down(&mut self, e: &MouseDownEvent, origin: Point<Pixels>) {
+ let position = e.position - origin;
let point = grid_point(
position,
self.last_content.size,
self.last_content.display_offset,
);
- if self.mouse_mode(e.shift) {
- if let Some(bytes) = mouse_button_report(point, e, true, self.last_content.mode) {
+ if self.mouse_mode(e.modifiers.shift) {
+ if let Some(bytes) =
+ mouse_button_report(point, e.button, e.modifiers, true, self.last_content.mode)
+ {
self.pty_tx.notify(bytes);
}
} else if e.button == MouseButton::Left {
- let position = e.position.sub(origin);
+ let position = e.position - origin;
let point = grid_point(
position,
self.last_content.size,
@@ -1182,18 +1202,25 @@ impl Terminal {
}
}
- pub fn mouse_up(&mut self, e: &MouseUp, origin: Vector2F, cx: &mut ModelContext<Self>) {
- let setting = settings::get::<TerminalSettings>(cx);
+ pub fn mouse_up(
+ &mut self,
+ e: &MouseUpEvent,
+ origin: Point<Pixels>,
+ cx: &mut ModelContext<Self>,
+ ) {
+ let setting = TerminalSettings::get_global(cx);
- let position = e.position.sub(origin);
- if self.mouse_mode(e.shift) {
+ let position = e.position - origin;
+ if self.mouse_mode(e.modifiers.shift) {
let point = grid_point(
position,
self.last_content.size,
self.last_content.display_offset,
);
- if let Some(bytes) = mouse_button_report(point, e, false, self.last_content.mode) {
+ if let Some(bytes) =
+ mouse_button_report(point, e.button, e.modifiers, false, self.last_content.mode)
+ {
self.pty_tx.notify(bytes);
}
} else {
@@ -1205,7 +1232,7 @@ impl Terminal {
if self.selection_phase == SelectionPhase::Ended {
let mouse_cell_index = content_index_for_mouse(position, &self.last_content.size);
if let Some(link) = self.last_content.cells[mouse_cell_index].hyperlink() {
- cx.platform().open_url(link.uri());
+ cx.open_url(link.uri());
} else if self.cmd_pressed {
self.events
.push_back(InternalEvent::FindHyperlink(position, true));
@@ -1218,13 +1245,13 @@ impl Terminal {
}
///Scroll the terminal
- pub fn scroll_wheel(&mut self, e: MouseScrollWheel, origin: Vector2F) {
+ pub fn scroll_wheel(&mut self, e: &ScrollWheelEvent, origin: Point<Pixels>) {
let mouse_mode = self.mouse_mode(e.shift);
if let Some(scroll_lines) = self.determine_scroll_lines(&e, mouse_mode) {
if mouse_mode {
let point = grid_point(
- e.position.sub(origin),
+ e.position - origin,
self.last_content.size,
self.last_content.display_offset,
);
@@ -1257,34 +1284,30 @@ impl Terminal {
self.word_from_position(self.last_mouse_position);
}
- fn determine_scroll_lines(&mut self, e: &MouseScrollWheel, mouse_mode: bool) -> Option<i32> {
+ fn determine_scroll_lines(&mut self, e: &ScrollWheelEvent, mouse_mode: bool) -> Option<i32> {
let scroll_multiplier = if mouse_mode { 1. } else { SCROLL_MULTIPLIER };
let line_height = self.last_content.size.line_height;
- match e.phase {
+ match e.touch_phase {
/* Reset scroll state on started */
- Some(TouchPhase::Started) => {
- self.scroll_px = 0.;
+ TouchPhase::Started => {
+ self.scroll_px = px(0.);
None
}
/* Calculate the appropriate scroll lines */
- Some(gpui::platform::TouchPhase::Moved) => {
+ TouchPhase::Moved => {
let old_offset = (self.scroll_px / line_height) as i32;
- self.scroll_px += e.delta.pixel_delta(line_height).y() * scroll_multiplier;
+ self.scroll_px += e.delta.pixel_delta(line_height).y * scroll_multiplier;
let new_offset = (self.scroll_px / line_height) as i32;
// Whenever we hit the edges, reset our stored scroll to 0
// so we can respond to changes in direction quickly
- self.scroll_px %= self.last_content.size.height;
+ self.scroll_px %= self.last_content.size.height();
Some(new_offset - old_offset)
}
- /* Fall back to delta / line_height */
- None => Some(
- ((e.delta.pixel_delta(line_height).y() * scroll_multiplier) / line_height) as i32,
- ),
- _ => None,
+ TouchPhase::Ended => None,
}
}
@@ -1292,9 +1315,9 @@ impl Terminal {
&mut self,
searcher: RegexSearch,
cx: &mut ModelContext<Self>,
- ) -> Task<Vec<RangeInclusive<Point>>> {
+ ) -> Task<Vec<RangeInclusive<AlacPoint>>> {
let term = self.term.clone();
- cx.background().spawn(async move {
+ cx.background_executor().spawn(async move {
let term = term.lock();
all_search_matches(&term, &searcher).collect()
@@ -1344,13 +1367,11 @@ impl Drop for Terminal {
}
}
-impl Entity for Terminal {
- type Event = Event;
-}
+impl EventEmitter<Event> for Terminal {}
/// Based on alacritty/src/display/hint.rs > regex_match_at
/// Retrieve the match, if the specified point is inside the content matching the regex.
-fn regex_match_at<T>(term: &Term<T>, point: Point, regex: &RegexSearch) -> Option<Match> {
+fn regex_match_at<T>(term: &Term<T>, point: AlacPoint, regex: &RegexSearch) -> Option<Match> {
visible_regex_match_iter(term, regex).find(|rm| rm.contains(&point))
}
@@ -1362,8 +1383,8 @@ pub fn visible_regex_match_iter<'a, T>(
) -> impl Iterator<Item = Match> + 'a {
let viewport_start = Line(-(term.grid().display_offset() as i32));
let viewport_end = viewport_start + term.bottommost_line();
- let mut start = term.line_search_left(Point::new(viewport_start, Column(0)));
- let mut end = term.line_search_right(Point::new(viewport_end, Column(0)));
+ let mut start = term.line_search_left(AlacPoint::new(viewport_start, Column(0)));
+ let mut end = term.line_search_right(AlacPoint::new(viewport_end, Column(0)));
start.line = start.line.max(viewport_start - MAX_SEARCH_LINES);
end.line = end.line.min(viewport_end + MAX_SEARCH_LINES);
@@ -1372,7 +1393,7 @@ pub fn visible_regex_match_iter<'a, T>(
.take_while(move |rm| rm.start().line <= viewport_end)
}
-fn make_selection(range: &RangeInclusive<Point>) -> Selection {
+fn make_selection(range: &RangeInclusive<AlacPoint>) -> Selection {
let mut selection = Selection::new(SelectionType::Simple, *range.start(), AlacDirection::Left);
selection.update(*range.end(), AlacDirection::Right);
selection
@@ -1382,33 +1403,124 @@ fn all_search_matches<'a, T>(
term: &'a Term<T>,
regex: &'a RegexSearch,
) -> impl Iterator<Item = Match> + 'a {
- let start = Point::new(term.grid().topmost_line(), Column(0));
- let end = Point::new(term.grid().bottommost_line(), term.grid().last_column());
+ let start = AlacPoint::new(term.grid().topmost_line(), Column(0));
+ let end = AlacPoint::new(term.grid().bottommost_line(), term.grid().last_column());
RegexIter::new(start, end, AlacDirection::Right, term, regex)
}
-fn content_index_for_mouse(pos: Vector2F, size: &TerminalSize) -> usize {
- let col = (pos.x() / size.cell_width()).round() as usize;
-
+fn content_index_for_mouse(pos: Point<Pixels>, size: &TerminalSize) -> usize {
+ let col = (pos.x / size.cell_width()).round() as usize;
let clamped_col = min(col, size.columns() - 1);
+ let row = (pos.y / size.line_height()).round() as usize;
+ let clamped_row = min(row, size.screen_lines() - 1);
+ clamped_row * size.columns() + clamped_col
+}
- let row = (pos.y() / size.line_height()).round() as usize;
+///Converts an 8 bit ANSI color to it's GPUI equivalent.
+///Accepts usize for compatibility with the alacritty::Colors interface,
+///Other than that use case, should only be called with values in the [0,255] range
+pub fn get_color_at_index(index: usize, theme: &Theme) -> Hsla {
+ let colors = theme.colors();
+
+ match index {
+ //0-15 are the same as the named colors above
+ 0 => colors.terminal_ansi_black,
+ 1 => colors.terminal_ansi_red,
+ 2 => colors.terminal_ansi_green,
+ 3 => colors.terminal_ansi_yellow,
+ 4 => colors.terminal_ansi_blue,
+ 5 => colors.terminal_ansi_magenta,
+ 6 => colors.terminal_ansi_cyan,
+ 7 => colors.terminal_ansi_white,
+ 8 => colors.terminal_ansi_bright_black,
+ 9 => colors.terminal_ansi_bright_red,
+ 10 => colors.terminal_ansi_bright_green,
+ 11 => colors.terminal_ansi_bright_yellow,
+ 12 => colors.terminal_ansi_bright_blue,
+ 13 => colors.terminal_ansi_bright_magenta,
+ 14 => colors.terminal_ansi_bright_cyan,
+ 15 => colors.terminal_ansi_bright_white,
+ //16-231 are mapped to their RGB colors on a 0-5 range per channel
+ 16..=231 => {
+ let (r, g, b) = rgb_for_index(&(index as u8)); //Split the index into it's ANSI-RGB components
+ let step = (u8::MAX as f32 / 5.).floor() as u8; //Split the RGB range into 5 chunks, with floor so no overflow
+ rgba_color(r * step, g * step, b * step) //Map the ANSI-RGB components to an RGB color
+ }
+ //232-255 are a 24 step grayscale from black to white
+ 232..=255 => {
+ let i = index as u8 - 232; //Align index to 0..24
+ let step = (u8::MAX as f32 / 24.).floor() as u8; //Split the RGB grayscale values into 24 chunks
+ rgba_color(i * step, i * step, i * step) //Map the ANSI-grayscale components to the RGB-grayscale
+ }
+ //For compatibility with the alacritty::Colors interface
+ 256 => colors.text,
+ 257 => colors.background,
+ 258 => theme.players().local().cursor,
+
+ // todo!(more colors)
+ 259 => red(), //style.dim_black,
+ 260 => red(), //style.dim_red,
+ 261 => red(), //style.dim_green,
+ 262 => red(), //style.dim_yellow,
+ 263 => red(), //style.dim_blue,
+ 264 => red(), //style.dim_magenta,
+ 265 => red(), //style.dim_cyan,
+ 266 => red(), //style.dim_white,
+ 267 => red(), //style.bright_foreground,
+ 268 => colors.terminal_ansi_black, //'Dim Background', non-standard color
+
+ _ => black(),
+ }
+}
- let clamped_row = min(row, size.screen_lines() - 1);
+///Generates the rgb channels in [0, 5] for a given index into the 6x6x6 ANSI color cube
+///See: [8 bit ansi color](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit).
+///
+///Wikipedia gives a formula for calculating the index for a given color:
+///
+///index = 16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5)
+///
+///This function does the reverse, calculating the r, g, and b components from a given index.
+fn rgb_for_index(i: &u8) -> (u8, u8, u8) {
+ debug_assert!((&16..=&231).contains(&i));
+ let i = i - 16;
+ let r = (i - (i % 36)) / 36;
+ let g = ((i % 36) - (i % 6)) / 6;
+ let b = (i % 36) % 6;
+ (r, g, b)
+}
- clamped_row * size.columns() + clamped_col
+pub fn rgba_color(r: u8, g: u8, b: u8) -> Hsla {
+ Rgba {
+ r: (r as f32 / 255.) as f32,
+ g: (g as f32 / 255.) as f32,
+ b: (b as f32 / 255.) as f32,
+ a: 1.,
+ }
+ .into()
}
#[cfg(test)]
mod tests {
use alacritty_terminal::{
- index::{Column, Line, Point},
+ index::{Column, Line, Point as AlacPoint},
term::cell::Cell,
};
- use gpui::geometry::vector::vec2f;
+ use gpui::{point, size, Pixels};
use rand::{distributions::Alphanumeric, rngs::ThreadRng, thread_rng, Rng};
- use crate::{content_index_for_mouse, IndexedCell, TerminalContent, TerminalSize};
+ use crate::{
+ content_index_for_mouse, rgb_for_index, IndexedCell, TerminalContent, TerminalSize,
+ };
+
+ #[test]
+ fn test_rgb_for_index() {
+ //Test every possible value in the color cube
+ for i in 16..=231 {
+ let (r, g, b) = rgb_for_index(&(i as u8));
+ assert_eq!(i, 16 + 36 * r + 6 * g + b);
+ }
+ }
#[test]
fn test_mouse_to_cell_test() {
@@ -1421,10 +1533,12 @@ mod tests {
let cell_size = rng.gen_range(5 * PRECISION..20 * PRECISION) as f32 / PRECISION as f32;
let size = crate::TerminalSize {
- cell_width: cell_size,
- line_height: cell_size,
- height: cell_size * (viewport_cells as f32),
- width: cell_size * (viewport_cells as f32),
+ cell_width: Pixels::from(cell_size),
+ line_height: Pixels::from(cell_size),
+ size: size(
+ Pixels::from(cell_size * (viewport_cells as f32)),
+ Pixels::from(cell_size * (viewport_cells as f32)),
+ ),
};
let cells = get_cells(size, &mut rng);
@@ -1438,9 +1552,9 @@ mod tests {
let row_offset = rng.gen_range(0..PRECISION) as f32 / PRECISION as f32;
let col_offset = rng.gen_range(0..PRECISION) as f32 / PRECISION as f32;
- let mouse_pos = vec2f(
- col as f32 * cell_size + col_offset,
- row as f32 * cell_size + row_offset,
+ let mouse_pos = point(
+ Pixels::from(col as f32 * cell_size + col_offset),
+ Pixels::from(row as f32 * cell_size + row_offset),
);
let content_index = content_index_for_mouse(mouse_pos, &content.size);
@@ -1458,21 +1572,28 @@ mod tests {
let mut rng = thread_rng();
let size = crate::TerminalSize {
- cell_width: 10.,
- line_height: 10.,
- height: 100.,
- width: 100.,
+ cell_width: Pixels::from(10.),
+ line_height: Pixels::from(10.),
+ size: size(Pixels::from(100.), Pixels::from(100.)),
};
let cells = get_cells(size, &mut rng);
let content = convert_cells_to_content(size, &cells);
assert_eq!(
- content.cells[content_index_for_mouse(vec2f(-10., -10.), &content.size)].c,
+ content.cells[content_index_for_mouse(
+ point(Pixels::from(-10.), Pixels::from(-10.)),
+ &content.size
+ )]
+ .c,
cells[0][0]
);
assert_eq!(
- content.cells[content_index_for_mouse(vec2f(1000., 1000.), &content.size)].c,
+ content.cells[content_index_for_mouse(
+ point(Pixels::from(1000.), Pixels::from(1000.)),
+ &content.size
+ )]
+ .c,
cells[9][9]
);
}
@@ -1480,9 +1601,9 @@ mod tests {
fn get_cells(size: TerminalSize, rng: &mut ThreadRng) -> Vec<Vec<char>> {
let mut cells = Vec::new();
- for _ in 0..((size.height() / size.line_height()) as usize) {
+ for _ in 0..(f32::from(size.height() / size.line_height()) as usize) {
let mut row_vec = Vec::new();
- for _ in 0..((size.width() / size.cell_width()) as usize) {
+ for _ in 0..(f32::from(size.width() / size.cell_width()) as usize) {
let cell_char = rng.sample(Alphanumeric) as char;
row_vec.push(cell_char)
}
@@ -1499,7 +1620,7 @@ mod tests {
for col in 0..cells[row].len() {
let cell_char = cells[row][col];
ic.push(IndexedCell {
- point: Point::new(Line(row as i32), Column(col)),
+ point: AlacPoint::new(Line(row as i32), Column(col)),
cell: Cell {
c: cell_char,
..Default::default()
@@ -1,8 +1,7 @@
-use std::{collections::HashMap, path::PathBuf};
-
-use gpui::{fonts, AppContext};
+use gpui::{px, AbsoluteLength, AppContext, FontFeatures, Pixels};
use schemars::JsonSchema;
use serde_derive::{Deserialize, Serialize};
+use std::{collections::HashMap, path::PathBuf};
#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
@@ -16,18 +15,18 @@ pub enum TerminalDockPosition {
pub struct TerminalSettings {
pub shell: Shell,
pub working_directory: WorkingDirectory,
- font_size: Option<f32>,
+ pub font_size: Option<Pixels>,
pub font_family: Option<String>,
pub line_height: TerminalLineHeight,
- pub font_features: Option<fonts::Features>,
+ pub font_features: Option<FontFeatures>,
pub env: HashMap<String, String>,
pub blinking: TerminalBlink,
pub alternate_scroll: AlternateScroll,
pub option_as_meta: bool,
pub copy_on_select: bool,
pub dock: TerminalDockPosition,
- pub default_width: f32,
- pub default_height: f32,
+ pub default_width: Pixels,
+ pub default_height: Pixels,
pub detect_venv: VenvSettings,
}
@@ -79,7 +78,7 @@ pub struct TerminalSettingsContent {
pub font_size: Option<f32>,
pub font_family: Option<String>,
pub line_height: Option<TerminalLineHeight>,
- pub font_features: Option<fonts::Features>,
+ pub font_features: Option<FontFeatures>,
pub env: Option<HashMap<String, String>>,
pub blinking: Option<TerminalBlink>,
pub alternate_scroll: Option<AlternateScroll>,
@@ -91,14 +90,7 @@ pub struct TerminalSettingsContent {
pub detect_venv: Option<VenvSettings>,
}
-impl TerminalSettings {
- pub fn font_size(&self, cx: &AppContext) -> Option<f32> {
- self.font_size
- .map(|size| theme::adjusted_font_size(size, cx))
- }
-}
-
-impl settings::Setting for TerminalSettings {
+impl settings::Settings for TerminalSettings {
const KEY: Option<&'static str> = Some("terminal");
type FileContent = TerminalSettingsContent;
@@ -106,7 +98,7 @@ impl settings::Setting for TerminalSettings {
fn load(
default_value: &Self::FileContent,
user_values: &[&Self::FileContent],
- _: &AppContext,
+ _: &mut AppContext,
) -> anyhow::Result<Self> {
Self::load_via_json_merge(default_value, user_values)
}
@@ -122,12 +114,13 @@ pub enum TerminalLineHeight {
}
impl TerminalLineHeight {
- pub fn value(&self) -> f32 {
- match self {
+ pub fn value(&self) -> AbsoluteLength {
+ let value = match self {
TerminalLineHeight::Comfortable => 1.618,
TerminalLineHeight::Standard => 1.3,
TerminalLineHeight::Custom(line_height) => f32::max(*line_height, 1.),
- }
+ };
+ px(value).into()
}
}
@@ -1,17 +1,17 @@
[package]
-name = "terminal2"
+name = "terminal"
version = "0.1.0"
edition = "2021"
publish = false
[lib]
-path = "src/terminal2.rs"
+path = "src/terminal.rs"
doctest = false
[dependencies]
gpui = { package = "gpui2", path = "../gpui2" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
db = { path = "../db" }
theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" }
@@ -1,12 +0,0 @@
-use alacritty_terminal::term::color::Rgb as AlacRgb;
-
-use gpui::Rgba;
-
-//Convenience method to convert from a GPUI color to an alacritty Rgb
-pub fn to_alac_rgb(color: impl Into<Rgba>) -> AlacRgb {
- let color = color.into();
- let r = ((color.r * color.a) * 255.) as u8;
- let g = ((color.g * color.a) * 255.) as u8;
- let b = ((color.b * color.a) * 255.) as u8;
- AlacRgb::new(r, g, b)
-}
@@ -1,464 +0,0 @@
-/// The mappings defined in this file where created from reading the alacritty source
-use alacritty_terminal::term::TermMode;
-use gpui::Keystroke;
-
-#[derive(Debug, PartialEq, Eq)]
-enum AlacModifiers {
- None,
- Alt,
- Ctrl,
- Shift,
- CtrlShift,
- Other,
-}
-
-impl AlacModifiers {
- fn new(ks: &Keystroke) -> Self {
- match (
- ks.modifiers.alt,
- ks.modifiers.control,
- ks.modifiers.shift,
- ks.modifiers.command,
- ) {
- (false, false, false, false) => AlacModifiers::None,
- (true, false, false, false) => AlacModifiers::Alt,
- (false, true, false, false) => AlacModifiers::Ctrl,
- (false, false, true, false) => AlacModifiers::Shift,
- (false, true, true, false) => AlacModifiers::CtrlShift,
- _ => AlacModifiers::Other,
- }
- }
-
- fn any(&self) -> bool {
- match &self {
- AlacModifiers::None => false,
- AlacModifiers::Alt => true,
- AlacModifiers::Ctrl => true,
- AlacModifiers::Shift => true,
- AlacModifiers::CtrlShift => true,
- AlacModifiers::Other => true,
- }
- }
-}
-
-pub fn to_esc_str(keystroke: &Keystroke, mode: &TermMode, alt_is_meta: bool) -> Option<String> {
- let modifiers = AlacModifiers::new(keystroke);
-
- // Manual Bindings including modifiers
- let manual_esc_str = match (keystroke.key.as_ref(), &modifiers) {
- //Basic special keys
- ("tab", AlacModifiers::None) => Some("\x09".to_string()),
- ("escape", AlacModifiers::None) => Some("\x1b".to_string()),
- ("enter", AlacModifiers::None) => Some("\x0d".to_string()),
- ("enter", AlacModifiers::Shift) => Some("\x0d".to_string()),
- ("backspace", AlacModifiers::None) => Some("\x7f".to_string()),
- //Interesting escape codes
- ("tab", AlacModifiers::Shift) => Some("\x1b[Z".to_string()),
- ("backspace", AlacModifiers::Alt) => Some("\x1b\x7f".to_string()),
- ("backspace", AlacModifiers::Shift) => Some("\x7f".to_string()),
- ("home", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
- Some("\x1b[1;2H".to_string())
- }
- ("end", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
- Some("\x1b[1;2F".to_string())
- }
- ("pageup", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
- Some("\x1b[5;2~".to_string())
- }
- ("pagedown", AlacModifiers::Shift) if mode.contains(TermMode::ALT_SCREEN) => {
- Some("\x1b[6;2~".to_string())
- }
- ("home", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
- Some("\x1bOH".to_string())
- }
- ("home", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
- Some("\x1b[H".to_string())
- }
- ("end", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
- Some("\x1bOF".to_string())
- }
- ("end", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
- Some("\x1b[F".to_string())
- }
- ("up", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
- Some("\x1bOA".to_string())
- }
- ("up", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
- Some("\x1b[A".to_string())
- }
- ("down", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
- Some("\x1bOB".to_string())
- }
- ("down", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
- Some("\x1b[B".to_string())
- }
- ("right", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
- Some("\x1bOC".to_string())
- }
- ("right", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
- Some("\x1b[C".to_string())
- }
- ("left", AlacModifiers::None) if mode.contains(TermMode::APP_CURSOR) => {
- Some("\x1bOD".to_string())
- }
- ("left", AlacModifiers::None) if !mode.contains(TermMode::APP_CURSOR) => {
- Some("\x1b[D".to_string())
- }
- ("back", AlacModifiers::None) => Some("\x7f".to_string()),
- ("insert", AlacModifiers::None) => Some("\x1b[2~".to_string()),
- ("delete", AlacModifiers::None) => Some("\x1b[3~".to_string()),
- ("pageup", AlacModifiers::None) => Some("\x1b[5~".to_string()),
- ("pagedown", AlacModifiers::None) => Some("\x1b[6~".to_string()),
- ("f1", AlacModifiers::None) => Some("\x1bOP".to_string()),
- ("f2", AlacModifiers::None) => Some("\x1bOQ".to_string()),
- ("f3", AlacModifiers::None) => Some("\x1bOR".to_string()),
- ("f4", AlacModifiers::None) => Some("\x1bOS".to_string()),
- ("f5", AlacModifiers::None) => Some("\x1b[15~".to_string()),
- ("f6", AlacModifiers::None) => Some("\x1b[17~".to_string()),
- ("f7", AlacModifiers::None) => Some("\x1b[18~".to_string()),
- ("f8", AlacModifiers::None) => Some("\x1b[19~".to_string()),
- ("f9", AlacModifiers::None) => Some("\x1b[20~".to_string()),
- ("f10", AlacModifiers::None) => Some("\x1b[21~".to_string()),
- ("f11", AlacModifiers::None) => Some("\x1b[23~".to_string()),
- ("f12", AlacModifiers::None) => Some("\x1b[24~".to_string()),
- ("f13", AlacModifiers::None) => Some("\x1b[25~".to_string()),
- ("f14", AlacModifiers::None) => Some("\x1b[26~".to_string()),
- ("f15", AlacModifiers::None) => Some("\x1b[28~".to_string()),
- ("f16", AlacModifiers::None) => Some("\x1b[29~".to_string()),
- ("f17", AlacModifiers::None) => Some("\x1b[31~".to_string()),
- ("f18", AlacModifiers::None) => Some("\x1b[32~".to_string()),
- ("f19", AlacModifiers::None) => Some("\x1b[33~".to_string()),
- ("f20", AlacModifiers::None) => Some("\x1b[34~".to_string()),
- // NumpadEnter, Action::Esc("\n".into());
- //Mappings for caret notation keys
- ("a", AlacModifiers::Ctrl) => Some("\x01".to_string()), //1
- ("A", AlacModifiers::CtrlShift) => Some("\x01".to_string()), //1
- ("b", AlacModifiers::Ctrl) => Some("\x02".to_string()), //2
- ("B", AlacModifiers::CtrlShift) => Some("\x02".to_string()), //2
- ("c", AlacModifiers::Ctrl) => Some("\x03".to_string()), //3
- ("C", AlacModifiers::CtrlShift) => Some("\x03".to_string()), //3
- ("d", AlacModifiers::Ctrl) => Some("\x04".to_string()), //4
- ("D", AlacModifiers::CtrlShift) => Some("\x04".to_string()), //4
- ("e", AlacModifiers::Ctrl) => Some("\x05".to_string()), //5
- ("E", AlacModifiers::CtrlShift) => Some("\x05".to_string()), //5
- ("f", AlacModifiers::Ctrl) => Some("\x06".to_string()), //6
- ("F", AlacModifiers::CtrlShift) => Some("\x06".to_string()), //6
- ("g", AlacModifiers::Ctrl) => Some("\x07".to_string()), //7
- ("G", AlacModifiers::CtrlShift) => Some("\x07".to_string()), //7
- ("h", AlacModifiers::Ctrl) => Some("\x08".to_string()), //8
- ("H", AlacModifiers::CtrlShift) => Some("\x08".to_string()), //8
- ("i", AlacModifiers::Ctrl) => Some("\x09".to_string()), //9
- ("I", AlacModifiers::CtrlShift) => Some("\x09".to_string()), //9
- ("j", AlacModifiers::Ctrl) => Some("\x0a".to_string()), //10
- ("J", AlacModifiers::CtrlShift) => Some("\x0a".to_string()), //10
- ("k", AlacModifiers::Ctrl) => Some("\x0b".to_string()), //11
- ("K", AlacModifiers::CtrlShift) => Some("\x0b".to_string()), //11
- ("l", AlacModifiers::Ctrl) => Some("\x0c".to_string()), //12
- ("L", AlacModifiers::CtrlShift) => Some("\x0c".to_string()), //12
- ("m", AlacModifiers::Ctrl) => Some("\x0d".to_string()), //13
- ("M", AlacModifiers::CtrlShift) => Some("\x0d".to_string()), //13
- ("n", AlacModifiers::Ctrl) => Some("\x0e".to_string()), //14
- ("N", AlacModifiers::CtrlShift) => Some("\x0e".to_string()), //14
- ("o", AlacModifiers::Ctrl) => Some("\x0f".to_string()), //15
- ("O", AlacModifiers::CtrlShift) => Some("\x0f".to_string()), //15
- ("p", AlacModifiers::Ctrl) => Some("\x10".to_string()), //16
- ("P", AlacModifiers::CtrlShift) => Some("\x10".to_string()), //16
- ("q", AlacModifiers::Ctrl) => Some("\x11".to_string()), //17
- ("Q", AlacModifiers::CtrlShift) => Some("\x11".to_string()), //17
- ("r", AlacModifiers::Ctrl) => Some("\x12".to_string()), //18
- ("R", AlacModifiers::CtrlShift) => Some("\x12".to_string()), //18
- ("s", AlacModifiers::Ctrl) => Some("\x13".to_string()), //19
- ("S", AlacModifiers::CtrlShift) => Some("\x13".to_string()), //19
- ("t", AlacModifiers::Ctrl) => Some("\x14".to_string()), //20
- ("T", AlacModifiers::CtrlShift) => Some("\x14".to_string()), //20
- ("u", AlacModifiers::Ctrl) => Some("\x15".to_string()), //21
- ("U", AlacModifiers::CtrlShift) => Some("\x15".to_string()), //21
- ("v", AlacModifiers::Ctrl) => Some("\x16".to_string()), //22
- ("V", AlacModifiers::CtrlShift) => Some("\x16".to_string()), //22
- ("w", AlacModifiers::Ctrl) => Some("\x17".to_string()), //23
- ("W", AlacModifiers::CtrlShift) => Some("\x17".to_string()), //23
- ("x", AlacModifiers::Ctrl) => Some("\x18".to_string()), //24
- ("X", AlacModifiers::CtrlShift) => Some("\x18".to_string()), //24
- ("y", AlacModifiers::Ctrl) => Some("\x19".to_string()), //25
- ("Y", AlacModifiers::CtrlShift) => Some("\x19".to_string()), //25
- ("z", AlacModifiers::Ctrl) => Some("\x1a".to_string()), //26
- ("Z", AlacModifiers::CtrlShift) => Some("\x1a".to_string()), //26
- ("@", AlacModifiers::Ctrl) => Some("\x00".to_string()), //0
- ("[", AlacModifiers::Ctrl) => Some("\x1b".to_string()), //27
- ("\\", AlacModifiers::Ctrl) => Some("\x1c".to_string()), //28
- ("]", AlacModifiers::Ctrl) => Some("\x1d".to_string()), //29
- ("^", AlacModifiers::Ctrl) => Some("\x1e".to_string()), //30
- ("_", AlacModifiers::Ctrl) => Some("\x1f".to_string()), //31
- ("?", AlacModifiers::Ctrl) => Some("\x7f".to_string()), //127
- _ => None,
- };
- if manual_esc_str.is_some() {
- return manual_esc_str;
- }
-
- // Automated bindings applying modifiers
- if modifiers.any() {
- let modifier_code = modifier_code(keystroke);
- let modified_esc_str = match keystroke.key.as_ref() {
- "up" => Some(format!("\x1b[1;{}A", modifier_code)),
- "down" => Some(format!("\x1b[1;{}B", modifier_code)),
- "right" => Some(format!("\x1b[1;{}C", modifier_code)),
- "left" => Some(format!("\x1b[1;{}D", modifier_code)),
- "f1" => Some(format!("\x1b[1;{}P", modifier_code)),
- "f2" => Some(format!("\x1b[1;{}Q", modifier_code)),
- "f3" => Some(format!("\x1b[1;{}R", modifier_code)),
- "f4" => Some(format!("\x1b[1;{}S", modifier_code)),
- "F5" => Some(format!("\x1b[15;{}~", modifier_code)),
- "f6" => Some(format!("\x1b[17;{}~", modifier_code)),
- "f7" => Some(format!("\x1b[18;{}~", modifier_code)),
- "f8" => Some(format!("\x1b[19;{}~", modifier_code)),
- "f9" => Some(format!("\x1b[20;{}~", modifier_code)),
- "f10" => Some(format!("\x1b[21;{}~", modifier_code)),
- "f11" => Some(format!("\x1b[23;{}~", modifier_code)),
- "f12" => Some(format!("\x1b[24;{}~", modifier_code)),
- "f13" => Some(format!("\x1b[25;{}~", modifier_code)),
- "f14" => Some(format!("\x1b[26;{}~", modifier_code)),
- "f15" => Some(format!("\x1b[28;{}~", modifier_code)),
- "f16" => Some(format!("\x1b[29;{}~", modifier_code)),
- "f17" => Some(format!("\x1b[31;{}~", modifier_code)),
- "f18" => Some(format!("\x1b[32;{}~", modifier_code)),
- "f19" => Some(format!("\x1b[33;{}~", modifier_code)),
- "f20" => Some(format!("\x1b[34;{}~", modifier_code)),
- _ if modifier_code == 2 => None,
- "insert" => Some(format!("\x1b[2;{}~", modifier_code)),
- "pageup" => Some(format!("\x1b[5;{}~", modifier_code)),
- "pagedown" => Some(format!("\x1b[6;{}~", modifier_code)),
- "end" => Some(format!("\x1b[1;{}F", modifier_code)),
- "home" => Some(format!("\x1b[1;{}H", modifier_code)),
- _ => None,
- };
- if modified_esc_str.is_some() {
- return modified_esc_str;
- }
- }
-
- let alt_meta_binding =
- if alt_is_meta && modifiers == AlacModifiers::Alt && keystroke.key.is_ascii() {
- Some(format!("\x1b{}", keystroke.key))
- } else {
- None
- };
-
- if alt_meta_binding.is_some() {
- return alt_meta_binding;
- }
-
- None
-}
-
-/// Code Modifiers
-/// ---------+---------------------------
-/// 2 | Shift
-/// 3 | Alt
-/// 4 | Shift + Alt
-/// 5 | Control
-/// 6 | Shift + Control
-/// 7 | Alt + Control
-/// 8 | Shift + Alt + Control
-/// ---------+---------------------------
-/// from: https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h2-PC-Style-Function-Keys
-fn modifier_code(keystroke: &Keystroke) -> u32 {
- let mut modifier_code = 0;
- if keystroke.modifiers.shift {
- modifier_code |= 1;
- }
- if keystroke.modifiers.alt {
- modifier_code |= 1 << 1;
- }
- if keystroke.modifiers.control {
- modifier_code |= 1 << 2;
- }
- modifier_code + 1
-}
-
-#[cfg(test)]
-mod test {
- use gpui::Modifiers;
-
- use super::*;
-
- #[test]
- fn test_scroll_keys() {
- //These keys should be handled by the scrolling element directly
- //Need to signify this by returning 'None'
- let shift_pageup = Keystroke::parse("shift-pageup").unwrap();
- let shift_pagedown = Keystroke::parse("shift-pagedown").unwrap();
- let shift_home = Keystroke::parse("shift-home").unwrap();
- let shift_end = Keystroke::parse("shift-end").unwrap();
-
- let none = TermMode::NONE;
- assert_eq!(to_esc_str(&shift_pageup, &none, false), None);
- assert_eq!(to_esc_str(&shift_pagedown, &none, false), None);
- assert_eq!(to_esc_str(&shift_home, &none, false), None);
- assert_eq!(to_esc_str(&shift_end, &none, false), None);
-
- let alt_screen = TermMode::ALT_SCREEN;
- assert_eq!(
- to_esc_str(&shift_pageup, &alt_screen, false),
- Some("\x1b[5;2~".to_string())
- );
- assert_eq!(
- to_esc_str(&shift_pagedown, &alt_screen, false),
- Some("\x1b[6;2~".to_string())
- );
- assert_eq!(
- to_esc_str(&shift_home, &alt_screen, false),
- Some("\x1b[1;2H".to_string())
- );
- assert_eq!(
- to_esc_str(&shift_end, &alt_screen, false),
- Some("\x1b[1;2F".to_string())
- );
-
- let pageup = Keystroke::parse("pageup").unwrap();
- let pagedown = Keystroke::parse("pagedown").unwrap();
- let any = TermMode::ANY;
-
- assert_eq!(
- to_esc_str(&pageup, &any, false),
- Some("\x1b[5~".to_string())
- );
- assert_eq!(
- to_esc_str(&pagedown, &any, false),
- Some("\x1b[6~".to_string())
- );
- }
-
- #[test]
- fn test_plain_inputs() {
- let ks = Keystroke {
- modifiers: Modifiers {
- control: false,
- alt: false,
- shift: false,
- command: false,
- function: false,
- },
- key: "🖖🏻".to_string(), //2 char string
- ime_key: None,
- };
- assert_eq!(to_esc_str(&ks, &TermMode::NONE, false), None);
- }
-
- #[test]
- fn test_application_mode() {
- let app_cursor = TermMode::APP_CURSOR;
- let none = TermMode::NONE;
-
- let up = Keystroke::parse("up").unwrap();
- let down = Keystroke::parse("down").unwrap();
- let left = Keystroke::parse("left").unwrap();
- let right = Keystroke::parse("right").unwrap();
-
- assert_eq!(to_esc_str(&up, &none, false), Some("\x1b[A".to_string()));
- assert_eq!(to_esc_str(&down, &none, false), Some("\x1b[B".to_string()));
- assert_eq!(to_esc_str(&right, &none, false), Some("\x1b[C".to_string()));
- assert_eq!(to_esc_str(&left, &none, false), Some("\x1b[D".to_string()));
-
- assert_eq!(
- to_esc_str(&up, &app_cursor, false),
- Some("\x1bOA".to_string())
- );
- assert_eq!(
- to_esc_str(&down, &app_cursor, false),
- Some("\x1bOB".to_string())
- );
- assert_eq!(
- to_esc_str(&right, &app_cursor, false),
- Some("\x1bOC".to_string())
- );
- assert_eq!(
- to_esc_str(&left, &app_cursor, false),
- Some("\x1bOD".to_string())
- );
- }
-
- #[test]
- fn test_ctrl_codes() {
- let letters_lower = 'a'..='z';
- let letters_upper = 'A'..='Z';
- let mode = TermMode::ANY;
-
- for (lower, upper) in letters_lower.zip(letters_upper) {
- assert_eq!(
- to_esc_str(
- &Keystroke::parse(&format!("ctrl-{}", lower)).unwrap(),
- &mode,
- false
- ),
- to_esc_str(
- &Keystroke::parse(&format!("ctrl-shift-{}", upper)).unwrap(),
- &mode,
- false
- ),
- "On letter: {}/{}",
- lower,
- upper
- )
- }
- }
-
- #[test]
- fn alt_is_meta() {
- let ascii_printable = ' '..='~';
- for character in ascii_printable {
- assert_eq!(
- to_esc_str(
- &Keystroke::parse(&format!("alt-{}", character)).unwrap(),
- &TermMode::NONE,
- true
- )
- .unwrap(),
- format!("\x1b{}", character)
- );
- }
-
- let gpui_keys = [
- "up", "down", "right", "left", "f1", "f2", "f3", "f4", "F5", "f6", "f7", "f8", "f9",
- "f10", "f11", "f12", "f13", "f14", "f15", "f16", "f17", "f18", "f19", "f20", "insert",
- "pageup", "pagedown", "end", "home",
- ];
-
- for key in gpui_keys {
- assert_ne!(
- to_esc_str(
- &Keystroke::parse(&format!("alt-{}", key)).unwrap(),
- &TermMode::NONE,
- true
- )
- .unwrap(),
- format!("\x1b{}", key)
- );
- }
- }
-
- #[test]
- fn test_modifier_code_calc() {
- // Code Modifiers
- // ---------+---------------------------
- // 2 | Shift
- // 3 | Alt
- // 4 | Shift + Alt
- // 5 | Control
- // 6 | Shift + Control
- // 7 | Alt + Control
- // 8 | Shift + Alt + Control
- // ---------+---------------------------
- // from: https://invisible-island.net/xterm/ctlseqs/ctlseqs.html#h2-PC-Style-Function-Keys
- assert_eq!(2, modifier_code(&Keystroke::parse("shift-A").unwrap()));
- assert_eq!(3, modifier_code(&Keystroke::parse("alt-A").unwrap()));
- assert_eq!(4, modifier_code(&Keystroke::parse("shift-alt-A").unwrap()));
- assert_eq!(5, modifier_code(&Keystroke::parse("ctrl-A").unwrap()));
- assert_eq!(6, modifier_code(&Keystroke::parse("shift-ctrl-A").unwrap()));
- assert_eq!(7, modifier_code(&Keystroke::parse("alt-ctrl-A").unwrap()));
- assert_eq!(
- 8,
- modifier_code(&Keystroke::parse("shift-ctrl-alt-A").unwrap())
- );
- }
-}
@@ -1,3 +0,0 @@
-pub mod colors;
-pub mod keys;
-pub mod mouse;
@@ -1,277 +0,0 @@
-use std::cmp::{self, max, min};
-use std::iter::repeat;
-
-use alacritty_terminal::grid::Dimensions;
-/// Most of the code, and specifically the constants, in this are copied from Alacritty,
-/// with modifications for our circumstances
-use alacritty_terminal::index::{Column as GridCol, Line as GridLine, Point as AlacPoint, Side};
-use alacritty_terminal::term::TermMode;
-use gpui::{px, Modifiers, MouseButton, MouseMoveEvent, Pixels, Point, ScrollWheelEvent};
-
-use crate::TerminalSize;
-
-enum MouseFormat {
- SGR,
- Normal(bool),
-}
-
-impl MouseFormat {
- fn from_mode(mode: TermMode) -> Self {
- if mode.contains(TermMode::SGR_MOUSE) {
- MouseFormat::SGR
- } else if mode.contains(TermMode::UTF8_MOUSE) {
- MouseFormat::Normal(true)
- } else {
- MouseFormat::Normal(false)
- }
- }
-}
-
-#[derive(Debug)]
-enum AlacMouseButton {
- LeftButton = 0,
- MiddleButton = 1,
- RightButton = 2,
- LeftMove = 32,
- MiddleMove = 33,
- RightMove = 34,
- NoneMove = 35,
- ScrollUp = 64,
- ScrollDown = 65,
- Other = 99,
-}
-
-impl AlacMouseButton {
- fn from_move(e: &MouseMoveEvent) -> Self {
- match e.pressed_button {
- Some(b) => match b {
- gpui::MouseButton::Left => AlacMouseButton::LeftMove,
- gpui::MouseButton::Middle => AlacMouseButton::MiddleMove,
- gpui::MouseButton::Right => AlacMouseButton::RightMove,
- gpui::MouseButton::Navigate(_) => AlacMouseButton::Other,
- },
- None => AlacMouseButton::NoneMove,
- }
- }
-
- fn from_button(e: MouseButton) -> Self {
- match e {
- gpui::MouseButton::Left => AlacMouseButton::LeftButton,
- gpui::MouseButton::Right => AlacMouseButton::MiddleButton,
- gpui::MouseButton::Middle => AlacMouseButton::RightButton,
- gpui::MouseButton::Navigate(_) => AlacMouseButton::Other,
- }
- }
-
- fn from_scroll(e: &ScrollWheelEvent) -> Self {
- let is_positive = match e.delta {
- gpui::ScrollDelta::Pixels(pixels) => pixels.y > px(0.),
- gpui::ScrollDelta::Lines(lines) => lines.y > 0.,
- };
-
- if is_positive {
- AlacMouseButton::ScrollUp
- } else {
- AlacMouseButton::ScrollDown
- }
- }
-
- fn is_other(&self) -> bool {
- match self {
- AlacMouseButton::Other => true,
- _ => false,
- }
- }
-}
-
-pub fn scroll_report(
- point: AlacPoint,
- scroll_lines: i32,
- e: &ScrollWheelEvent,
- mode: TermMode,
-) -> Option<impl Iterator<Item = Vec<u8>>> {
- if mode.intersects(TermMode::MOUSE_MODE) {
- mouse_report(
- point,
- AlacMouseButton::from_scroll(e),
- true,
- e.modifiers,
- MouseFormat::from_mode(mode),
- )
- .map(|report| repeat(report).take(max(scroll_lines, 1) as usize))
- } else {
- None
- }
-}
-
-pub fn alt_scroll(scroll_lines: i32) -> Vec<u8> {
- let cmd = if scroll_lines > 0 { b'A' } else { b'B' };
-
- let mut content = Vec::with_capacity(scroll_lines.abs() as usize * 3);
- for _ in 0..scroll_lines.abs() {
- content.push(0x1b);
- content.push(b'O');
- content.push(cmd);
- }
- content
-}
-
-pub fn mouse_button_report(
- point: AlacPoint,
- button: gpui::MouseButton,
- modifiers: Modifiers,
- pressed: bool,
- mode: TermMode,
-) -> Option<Vec<u8>> {
- let button = AlacMouseButton::from_button(button);
- if !button.is_other() && mode.intersects(TermMode::MOUSE_MODE) {
- mouse_report(
- point,
- button,
- pressed,
- modifiers,
- MouseFormat::from_mode(mode),
- )
- } else {
- None
- }
-}
-
-pub fn mouse_moved_report(point: AlacPoint, e: &MouseMoveEvent, mode: TermMode) -> Option<Vec<u8>> {
- let button = AlacMouseButton::from_move(e);
-
- if !button.is_other() && mode.intersects(TermMode::MOUSE_MOTION | TermMode::MOUSE_DRAG) {
- //Only drags are reported in drag mode, so block NoneMove.
- if mode.contains(TermMode::MOUSE_DRAG) && matches!(button, AlacMouseButton::NoneMove) {
- None
- } else {
- mouse_report(
- point,
- button,
- true,
- e.modifiers,
- MouseFormat::from_mode(mode),
- )
- }
- } else {
- None
- }
-}
-
-pub fn mouse_side(
- pos: Point<Pixels>,
- cur_size: TerminalSize,
-) -> alacritty_terminal::index::Direction {
- let cell_width = cur_size.cell_width.floor();
- if cell_width == px(0.) {
- return Side::Right;
- }
-
- let x = pos.x.floor();
-
- let cell_x = cmp::max(px(0.), x - cell_width) % cell_width;
- let half_cell_width = (cur_size.cell_width / 2.0).floor();
- let additional_padding = (cur_size.width() - cur_size.cell_width * 2.) % cur_size.cell_width;
- let end_of_grid = cur_size.width() - cur_size.cell_width - additional_padding;
-
- //Width: Pixels or columns?
- if cell_x > half_cell_width
- // Edge case when mouse leaves the window.
- || x >= end_of_grid
- {
- Side::Right
- } else {
- Side::Left
- }
-}
-
-pub fn grid_point(pos: Point<Pixels>, cur_size: TerminalSize, display_offset: usize) -> AlacPoint {
- let col = GridCol((pos.x / cur_size.cell_width) as usize);
- let col = min(col, cur_size.last_column());
- let line = (pos.y / cur_size.line_height) as i32;
- let line = min(line, cur_size.bottommost_line().0);
- AlacPoint::new(GridLine(line - display_offset as i32), col)
-}
-
-///Generate the bytes to send to the terminal, from the cell location, a mouse event, and the terminal mode
-fn mouse_report(
- point: AlacPoint,
- button: AlacMouseButton,
- pressed: bool,
- modifiers: Modifiers,
- format: MouseFormat,
-) -> Option<Vec<u8>> {
- if point.line < 0 {
- return None;
- }
-
- let mut mods = 0;
- if modifiers.shift {
- mods += 4;
- }
- if modifiers.alt {
- mods += 8;
- }
- if modifiers.control {
- mods += 16;
- }
-
- match format {
- MouseFormat::SGR => {
- Some(sgr_mouse_report(point, button as u8 + mods, pressed).into_bytes())
- }
- MouseFormat::Normal(utf8) => {
- if pressed {
- normal_mouse_report(point, button as u8 + mods, utf8)
- } else {
- normal_mouse_report(point, 3 + mods, utf8)
- }
- }
- }
-}
-
-fn normal_mouse_report(point: AlacPoint, button: u8, utf8: bool) -> Option<Vec<u8>> {
- let AlacPoint { line, column } = point;
- let max_point = if utf8 { 2015 } else { 223 };
-
- if line >= max_point || column >= max_point {
- return None;
- }
-
- let mut msg = vec![b'\x1b', b'[', b'M', 32 + button];
-
- let mouse_pos_encode = |pos: usize| -> Vec<u8> {
- let pos = 32 + 1 + pos;
- let first = 0xC0 + pos / 64;
- let second = 0x80 + (pos & 63);
- vec![first as u8, second as u8]
- };
-
- if utf8 && column >= 95 {
- msg.append(&mut mouse_pos_encode(column.0));
- } else {
- msg.push(32 + 1 + column.0 as u8);
- }
-
- if utf8 && line >= 95 {
- msg.append(&mut mouse_pos_encode(line.0 as usize));
- } else {
- msg.push(32 + 1 + line.0 as u8);
- }
-
- Some(msg)
-}
-
-fn sgr_mouse_report(point: AlacPoint, button: u8, pressed: bool) -> String {
- let c = if pressed { 'M' } else { 'm' };
-
- let msg = format!(
- "\x1b[<{};{};{}{}",
- button,
- point.column + 1,
- point.line + 1,
- c
- );
-
- msg
-}
@@ -1,1638 +0,0 @@
-pub mod mappings;
-pub use alacritty_terminal;
-pub mod terminal_settings;
-
-use alacritty_terminal::{
- ansi::{ClearMode, Handler},
- config::{Config, Program, PtyConfig, Scrolling},
- event::{Event as AlacTermEvent, EventListener, Notify, WindowSize},
- event_loop::{EventLoop, Msg, Notifier},
- grid::{Dimensions, Scroll as AlacScroll},
- index::{Boundary, Column, Direction as AlacDirection, Line, Point as AlacPoint},
- selection::{Selection, SelectionRange, SelectionType},
- sync::FairMutex,
- term::{
- cell::Cell,
- color::Rgb,
- search::{Match, RegexIter, RegexSearch},
- RenderableCursor, TermMode,
- },
- tty::{self, setup_env},
- Term,
-};
-use anyhow::{bail, Result};
-
-use futures::{
- channel::mpsc::{unbounded, UnboundedReceiver, UnboundedSender},
- FutureExt,
-};
-
-use mappings::mouse::{
- alt_scroll, grid_point, mouse_button_report, mouse_moved_report, mouse_side, scroll_report,
-};
-
-use procinfo::LocalProcessInfo;
-use serde::{Deserialize, Serialize};
-use settings::Settings;
-use terminal_settings::{AlternateScroll, Shell, TerminalBlink, TerminalSettings};
-use theme::{ActiveTheme, Theme};
-use util::truncate_and_trailoff;
-
-use std::{
- cmp::{self, min},
- collections::{HashMap, VecDeque},
- fmt::Display,
- ops::{Deref, Index, RangeInclusive},
- os::unix::prelude::AsRawFd,
- path::PathBuf,
- sync::Arc,
- time::{Duration, Instant},
-};
-use thiserror::Error;
-
-use gpui::{
- actions, black, px, red, AnyWindowHandle, AppContext, Bounds, ClipboardItem, EventEmitter,
- Hsla, Keystroke, ModelContext, Modifiers, MouseButton, MouseDownEvent, MouseMoveEvent,
- MouseUpEvent, Pixels, Point, Rgba, ScrollWheelEvent, Size, Task, TouchPhase,
-};
-
-use crate::mappings::{colors::to_alac_rgb, keys::to_esc_str};
-use lazy_static::lazy_static;
-
-actions!(
- terminal,
- [
- Clear,
- Copy,
- Paste,
- ShowCharacterPalette,
- SearchTest,
- SendText,
- SendKeystroke,
- ]
-);
-
-///Scrolling is unbearably sluggish by default. Alacritty supports a configurable
-///Scroll multiplier that is set to 3 by default. This will be removed when I
-///Implement scroll bars.
-const SCROLL_MULTIPLIER: f32 = 4.;
-const MAX_SEARCH_LINES: usize = 100;
-const DEBUG_TERMINAL_WIDTH: Pixels = px(500.);
-const DEBUG_TERMINAL_HEIGHT: Pixels = px(30.);
-const DEBUG_CELL_WIDTH: Pixels = px(5.);
-const DEBUG_LINE_HEIGHT: Pixels = px(5.);
-
-lazy_static! {
- // Regex Copied from alacritty's ui_config.rs and modified its declaration slightly:
- // * avoid Rust-specific escaping.
- // * use more strict regex for `file://` protocol matching: original regex has `file:` inside, but we want to avoid matching `some::file::module` strings.
- static ref URL_REGEX: RegexSearch = RegexSearch::new(r#"(ipfs:|ipns:|magnet:|mailto:|gemini://|gopher://|https://|http://|news:|file://|git://|ssh:|ftp://)[^\u{0000}-\u{001F}\u{007F}-\u{009F}<>"\s{-}\^⟨⟩`]+"#).unwrap();
-
- static ref WORD_REGEX: RegexSearch = RegexSearch::new(r#"[\w.\[\]:/@\-~]+"#).unwrap();
-}
-
-///Upward flowing events, for changing the title and such
-#[derive(Clone, Debug)]
-pub enum Event {
- TitleChanged,
- BreadcrumbsChanged,
- CloseTerminal,
- Bell,
- Wakeup,
- BlinkChanged,
- SelectionsChanged,
- NewNavigationTarget(Option<MaybeNavigationTarget>),
- Open(MaybeNavigationTarget),
-}
-
-/// A string inside terminal, potentially useful as a URI that can be opened.
-#[derive(Clone, Debug)]
-pub enum MaybeNavigationTarget {
- /// HTTP, git, etc. string determined by the [`URL_REGEX`] regex.
- Url(String),
- /// File system path, absolute or relative, existing or not.
- /// Might have line and column number(s) attached as `file.rs:1:23`
- PathLike(String),
-}
-
-#[derive(Clone)]
-enum InternalEvent {
- ColorRequest(usize, Arc<dyn Fn(Rgb) -> String + Sync + Send + 'static>),
- Resize(TerminalSize),
- Clear,
- // FocusNextMatch,
- Scroll(AlacScroll),
- ScrollToAlacPoint(AlacPoint),
- SetSelection(Option<(Selection, AlacPoint)>),
- UpdateSelection(Point<Pixels>),
- // Adjusted mouse position, should open
- FindHyperlink(Point<Pixels>, bool),
- Copy,
-}
-
-///A translation struct for Alacritty to communicate with us from their event loop
-#[derive(Clone)]
-pub struct ZedListener(UnboundedSender<AlacTermEvent>);
-
-impl EventListener for ZedListener {
- fn send_event(&self, event: AlacTermEvent) {
- self.0.unbounded_send(event).ok();
- }
-}
-
-pub fn init(cx: &mut AppContext) {
- TerminalSettings::register(cx);
-}
-
-#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
-pub struct TerminalSize {
- pub cell_width: Pixels,
- pub line_height: Pixels,
- pub size: Size<Pixels>,
-}
-
-impl TerminalSize {
- pub fn new(line_height: Pixels, cell_width: Pixels, size: Size<Pixels>) -> Self {
- TerminalSize {
- cell_width,
- line_height,
- size,
- }
- }
-
- pub fn num_lines(&self) -> usize {
- f32::from((self.size.height / self.line_height).floor()) as usize
- }
-
- pub fn num_columns(&self) -> usize {
- f32::from((self.size.width / self.cell_width).floor()) as usize
- }
-
- pub fn height(&self) -> Pixels {
- self.size.height
- }
-
- pub fn width(&self) -> Pixels {
- self.size.width
- }
-
- pub fn cell_width(&self) -> Pixels {
- self.cell_width
- }
-
- pub fn line_height(&self) -> Pixels {
- self.line_height
- }
-}
-impl Default for TerminalSize {
- fn default() -> Self {
- TerminalSize::new(
- DEBUG_LINE_HEIGHT,
- DEBUG_CELL_WIDTH,
- Size {
- width: DEBUG_TERMINAL_WIDTH,
- height: DEBUG_TERMINAL_HEIGHT,
- },
- )
- }
-}
-
-impl From<TerminalSize> for WindowSize {
- fn from(val: TerminalSize) -> Self {
- WindowSize {
- num_lines: val.num_lines() as u16,
- num_cols: val.num_columns() as u16,
- cell_width: f32::from(val.cell_width()) as u16,
- cell_height: f32::from(val.line_height()) as u16,
- }
- }
-}
-
-impl Dimensions for TerminalSize {
- /// Note: this is supposed to be for the back buffer's length,
- /// but we exclusively use it to resize the terminal, which does not
- /// use this method. We still have to implement it for the trait though,
- /// hence, this comment.
- fn total_lines(&self) -> usize {
- self.screen_lines()
- }
-
- fn screen_lines(&self) -> usize {
- self.num_lines()
- }
-
- fn columns(&self) -> usize {
- self.num_columns()
- }
-}
-
-#[derive(Error, Debug)]
-pub struct TerminalError {
- pub directory: Option<PathBuf>,
- pub shell: Shell,
- pub source: std::io::Error,
-}
-
-impl TerminalError {
- pub fn fmt_directory(&self) -> String {
- self.directory
- .clone()
- .map(|path| {
- match path
- .into_os_string()
- .into_string()
- .map_err(|os_str| format!("<non-utf8 path> {}", os_str.to_string_lossy()))
- {
- Ok(s) => s,
- Err(s) => s,
- }
- })
- .unwrap_or_else(|| {
- let default_dir =
- dirs::home_dir().map(|buf| buf.into_os_string().to_string_lossy().to_string());
- match default_dir {
- Some(dir) => format!("<none specified, using home directory> {}", dir),
- None => "<none specified, could not find home directory>".to_string(),
- }
- })
- }
-
- pub fn shell_to_string(&self) -> String {
- match &self.shell {
- Shell::System => "<system shell>".to_string(),
- Shell::Program(p) => p.to_string(),
- Shell::WithArguments { program, args } => format!("{} {}", program, args.join(" ")),
- }
- }
-
- pub fn fmt_shell(&self) -> String {
- match &self.shell {
- Shell::System => "<system defined shell>".to_string(),
- Shell::Program(s) => s.to_string(),
- Shell::WithArguments { program, args } => format!("{} {}", program, args.join(" ")),
- }
- }
-}
-
-impl Display for TerminalError {
- fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- let dir_string: String = self.fmt_directory();
- let shell = self.fmt_shell();
-
- write!(
- f,
- "Working directory: {} Shell command: `{}`, IOError: {}",
- dir_string, shell, self.source
- )
- }
-}
-
-pub struct TerminalBuilder {
- terminal: Terminal,
- events_rx: UnboundedReceiver<AlacTermEvent>,
-}
-
-impl TerminalBuilder {
- pub fn new(
- working_directory: Option<PathBuf>,
- shell: Shell,
- mut env: HashMap<String, String>,
- blink_settings: Option<TerminalBlink>,
- alternate_scroll: AlternateScroll,
- window: AnyWindowHandle,
- ) -> Result<TerminalBuilder> {
- let pty_config = {
- let alac_shell = match shell.clone() {
- Shell::System => None,
- Shell::Program(program) => Some(Program::Just(program)),
- Shell::WithArguments { program, args } => Some(Program::WithArgs { program, args }),
- };
-
- PtyConfig {
- shell: alac_shell,
- working_directory: working_directory.clone(),
- hold: false,
- }
- };
-
- //TODO: Properly set the current locale,
- env.insert("LC_ALL".to_string(), "en_US.UTF-8".to_string());
- env.insert("ZED_TERM".to_string(), true.to_string());
-
- let alac_scrolling = Scrolling::default();
- // alac_scrolling.set_history((BACK_BUFFER_SIZE * 2) as u32);
-
- let config = Config {
- pty_config: pty_config.clone(),
- env,
- scrolling: alac_scrolling,
- ..Default::default()
- };
-
- setup_env(&config);
-
- //Spawn a task so the Alacritty EventLoop can communicate with us in a view context
- //TODO: Remove with a bounded sender which can be dispatched on &self
- let (events_tx, events_rx) = unbounded();
- //Set up the terminal...
- let mut term = Term::new(
- &config,
- &TerminalSize::default(),
- ZedListener(events_tx.clone()),
- );
-
- //Start off blinking if we need to
- if let Some(TerminalBlink::On) = blink_settings {
- term.set_mode(alacritty_terminal::ansi::Mode::BlinkingCursor)
- }
-
- //Alacritty defaults to alternate scrolling being on, so we just need to turn it off.
- if let AlternateScroll::Off = alternate_scroll {
- term.unset_mode(alacritty_terminal::ansi::Mode::AlternateScroll)
- }
-
- let term = Arc::new(FairMutex::new(term));
-
- //Setup the pty...
- let pty = match tty::new(
- &pty_config,
- TerminalSize::default().into(),
- window.window_id().as_u64(),
- ) {
- Ok(pty) => pty,
- Err(error) => {
- bail!(TerminalError {
- directory: working_directory,
- shell,
- source: error,
- });
- }
- };
-
- let fd = pty.file().as_raw_fd();
- let shell_pid = pty.child().id();
-
- //And connect them together
- let event_loop = EventLoop::new(
- term.clone(),
- ZedListener(events_tx.clone()),
- pty,
- pty_config.hold,
- false,
- );
-
- //Kick things off
- let pty_tx = event_loop.channel();
- let _io_thread = event_loop.spawn();
-
- let terminal = Terminal {
- pty_tx: Notifier(pty_tx),
- term,
- events: VecDeque::with_capacity(10), //Should never get this high.
- last_content: Default::default(),
- last_mouse: None,
- matches: Vec::new(),
- last_synced: Instant::now(),
- sync_task: None,
- selection_head: None,
- shell_fd: fd as u32,
- shell_pid,
- foreground_process_info: None,
- breadcrumb_text: String::new(),
- scroll_px: px(0.),
- last_mouse_position: None,
- next_link_id: 0,
- selection_phase: SelectionPhase::Ended,
- cmd_pressed: false,
- hovered_word: false,
- };
-
- Ok(TerminalBuilder {
- terminal,
- events_rx,
- })
- }
-
- pub fn subscribe(mut self, cx: &mut ModelContext<Terminal>) -> Terminal {
- //Event loop
- cx.spawn(|this, mut cx| async move {
- use futures::StreamExt;
-
- while let Some(event) = self.events_rx.next().await {
- this.update(&mut cx, |this, cx| {
- //Process the first event immediately for lowered latency
- this.process_event(&event, cx);
- })?;
-
- 'outer: loop {
- let mut events = vec![];
- let mut timer = cx
- .background_executor()
- .timer(Duration::from_millis(4))
- .fuse();
- let mut wakeup = false;
- loop {
- futures::select_biased! {
- _ = timer => break,
- event = self.events_rx.next() => {
- if let Some(event) = event {
- if matches!(event, AlacTermEvent::Wakeup) {
- wakeup = true;
- } else {
- events.push(event);
- }
-
- if events.len() > 100 {
- break;
- }
- } else {
- break;
- }
- },
- }
- }
-
- if events.is_empty() && wakeup == false {
- smol::future::yield_now().await;
- break 'outer;
- } else {
- this.update(&mut cx, |this, cx| {
- if wakeup {
- this.process_event(&AlacTermEvent::Wakeup, cx);
- }
-
- for event in events {
- this.process_event(&event, cx);
- }
- })?;
- smol::future::yield_now().await;
- }
- }
- }
-
- anyhow::Ok(())
- })
- .detach();
-
- self.terminal
- }
-}
-
-#[derive(Debug, Clone, Deserialize, Serialize)]
-pub struct IndexedCell {
- pub point: AlacPoint,
- pub cell: Cell,
-}
-
-impl Deref for IndexedCell {
- type Target = Cell;
-
- #[inline]
- fn deref(&self) -> &Cell {
- &self.cell
- }
-}
-
-// TODO: Un-pub
-#[derive(Clone)]
-pub struct TerminalContent {
- pub cells: Vec<IndexedCell>,
- pub mode: TermMode,
- pub display_offset: usize,
- pub selection_text: Option<String>,
- pub selection: Option<SelectionRange>,
- pub cursor: RenderableCursor,
- pub cursor_char: char,
- pub size: TerminalSize,
- pub last_hovered_word: Option<HoveredWord>,
-}
-
-#[derive(Clone)]
-pub struct HoveredWord {
- pub word: String,
- pub word_match: RangeInclusive<AlacPoint>,
- pub id: usize,
-}
-
-impl Default for TerminalContent {
- fn default() -> Self {
- TerminalContent {
- cells: Default::default(),
- mode: Default::default(),
- display_offset: Default::default(),
- selection_text: Default::default(),
- selection: Default::default(),
- cursor: RenderableCursor {
- shape: alacritty_terminal::ansi::CursorShape::Block,
- point: AlacPoint::new(Line(0), Column(0)),
- },
- cursor_char: Default::default(),
- size: Default::default(),
- last_hovered_word: None,
- }
- }
-}
-
-#[derive(PartialEq, Eq)]
-pub enum SelectionPhase {
- Selecting,
- Ended,
-}
-
-pub struct Terminal {
- pty_tx: Notifier,
- term: Arc<FairMutex<Term<ZedListener>>>,
- events: VecDeque<InternalEvent>,
- /// This is only used for mouse mode cell change detection
- last_mouse: Option<(AlacPoint, AlacDirection)>,
- /// This is only used for terminal hovered word checking
- last_mouse_position: Option<Point<Pixels>>,
- pub matches: Vec<RangeInclusive<AlacPoint>>,
- pub last_content: TerminalContent,
- last_synced: Instant,
- sync_task: Option<Task<()>>,
- pub selection_head: Option<AlacPoint>,
- pub breadcrumb_text: String,
- shell_pid: u32,
- shell_fd: u32,
- pub foreground_process_info: Option<LocalProcessInfo>,
- scroll_px: Pixels,
- next_link_id: usize,
- selection_phase: SelectionPhase,
- cmd_pressed: bool,
- hovered_word: bool,
-}
-
-impl Terminal {
- fn process_event(&mut self, event: &AlacTermEvent, cx: &mut ModelContext<Self>) {
- match event {
- AlacTermEvent::Title(title) => {
- self.breadcrumb_text = title.to_string();
- cx.emit(Event::BreadcrumbsChanged);
- }
- AlacTermEvent::ResetTitle => {
- self.breadcrumb_text = String::new();
- cx.emit(Event::BreadcrumbsChanged);
- }
- AlacTermEvent::ClipboardStore(_, data) => {
- cx.write_to_clipboard(ClipboardItem::new(data.to_string()))
- }
- AlacTermEvent::ClipboardLoad(_, format) => self.write_to_pty(format(
- &cx.read_from_clipboard()
- .map(|ci| ci.text().to_string())
- .unwrap_or_else(|| "".to_string()),
- )),
- AlacTermEvent::PtyWrite(out) => self.write_to_pty(out.clone()),
- AlacTermEvent::TextAreaSizeRequest(format) => {
- self.write_to_pty(format(self.last_content.size.into()))
- }
- AlacTermEvent::CursorBlinkingChange => {
- cx.emit(Event::BlinkChanged);
- }
- AlacTermEvent::Bell => {
- cx.emit(Event::Bell);
- }
- AlacTermEvent::Exit => cx.emit(Event::CloseTerminal),
- AlacTermEvent::MouseCursorDirty => {
- //NOOP, Handled in render
- }
- AlacTermEvent::Wakeup => {
- cx.emit(Event::Wakeup);
-
- if self.update_process_info() {
- cx.emit(Event::TitleChanged);
- }
- }
- AlacTermEvent::ColorRequest(idx, fun_ptr) => {
- self.events
- .push_back(InternalEvent::ColorRequest(*idx, fun_ptr.clone()));
- }
- }
- }
-
- /// Update the cached process info, returns whether the Zed-relevant info has changed
- fn update_process_info(&mut self) -> bool {
- let mut pid = unsafe { libc::tcgetpgrp(self.shell_fd as i32) };
- if pid < 0 {
- pid = self.shell_pid as i32;
- }
-
- if let Some(process_info) = LocalProcessInfo::with_root_pid(pid as u32) {
- let res = self
- .foreground_process_info
- .as_ref()
- .map(|old_info| {
- process_info.cwd != old_info.cwd || process_info.name != old_info.name
- })
- .unwrap_or(true);
-
- self.foreground_process_info = Some(process_info.clone());
-
- res
- } else {
- false
- }
- }
-
- ///Takes events from Alacritty and translates them to behavior on this view
- fn process_terminal_event(
- &mut self,
- event: &InternalEvent,
- term: &mut Term<ZedListener>,
- cx: &mut ModelContext<Self>,
- ) {
- match event {
- InternalEvent::ColorRequest(index, format) => {
- let color = term.colors()[*index].unwrap_or_else(|| {
- to_alac_rgb(get_color_at_index(*index, cx.theme().as_ref()))
- });
- self.write_to_pty(format(color))
- }
- InternalEvent::Resize(mut new_size) => {
- new_size.size.height = cmp::max(new_size.line_height, new_size.height());
- new_size.size.width = cmp::max(new_size.cell_width, new_size.width());
-
- self.last_content.size = new_size.clone();
-
- self.pty_tx.0.send(Msg::Resize(new_size.into())).ok();
-
- term.resize(new_size);
- }
- InternalEvent::Clear => {
- // Clear back buffer
- term.clear_screen(ClearMode::Saved);
-
- let cursor = term.grid().cursor.point;
-
- // Clear the lines above
- term.grid_mut().reset_region(..cursor.line);
-
- // Copy the current line up
- let line = term.grid()[cursor.line][..Column(term.grid().columns())]
- .iter()
- .cloned()
- .enumerate()
- .collect::<Vec<(usize, Cell)>>();
-
- for (i, cell) in line {
- term.grid_mut()[Line(0)][Column(i)] = cell;
- }
-
- // Reset the cursor
- term.grid_mut().cursor.point =
- AlacPoint::new(Line(0), term.grid_mut().cursor.point.column);
- let new_cursor = term.grid().cursor.point;
-
- // Clear the lines below the new cursor
- if (new_cursor.line.0 as usize) < term.screen_lines() - 1 {
- term.grid_mut().reset_region((new_cursor.line + 1)..);
- }
-
- cx.emit(Event::Wakeup);
- }
- InternalEvent::Scroll(scroll) => {
- term.scroll_display(*scroll);
- self.refresh_hovered_word();
- }
- InternalEvent::SetSelection(selection) => {
- term.selection = selection.as_ref().map(|(sel, _)| sel.clone());
-
- if let Some((_, head)) = selection {
- self.selection_head = Some(*head);
- }
- cx.emit(Event::SelectionsChanged)
- }
- InternalEvent::UpdateSelection(position) => {
- if let Some(mut selection) = term.selection.take() {
- let point = grid_point(
- *position,
- self.last_content.size,
- term.grid().display_offset(),
- );
-
- let side = mouse_side(*position, self.last_content.size);
-
- selection.update(point, side);
- term.selection = Some(selection);
-
- self.selection_head = Some(point);
- cx.emit(Event::SelectionsChanged)
- }
- }
-
- InternalEvent::Copy => {
- if let Some(txt) = term.selection_to_string() {
- cx.write_to_clipboard(ClipboardItem::new(txt))
- }
- }
- InternalEvent::ScrollToAlacPoint(point) => {
- term.scroll_to_point(*point);
- self.refresh_hovered_word();
- }
- InternalEvent::FindHyperlink(position, open) => {
- let prev_hovered_word = self.last_content.last_hovered_word.take();
-
- let point = grid_point(
- *position,
- self.last_content.size,
- term.grid().display_offset(),
- )
- .grid_clamp(term, Boundary::Grid);
-
- let link = term.grid().index(point).hyperlink();
- let found_word = if link.is_some() {
- let mut min_index = point;
- loop {
- let new_min_index = min_index.sub(term, Boundary::Cursor, 1);
- if new_min_index == min_index {
- break;
- } else if term.grid().index(new_min_index).hyperlink() != link {
- break;
- } else {
- min_index = new_min_index
- }
- }
-
- let mut max_index = point;
- loop {
- let new_max_index = max_index.add(term, Boundary::Cursor, 1);
- if new_max_index == max_index {
- break;
- } else if term.grid().index(new_max_index).hyperlink() != link {
- break;
- } else {
- max_index = new_max_index
- }
- }
-
- let url = link.unwrap().uri().to_owned();
- let url_match = min_index..=max_index;
-
- Some((url, true, url_match))
- } else if let Some(word_match) = regex_match_at(term, point, &WORD_REGEX) {
- let maybe_url_or_path =
- term.bounds_to_string(*word_match.start(), *word_match.end());
- let original_match = word_match.clone();
- let (sanitized_match, sanitized_word) =
- if maybe_url_or_path.starts_with('[') && maybe_url_or_path.ends_with(']') {
- (
- Match::new(
- word_match.start().add(term, Boundary::Cursor, 1),
- word_match.end().sub(term, Boundary::Cursor, 1),
- ),
- maybe_url_or_path[1..maybe_url_or_path.len() - 1].to_owned(),
- )
- } else {
- (word_match, maybe_url_or_path)
- };
-
- let is_url = match regex_match_at(term, point, &URL_REGEX) {
- Some(url_match) => {
- // `]` is a valid symbol in the `file://` URL, so the regex match will include it
- // consider that when ensuring that the URL match is the same as the original word
- if sanitized_match != original_match {
- url_match.start() == sanitized_match.start()
- && url_match.end() == original_match.end()
- } else {
- url_match == sanitized_match
- }
- }
- None => false,
- };
- Some((sanitized_word, is_url, sanitized_match))
- } else {
- None
- };
-
- match found_word {
- Some((maybe_url_or_path, is_url, url_match)) => {
- if *open {
- let target = if is_url {
- MaybeNavigationTarget::Url(maybe_url_or_path)
- } else {
- MaybeNavigationTarget::PathLike(maybe_url_or_path)
- };
- cx.emit(Event::Open(target));
- } else {
- self.update_selected_word(
- prev_hovered_word,
- url_match,
- maybe_url_or_path,
- is_url,
- cx,
- );
- }
- self.hovered_word = true;
- }
- None => {
- if self.hovered_word {
- cx.emit(Event::NewNavigationTarget(None));
- }
- self.hovered_word = false;
- }
- }
- }
- }
- }
-
- fn update_selected_word(
- &mut self,
- prev_word: Option<HoveredWord>,
- word_match: RangeInclusive<AlacPoint>,
- word: String,
- is_url: bool,
- cx: &mut ModelContext<Self>,
- ) {
- if let Some(prev_word) = prev_word {
- if prev_word.word == word && prev_word.word_match == word_match {
- self.last_content.last_hovered_word = Some(HoveredWord {
- word,
- word_match,
- id: prev_word.id,
- });
- return;
- }
- }
-
- self.last_content.last_hovered_word = Some(HoveredWord {
- word: word.clone(),
- word_match,
- id: self.next_link_id(),
- });
- let navigation_target = if is_url {
- MaybeNavigationTarget::Url(word)
- } else {
- MaybeNavigationTarget::PathLike(word)
- };
- cx.emit(Event::NewNavigationTarget(Some(navigation_target)));
- }
-
- fn next_link_id(&mut self) -> usize {
- let res = self.next_link_id;
- self.next_link_id = self.next_link_id.wrapping_add(1);
- res
- }
-
- pub fn last_content(&self) -> &TerminalContent {
- &self.last_content
- }
-
- //To test:
- //- Activate match on terminal (scrolling and selection)
- //- Editor search snapping behavior
-
- pub fn activate_match(&mut self, index: usize) {
- if let Some(search_match) = self.matches.get(index).cloned() {
- self.set_selection(Some((make_selection(&search_match), *search_match.end())));
-
- self.events
- .push_back(InternalEvent::ScrollToAlacPoint(*search_match.start()));
- }
- }
-
- pub fn select_matches(&mut self, matches: Vec<RangeInclusive<AlacPoint>>) {
- let matches_to_select = self
- .matches
- .iter()
- .filter(|self_match| matches.contains(self_match))
- .cloned()
- .collect::<Vec<_>>();
- for match_to_select in matches_to_select {
- self.set_selection(Some((
- make_selection(&match_to_select),
- *match_to_select.end(),
- )));
- }
- }
-
- pub fn select_all(&mut self) {
- let term = self.term.lock();
- let start = AlacPoint::new(term.topmost_line(), Column(0));
- let end = AlacPoint::new(term.bottommost_line(), term.last_column());
- drop(term);
- self.set_selection(Some((make_selection(&(start..=end)), end)));
- }
-
- fn set_selection(&mut self, selection: Option<(Selection, AlacPoint)>) {
- self.events
- .push_back(InternalEvent::SetSelection(selection));
- }
-
- pub fn copy(&mut self) {
- self.events.push_back(InternalEvent::Copy);
- }
-
- pub fn clear(&mut self) {
- self.events.push_back(InternalEvent::Clear)
- }
-
- ///Resize the terminal and the PTY.
- pub fn set_size(&mut self, new_size: TerminalSize) {
- self.events.push_back(InternalEvent::Resize(new_size))
- }
-
- ///Write the Input payload to the tty.
- fn write_to_pty(&self, input: String) {
- self.pty_tx.notify(input.into_bytes());
- }
-
- fn write_bytes_to_pty(&self, input: Vec<u8>) {
- self.pty_tx.notify(input);
- }
-
- pub fn input(&mut self, input: String) {
- self.events
- .push_back(InternalEvent::Scroll(AlacScroll::Bottom));
- self.events.push_back(InternalEvent::SetSelection(None));
-
- self.write_to_pty(input);
- }
-
- pub fn input_bytes(&mut self, input: Vec<u8>) {
- self.events
- .push_back(InternalEvent::Scroll(AlacScroll::Bottom));
- self.events.push_back(InternalEvent::SetSelection(None));
-
- self.write_bytes_to_pty(input);
- }
-
- pub fn try_keystroke(&mut self, keystroke: &Keystroke, alt_is_meta: bool) -> bool {
- let esc = to_esc_str(keystroke, &self.last_content.mode, alt_is_meta);
- if let Some(esc) = esc {
- self.input(esc);
- true
- } else {
- false
- }
- }
-
- pub fn try_modifiers_change(&mut self, modifiers: &Modifiers) -> bool {
- let changed = self.cmd_pressed != modifiers.command;
- if !self.cmd_pressed && modifiers.command {
- self.refresh_hovered_word();
- }
- self.cmd_pressed = modifiers.command;
- changed
- }
-
- ///Paste text into the terminal
- pub fn paste(&mut self, text: &str) {
- let paste_text = if self.last_content.mode.contains(TermMode::BRACKETED_PASTE) {
- format!("{}{}{}", "\x1b[200~", text.replace('\x1b', ""), "\x1b[201~")
- } else {
- text.replace("\r\n", "\r").replace('\n', "\r")
- };
-
- self.input(paste_text);
- }
-
- pub fn try_sync(&mut self, cx: &mut ModelContext<Self>) {
- let term = self.term.clone();
-
- let mut terminal = if let Some(term) = term.try_lock_unfair() {
- term
- } else if self.last_synced.elapsed().as_secs_f32() > 0.25 {
- term.lock_unfair() //It's been too long, force block
- } else if let None = self.sync_task {
- //Skip this frame
- let delay = cx.background_executor().timer(Duration::from_millis(16));
- self.sync_task = Some(cx.spawn(|weak_handle, mut cx| async move {
- delay.await;
- if let Some(handle) = weak_handle.upgrade() {
- handle
- .update(&mut cx, |terminal, cx| {
- terminal.sync_task.take();
- cx.notify();
- })
- .ok();
- }
- }));
- return;
- } else {
- //No lock and delayed rendering already scheduled, nothing to do
- return;
- };
-
- //Note that the ordering of events matters for event processing
- while let Some(e) = self.events.pop_front() {
- self.process_terminal_event(&e, &mut terminal, cx)
- }
-
- self.last_content = Self::make_content(&terminal, &self.last_content);
- self.last_synced = Instant::now();
- }
-
- fn make_content(term: &Term<ZedListener>, last_content: &TerminalContent) -> TerminalContent {
- let content = term.renderable_content();
- TerminalContent {
- cells: content
- .display_iter
- //TODO: Add this once there's a way to retain empty lines
- // .filter(|ic| {
- // !ic.flags.contains(Flags::HIDDEN)
- // && !(ic.bg == Named(NamedColor::Background)
- // && ic.c == ' '
- // && !ic.flags.contains(Flags::INVERSE))
- // })
- .map(|ic| IndexedCell {
- point: ic.point,
- cell: ic.cell.clone(),
- })
- .collect::<Vec<IndexedCell>>(),
- mode: content.mode,
- display_offset: content.display_offset,
- selection_text: term.selection_to_string(),
- selection: content.selection,
- cursor: content.cursor,
- cursor_char: term.grid()[content.cursor.point].c,
- size: last_content.size,
- last_hovered_word: last_content.last_hovered_word.clone(),
- }
- }
-
- pub fn focus_in(&self) {
- if self.last_content.mode.contains(TermMode::FOCUS_IN_OUT) {
- self.write_to_pty("\x1b[I".to_string());
- }
- }
-
- pub fn focus_out(&mut self) {
- self.last_mouse_position = None;
- if self.last_content.mode.contains(TermMode::FOCUS_IN_OUT) {
- self.write_to_pty("\x1b[O".to_string());
- }
- }
-
- pub fn mouse_changed(&mut self, point: AlacPoint, side: AlacDirection) -> bool {
- match self.last_mouse {
- Some((old_point, old_side)) => {
- if old_point == point && old_side == side {
- false
- } else {
- self.last_mouse = Some((point, side));
- true
- }
- }
- None => {
- self.last_mouse = Some((point, side));
- true
- }
- }
- }
-
- pub fn mouse_mode(&self, shift: bool) -> bool {
- self.last_content.mode.intersects(TermMode::MOUSE_MODE) && !shift
- }
-
- pub fn mouse_move(&mut self, e: &MouseMoveEvent, origin: Point<Pixels>) {
- let position = e.position - origin;
- self.last_mouse_position = Some(position);
- if self.mouse_mode(e.modifiers.shift) {
- let point = grid_point(
- position,
- self.last_content.size,
- self.last_content.display_offset,
- );
- let side = mouse_side(position, self.last_content.size);
-
- if self.mouse_changed(point, side) {
- if let Some(bytes) = mouse_moved_report(point, e, self.last_content.mode) {
- self.pty_tx.notify(bytes);
- }
- }
- } else if self.cmd_pressed {
- self.word_from_position(Some(position));
- }
- }
-
- fn word_from_position(&mut self, position: Option<Point<Pixels>>) {
- if self.selection_phase == SelectionPhase::Selecting {
- self.last_content.last_hovered_word = None;
- } else if let Some(position) = position {
- self.events
- .push_back(InternalEvent::FindHyperlink(position, false));
- }
- }
-
- pub fn mouse_drag(
- &mut self,
- e: &MouseMoveEvent,
- origin: Point<Pixels>,
- region: Bounds<Pixels>,
- ) {
- let position = e.position - origin;
- self.last_mouse_position = Some(position);
-
- if !self.mouse_mode(e.modifiers.shift) {
- self.selection_phase = SelectionPhase::Selecting;
- // Alacritty has the same ordering, of first updating the selection
- // then scrolling 15ms later
- self.events
- .push_back(InternalEvent::UpdateSelection(position));
-
- // Doesn't make sense to scroll the alt screen
- if !self.last_content.mode.contains(TermMode::ALT_SCREEN) {
- let scroll_delta = match self.drag_line_delta(e, region) {
- Some(value) => value,
- None => return,
- };
-
- let scroll_lines = (scroll_delta / self.last_content.size.line_height) as i32;
-
- self.events
- .push_back(InternalEvent::Scroll(AlacScroll::Delta(scroll_lines)));
- }
- }
- }
-
- fn drag_line_delta(&mut self, e: &MouseMoveEvent, region: Bounds<Pixels>) -> Option<Pixels> {
- //TODO: Why do these need to be doubled? Probably the same problem that the IME has
- let top = region.origin.y + (self.last_content.size.line_height * 2.);
- let bottom = region.lower_left().y - (self.last_content.size.line_height * 2.);
- let scroll_delta = if e.position.y < top {
- (top - e.position.y).pow(1.1)
- } else if e.position.y > bottom {
- -((e.position.y - bottom).pow(1.1))
- } else {
- return None; //Nothing to do
- };
- Some(scroll_delta)
- }
-
- pub fn mouse_down(&mut self, e: &MouseDownEvent, origin: Point<Pixels>) {
- let position = e.position - origin;
- let point = grid_point(
- position,
- self.last_content.size,
- self.last_content.display_offset,
- );
-
- if self.mouse_mode(e.modifiers.shift) {
- if let Some(bytes) =
- mouse_button_report(point, e.button, e.modifiers, true, self.last_content.mode)
- {
- self.pty_tx.notify(bytes);
- }
- } else if e.button == MouseButton::Left {
- let position = e.position - origin;
- let point = grid_point(
- position,
- self.last_content.size,
- self.last_content.display_offset,
- );
-
- // Use .opposite so that selection is inclusive of the cell clicked.
- let side = mouse_side(position, self.last_content.size);
-
- let selection_type = match e.click_count {
- 0 => return, //This is a release
- 1 => Some(SelectionType::Simple),
- 2 => Some(SelectionType::Semantic),
- 3 => Some(SelectionType::Lines),
- _ => None,
- };
-
- let selection =
- selection_type.map(|selection_type| Selection::new(selection_type, point, side));
-
- if let Some(sel) = selection {
- self.events
- .push_back(InternalEvent::SetSelection(Some((sel, point))));
- }
- }
- }
-
- pub fn mouse_up(
- &mut self,
- e: &MouseUpEvent,
- origin: Point<Pixels>,
- cx: &mut ModelContext<Self>,
- ) {
- let setting = TerminalSettings::get_global(cx);
-
- let position = e.position - origin;
- if self.mouse_mode(e.modifiers.shift) {
- let point = grid_point(
- position,
- self.last_content.size,
- self.last_content.display_offset,
- );
-
- if let Some(bytes) =
- mouse_button_report(point, e.button, e.modifiers, false, self.last_content.mode)
- {
- self.pty_tx.notify(bytes);
- }
- } else {
- if e.button == MouseButton::Left && setting.copy_on_select {
- self.copy();
- }
-
- //Hyperlinks
- if self.selection_phase == SelectionPhase::Ended {
- let mouse_cell_index = content_index_for_mouse(position, &self.last_content.size);
- if let Some(link) = self.last_content.cells[mouse_cell_index].hyperlink() {
- cx.open_url(link.uri());
- } else if self.cmd_pressed {
- self.events
- .push_back(InternalEvent::FindHyperlink(position, true));
- }
- }
- }
-
- self.selection_phase = SelectionPhase::Ended;
- self.last_mouse = None;
- }
-
- ///Scroll the terminal
- pub fn scroll_wheel(&mut self, e: &ScrollWheelEvent, origin: Point<Pixels>) {
- let mouse_mode = self.mouse_mode(e.shift);
-
- if let Some(scroll_lines) = self.determine_scroll_lines(&e, mouse_mode) {
- if mouse_mode {
- let point = grid_point(
- e.position - origin,
- self.last_content.size,
- self.last_content.display_offset,
- );
-
- if let Some(scrolls) =
- scroll_report(point, scroll_lines as i32, &e, self.last_content.mode)
- {
- for scroll in scrolls {
- self.pty_tx.notify(scroll);
- }
- };
- } else if self
- .last_content
- .mode
- .contains(TermMode::ALT_SCREEN | TermMode::ALTERNATE_SCROLL)
- && !e.shift
- {
- self.pty_tx.notify(alt_scroll(scroll_lines))
- } else {
- if scroll_lines != 0 {
- let scroll = AlacScroll::Delta(scroll_lines);
-
- self.events.push_back(InternalEvent::Scroll(scroll));
- }
- }
- }
- }
-
- fn refresh_hovered_word(&mut self) {
- self.word_from_position(self.last_mouse_position);
- }
-
- fn determine_scroll_lines(&mut self, e: &ScrollWheelEvent, mouse_mode: bool) -> Option<i32> {
- let scroll_multiplier = if mouse_mode { 1. } else { SCROLL_MULTIPLIER };
- let line_height = self.last_content.size.line_height;
- match e.touch_phase {
- /* Reset scroll state on started */
- TouchPhase::Started => {
- self.scroll_px = px(0.);
- None
- }
- /* Calculate the appropriate scroll lines */
- TouchPhase::Moved => {
- let old_offset = (self.scroll_px / line_height) as i32;
-
- self.scroll_px += e.delta.pixel_delta(line_height).y * scroll_multiplier;
-
- let new_offset = (self.scroll_px / line_height) as i32;
-
- // Whenever we hit the edges, reset our stored scroll to 0
- // so we can respond to changes in direction quickly
- self.scroll_px %= self.last_content.size.height();
-
- Some(new_offset - old_offset)
- }
- TouchPhase::Ended => None,
- }
- }
-
- pub fn find_matches(
- &mut self,
- searcher: RegexSearch,
- cx: &mut ModelContext<Self>,
- ) -> Task<Vec<RangeInclusive<AlacPoint>>> {
- let term = self.term.clone();
- cx.background_executor().spawn(async move {
- let term = term.lock();
-
- all_search_matches(&term, &searcher).collect()
- })
- }
-
- pub fn title(&self) -> String {
- self.foreground_process_info
- .as_ref()
- .map(|fpi| {
- format!(
- "{} — {}",
- truncate_and_trailoff(
- &fpi.cwd
- .file_name()
- .map(|name| name.to_string_lossy().to_string())
- .unwrap_or_default(),
- 25
- ),
- truncate_and_trailoff(
- &{
- format!(
- "{}{}",
- fpi.name,
- if fpi.argv.len() >= 1 {
- format!(" {}", (&fpi.argv[1..]).join(" "))
- } else {
- "".to_string()
- }
- )
- },
- 25
- )
- )
- })
- .unwrap_or_else(|| "Terminal".to_string())
- }
-
- pub fn can_navigate_to_selected_word(&self) -> bool {
- self.cmd_pressed && self.hovered_word
- }
-}
-
-impl Drop for Terminal {
- fn drop(&mut self) {
- self.pty_tx.0.send(Msg::Shutdown).ok();
- }
-}
-
-impl EventEmitter<Event> for Terminal {}
-
-/// Based on alacritty/src/display/hint.rs > regex_match_at
-/// Retrieve the match, if the specified point is inside the content matching the regex.
-fn regex_match_at<T>(term: &Term<T>, point: AlacPoint, regex: &RegexSearch) -> Option<Match> {
- visible_regex_match_iter(term, regex).find(|rm| rm.contains(&point))
-}
-
-/// Copied from alacritty/src/display/hint.rs:
-/// Iterate over all visible regex matches.
-pub fn visible_regex_match_iter<'a, T>(
- term: &'a Term<T>,
- regex: &'a RegexSearch,
-) -> impl Iterator<Item = Match> + 'a {
- let viewport_start = Line(-(term.grid().display_offset() as i32));
- let viewport_end = viewport_start + term.bottommost_line();
- let mut start = term.line_search_left(AlacPoint::new(viewport_start, Column(0)));
- let mut end = term.line_search_right(AlacPoint::new(viewport_end, Column(0)));
- start.line = start.line.max(viewport_start - MAX_SEARCH_LINES);
- end.line = end.line.min(viewport_end + MAX_SEARCH_LINES);
-
- RegexIter::new(start, end, AlacDirection::Right, term, regex)
- .skip_while(move |rm| rm.end().line < viewport_start)
- .take_while(move |rm| rm.start().line <= viewport_end)
-}
-
-fn make_selection(range: &RangeInclusive<AlacPoint>) -> Selection {
- let mut selection = Selection::new(SelectionType::Simple, *range.start(), AlacDirection::Left);
- selection.update(*range.end(), AlacDirection::Right);
- selection
-}
-
-fn all_search_matches<'a, T>(
- term: &'a Term<T>,
- regex: &'a RegexSearch,
-) -> impl Iterator<Item = Match> + 'a {
- let start = AlacPoint::new(term.grid().topmost_line(), Column(0));
- let end = AlacPoint::new(term.grid().bottommost_line(), term.grid().last_column());
- RegexIter::new(start, end, AlacDirection::Right, term, regex)
-}
-
-fn content_index_for_mouse(pos: Point<Pixels>, size: &TerminalSize) -> usize {
- let col = (pos.x / size.cell_width()).round() as usize;
- let clamped_col = min(col, size.columns() - 1);
- let row = (pos.y / size.line_height()).round() as usize;
- let clamped_row = min(row, size.screen_lines() - 1);
- clamped_row * size.columns() + clamped_col
-}
-
-///Converts an 8 bit ANSI color to it's GPUI equivalent.
-///Accepts usize for compatibility with the alacritty::Colors interface,
-///Other than that use case, should only be called with values in the [0,255] range
-pub fn get_color_at_index(index: usize, theme: &Theme) -> Hsla {
- let colors = theme.colors();
-
- match index {
- //0-15 are the same as the named colors above
- 0 => colors.terminal_ansi_black,
- 1 => colors.terminal_ansi_red,
- 2 => colors.terminal_ansi_green,
- 3 => colors.terminal_ansi_yellow,
- 4 => colors.terminal_ansi_blue,
- 5 => colors.terminal_ansi_magenta,
- 6 => colors.terminal_ansi_cyan,
- 7 => colors.terminal_ansi_white,
- 8 => colors.terminal_ansi_bright_black,
- 9 => colors.terminal_ansi_bright_red,
- 10 => colors.terminal_ansi_bright_green,
- 11 => colors.terminal_ansi_bright_yellow,
- 12 => colors.terminal_ansi_bright_blue,
- 13 => colors.terminal_ansi_bright_magenta,
- 14 => colors.terminal_ansi_bright_cyan,
- 15 => colors.terminal_ansi_bright_white,
- //16-231 are mapped to their RGB colors on a 0-5 range per channel
- 16..=231 => {
- let (r, g, b) = rgb_for_index(&(index as u8)); //Split the index into it's ANSI-RGB components
- let step = (u8::MAX as f32 / 5.).floor() as u8; //Split the RGB range into 5 chunks, with floor so no overflow
- rgba_color(r * step, g * step, b * step) //Map the ANSI-RGB components to an RGB color
- }
- //232-255 are a 24 step grayscale from black to white
- 232..=255 => {
- let i = index as u8 - 232; //Align index to 0..24
- let step = (u8::MAX as f32 / 24.).floor() as u8; //Split the RGB grayscale values into 24 chunks
- rgba_color(i * step, i * step, i * step) //Map the ANSI-grayscale components to the RGB-grayscale
- }
- //For compatibility with the alacritty::Colors interface
- 256 => colors.text,
- 257 => colors.background,
- 258 => theme.players().local().cursor,
-
- // todo!(more colors)
- 259 => red(), //style.dim_black,
- 260 => red(), //style.dim_red,
- 261 => red(), //style.dim_green,
- 262 => red(), //style.dim_yellow,
- 263 => red(), //style.dim_blue,
- 264 => red(), //style.dim_magenta,
- 265 => red(), //style.dim_cyan,
- 266 => red(), //style.dim_white,
- 267 => red(), //style.bright_foreground,
- 268 => colors.terminal_ansi_black, //'Dim Background', non-standard color
-
- _ => black(),
- }
-}
-
-///Generates the rgb channels in [0, 5] for a given index into the 6x6x6 ANSI color cube
-///See: [8 bit ansi color](https://en.wikipedia.org/wiki/ANSI_escape_code#8-bit).
-///
-///Wikipedia gives a formula for calculating the index for a given color:
-///
-///index = 16 + 36 × r + 6 × g + b (0 ≤ r, g, b ≤ 5)
-///
-///This function does the reverse, calculating the r, g, and b components from a given index.
-fn rgb_for_index(i: &u8) -> (u8, u8, u8) {
- debug_assert!((&16..=&231).contains(&i));
- let i = i - 16;
- let r = (i - (i % 36)) / 36;
- let g = ((i % 36) - (i % 6)) / 6;
- let b = (i % 36) % 6;
- (r, g, b)
-}
-
-pub fn rgba_color(r: u8, g: u8, b: u8) -> Hsla {
- Rgba {
- r: (r as f32 / 255.) as f32,
- g: (g as f32 / 255.) as f32,
- b: (b as f32 / 255.) as f32,
- a: 1.,
- }
- .into()
-}
-
-#[cfg(test)]
-mod tests {
- use alacritty_terminal::{
- index::{Column, Line, Point as AlacPoint},
- term::cell::Cell,
- };
- use gpui::{point, size, Pixels};
- use rand::{distributions::Alphanumeric, rngs::ThreadRng, thread_rng, Rng};
-
- use crate::{
- content_index_for_mouse, rgb_for_index, IndexedCell, TerminalContent, TerminalSize,
- };
-
- #[test]
- fn test_rgb_for_index() {
- //Test every possible value in the color cube
- for i in 16..=231 {
- let (r, g, b) = rgb_for_index(&(i as u8));
- assert_eq!(i, 16 + 36 * r + 6 * g + b);
- }
- }
-
- #[test]
- fn test_mouse_to_cell_test() {
- let mut rng = thread_rng();
- const ITERATIONS: usize = 10;
- const PRECISION: usize = 1000;
-
- for _ in 0..ITERATIONS {
- let viewport_cells = rng.gen_range(15..20);
- let cell_size = rng.gen_range(5 * PRECISION..20 * PRECISION) as f32 / PRECISION as f32;
-
- let size = crate::TerminalSize {
- cell_width: Pixels::from(cell_size),
- line_height: Pixels::from(cell_size),
- size: size(
- Pixels::from(cell_size * (viewport_cells as f32)),
- Pixels::from(cell_size * (viewport_cells as f32)),
- ),
- };
-
- let cells = get_cells(size, &mut rng);
- let content = convert_cells_to_content(size, &cells);
-
- for row in 0..(viewport_cells - 1) {
- let row = row as usize;
- for col in 0..(viewport_cells - 1) {
- let col = col as usize;
-
- let row_offset = rng.gen_range(0..PRECISION) as f32 / PRECISION as f32;
- let col_offset = rng.gen_range(0..PRECISION) as f32 / PRECISION as f32;
-
- let mouse_pos = point(
- Pixels::from(col as f32 * cell_size + col_offset),
- Pixels::from(row as f32 * cell_size + row_offset),
- );
-
- let content_index = content_index_for_mouse(mouse_pos, &content.size);
- let mouse_cell = content.cells[content_index].c;
- let real_cell = cells[row][col];
-
- assert_eq!(mouse_cell, real_cell);
- }
- }
- }
- }
-
- #[test]
- fn test_mouse_to_cell_clamp() {
- let mut rng = thread_rng();
-
- let size = crate::TerminalSize {
- cell_width: Pixels::from(10.),
- line_height: Pixels::from(10.),
- size: size(Pixels::from(100.), Pixels::from(100.)),
- };
-
- let cells = get_cells(size, &mut rng);
- let content = convert_cells_to_content(size, &cells);
-
- assert_eq!(
- content.cells[content_index_for_mouse(
- point(Pixels::from(-10.), Pixels::from(-10.)),
- &content.size
- )]
- .c,
- cells[0][0]
- );
- assert_eq!(
- content.cells[content_index_for_mouse(
- point(Pixels::from(1000.), Pixels::from(1000.)),
- &content.size
- )]
- .c,
- cells[9][9]
- );
- }
-
- fn get_cells(size: TerminalSize, rng: &mut ThreadRng) -> Vec<Vec<char>> {
- let mut cells = Vec::new();
-
- for _ in 0..(f32::from(size.height() / size.line_height()) as usize) {
- let mut row_vec = Vec::new();
- for _ in 0..(f32::from(size.width() / size.cell_width()) as usize) {
- let cell_char = rng.sample(Alphanumeric) as char;
- row_vec.push(cell_char)
- }
- cells.push(row_vec)
- }
-
- cells
- }
-
- fn convert_cells_to_content(size: TerminalSize, cells: &Vec<Vec<char>>) -> TerminalContent {
- let mut ic = Vec::new();
-
- for row in 0..cells.len() {
- for col in 0..cells[row].len() {
- let cell_char = cells[row][col];
- ic.push(IndexedCell {
- point: AlacPoint::new(Line(row as i32), Column(col)),
- cell: Cell {
- c: cell_char,
- ..Default::default()
- },
- });
- }
- }
-
- TerminalContent {
- cells: ic,
- size,
- ..Default::default()
- }
- }
-}
@@ -1,157 +0,0 @@
-use gpui::{px, AbsoluteLength, AppContext, FontFeatures, Pixels};
-use schemars::JsonSchema;
-use serde_derive::{Deserialize, Serialize};
-use std::{collections::HashMap, path::PathBuf};
-
-#[derive(Copy, Clone, Debug, Serialize, Deserialize, JsonSchema, PartialEq, Eq)]
-#[serde(rename_all = "snake_case")]
-pub enum TerminalDockPosition {
- Left,
- Bottom,
- Right,
-}
-
-#[derive(Deserialize)]
-pub struct TerminalSettings {
- pub shell: Shell,
- pub working_directory: WorkingDirectory,
- pub font_size: Option<Pixels>,
- pub font_family: Option<String>,
- pub line_height: TerminalLineHeight,
- pub font_features: Option<FontFeatures>,
- pub env: HashMap<String, String>,
- pub blinking: TerminalBlink,
- pub alternate_scroll: AlternateScroll,
- pub option_as_meta: bool,
- pub copy_on_select: bool,
- pub dock: TerminalDockPosition,
- pub default_width: Pixels,
- pub default_height: Pixels,
- pub detect_venv: VenvSettings,
-}
-
-#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum VenvSettings {
- #[default]
- Off,
- On {
- activate_script: Option<ActivateScript>,
- directories: Option<Vec<PathBuf>>,
- },
-}
-
-pub struct VenvSettingsContent<'a> {
- pub activate_script: ActivateScript,
- pub directories: &'a [PathBuf],
-}
-
-impl VenvSettings {
- pub fn as_option(&self) -> Option<VenvSettingsContent> {
- match self {
- VenvSettings::Off => None,
- VenvSettings::On {
- activate_script,
- directories,
- } => Some(VenvSettingsContent {
- activate_script: activate_script.unwrap_or(ActivateScript::Default),
- directories: directories.as_deref().unwrap_or(&[]),
- }),
- }
- }
-}
-
-#[derive(Clone, Copy, Debug, Default, Serialize, Deserialize, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum ActivateScript {
- #[default]
- Default,
- Csh,
- Fish,
- Nushell,
-}
-
-#[derive(Clone, Debug, Default, Serialize, Deserialize, JsonSchema)]
-pub struct TerminalSettingsContent {
- pub shell: Option<Shell>,
- pub working_directory: Option<WorkingDirectory>,
- pub font_size: Option<f32>,
- pub font_family: Option<String>,
- pub line_height: Option<TerminalLineHeight>,
- pub font_features: Option<FontFeatures>,
- pub env: Option<HashMap<String, String>>,
- pub blinking: Option<TerminalBlink>,
- pub alternate_scroll: Option<AlternateScroll>,
- pub option_as_meta: Option<bool>,
- pub copy_on_select: Option<bool>,
- pub dock: Option<TerminalDockPosition>,
- pub default_width: Option<f32>,
- pub default_height: Option<f32>,
- pub detect_venv: Option<VenvSettings>,
-}
-
-impl settings::Settings for TerminalSettings {
- const KEY: Option<&'static str> = Some("terminal");
-
- type FileContent = TerminalSettingsContent;
-
- fn load(
- default_value: &Self::FileContent,
- user_values: &[&Self::FileContent],
- _: &mut AppContext,
- ) -> anyhow::Result<Self> {
- Self::load_via_json_merge(default_value, user_values)
- }
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, JsonSchema, Default)]
-#[serde(rename_all = "snake_case")]
-pub enum TerminalLineHeight {
- #[default]
- Comfortable,
- Standard,
- Custom(f32),
-}
-
-impl TerminalLineHeight {
- pub fn value(&self) -> AbsoluteLength {
- let value = match self {
- TerminalLineHeight::Comfortable => 1.618,
- TerminalLineHeight::Standard => 1.3,
- TerminalLineHeight::Custom(line_height) => f32::max(*line_height, 1.),
- };
- px(value).into()
- }
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum TerminalBlink {
- Off,
- TerminalControlled,
- On,
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum Shell {
- System,
- Program(String),
- WithArguments { program: String, args: Vec<String> },
-}
-
-#[derive(Clone, Copy, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum AlternateScroll {
- On,
- Off,
-}
-
-#[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq, JsonSchema)]
-#[serde(rename_all = "snake_case")]
-pub enum WorkingDirectory {
- CurrentProjectDirectory,
- FirstProjectDirectory,
- AlwaysHome,
- Always { directory: String },
-}
@@ -14,13 +14,13 @@ language = { path = "../language" }
gpui = { package = "gpui2", path = "../gpui2" }
project = { path = "../project" }
# search = { path = "../search" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
theme = { package = "theme2", path = "../theme2" }
util = { path = "../util" }
workspace = { path = "../workspace" }
db = { path = "../db" }
procinfo = { git = "https://github.com/zed-industries/wezterm", rev = "5cd757e5f2eb039ed0c6bb6512223e69d5efc64d", default-features = false }
-terminal = { package = "terminal2", path = "../terminal2" }
+terminal = { path = "../terminal" }
ui = { package = "ui2", path = "../ui2" }
smallvec.workspace = true
smol.workspace = true
@@ -14,7 +14,7 @@ test-support = ["rand"]
[dependencies]
clock = { path = "../clock" }
collections = { path = "../collections" }
-rope = { package = "rope2", path = "../rope2" }
+rope = { path = "../rope" }
sum_tree = { path = "../sum_tree" }
util = { path = "../util" }
@@ -29,7 +29,7 @@ schemars.workspace = true
serde.workspace = true
serde_derive.workspace = true
serde_json.workspace = true
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
story = { path = "../story", optional = true }
toml.workspace = true
uuid.workspace = true
@@ -39,4 +39,4 @@ itertools = { version = "0.11.0", optional = true }
[dev-dependencies]
gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
@@ -16,7 +16,7 @@ fs = { path = "../fs" }
fuzzy = { path = "../fuzzy" }
gpui = { package = "gpui2", path = "../gpui2" }
picker = { path = "../picker" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
theme = { package = "theme2", path = "../theme2" }
ui = { package = "ui2", path = "../ui2" }
util = { path = "../util" }
@@ -13,9 +13,9 @@ anyhow.workspace = true
chrono = "0.4"
gpui = { package = "gpui2", path = "../gpui2" }
itertools = { version = "0.11.0", optional = true }
-menu = { package = "menu2", path = "../menu2"}
+menu = { path = "../menu"}
serde.workspace = true
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
smallvec.workspace = true
story = { path = "../story", optional = true }
strum = { version = "0.25.0", features = ["derive"] }
@@ -30,7 +30,7 @@ editor = { path = "../editor" }
gpui = { package = "gpui2", path = "../gpui2" }
language = { path = "../language" }
search = { path = "../search" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
workspace = { path = "../workspace" }
theme = { package = "theme2", path = "../theme2" }
ui = { package = "ui2", path = "../ui2"}
@@ -47,7 +47,7 @@ gpui = { package = "gpui2", path = "../gpui2", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
util = { path = "../util", features = ["test-support"] }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
workspace = { path = "../workspace", features = ["test-support"] }
theme = { package = "theme2", path = "../theme2", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
@@ -20,7 +20,7 @@ ui = { package = "ui2", path = "../ui2" }
db = { path = "../db" }
install_cli = { path = "../install_cli" }
project = { path = "../project" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
theme = { package = "theme2", path = "../theme2" }
theme_selector = { path = "../theme_selector" }
util = { path = "../util" }
@@ -31,8 +31,8 @@ language = { path = "../language" }
#menu = { path = "../menu" }
node_runtime = { path = "../node_runtime" }
project = { path = "../project" }
-settings = { path = "../settings2", package = "settings2" }
-terminal = { path = "../terminal2", package = "terminal2" }
+settings = { path = "../settings" }
+terminal = { path = "../terminal" }
theme = { path = "../theme2", package = "theme2" }
util = { path = "../util" }
ui = { package = "ui2", path = "../ui2" }
@@ -58,7 +58,7 @@ call = { path = "../call", features = ["test-support"] }
client = { path = "../client", features = ["test-support"] }
gpui = { path = "../gpui2", package = "gpui2", features = ["test-support"] }
project = { path = "../project", features = ["test-support"] }
-settings = { path = "../settings2", package = "settings2", features = ["test-support"] }
+settings = { path = "../settings", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] }
db = { path = "../db", features = ["test-support"] }
@@ -16,7 +16,7 @@ path = "src/main.rs"
[dependencies]
ai = { path = "../ai"}
-audio = { package = "audio2", path = "../audio2" }
+audio = { path = "../audio" }
activity_indicator = { path = "../activity_indicator"}
auto_update = { path = "../auto_update" }
breadcrumbs = { path = "../breadcrumbs" }
@@ -46,7 +46,7 @@ journal = { path = "../journal" }
language = { path = "../language" }
language_selector = { path = "../language_selector" }
lsp = { path = "../lsp" }
-menu = { package = "menu2", path = "../menu2" }
+menu = { path = "../menu" }
language_tools = { path = "../language_tools" }
node_runtime = { path = "../node_runtime" }
notifications = { path = "../notifications" }
@@ -58,9 +58,9 @@ project_panel = { path = "../project_panel" }
project_symbols = { path = "../project_symbols" }
quick_action_bar = { path = "../quick_action_bar" }
recent_projects = { path = "../recent_projects" }
-rope = { package = "rope2", path = "../rope2"}
+rope = { path = "../rope"}
rpc = { path = "../rpc" }
-settings = { package = "settings2", path = "../settings2" }
+settings = { path = "../settings" }
feature_flags = { path = "../feature_flags" }
sum_tree = { path = "../sum_tree" }
shellexpand = "2.1.0"
@@ -11,7 +11,7 @@ graph_file=target/crate-graph.html
cargo depgraph \
--workspace-only \
--offline \
- --root=zed,cli,collab2 \
+ --root=zed,cli,collab \
--dedup-transitive-deps \
| dot -Tsvg > $graph_file