Detailed changes
@@ -56,6 +56,7 @@ jobs:
MACOS_CERTIFICATE_PASSWORD: ${{ secrets.MACOS_CERTIFICATE_PASSWORD }}
APPLE_NOTARIZATION_USERNAME: ${{ secrets.APPLE_NOTARIZATION_USERNAME }}
APPLE_NOTARIZATION_PASSWORD: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }}
+ ZED_AMPLITUDE_API_KEY: ${{ secrets.ZED_AMPLITUDE_API_KEY }}
steps:
- name: Install Rust
run: |
@@ -0,0 +1,22 @@
+on:
+ release:
+ types: [published]
+
+jobs:
+ message:
+ runs-on: ubuntu-latest
+ steps:
+ - name: Discord Webhook Action
+ uses: tsickert/discord-webhook@v5.3.0
+ with:
+ webhook-url: ${{ secrets.DISCORD_WEBHOOK_URL }}
+ content: |
+ 📣 Zed ${{ github.event.release.name }} was just released!
+
+ Restart your Zed or head to https://zed.dev/releases to grab it.
+
+ ```md
+ ### Changelog
+
+ ${{ github.event.release.body }}
+ ```
@@ -959,6 +959,7 @@ dependencies = [
"async-recursion",
"async-tungstenite",
"collections",
+ "db",
"futures",
"gpui",
"image",
@@ -969,13 +970,16 @@ dependencies = [
"postage",
"rand 0.8.5",
"rpc",
+ "serde",
"smol",
"sum_tree",
+ "tempfile",
"thiserror",
"time 0.3.11",
"tiny_http",
"url",
"util",
+ "uuid 1.1.2",
]
[[package]]
@@ -1042,6 +1046,7 @@ dependencies = [
"env_logger",
"envy",
"futures",
+ "git",
"gpui",
"hyper",
"language",
@@ -1072,6 +1077,7 @@ dependencies = [
"tracing",
"tracing-log",
"tracing-subscriber",
+ "unindent",
"util",
"workspace",
]
@@ -1495,6 +1501,19 @@ dependencies = [
"matches",
]
+[[package]]
+name = "db"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "async-trait",
+ "collections",
+ "gpui",
+ "parking_lot 0.11.2",
+ "rocksdb",
+ "tempdir",
+]
+
[[package]]
name = "deflate"
version = "0.8.6"
@@ -1672,6 +1691,7 @@ dependencies = [
"env_logger",
"futures",
"fuzzy",
+ "git",
"gpui",
"indoc",
"itertools",
@@ -1694,6 +1714,8 @@ dependencies = [
"text",
"theme",
"tree-sitter",
+ "tree-sitter-html",
+ "tree-sitter-javascript",
"tree-sitter-rust",
"unindent",
"util",
@@ -2199,6 +2221,39 @@ dependencies = [
"stable_deref_trait",
]
+[[package]]
+name = "git"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "async-trait",
+ "clock",
+ "collections",
+ "futures",
+ "git2",
+ "lazy_static",
+ "log",
+ "parking_lot 0.11.2",
+ "smol",
+ "sum_tree",
+ "text",
+ "unindent",
+ "util",
+]
+
+[[package]]
+name = "git2"
+version = "0.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2994bee4a3a6a51eb90c218523be382fd7ea09b16380b9312e9dbe955ff7c7d1"
+dependencies = [
+ "bitflags",
+ "libc",
+ "libgit2-sys",
+ "log",
+ "url",
+]
+
[[package]]
name = "glob"
version = "0.3.0"
@@ -2815,6 +2870,7 @@ dependencies = [
"env_logger",
"futures",
"fuzzy",
+ "git",
"gpui",
"lazy_static",
"log",
@@ -2834,6 +2890,8 @@ dependencies = [
"text",
"theme",
"tree-sitter",
+ "tree-sitter-html",
+ "tree-sitter-javascript",
"tree-sitter-json 0.19.0",
"tree-sitter-python",
"tree-sitter-rust",
@@ -2869,6 +2927,18 @@ version = "0.2.126"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836"
+[[package]]
+name = "libgit2-sys"
+version = "0.14.0+1.5.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "47a00859c70c8a4f7218e6d1cc32875c4b55f6799445b842b0d8ed5e4c3d959b"
+dependencies = [
+ "cc",
+ "libc",
+ "libz-sys",
+ "pkg-config",
+]
+
[[package]]
name = "libloading"
version = "0.7.3"
@@ -3941,9 +4011,11 @@ dependencies = [
"client",
"clock",
"collections",
+ "db",
"fsevent",
"futures",
"fuzzy",
+ "git",
"gpui",
"ignore",
"language",
@@ -5999,6 +6071,15 @@ dependencies = [
"tree-sitter",
]
+[[package]]
+name = "tree-sitter-css"
+version = "0.19.0"
+source = "git+https://github.com/tree-sitter/tree-sitter-css?rev=769203d0f9abe1a9a691ac2b9fe4bb4397a73c51#769203d0f9abe1a9a691ac2b9fe4bb4397a73c51"
+dependencies = [
+ "cc",
+ "tree-sitter",
+]
+
[[package]]
name = "tree-sitter-elixir"
version = "0.19.0"
@@ -6017,6 +6098,26 @@ dependencies = [
"tree-sitter",
]
+[[package]]
+name = "tree-sitter-html"
+version = "0.19.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "184e6b77953a354303dc87bf5fe36558c83569ce92606e7b382a0dc1b7443443"
+dependencies = [
+ "cc",
+ "tree-sitter",
+]
+
+[[package]]
+name = "tree-sitter-javascript"
+version = "0.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2490fab08630b2c8943c320f7b63473cbf65511c8d83aec551beb9b4375906ed"
+dependencies = [
+ "cc",
+ "tree-sitter",
+]
+
[[package]]
name = "tree-sitter-json"
version = "0.19.0"
@@ -6306,6 +6407,8 @@ version = "0.1.0"
dependencies = [
"anyhow",
"futures",
+ "git2",
+ "lazy_static",
"log",
"rand 0.8.5",
"serde_json",
@@ -6326,6 +6429,9 @@ name = "uuid"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd6469f4314d5f1ffec476e05f17cc9a78bc7a27a6a857842170bdf8d6f98d2f"
+dependencies = [
+ "getrandom 0.2.7",
+]
[[package]]
name = "valuable"
@@ -7122,7 +7228,7 @@ dependencies = [
[[package]]
name = "zed"
-version = "0.55.0"
+version = "0.59.0"
dependencies = [
"activity_indicator",
"anyhow",
@@ -7198,8 +7304,10 @@ dependencies = [
"tree-sitter",
"tree-sitter-c",
"tree-sitter-cpp",
+ "tree-sitter-css",
"tree-sitter-elixir",
"tree-sitter-go",
+ "tree-sitter-html",
"tree-sitter-json 0.20.0",
"tree-sitter-markdown",
"tree-sitter-python",
@@ -74,6 +74,15 @@
"hard_tabs": false,
// How many columns a tab should occupy.
"tab_size": 4,
+ // Git gutter behavior configuration.
+ "git": {
+ // Control whether the git gutter is shown. May take 2 values:
+ // 1. Show the gutter
+ // "git_gutter": "tracked_files"
+ // 2. Hide the gutter
+ // "git_gutter": "hide"
+ "git_gutter": "tracked_files"
+ },
// Settings specific to the terminal
"terminal": {
// What shell to use when opening a terminal. May take 3 values:
@@ -12,6 +12,7 @@ test-support = ["collections/test-support", "gpui/test-support", "rpc/test-suppo
[dependencies]
collections = { path = "../collections" }
+db = { path = "../db" }
gpui = { path = "../gpui" }
util = { path = "../util" }
rpc = { path = "../rpc" }
@@ -31,7 +32,10 @@ smol = "1.2.5"
thiserror = "1.0.29"
time = { version = "0.3", features = ["serde", "serde-well-known"] }
tiny_http = "0.8"
+uuid = { version = "1.1.2", features = ["v4"] }
url = "2.2"
+serde = { version = "*", features = ["derive"] }
+tempfile = "3"
[dev-dependencies]
collections = { path = "../collections", features = ["test-support"] }
@@ -601,7 +601,7 @@ mod tests {
let user_id = 5;
let http_client = FakeHttpClient::with_404_response();
- let client = Client::new(http_client.clone());
+ let client = cx.update(|cx| Client::new(http_client.clone(), cx));
let server = FakeServer::for_client(user_id, &client, cx).await;
Channel::init(&client);
@@ -3,6 +3,7 @@ pub mod test;
pub mod channel;
pub mod http;
+pub mod telemetry;
pub mod user;
use anyhow::{anyhow, Context, Result};
@@ -11,10 +12,12 @@ use async_tungstenite::tungstenite::{
error::Error as WebsocketError,
http::{Request, StatusCode},
};
+use db::Db;
use futures::{future::LocalBoxFuture, FutureExt, SinkExt, StreamExt, TryStreamExt};
use gpui::{
- actions, AnyModelHandle, AnyViewHandle, AnyWeakModelHandle, AnyWeakViewHandle, AsyncAppContext,
- Entity, ModelContext, ModelHandle, MutableAppContext, Task, View, ViewContext, ViewHandle,
+ actions, serde_json::Value, AnyModelHandle, AnyViewHandle, AnyWeakModelHandle,
+ AnyWeakViewHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
+ MutableAppContext, Task, View, ViewContext, ViewHandle,
};
use http::HttpClient;
use lazy_static::lazy_static;
@@ -28,9 +31,11 @@ use std::{
convert::TryFrom,
fmt::Write as _,
future::Future,
+ path::PathBuf,
sync::{Arc, Weak},
time::{Duration, Instant},
};
+use telemetry::Telemetry;
use thiserror::Error;
use url::Url;
use util::{ResultExt, TryFutureExt};
@@ -51,11 +56,16 @@ pub const ZED_SECRET_CLIENT_TOKEN: &str = "618033988749894";
actions!(client, [Authenticate]);
-pub fn init(rpc: Arc<Client>, cx: &mut MutableAppContext) {
- cx.add_global_action(move |_: &Authenticate, cx| {
- let rpc = rpc.clone();
- cx.spawn(|cx| async move { rpc.authenticate_and_connect(true, &cx).log_err().await })
+pub fn init(client: Arc<Client>, cx: &mut MutableAppContext) {
+ cx.add_global_action({
+ let client = client.clone();
+ move |_: &Authenticate, cx| {
+ let client = client.clone();
+ cx.spawn(
+ |cx| async move { client.authenticate_and_connect(true, &cx).log_err().await },
+ )
.detach();
+ }
});
}
@@ -63,6 +73,7 @@ pub struct Client {
id: usize,
peer: Arc<Peer>,
http: Arc<dyn HttpClient>,
+ telemetry: Arc<Telemetry>,
state: RwLock<ClientState>,
#[allow(clippy::type_complexity)]
@@ -232,10 +243,11 @@ impl Drop for Subscription {
}
impl Client {
- pub fn new(http: Arc<dyn HttpClient>) -> Arc<Self> {
+ pub fn new(http: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
Arc::new(Self {
id: 0,
peer: Peer::new(),
+ telemetry: Telemetry::new(http.clone(), cx),
http,
state: Default::default(),
@@ -339,6 +351,7 @@ impl Client {
}));
}
Status::SignedOut | Status::UpgradeRequired => {
+ self.telemetry.set_authenticated_user_info(None, false);
state._reconnect_task.take();
}
_ => {}
@@ -618,6 +631,9 @@ impl Client {
if credentials.is_none() && try_keychain {
credentials = read_credentials_from_keychain(cx);
read_from_keychain = credentials.is_some();
+ if read_from_keychain {
+ self.report_event("read credentials from keychain", Default::default());
+ }
}
if credentials.is_none() {
let mut status_rx = self.status();
@@ -901,6 +917,7 @@ impl Client {
) -> Task<Result<Credentials>> {
let platform = cx.platform();
let executor = cx.background();
+ let telemetry = self.telemetry.clone();
executor.clone().spawn(async move {
// Generate a pair of asymmetric encryption keys. The public key will be used by the
// zed server to encrypt the user's access token, so that it can'be intercepted by
@@ -979,6 +996,8 @@ impl Client {
.context("failed to decrypt access token")?;
platform.activate(true);
+ telemetry.report_event("authenticate with browser", Default::default());
+
Ok(Credentials {
user_id: user_id.parse()?,
access_token,
@@ -1043,6 +1062,18 @@ impl Client {
log::debug!("rpc respond. client_id:{}. name:{}", self.id, T::NAME);
self.peer.respond_with_error(receipt, error)
}
+
+ pub fn start_telemetry(&self, db: Arc<Db>) {
+ self.telemetry.start(db);
+ }
+
+ pub fn report_event(&self, kind: &str, properties: Value) {
+ self.telemetry.report_event(kind, properties)
+ }
+
+ pub fn telemetry_log_file_path(&self) -> Option<PathBuf> {
+ self.telemetry.log_file_path()
+ }
}
impl AnyWeakEntityHandle {
@@ -1108,7 +1139,7 @@ mod tests {
cx.foreground().forbid_parking();
let user_id = 5;
- let client = Client::new(FakeHttpClient::with_404_response());
+ let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let server = FakeServer::for_client(user_id, &client, cx).await;
let mut status = client.status();
assert!(matches!(
@@ -1147,7 +1178,7 @@ mod tests {
let auth_count = Arc::new(Mutex::new(0));
let dropped_auth_count = Arc::new(Mutex::new(0));
- let client = Client::new(FakeHttpClient::with_404_response());
+ let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
client.override_authenticate({
let auth_count = auth_count.clone();
let dropped_auth_count = dropped_auth_count.clone();
@@ -1196,7 +1227,7 @@ mod tests {
cx.foreground().forbid_parking();
let user_id = 5;
- let client = Client::new(FakeHttpClient::with_404_response());
+ let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let server = FakeServer::for_client(user_id, &client, cx).await;
let (done_tx1, mut done_rx1) = smol::channel::unbounded();
@@ -1242,7 +1273,7 @@ mod tests {
cx.foreground().forbid_parking();
let user_id = 5;
- let client = Client::new(FakeHttpClient::with_404_response());
+ let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let server = FakeServer::for_client(user_id, &client, cx).await;
let model = cx.add_model(|_| Model::default());
@@ -1270,7 +1301,7 @@ mod tests {
cx.foreground().forbid_parking();
let user_id = 5;
- let client = Client::new(FakeHttpClient::with_404_response());
+ let client = cx.update(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let server = FakeServer::for_client(user_id, &client, cx).await;
let model = cx.add_model(|_| Model::default());
@@ -0,0 +1,283 @@
+use crate::http::HttpClient;
+use db::Db;
+use gpui::{
+ executor::Background,
+ serde_json::{self, value::Map, Value},
+ AppContext, Task,
+};
+use isahc::Request;
+use lazy_static::lazy_static;
+use parking_lot::Mutex;
+use serde::Serialize;
+use serde_json::json;
+use std::{
+ io::Write,
+ mem,
+ path::PathBuf,
+ sync::Arc,
+ time::{Duration, SystemTime, UNIX_EPOCH},
+};
+use tempfile::NamedTempFile;
+use util::{post_inc, ResultExt, TryFutureExt};
+use uuid::Uuid;
+
+pub struct Telemetry {
+ http_client: Arc<dyn HttpClient>,
+ executor: Arc<Background>,
+ session_id: u128,
+ state: Mutex<TelemetryState>,
+}
+
+#[derive(Default)]
+struct TelemetryState {
+ metrics_id: Option<Arc<str>>,
+ device_id: Option<Arc<str>>,
+ app_version: Option<Arc<str>>,
+ os_version: Option<Arc<str>>,
+ os_name: &'static str,
+ queue: Vec<AmplitudeEvent>,
+ next_event_id: usize,
+ flush_task: Option<Task<()>>,
+ log_file: Option<NamedTempFile>,
+}
+
+const AMPLITUDE_EVENTS_URL: &'static str = "https://api2.amplitude.com/batch";
+
+lazy_static! {
+ static ref AMPLITUDE_API_KEY: Option<String> = std::env::var("ZED_AMPLITUDE_API_KEY")
+ .ok()
+ .or_else(|| option_env!("ZED_AMPLITUDE_API_KEY").map(|key| key.to_string()));
+}
+
+#[derive(Serialize)]
+struct AmplitudeEventBatch {
+ api_key: &'static str,
+ events: Vec<AmplitudeEvent>,
+}
+
+#[derive(Serialize)]
+struct AmplitudeEvent {
+ #[serde(skip_serializing_if = "Option::is_none")]
+ user_id: Option<Arc<str>>,
+ device_id: Option<Arc<str>>,
+ event_type: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ event_properties: Option<Map<String, Value>>,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ user_properties: Option<Map<String, Value>>,
+ os_name: &'static str,
+ os_version: Option<Arc<str>>,
+ app_version: Option<Arc<str>>,
+ platform: &'static str,
+ event_id: usize,
+ session_id: u128,
+ time: u128,
+}
+
+#[cfg(debug_assertions)]
+const MAX_QUEUE_LEN: usize = 1;
+
+#[cfg(not(debug_assertions))]
+const MAX_QUEUE_LEN: usize = 10;
+
+#[cfg(debug_assertions)]
+const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(1);
+
+#[cfg(not(debug_assertions))]
+const DEBOUNCE_INTERVAL: Duration = Duration::from_secs(30);
+
+impl Telemetry {
+ pub fn new(client: Arc<dyn HttpClient>, cx: &AppContext) -> Arc<Self> {
+ let platform = cx.platform();
+ let this = Arc::new(Self {
+ http_client: client,
+ executor: cx.background().clone(),
+ session_id: SystemTime::now()
+ .duration_since(UNIX_EPOCH)
+ .unwrap()
+ .as_millis(),
+ state: Mutex::new(TelemetryState {
+ os_version: platform
+ .os_version()
+ .log_err()
+ .map(|v| v.to_string().into()),
+ os_name: platform.os_name().into(),
+ app_version: platform
+ .app_version()
+ .log_err()
+ .map(|v| v.to_string().into()),
+ device_id: None,
+ queue: Default::default(),
+ flush_task: Default::default(),
+ next_event_id: 0,
+ log_file: None,
+ metrics_id: None,
+ }),
+ });
+
+ if AMPLITUDE_API_KEY.is_some() {
+ this.executor
+ .spawn({
+ let this = this.clone();
+ async move {
+ if let Some(tempfile) = NamedTempFile::new().log_err() {
+ this.state.lock().log_file = Some(tempfile);
+ }
+ }
+ })
+ .detach();
+ }
+
+ this
+ }
+
+ pub fn log_file_path(&self) -> Option<PathBuf> {
+ Some(self.state.lock().log_file.as_ref()?.path().to_path_buf())
+ }
+
+ pub fn start(self: &Arc<Self>, db: Arc<Db>) {
+ let this = self.clone();
+ self.executor
+ .spawn(
+ async move {
+ let device_id = if let Some(device_id) = db
+ .read(["device_id"])?
+ .into_iter()
+ .flatten()
+ .next()
+ .and_then(|bytes| String::from_utf8(bytes).ok())
+ {
+ device_id
+ } else {
+ let device_id = Uuid::new_v4().to_string();
+ db.write([("device_id", device_id.as_bytes())])?;
+ device_id
+ };
+
+ let device_id = Some(Arc::from(device_id));
+ let mut state = this.state.lock();
+ state.device_id = device_id.clone();
+ for event in &mut state.queue {
+ event.device_id = device_id.clone();
+ }
+ if !state.queue.is_empty() {
+ drop(state);
+ this.flush();
+ }
+
+ anyhow::Ok(())
+ }
+ .log_err(),
+ )
+ .detach();
+ }
+
+ pub fn set_authenticated_user_info(
+ self: &Arc<Self>,
+ metrics_id: Option<String>,
+ is_staff: bool,
+ ) {
+ let is_signed_in = metrics_id.is_some();
+ self.state.lock().metrics_id = metrics_id.map(|s| s.into());
+ if is_signed_in {
+ self.report_event_with_user_properties(
+ "$identify",
+ Default::default(),
+ json!({ "$set": { "staff": is_staff } }),
+ )
+ }
+ }
+
+ pub fn report_event(self: &Arc<Self>, kind: &str, properties: Value) {
+ self.report_event_with_user_properties(kind, properties, Default::default());
+ }
+
+ fn report_event_with_user_properties(
+ self: &Arc<Self>,
+ kind: &str,
+ properties: Value,
+ user_properties: Value,
+ ) {
+ if AMPLITUDE_API_KEY.is_none() {
+ return;
+ }
+
+ let mut state = self.state.lock();
+ let event = AmplitudeEvent {
+ event_type: kind.to_string(),
+ time: SystemTime::now()
+ .duration_since(UNIX_EPOCH)
+ .unwrap()
+ .as_millis(),
+ session_id: self.session_id,
+ event_properties: if let Value::Object(properties) = properties {
+ Some(properties)
+ } else {
+ None
+ },
+ user_properties: if let Value::Object(user_properties) = user_properties {
+ Some(user_properties)
+ } else {
+ None
+ },
+ user_id: state.metrics_id.clone(),
+ device_id: state.device_id.clone(),
+ os_name: state.os_name,
+ platform: "Zed",
+ os_version: state.os_version.clone(),
+ app_version: state.app_version.clone(),
+ event_id: post_inc(&mut state.next_event_id),
+ };
+ state.queue.push(event);
+ if state.device_id.is_some() {
+ if state.queue.len() >= MAX_QUEUE_LEN {
+ drop(state);
+ self.flush();
+ } else {
+ let this = self.clone();
+ let executor = self.executor.clone();
+ state.flush_task = Some(self.executor.spawn(async move {
+ executor.timer(DEBOUNCE_INTERVAL).await;
+ this.flush();
+ }));
+ }
+ }
+ }
+
+ fn flush(self: &Arc<Self>) {
+ let mut state = self.state.lock();
+ let events = mem::take(&mut state.queue);
+ state.flush_task.take();
+ drop(state);
+
+ if let Some(api_key) = AMPLITUDE_API_KEY.as_ref() {
+ let this = self.clone();
+ self.executor
+ .spawn(
+ async move {
+ let mut json_bytes = Vec::new();
+
+ if let Some(file) = &mut this.state.lock().log_file {
+ let file = file.as_file_mut();
+ for event in &events {
+ json_bytes.clear();
+ serde_json::to_writer(&mut json_bytes, event)?;
+ file.write_all(&json_bytes)?;
+ file.write(b"\n")?;
+ }
+ }
+
+ let batch = AmplitudeEventBatch { api_key, events };
+ json_bytes.clear();
+ serde_json::to_writer(&mut json_bytes, &batch)?;
+ let request =
+ Request::post(AMPLITUDE_EVENTS_URL).body(json_bytes.into())?;
+ this.http_client.send(request).await?;
+ Ok(())
+ }
+ .log_err(),
+ )
+ .detach();
+ }
+ }
+}
@@ -6,7 +6,10 @@ use anyhow::{anyhow, Result};
use futures::{future::BoxFuture, stream::BoxStream, Future, StreamExt};
use gpui::{executor, ModelHandle, TestAppContext};
use parking_lot::Mutex;
-use rpc::{proto, ConnectionId, Peer, Receipt, TypedEnvelope};
+use rpc::{
+ proto::{self, GetPrivateUserInfo, GetPrivateUserInfoResponse},
+ ConnectionId, Peer, Receipt, TypedEnvelope,
+};
use std::{fmt, rc::Rc, sync::Arc};
pub struct FakeServer {
@@ -93,6 +96,7 @@ impl FakeServer {
.authenticate_and_connect(false, &cx.to_async())
.await
.unwrap();
+
server
}
@@ -126,26 +130,45 @@ impl FakeServer {
#[allow(clippy::await_holding_lock)]
pub async fn receive<M: proto::EnvelopedMessage>(&self) -> Result<TypedEnvelope<M>> {
self.executor.start_waiting();
- let message = self
- .state
- .lock()
- .incoming
- .as_mut()
- .expect("not connected")
- .next()
- .await
- .ok_or_else(|| anyhow!("other half hung up"))?;
- self.executor.finish_waiting();
- let type_name = message.payload_type_name();
- Ok(*message
- .into_any()
- .downcast::<TypedEnvelope<M>>()
- .unwrap_or_else(|_| {
- panic!(
- "fake server received unexpected message type: {:?}",
- type_name
- );
- }))
+
+ loop {
+ let message = self
+ .state
+ .lock()
+ .incoming
+ .as_mut()
+ .expect("not connected")
+ .next()
+ .await
+ .ok_or_else(|| anyhow!("other half hung up"))?;
+ self.executor.finish_waiting();
+ let type_name = message.payload_type_name();
+ let message = message.into_any();
+
+ if message.is::<TypedEnvelope<M>>() {
+ return Ok(*message.downcast().unwrap());
+ }
+
+ if message.is::<TypedEnvelope<GetPrivateUserInfo>>() {
+ self.respond(
+ message
+ .downcast::<TypedEnvelope<GetPrivateUserInfo>>()
+ .unwrap()
+ .receipt(),
+ GetPrivateUserInfoResponse {
+ metrics_id: "the-metrics-id".into(),
+ staff: false,
+ },
+ )
+ .await;
+ continue;
+ }
+
+ panic!(
+ "fake server received unexpected message type: {:?}",
+ type_name
+ );
+ }
}
pub async fn respond<T: proto::RequestMessage>(
@@ -135,10 +135,21 @@ impl UserStore {
match status {
Status::Connected { .. } => {
if let Some((this, user_id)) = this.upgrade(&cx).zip(client.user_id()) {
- let user = this
+ let fetch_user = this
.update(&mut cx, |this, cx| this.get_user(user_id, cx))
- .log_err()
- .await;
+ .log_err();
+ let fetch_metrics_id =
+ client.request(proto::GetPrivateUserInfo {}).log_err();
+ let (user, info) = futures::join!(fetch_user, fetch_metrics_id);
+ if let Some(info) = info {
+ client.telemetry.set_authenticated_user_info(
+ Some(info.metrics_id),
+ info.staff,
+ );
+ } else {
+ client.telemetry.set_authenticated_user_info(None, false);
+ }
+ client.telemetry.report_event("sign in", Default::default());
current_user_tx.send(user).await.ok();
}
}
@@ -1,5 +1,5 @@
[package]
-authors = ["Nathan Sobo <nathan@warp.dev>"]
+authors = ["Nathan Sobo <nathan@zed.dev>"]
default-run = "collab"
edition = "2021"
name = "collab"
@@ -26,6 +26,7 @@ base64 = "0.13"
clap = { version = "3.1", features = ["derive"], optional = true }
envy = "0.4.2"
futures = "0.3"
+git = { path = "../git" }
hyper = "0.14"
lazy_static = "1.4"
lipsum = { version = "0.8", optional = true }
@@ -66,11 +67,13 @@ rpc = { path = "../rpc", features = ["test-support"] }
settings = { path = "../settings", features = ["test-support"] }
theme = { path = "../theme" }
workspace = { path = "../workspace", features = ["test-support"] }
+git = { path = "../git", features = ["test-support"] }
ctor = "0.1"
env_logger = "0.9"
util = { path = "../util" }
lazy_static = "1.4"
serde_json = { version = "1.0", features = ["preserve_order"] }
+unindent = "0.1"
[features]
seed-support = ["clap", "lipsum", "reqwest"]
@@ -0,0 +1,27 @@
+CREATE TABLE IF NOT EXISTS "signups" (
+ "id" SERIAL PRIMARY KEY,
+ "email_address" VARCHAR NOT NULL,
+ "email_confirmation_code" VARCHAR(64) NOT NULL,
+ "email_confirmation_sent" BOOLEAN NOT NULL,
+ "created_at" TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "device_id" VARCHAR,
+ "user_id" INTEGER REFERENCES users (id) ON DELETE CASCADE,
+ "inviting_user_id" INTEGER REFERENCES users (id) ON DELETE SET NULL,
+
+ "platform_mac" BOOLEAN NOT NULL,
+ "platform_linux" BOOLEAN NOT NULL,
+ "platform_windows" BOOLEAN NOT NULL,
+ "platform_unknown" BOOLEAN NOT NULL,
+
+ "editor_features" VARCHAR[],
+ "programming_languages" VARCHAR[]
+);
+
+CREATE UNIQUE INDEX "index_signups_on_email_address" ON "signups" ("email_address");
+CREATE INDEX "index_signups_on_email_confirmation_sent" ON "signups" ("email_confirmation_sent");
+
+ALTER TABLE "users"
+ ADD "github_user_id" INTEGER;
+
+CREATE INDEX "index_users_on_email_address" ON "users" ("email_address");
+CREATE INDEX "index_users_on_github_user_id" ON "users" ("github_user_id");
@@ -0,0 +1,2 @@
+ALTER TABLE "users"
+ ADD "metrics_id" uuid NOT NULL DEFAULT gen_random_uuid();
@@ -1,6 +1,6 @@
use crate::{
auth,
- db::{ProjectId, User, UserId},
+ db::{Invite, NewUserParams, ProjectId, Signup, User, UserId, WaitlistSummary},
rpc::{self, ResultExt},
AppState, Error, Result,
};
@@ -24,13 +24,10 @@ use tracing::instrument;
pub fn routes(rpc_server: &Arc<rpc::Server>, state: Arc<AppState>) -> Router<Body> {
Router::new()
+ .route("/user", get(get_authenticated_user))
.route("/users", get(get_users).post(create_user))
- .route(
- "/users/:id",
- put(update_user).delete(destroy_user).get(get_user),
- )
+ .route("/users/:id", put(update_user).delete(destroy_user))
.route("/users/:id/access_tokens", post(create_access_token))
- .route("/bulk_users", post(create_users))
.route("/users_with_no_invites", get(get_users_with_no_invites))
.route("/invite_codes/:code", get(get_user_for_invite_code))
.route("/panic", post(trace_panic))
@@ -45,6 +42,11 @@ pub fn routes(rpc_server: &Arc<rpc::Server>, state: Arc<AppState>) -> Router<Bod
)
.route("/user_activity/counts", get(get_active_user_counts))
.route("/project_metadata", get(get_project_metadata))
+ .route("/signups", post(create_signup))
+ .route("/signups_summary", get(get_waitlist_summary))
+ .route("/user_invites", post(create_invite_from_code))
+ .route("/unsent_invites", get(get_unsent_invites))
+ .route("/sent_invites", post(record_sent_invites))
.layer(
ServiceBuilder::new()
.layer(Extension(state))
@@ -84,6 +86,31 @@ pub async fn validate_api_token<B>(req: Request<B>, next: Next<B>) -> impl IntoR
Ok::<_, Error>(next.run(req).await)
}
+#[derive(Debug, Deserialize)]
+struct AuthenticatedUserParams {
+ github_user_id: i32,
+ github_login: String,
+}
+
+#[derive(Debug, Serialize)]
+struct AuthenticatedUserResponse {
+ user: User,
+ metrics_id: String,
+}
+
+async fn get_authenticated_user(
+ Query(params): Query<AuthenticatedUserParams>,
+ Extension(app): Extension<Arc<AppState>>,
+) -> Result<Json<AuthenticatedUserResponse>> {
+ let user = app
+ .db
+ .get_user_by_github_account(¶ms.github_login, Some(params.github_user_id))
+ .await?
+ .ok_or_else(|| Error::Http(StatusCode::NOT_FOUND, "user not found".into()))?;
+ let metrics_id = app.db.get_user_metrics_id(user.id).await?;
+ return Ok(Json(AuthenticatedUserResponse { user, metrics_id }));
+}
+
#[derive(Debug, Deserialize)]
struct GetUsersQueryParams {
query: Option<String>,
@@ -108,48 +135,76 @@ async fn get_users(
#[derive(Deserialize, Debug)]
struct CreateUserParams {
+ github_user_id: i32,
github_login: String,
- invite_code: Option<String>,
- email_address: Option<String>,
+ email_address: String,
+ email_confirmation_code: Option<String>,
+ #[serde(default)]
admin: bool,
+ #[serde(default)]
+ invite_count: i32,
+}
+
+#[derive(Serialize, Debug)]
+struct CreateUserResponse {
+ user: User,
+ signup_device_id: Option<String>,
+ metrics_id: String,
}
async fn create_user(
Json(params): Json<CreateUserParams>,
Extension(app): Extension<Arc<AppState>>,
Extension(rpc_server): Extension<Arc<rpc::Server>>,
-) -> Result<Json<User>> {
- let user_id = if let Some(invite_code) = params.invite_code {
- let invitee_id = app
- .db
- .redeem_invite_code(
- &invite_code,
- ¶ms.github_login,
- params.email_address.as_deref(),
- )
- .await?;
- rpc_server
- .invite_code_redeemed(&invite_code, invitee_id)
- .await
- .trace_err();
- invitee_id
- } else {
+) -> Result<Json<CreateUserResponse>> {
+ let user = NewUserParams {
+ github_login: params.github_login,
+ github_user_id: params.github_user_id,
+ invite_count: params.invite_count,
+ };
+
+ // Creating a user via the normal signup process
+ let result = if let Some(email_confirmation_code) = params.email_confirmation_code {
app.db
- .create_user(
- ¶ms.github_login,
- params.email_address.as_deref(),
- params.admin,
+ .create_user_from_invite(
+ &Invite {
+ email_address: params.email_address,
+ email_confirmation_code,
+ },
+ user,
)
.await?
+ }
+ // Creating a user as an admin
+ else if params.admin {
+ app.db
+ .create_user(¶ms.email_address, false, user)
+ .await?
+ } else {
+ Err(Error::Http(
+ StatusCode::UNPROCESSABLE_ENTITY,
+ "email confirmation code is required".into(),
+ ))?
};
+ if let Some(inviter_id) = result.inviting_user_id {
+ rpc_server
+ .invite_code_redeemed(inviter_id, result.user_id)
+ .await
+ .trace_err();
+ }
+
let user = app
.db
- .get_user_by_id(user_id)
+ .get_user_by_id(result.user_id)
.await?
.ok_or_else(|| anyhow!("couldn't find the user we just created"))?;
- Ok(Json(user))
+ Ok(Json(CreateUserResponse {
+ user,
+ metrics_id: result.metrics_id,
+ signup_device_id: result.signup_device_id,
+ }))
}
#[derive(Deserialize)]
@@ -171,7 +226,9 @@ async fn update_user(
}
if let Some(invite_count) = params.invite_count {
- app.db.set_invite_count(user_id, invite_count).await?;
+ app.db
+ .set_invite_count_for_user(user_id, invite_count)
+ .await?;
rpc_server.invite_count_updated(user_id).await.trace_err();
}
@@ -186,54 +243,6 @@ async fn destroy_user(
Ok(())
}
-async fn get_user(
- Path(login): Path<String>,
- Extension(app): Extension<Arc<AppState>>,
-) -> Result<Json<User>> {
- let user = app
- .db
- .get_user_by_github_login(&login)
- .await?
- .ok_or_else(|| Error::Http(StatusCode::NOT_FOUND, "User not found".to_string()))?;
- Ok(Json(user))
-}
-
-#[derive(Deserialize)]
-struct CreateUsersParams {
- users: Vec<CreateUsersEntry>,
-}
-
-#[derive(Deserialize)]
-struct CreateUsersEntry {
- github_login: String,
- email_address: String,
- invite_count: usize,
-}
-
-async fn create_users(
- Json(params): Json<CreateUsersParams>,
- Extension(app): Extension<Arc<AppState>>,
-) -> Result<Json<Vec<User>>> {
- let user_ids = app
- .db
- .create_users(
- params
- .users
- .into_iter()
- .map(|params| {
- (
- params.github_login,
- params.email_address,
- params.invite_count,
- )
- })
- .collect(),
- )
- .await?;
- let users = app.db.get_users_by_ids(user_ids).await?;
- Ok(Json(users))
-}
-
#[derive(Debug, Deserialize)]
struct GetUsersWithNoInvites {
invited_by_another_user: bool,
@@ -368,22 +377,24 @@ struct CreateAccessTokenResponse {
}
async fn create_access_token(
- Path(login): Path<String>,
+ Path(user_id): Path<UserId>,
Query(params): Query<CreateAccessTokenQueryParams>,
Extension(app): Extension<Arc<AppState>>,
) -> Result<Json<CreateAccessTokenResponse>> {
- // request.require_token().await?;
-
let user = app
.db
- .get_user_by_github_login(&login)
+ .get_user_by_id(user_id)
.await?
.ok_or_else(|| anyhow!("user not found"))?;
let mut user_id = user.id;
if let Some(impersonate) = params.impersonate {
if user.admin {
- if let Some(impersonated_user) = app.db.get_user_by_github_login(&impersonate).await? {
+ if let Some(impersonated_user) = app
+ .db
+ .get_user_by_github_account(&impersonate, None)
+ .await?
+ {
user_id = impersonated_user.id;
} else {
return Err(Error::Http(
@@ -415,3 +426,59 @@ async fn get_user_for_invite_code(
) -> Result<Json<User>> {
Ok(Json(app.db.get_user_for_invite_code(&code).await?))
}
+
+async fn create_signup(
+ Json(params): Json<Signup>,
+ Extension(app): Extension<Arc<AppState>>,
+) -> Result<()> {
+ app.db.create_signup(params).await?;
+ Ok(())
+}
+
+async fn get_waitlist_summary(
+ Extension(app): Extension<Arc<AppState>>,
+) -> Result<Json<WaitlistSummary>> {
+ Ok(Json(app.db.get_waitlist_summary().await?))
+}
+
+#[derive(Deserialize)]
+pub struct CreateInviteFromCodeParams {
+ invite_code: String,
+ email_address: String,
+ device_id: Option<String>,
+}
+
+async fn create_invite_from_code(
+ Json(params): Json<CreateInviteFromCodeParams>,
+ Extension(app): Extension<Arc<AppState>>,
+) -> Result<Json<Invite>> {
+ Ok(Json(
+ app.db
+ .create_invite_from_code(
+ ¶ms.invite_code,
+ ¶ms.email_address,
+ params.device_id.as_deref(),
+ )
+ .await?,
+ ))
+}
+
+#[derive(Deserialize)]
+pub struct GetUnsentInvitesParams {
+ pub count: usize,
+}
+
+async fn get_unsent_invites(
+ Query(params): Query<GetUnsentInvitesParams>,
+ Extension(app): Extension<Arc<AppState>>,
+) -> Result<Json<Vec<Invite>>> {
+ Ok(Json(app.db.get_unsent_invites(params.count).await?))
+}
+
+async fn record_sent_invites(
+ Json(params): Json<Vec<Invite>>,
+ Extension(app): Extension<Arc<AppState>>,
+) -> Result<()> {
+ app.db.record_sent_invites(¶ms).await?;
+ Ok(())
+}
@@ -11,7 +11,7 @@ mod db;
#[derive(Debug, Deserialize)]
struct GitHubUser {
- id: usize,
+ id: i32,
login: String,
email: Option<String>,
}
@@ -26,8 +26,11 @@ async fn main() {
let github_token = std::env::var("GITHUB_TOKEN").expect("missing GITHUB_TOKEN env var");
let client = reqwest::Client::new();
- let current_user =
+ let mut current_user =
fetch_github::<GitHubUser>(&client, &github_token, "https://api.github.com/user").await;
+ current_user
+ .email
+ .get_or_insert_with(|| "placeholder@example.com".to_string());
let staff_users = fetch_github::<Vec<GitHubUser>>(
&client,
&github_token,
@@ -64,16 +67,24 @@ async fn main() {
let mut zed_user_ids = Vec::<UserId>::new();
for (github_user, admin) in zed_users {
if let Some(user) = db
- .get_user_by_github_login(&github_user.login)
+ .get_user_by_github_account(&github_user.login, Some(github_user.id))
.await
.expect("failed to fetch user")
{
zed_user_ids.push(user.id);
- } else {
+ } else if let Some(email) = &github_user.email {
zed_user_ids.push(
- db.create_user(&github_user.login, github_user.email.as_deref(), admin)
- .await
- .expect("failed to insert user"),
+ db.create_user(
+ email,
+ admin,
+ db::NewUserParams {
+ github_login: github_user.login,
+ github_user_id: github_user.id,
+ invite_count: 5,
+ },
+ )
+ .await
+ .expect("failed to insert user"),
);
}
}
@@ -1,5 +1,3 @@
-use std::{cmp, ops::Range, time::Duration};
-
use crate::{Error, Result};
use anyhow::{anyhow, Context};
use async_trait::async_trait;
@@ -8,37 +6,52 @@ use collections::HashMap;
use futures::StreamExt;
use serde::{Deserialize, Serialize};
pub use sqlx::postgres::PgPoolOptions as DbOptions;
-use sqlx::{types::Uuid, FromRow, QueryBuilder, Row};
+use sqlx::{types::Uuid, FromRow, QueryBuilder};
+use std::{cmp, ops::Range, time::Duration};
use time::{OffsetDateTime, PrimitiveDateTime};
#[async_trait]
pub trait Db: Send + Sync {
async fn create_user(
&self,
- github_login: &str,
- email_address: Option<&str>,
+ email_address: &str,
admin: bool,
- ) -> Result<UserId>;
+ params: NewUserParams,
+ ) -> Result<NewUserResult>;
async fn get_all_users(&self, page: u32, limit: u32) -> Result<Vec<User>>;
- async fn create_users(&self, users: Vec<(String, String, usize)>) -> Result<Vec<UserId>>;
async fn fuzzy_search_users(&self, query: &str, limit: u32) -> Result<Vec<User>>;
async fn get_user_by_id(&self, id: UserId) -> Result<Option<User>>;
+ async fn get_user_metrics_id(&self, id: UserId) -> Result<String>;
async fn get_users_by_ids(&self, ids: Vec<UserId>) -> Result<Vec<User>>;
async fn get_users_with_no_invites(&self, invited_by_another_user: bool) -> Result<Vec<User>>;
- async fn get_user_by_github_login(&self, github_login: &str) -> Result<Option<User>>;
+ async fn get_user_by_github_account(
+ &self,
+ github_login: &str,
+ github_user_id: Option<i32>,
+ ) -> Result<Option<User>>;
async fn set_user_is_admin(&self, id: UserId, is_admin: bool) -> Result<()>;
async fn set_user_connected_once(&self, id: UserId, connected_once: bool) -> Result<()>;
async fn destroy_user(&self, id: UserId) -> Result<()>;
- async fn set_invite_count(&self, id: UserId, count: u32) -> Result<()>;
+ async fn set_invite_count_for_user(&self, id: UserId, count: u32) -> Result<()>;
async fn get_invite_code_for_user(&self, id: UserId) -> Result<Option<(String, u32)>>;
async fn get_user_for_invite_code(&self, code: &str) -> Result<User>;
- async fn redeem_invite_code(
+ async fn create_invite_from_code(
&self,
code: &str,
- login: &str,
- email_address: Option<&str>,
- ) -> Result<UserId>;
+ email_address: &str,
+ device_id: Option<&str>,
+ ) -> Result<Invite>;
+
+ async fn create_signup(&self, signup: Signup) -> Result<()>;
+ async fn get_waitlist_summary(&self) -> Result<WaitlistSummary>;
+ async fn get_unsent_invites(&self, count: usize) -> Result<Vec<Invite>>;
+ async fn record_sent_invites(&self, invites: &[Invite]) -> Result<()>;
+ async fn create_user_from_invite(
+ &self,
+ invite: &Invite,
+ user: NewUserParams,
+ ) -> Result<NewUserResult>;
/// Registers a new project for the given user.
async fn register_project(&self, host_user_id: UserId) -> Result<ProjectId>;
@@ -115,8 +128,8 @@ pub trait Db: Send + Sync {
max_access_token_count: usize,
) -> Result<()>;
async fn get_access_token_hashes(&self, user_id: UserId) -> Result<Vec<String>>;
- #[cfg(any(test, feature = "seed-support"))]
+ #[cfg(any(test, feature = "seed-support"))]
async fn find_org_by_slug(&self, slug: &str) -> Result<Option<Org>>;
#[cfg(any(test, feature = "seed-support"))]
async fn create_org(&self, name: &str, slug: &str) -> Result<OrgId>;
@@ -130,6 +143,7 @@ pub trait Db: Send + Sync {
async fn get_accessible_channels(&self, user_id: UserId) -> Result<Vec<Channel>>;
async fn can_user_access_channel(&self, user_id: UserId, channel_id: ChannelId)
-> Result<bool>;
+
#[cfg(any(test, feature = "seed-support"))]
async fn add_channel_member(
&self,
@@ -151,10 +165,12 @@ pub trait Db: Send + Sync {
count: usize,
before_id: Option<MessageId>,
) -> Result<Vec<ChannelMessage>>;
+
#[cfg(test)]
async fn teardown(&self, url: &str);
+
#[cfg(test)]
- fn as_fake(&self) -> Option<&tests::FakeDb>;
+ fn as_fake(&self) -> Option<&FakeDb>;
}
pub struct PostgresDb {
@@ -170,6 +186,18 @@ impl PostgresDb {
.context("failed to connect to postgres database")?;
Ok(Self { pool })
}
+
+ pub fn fuzzy_like_string(string: &str) -> String {
+ let mut result = String::with_capacity(string.len() * 2 + 1);
+ for c in string.chars() {
+ if c.is_alphanumeric() {
+ result.push('%');
+ result.push(c);
+ }
+ }
+ result.push('%');
+ result
+ }
}
#[async_trait]
@@ -178,23 +206,29 @@ impl Db for PostgresDb {
async fn create_user(
&self,
- github_login: &str,
- email_address: Option<&str>,
+ email_address: &str,
admin: bool,
- ) -> Result<UserId> {
+ params: NewUserParams,
+ ) -> Result<NewUserResult> {
let query = "
- INSERT INTO users (github_login, email_address, admin)
- VALUES ($1, $2, $3)
+ INSERT INTO users (email_address, github_login, github_user_id, admin)
+ VALUES ($1, $2, $3, $4)
ON CONFLICT (github_login) DO UPDATE SET github_login = excluded.github_login
- RETURNING id
+ RETURNING id, metrics_id::text
";
- Ok(sqlx::query_scalar(query)
- .bind(github_login)
+ let (user_id, metrics_id): (UserId, String) = sqlx::query_as(query)
.bind(email_address)
+ .bind(params.github_login)
+ .bind(params.github_user_id)
.bind(admin)
.fetch_one(&self.pool)
- .await
- .map(UserId)?)
+ .await?;
+ Ok(NewUserResult {
+ user_id,
+ metrics_id,
+ signup_device_id: None,
+ inviting_user_id: None,
+ })
}
async fn get_all_users(&self, page: u32, limit: u32) -> Result<Vec<User>> {
@@ -206,43 +240,8 @@ impl Db for PostgresDb {
.await?)
}
- async fn create_users(&self, users: Vec<(String, String, usize)>) -> Result<Vec<UserId>> {
- let mut query = QueryBuilder::new(
- "INSERT INTO users (github_login, email_address, admin, invite_code, invite_count)",
- );
- query.push_values(
- users,
- |mut query, (github_login, email_address, invite_count)| {
- query
- .push_bind(github_login)
- .push_bind(email_address)
- .push_bind(false)
- .push_bind(random_invite_code())
- .push_bind(invite_count as i32);
- },
- );
- query.push(
- "
- ON CONFLICT (github_login) DO UPDATE SET
- github_login = excluded.github_login,
- invite_count = excluded.invite_count,
- invite_code = CASE WHEN users.invite_code IS NULL
- THEN excluded.invite_code
- ELSE users.invite_code
- END
- RETURNING id
- ",
- );
-
- let rows = query.build().fetch_all(&self.pool).await?;
- Ok(rows
- .into_iter()
- .filter_map(|row| row.try_get::<UserId, _>(0).ok())
- .collect())
- }
-
async fn fuzzy_search_users(&self, name_query: &str, limit: u32) -> Result<Vec<User>> {
- let like_string = fuzzy_like_string(name_query);
+ let like_string = Self::fuzzy_like_string(name_query);
let query = "
SELECT users.*
FROM users
@@ -263,6 +262,18 @@ impl Db for PostgresDb {
Ok(users.into_iter().next())
}
+ async fn get_user_metrics_id(&self, id: UserId) -> Result<String> {
+ let query = "
+ SELECT metrics_id::text
+ FROM users
+ WHERE id = $1
+ ";
+ Ok(sqlx::query_scalar(query)
+ .bind(id)
+ .fetch_one(&self.pool)
+ .await?)
+ }
+
async fn get_users_by_ids(&self, ids: Vec<UserId>) -> Result<Vec<User>> {
let ids = ids.into_iter().map(|id| id.0).collect::<Vec<_>>();
let query = "
@@ -290,12 +301,53 @@ impl Db for PostgresDb {
Ok(sqlx::query_as(&query).fetch_all(&self.pool).await?)
}
- async fn get_user_by_github_login(&self, github_login: &str) -> Result<Option<User>> {
- let query = "SELECT * FROM users WHERE github_login = $1 LIMIT 1";
- Ok(sqlx::query_as(query)
+ async fn get_user_by_github_account(
+ &self,
+ github_login: &str,
+ github_user_id: Option<i32>,
+ ) -> Result<Option<User>> {
+ if let Some(github_user_id) = github_user_id {
+ let mut user = sqlx::query_as::<_, User>(
+ "
+ UPDATE users
+ SET github_login = $1
+ WHERE github_user_id = $2
+ RETURNING *
+ ",
+ )
+ .bind(github_login)
+ .bind(github_user_id)
+ .fetch_optional(&self.pool)
+ .await?;
+
+ if user.is_none() {
+ user = sqlx::query_as::<_, User>(
+ "
+ UPDATE users
+ SET github_user_id = $1
+ WHERE github_login = $2
+ RETURNING *
+ ",
+ )
+ .bind(github_user_id)
+ .bind(github_login)
+ .fetch_optional(&self.pool)
+ .await?;
+ }
+
+ Ok(user)
+ } else {
+ Ok(sqlx::query_as(
+ "
+ SELECT * FROM users
+ WHERE github_login = $1
+ LIMIT 1
+ ",
+ )
.bind(github_login)
.fetch_optional(&self.pool)
.await?)
+ }
}
async fn set_user_is_admin(&self, id: UserId, is_admin: bool) -> Result<()> {
@@ -333,9 +385,207 @@ impl Db for PostgresDb {
.map(drop)?)
}
+ // signups
+
+ async fn create_signup(&self, signup: Signup) -> Result<()> {
+ sqlx::query(
+ "
+ INSERT INTO signups
+ (
+ email_address,
+ email_confirmation_code,
+ email_confirmation_sent,
+ platform_linux,
+ platform_mac,
+ platform_windows,
+ platform_unknown,
+ editor_features,
+ programming_languages,
+ device_id
+ )
+ VALUES
+ ($1, $2, 'f', $3, $4, $5, 'f', $6, $7, $8)
+ RETURNING id
+ ",
+ )
+ .bind(&signup.email_address)
+ .bind(&random_email_confirmation_code())
+ .bind(&signup.platform_linux)
+ .bind(&signup.platform_mac)
+ .bind(&signup.platform_windows)
+ .bind(&signup.editor_features)
+ .bind(&signup.programming_languages)
+ .bind(&signup.device_id)
+ .execute(&self.pool)
+ .await?;
+ Ok(())
+ }
+
+ async fn get_waitlist_summary(&self) -> Result<WaitlistSummary> {
+ Ok(sqlx::query_as(
+ "
+ SELECT
+ COUNT(*) as count,
+ COALESCE(SUM(CASE WHEN platform_linux THEN 1 ELSE 0 END), 0) as linux_count,
+ COALESCE(SUM(CASE WHEN platform_mac THEN 1 ELSE 0 END), 0) as mac_count,
+ COALESCE(SUM(CASE WHEN platform_windows THEN 1 ELSE 0 END), 0) as windows_count
+ FROM (
+ SELECT *
+ FROM signups
+ WHERE
+ NOT email_confirmation_sent
+ ) AS unsent
+ ",
+ )
+ .fetch_one(&self.pool)
+ .await?)
+ }
+
+ async fn get_unsent_invites(&self, count: usize) -> Result<Vec<Invite>> {
+ Ok(sqlx::query_as(
+ "
+ SELECT
+ email_address, email_confirmation_code
+ FROM signups
+ WHERE
+ NOT email_confirmation_sent AND
+ platform_mac
+ LIMIT $1
+ ",
+ )
+ .bind(count as i32)
+ .fetch_all(&self.pool)
+ .await?)
+ }
+
+ async fn record_sent_invites(&self, invites: &[Invite]) -> Result<()> {
+ sqlx::query(
+ "
+ UPDATE signups
+ SET email_confirmation_sent = 't'
+ WHERE email_address = ANY ($1)
+ ",
+ )
+ .bind(
+ &invites
+ .iter()
+ .map(|s| s.email_address.as_str())
+ .collect::<Vec<_>>(),
+ )
+ .execute(&self.pool)
+ .await?;
+ Ok(())
+ }
+
+ async fn create_user_from_invite(
+ &self,
+ invite: &Invite,
+ user: NewUserParams,
+ ) -> Result<NewUserResult> {
+ let mut tx = self.pool.begin().await?;
+
+ let (signup_id, existing_user_id, inviting_user_id, signup_device_id): (
+ i32,
+ Option<UserId>,
+ Option<UserId>,
+ Option<String>,
+ ) = sqlx::query_as(
+ "
+ SELECT id, user_id, inviting_user_id, device_id
+ FROM signups
+ WHERE
+ email_address = $1 AND
+ email_confirmation_code = $2
+ ",
+ )
+ .bind(&invite.email_address)
+ .bind(&invite.email_confirmation_code)
+ .fetch_optional(&mut tx)
+ .await?
+ .ok_or_else(|| Error::Http(StatusCode::NOT_FOUND, "no such invite".to_string()))?;
+
+ if existing_user_id.is_some() {
+ Err(Error::Http(
+ StatusCode::UNPROCESSABLE_ENTITY,
+ "invitation already redeemed".to_string(),
+ ))?;
+ }
+
+ let (user_id, metrics_id): (UserId, String) = sqlx::query_as(
+ "
+ INSERT INTO users
+ (email_address, github_login, github_user_id, admin, invite_count, invite_code)
+ VALUES
+ ($1, $2, $3, 'f', $4, $5)
+ RETURNING id, metrics_id::text
+ ",
+ )
+ .bind(&invite.email_address)
+ .bind(&user.github_login)
+ .bind(&user.github_user_id)
+ .bind(&user.invite_count)
+ .bind(random_invite_code())
+ .fetch_one(&mut tx)
+ .await?;
+
+ sqlx::query(
+ "
+ UPDATE signups
+ SET user_id = $1
+ WHERE id = $2
+ ",
+ )
+ .bind(&user_id)
+ .bind(&signup_id)
+ .execute(&mut tx)
+ .await?;
+
+ if let Some(inviting_user_id) = inviting_user_id {
+ let id: Option<UserId> = sqlx::query_scalar(
+ "
+ UPDATE users
+ SET invite_count = invite_count - 1
+ WHERE id = $1 AND invite_count > 0
+ RETURNING id
+ ",
+ )
+ .bind(&inviting_user_id)
+ .fetch_optional(&mut tx)
+ .await?;
+
+ if id.is_none() {
+ Err(Error::Http(
+ StatusCode::UNAUTHORIZED,
+ "no invites remaining".to_string(),
+ ))?;
+ }
+
+ sqlx::query(
+ "
+ INSERT INTO contacts
+ (user_id_a, user_id_b, a_to_b, should_notify, accepted)
+ VALUES
+ ($1, $2, 't', 't', 't')
+ ",
+ )
+ .bind(inviting_user_id)
+ .bind(user_id)
+ .execute(&mut tx)
+ .await?;
+ }
+
+ tx.commit().await?;
+ Ok(NewUserResult {
+ user_id,
+ metrics_id,
+ inviting_user_id,
+ signup_device_id,
+ })
+ }
+
// invite codes
- async fn set_invite_count(&self, id: UserId, count: u32) -> Result<()> {
+ async fn set_invite_count_for_user(&self, id: UserId, count: u32) -> Result<()> {
let mut tx = self.pool.begin().await?;
if count > 0 {
sqlx::query(
@@ -403,83 +653,89 @@ impl Db for PostgresDb {
})
}
- async fn redeem_invite_code(
+ async fn create_invite_from_code(
&self,
code: &str,
- login: &str,
- email_address: Option<&str>,
- ) -> Result<UserId> {
+ email_address: &str,
+ device_id: Option<&str>,
+ ) -> Result<Invite> {
let mut tx = self.pool.begin().await?;
- let inviter_id: Option<UserId> = sqlx::query_scalar(
+ let existing_user: Option<UserId> = sqlx::query_scalar(
"
- UPDATE users
- SET invite_count = invite_count - 1
- WHERE
- invite_code = $1 AND
- invite_count > 0
- RETURNING id
+ SELECT id
+ FROM users
+ WHERE email_address = $1
",
)
- .bind(code)
+ .bind(email_address)
.fetch_optional(&mut tx)
.await?;
+ if existing_user.is_some() {
+ Err(anyhow!("email address is already in use"))?;
+ }
- let inviter_id = match inviter_id {
- Some(inviter_id) => inviter_id,
- None => {
- if sqlx::query_scalar::<_, i32>("SELECT 1 FROM users WHERE invite_code = $1")
- .bind(code)
- .fetch_optional(&mut tx)
- .await?
- .is_some()
- {
- Err(Error::Http(
- StatusCode::UNAUTHORIZED,
- "no invites remaining".to_string(),
- ))?
- } else {
- Err(Error::Http(
- StatusCode::NOT_FOUND,
- "invite code not found".to_string(),
- ))?
- }
- }
- };
-
- let invitee_id = sqlx::query_scalar(
+ let row: Option<(UserId, i32)> = sqlx::query_as(
"
- INSERT INTO users
- (github_login, email_address, admin, inviter_id, invite_code, invite_count)
- VALUES
- ($1, $2, 'f', $3, $4, $5)
- RETURNING id
+ SELECT id, invite_count
+ FROM users
+ WHERE invite_code = $1
",
)
- .bind(login)
- .bind(email_address)
- .bind(inviter_id)
- .bind(random_invite_code())
- .bind(5)
- .fetch_one(&mut tx)
- .await
- .map(UserId)?;
+ .bind(code)
+ .fetch_optional(&mut tx)
+ .await?;
- sqlx::query(
+ let (inviter_id, invite_count) = match row {
+ Some(row) => row,
+ None => Err(Error::Http(
+ StatusCode::NOT_FOUND,
+ "invite code not found".to_string(),
+ ))?,
+ };
+
+ if invite_count == 0 {
+ Err(Error::Http(
+ StatusCode::UNAUTHORIZED,
+ "no invites remaining".to_string(),
+ ))?;
+ }
+
+ let email_confirmation_code: String = sqlx::query_scalar(
"
- INSERT INTO contacts
- (user_id_a, user_id_b, a_to_b, should_notify, accepted)
- VALUES
- ($1, $2, 't', 't', 't')
+ INSERT INTO signups
+ (
+ email_address,
+ email_confirmation_code,
+ email_confirmation_sent,
+ inviting_user_id,
+ platform_linux,
+ platform_mac,
+ platform_windows,
+ platform_unknown,
+ device_id
+ )
+ VALUES
+ ($1, $2, 'f', $3, 'f', 'f', 'f', 't', $4)
+ ON CONFLICT (email_address)
+ DO UPDATE SET
+ inviting_user_id = excluded.inviting_user_id
+ RETURNING email_confirmation_code
",
)
- .bind(inviter_id)
- .bind(invitee_id)
- .execute(&mut tx)
+ .bind(&email_address)
+ .bind(&random_email_confirmation_code())
+ .bind(&inviter_id)
+ .bind(&device_id)
+ .fetch_one(&mut tx)
.await?;
tx.commit().await?;
- Ok(invitee_id)
+
+ Ok(Invite {
+ email_address: email_address.into(),
+ email_confirmation_code,
+ })
}
// projects
@@ -1291,7 +1547,7 @@ impl Db for PostgresDb {
}
#[cfg(test)]
- fn as_fake(&self) -> Option<&tests::FakeDb> {
+ fn as_fake(&self) -> Option<&FakeDb> {
None
}
}
@@ -1344,6 +1600,7 @@ id_type!(UserId);
pub struct User {
pub id: UserId,
pub github_login: String,
+ pub github_user_id: Option<i32>,
pub email_address: Option<String>,
pub admin: bool,
pub invite_code: Option<String>,
@@ -1368,19 +1625,19 @@ pub struct UserActivitySummary {
#[derive(Clone, Debug, PartialEq, Serialize)]
pub struct ProjectActivitySummary {
- id: ProjectId,
- duration: Duration,
- max_collaborators: usize,
+ pub id: ProjectId,
+ pub duration: Duration,
+ pub max_collaborators: usize,
}
#[derive(Clone, Debug, PartialEq, Serialize)]
pub struct UserActivityPeriod {
- project_id: ProjectId,
+ pub project_id: ProjectId,
#[serde(with = "time::serde::iso8601")]
- start: OffsetDateTime,
+ pub start: OffsetDateTime,
#[serde(with = "time::serde::iso8601")]
- end: OffsetDateTime,
- extensions: HashMap<String, usize>,
+ pub end: OffsetDateTime,
+ pub extensions: HashMap<String, usize>,
}
id_type!(OrgId);
@@ -1442,28 +1699,67 @@ pub struct IncomingContactRequest {
pub should_notify: bool,
}
-fn fuzzy_like_string(string: &str) -> String {
- let mut result = String::with_capacity(string.len() * 2 + 1);
- for c in string.chars() {
- if c.is_alphanumeric() {
- result.push('%');
- result.push(c);
- }
- }
- result.push('%');
- result
+#[derive(Clone, Deserialize)]
+pub struct Signup {
+ pub email_address: String,
+ pub platform_mac: bool,
+ pub platform_windows: bool,
+ pub platform_linux: bool,
+ pub editor_features: Vec<String>,
+ pub programming_languages: Vec<String>,
+ pub device_id: Option<String>,
+}
+
+#[derive(Clone, Debug, PartialEq, Deserialize, Serialize, FromRow)]
+pub struct WaitlistSummary {
+ #[sqlx(default)]
+ pub count: i64,
+ #[sqlx(default)]
+ pub linux_count: i64,
+ #[sqlx(default)]
+ pub mac_count: i64,
+ #[sqlx(default)]
+ pub windows_count: i64,
+}
+
+#[derive(FromRow, PartialEq, Debug, Serialize, Deserialize)]
+pub struct Invite {
+ pub email_address: String,
+ pub email_confirmation_code: String,
+}
+
+#[derive(Debug, Serialize, Deserialize)]
+pub struct NewUserParams {
+ pub github_login: String,
+ pub github_user_id: i32,
+ pub invite_count: i32,
+}
+
+#[derive(Debug)]
+pub struct NewUserResult {
+ pub user_id: UserId,
+ pub metrics_id: String,
+ pub inviting_user_id: Option<UserId>,
+ pub signup_device_id: Option<String>,
}
fn random_invite_code() -> String {
nanoid::nanoid!(16)
}
+fn random_email_confirmation_code() -> String {
+ nanoid::nanoid!(64)
+}
+
+#[cfg(test)]
+pub use test::*;
+
#[cfg(test)]
-pub mod tests {
+mod test {
use super::*;
use anyhow::anyhow;
use collections::BTreeMap;
- use gpui::executor::{Background, Deterministic};
+ use gpui::executor::Background;
use lazy_static::lazy_static;
use parking_lot::Mutex;
use rand::prelude::*;
@@ -1474,900 +1770,30 @@ pub mod tests {
use std::{path::Path, sync::Arc};
use util::post_inc;
- #[tokio::test(flavor = "multi_thread")]
- async fn test_get_users_by_ids() {
- for test_db in [
- TestDb::postgres().await,
- TestDb::fake(build_background_executor()),
- ] {
- let db = test_db.db();
-
- let user = db.create_user("user", None, false).await.unwrap();
- let friend1 = db.create_user("friend-1", None, false).await.unwrap();
- let friend2 = db.create_user("friend-2", None, false).await.unwrap();
- let friend3 = db.create_user("friend-3", None, false).await.unwrap();
-
- assert_eq!(
- db.get_users_by_ids(vec![user, friend1, friend2, friend3])
- .await
- .unwrap(),
- vec![
- User {
- id: user,
- github_login: "user".to_string(),
- admin: false,
- ..Default::default()
- },
- User {
- id: friend1,
- github_login: "friend-1".to_string(),
- admin: false,
- ..Default::default()
- },
- User {
- id: friend2,
- github_login: "friend-2".to_string(),
- admin: false,
- ..Default::default()
- },
- User {
- id: friend3,
- github_login: "friend-3".to_string(),
- admin: false,
- ..Default::default()
- }
- ]
- );
- }
+ pub struct FakeDb {
+ background: Arc<Background>,
+ pub users: Mutex<BTreeMap<UserId, User>>,
+ pub projects: Mutex<BTreeMap<ProjectId, Project>>,
+ pub worktree_extensions: Mutex<BTreeMap<(ProjectId, u64, String), u32>>,
+ pub orgs: Mutex<BTreeMap<OrgId, Org>>,
+ pub org_memberships: Mutex<BTreeMap<(OrgId, UserId), bool>>,
+ pub channels: Mutex<BTreeMap<ChannelId, Channel>>,
+ pub channel_memberships: Mutex<BTreeMap<(ChannelId, UserId), bool>>,
+ pub channel_messages: Mutex<BTreeMap<MessageId, ChannelMessage>>,
+ pub contacts: Mutex<Vec<FakeContact>>,
+ next_channel_message_id: Mutex<i32>,
+ next_user_id: Mutex<i32>,
+ next_org_id: Mutex<i32>,
+ next_channel_id: Mutex<i32>,
+ next_project_id: Mutex<i32>,
}
- #[tokio::test(flavor = "multi_thread")]
- async fn test_create_users() {
- let db = TestDb::postgres().await;
- let db = db.db();
-
- // Create the first batch of users, ensuring invite counts are assigned
- // correctly and the respective invite codes are unique.
- let user_ids_batch_1 = db
- .create_users(vec![
- ("user1".to_string(), "hi@user1.com".to_string(), 5),
- ("user2".to_string(), "hi@user2.com".to_string(), 4),
- ("user3".to_string(), "hi@user3.com".to_string(), 3),
- ])
- .await
- .unwrap();
- assert_eq!(user_ids_batch_1.len(), 3);
-
- let users = db.get_users_by_ids(user_ids_batch_1.clone()).await.unwrap();
- assert_eq!(users.len(), 3);
- assert_eq!(users[0].github_login, "user1");
- assert_eq!(users[0].email_address.as_deref(), Some("hi@user1.com"));
- assert_eq!(users[0].invite_count, 5);
- assert_eq!(users[1].github_login, "user2");
- assert_eq!(users[1].email_address.as_deref(), Some("hi@user2.com"));
- assert_eq!(users[1].invite_count, 4);
- assert_eq!(users[2].github_login, "user3");
- assert_eq!(users[2].email_address.as_deref(), Some("hi@user3.com"));
- assert_eq!(users[2].invite_count, 3);
-
- let invite_code_1 = users[0].invite_code.clone().unwrap();
- let invite_code_2 = users[1].invite_code.clone().unwrap();
- let invite_code_3 = users[2].invite_code.clone().unwrap();
- assert_ne!(invite_code_1, invite_code_2);
- assert_ne!(invite_code_1, invite_code_3);
- assert_ne!(invite_code_2, invite_code_3);
-
- // Create the second batch of users and include a user that is already in the database, ensuring
- // the invite count for the existing user is updated without changing their invite code.
- let user_ids_batch_2 = db
- .create_users(vec![
- ("user2".to_string(), "hi@user2.com".to_string(), 10),
- ("user4".to_string(), "hi@user4.com".to_string(), 2),
- ])
- .await
- .unwrap();
- assert_eq!(user_ids_batch_2.len(), 2);
- assert_eq!(user_ids_batch_2[0], user_ids_batch_1[1]);
-
- let users = db.get_users_by_ids(user_ids_batch_2).await.unwrap();
- assert_eq!(users.len(), 2);
- assert_eq!(users[0].github_login, "user2");
- assert_eq!(users[0].email_address.as_deref(), Some("hi@user2.com"));
- assert_eq!(users[0].invite_count, 10);
- assert_eq!(users[0].invite_code, Some(invite_code_2.clone()));
- assert_eq!(users[1].github_login, "user4");
- assert_eq!(users[1].email_address.as_deref(), Some("hi@user4.com"));
- assert_eq!(users[1].invite_count, 2);
-
- let invite_code_4 = users[1].invite_code.clone().unwrap();
- assert_ne!(invite_code_4, invite_code_1);
- assert_ne!(invite_code_4, invite_code_2);
- assert_ne!(invite_code_4, invite_code_3);
- }
-
- #[tokio::test(flavor = "multi_thread")]
- async fn test_worktree_extensions() {
- let test_db = TestDb::postgres().await;
- let db = test_db.db();
-
- let user = db.create_user("user_1", None, false).await.unwrap();
- let project = db.register_project(user).await.unwrap();
-
- db.update_worktree_extensions(project, 100, Default::default())
- .await
- .unwrap();
- db.update_worktree_extensions(
- project,
- 100,
- [("rs".to_string(), 5), ("md".to_string(), 3)]
- .into_iter()
- .collect(),
- )
- .await
- .unwrap();
- db.update_worktree_extensions(
- project,
- 100,
- [("rs".to_string(), 6), ("md".to_string(), 5)]
- .into_iter()
- .collect(),
- )
- .await
- .unwrap();
- db.update_worktree_extensions(
- project,
- 101,
- [("ts".to_string(), 2), ("md".to_string(), 1)]
- .into_iter()
- .collect(),
- )
- .await
- .unwrap();
-
- assert_eq!(
- db.get_project_extensions(project).await.unwrap(),
- [
- (
- 100,
- [("rs".into(), 6), ("md".into(), 5),]
- .into_iter()
- .collect::<HashMap<_, _>>()
- ),
- (
- 101,
- [("ts".into(), 2), ("md".into(), 1),]
- .into_iter()
- .collect::<HashMap<_, _>>()
- )
- ]
- .into_iter()
- .collect()
- );
- }
-
- #[tokio::test(flavor = "multi_thread")]
- async fn test_user_activity() {
- let test_db = TestDb::postgres().await;
- let db = test_db.db();
-
- let user_1 = db.create_user("user_1", None, false).await.unwrap();
- let user_2 = db.create_user("user_2", None, false).await.unwrap();
- let user_3 = db.create_user("user_3", None, false).await.unwrap();
- let project_1 = db.register_project(user_1).await.unwrap();
- db.update_worktree_extensions(
- project_1,
- 1,
- HashMap::from_iter([("rs".into(), 5), ("md".into(), 7)]),
- )
- .await
- .unwrap();
- let project_2 = db.register_project(user_2).await.unwrap();
- let t0 = OffsetDateTime::now_utc() - Duration::from_secs(60 * 60);
-
- // User 2 opens a project
- let t1 = t0 + Duration::from_secs(10);
- db.record_user_activity(t0..t1, &[(user_2, project_2)])
- .await
- .unwrap();
-
- let t2 = t1 + Duration::from_secs(10);
- db.record_user_activity(t1..t2, &[(user_2, project_2)])
- .await
- .unwrap();
-
- // User 1 joins the project
- let t3 = t2 + Duration::from_secs(10);
- db.record_user_activity(t2..t3, &[(user_2, project_2), (user_1, project_2)])
- .await
- .unwrap();
-
- // User 1 opens another project
- let t4 = t3 + Duration::from_secs(10);
- db.record_user_activity(
- t3..t4,
- &[
- (user_2, project_2),
- (user_1, project_2),
- (user_1, project_1),
- ],
- )
- .await
- .unwrap();
-
- // User 3 joins that project
- let t5 = t4 + Duration::from_secs(10);
- db.record_user_activity(
- t4..t5,
- &[
- (user_2, project_2),
- (user_1, project_2),
- (user_1, project_1),
- (user_3, project_1),
- ],
- )
- .await
- .unwrap();
-
- // User 2 leaves
- let t6 = t5 + Duration::from_secs(5);
- db.record_user_activity(t5..t6, &[(user_1, project_1), (user_3, project_1)])
- .await
- .unwrap();
-
- let t7 = t6 + Duration::from_secs(60);
- let t8 = t7 + Duration::from_secs(10);
- db.record_user_activity(t7..t8, &[(user_1, project_1)])
- .await
- .unwrap();
-
- assert_eq!(
- db.get_top_users_activity_summary(t0..t6, 10).await.unwrap(),
- &[
- UserActivitySummary {
- id: user_1,
- github_login: "user_1".to_string(),
- project_activity: vec![
- ProjectActivitySummary {
- id: project_1,
- duration: Duration::from_secs(25),
- max_collaborators: 2
- },
- ProjectActivitySummary {
- id: project_2,
- duration: Duration::from_secs(30),
- max_collaborators: 2
- }
- ]
- },
- UserActivitySummary {
- id: user_2,
- github_login: "user_2".to_string(),
- project_activity: vec![ProjectActivitySummary {
- id: project_2,
- duration: Duration::from_secs(50),
- max_collaborators: 2
- }]
- },
- UserActivitySummary {
- id: user_3,
- github_login: "user_3".to_string(),
- project_activity: vec![ProjectActivitySummary {
- id: project_1,
- duration: Duration::from_secs(15),
- max_collaborators: 2
- }]
- },
- ]
- );
-
- assert_eq!(
- db.get_active_user_count(t0..t6, Duration::from_secs(56), false)
- .await
- .unwrap(),
- 0
- );
- assert_eq!(
- db.get_active_user_count(t0..t6, Duration::from_secs(56), true)
- .await
- .unwrap(),
- 0
- );
- assert_eq!(
- db.get_active_user_count(t0..t6, Duration::from_secs(54), false)
- .await
- .unwrap(),
- 1
- );
- assert_eq!(
- db.get_active_user_count(t0..t6, Duration::from_secs(54), true)
- .await
- .unwrap(),
- 1
- );
- assert_eq!(
- db.get_active_user_count(t0..t6, Duration::from_secs(30), false)
- .await
- .unwrap(),
- 2
- );
- assert_eq!(
- db.get_active_user_count(t0..t6, Duration::from_secs(30), true)
- .await
- .unwrap(),
- 2
- );
- assert_eq!(
- db.get_active_user_count(t0..t6, Duration::from_secs(10), false)
- .await
- .unwrap(),
- 3
- );
- assert_eq!(
- db.get_active_user_count(t0..t6, Duration::from_secs(10), true)
- .await
- .unwrap(),
- 3
- );
- assert_eq!(
- db.get_active_user_count(t0..t1, Duration::from_secs(5), false)
- .await
- .unwrap(),
- 1
- );
- assert_eq!(
- db.get_active_user_count(t0..t1, Duration::from_secs(5), true)
- .await
- .unwrap(),
- 0
- );
-
- assert_eq!(
- db.get_user_activity_timeline(t3..t6, user_1).await.unwrap(),
- &[
- UserActivityPeriod {
- project_id: project_1,
- start: t3,
- end: t6,
- extensions: HashMap::from_iter([("rs".to_string(), 5), ("md".to_string(), 7)]),
- },
- UserActivityPeriod {
- project_id: project_2,
- start: t3,
- end: t5,
- extensions: Default::default(),
- },
- ]
- );
- assert_eq!(
- db.get_user_activity_timeline(t0..t8, user_1).await.unwrap(),
- &[
- UserActivityPeriod {
- project_id: project_2,
- start: t2,
- end: t5,
- extensions: Default::default(),
- },
- UserActivityPeriod {
- project_id: project_1,
- start: t3,
- end: t6,
- extensions: HashMap::from_iter([("rs".to_string(), 5), ("md".to_string(), 7)]),
- },
- UserActivityPeriod {
- project_id: project_1,
- start: t7,
- end: t8,
- extensions: HashMap::from_iter([("rs".to_string(), 5), ("md".to_string(), 7)]),
- },
- ]
- );
- }
-
- #[tokio::test(flavor = "multi_thread")]
- async fn test_recent_channel_messages() {
- for test_db in [
- TestDb::postgres().await,
- TestDb::fake(build_background_executor()),
- ] {
- let db = test_db.db();
- let user = db.create_user("user", None, false).await.unwrap();
- let org = db.create_org("org", "org").await.unwrap();
- let channel = db.create_org_channel(org, "channel").await.unwrap();
- for i in 0..10 {
- db.create_channel_message(
- channel,
- user,
- &i.to_string(),
- OffsetDateTime::now_utc(),
- i,
- )
- .await
- .unwrap();
- }
-
- let messages = db.get_channel_messages(channel, 5, None).await.unwrap();
- assert_eq!(
- messages.iter().map(|m| &m.body).collect::<Vec<_>>(),
- ["5", "6", "7", "8", "9"]
- );
-
- let prev_messages = db
- .get_channel_messages(channel, 4, Some(messages[0].id))
- .await
- .unwrap();
- assert_eq!(
- prev_messages.iter().map(|m| &m.body).collect::<Vec<_>>(),
- ["1", "2", "3", "4"]
- );
- }
- }
-
- #[tokio::test(flavor = "multi_thread")]
- async fn test_channel_message_nonces() {
- for test_db in [
- TestDb::postgres().await,
- TestDb::fake(build_background_executor()),
- ] {
- let db = test_db.db();
- let user = db.create_user("user", None, false).await.unwrap();
- let org = db.create_org("org", "org").await.unwrap();
- let channel = db.create_org_channel(org, "channel").await.unwrap();
-
- let msg1_id = db
- .create_channel_message(channel, user, "1", OffsetDateTime::now_utc(), 1)
- .await
- .unwrap();
- let msg2_id = db
- .create_channel_message(channel, user, "2", OffsetDateTime::now_utc(), 2)
- .await
- .unwrap();
- let msg3_id = db
- .create_channel_message(channel, user, "3", OffsetDateTime::now_utc(), 1)
- .await
- .unwrap();
- let msg4_id = db
- .create_channel_message(channel, user, "4", OffsetDateTime::now_utc(), 2)
- .await
- .unwrap();
-
- assert_ne!(msg1_id, msg2_id);
- assert_eq!(msg1_id, msg3_id);
- assert_eq!(msg2_id, msg4_id);
- }
- }
-
- #[tokio::test(flavor = "multi_thread")]
- async fn test_create_access_tokens() {
- let test_db = TestDb::postgres().await;
- let db = test_db.db();
- let user = db.create_user("the-user", None, false).await.unwrap();
-
- db.create_access_token_hash(user, "h1", 3).await.unwrap();
- db.create_access_token_hash(user, "h2", 3).await.unwrap();
- assert_eq!(
- db.get_access_token_hashes(user).await.unwrap(),
- &["h2".to_string(), "h1".to_string()]
- );
-
- db.create_access_token_hash(user, "h3", 3).await.unwrap();
- assert_eq!(
- db.get_access_token_hashes(user).await.unwrap(),
- &["h3".to_string(), "h2".to_string(), "h1".to_string(),]
- );
-
- db.create_access_token_hash(user, "h4", 3).await.unwrap();
- assert_eq!(
- db.get_access_token_hashes(user).await.unwrap(),
- &["h4".to_string(), "h3".to_string(), "h2".to_string(),]
- );
-
- db.create_access_token_hash(user, "h5", 3).await.unwrap();
- assert_eq!(
- db.get_access_token_hashes(user).await.unwrap(),
- &["h5".to_string(), "h4".to_string(), "h3".to_string()]
- );
- }
-
- #[test]
- fn test_fuzzy_like_string() {
- assert_eq!(fuzzy_like_string("abcd"), "%a%b%c%d%");
- assert_eq!(fuzzy_like_string("x y"), "%x%y%");
- assert_eq!(fuzzy_like_string(" z "), "%z%");
- }
-
- #[tokio::test(flavor = "multi_thread")]
- async fn test_fuzzy_search_users() {
- let test_db = TestDb::postgres().await;
- let db = test_db.db();
- for github_login in [
- "California",
- "colorado",
- "oregon",
- "washington",
- "florida",
- "delaware",
- "rhode-island",
- ] {
- db.create_user(github_login, None, false).await.unwrap();
- }
-
- assert_eq!(
- fuzzy_search_user_names(db, "clr").await,
- &["colorado", "California"]
- );
- assert_eq!(
- fuzzy_search_user_names(db, "ro").await,
- &["rhode-island", "colorado", "oregon"],
- );
-
- async fn fuzzy_search_user_names(db: &Arc<dyn Db>, query: &str) -> Vec<String> {
- db.fuzzy_search_users(query, 10)
- .await
- .unwrap()
- .into_iter()
- .map(|user| user.github_login)
- .collect::<Vec<_>>()
- }
- }
-
- #[tokio::test(flavor = "multi_thread")]
- async fn test_add_contacts() {
- for test_db in [
- TestDb::postgres().await,
- TestDb::fake(build_background_executor()),
- ] {
- let db = test_db.db();
-
- let user_1 = db.create_user("user1", None, false).await.unwrap();
- let user_2 = db.create_user("user2", None, false).await.unwrap();
- let user_3 = db.create_user("user3", None, false).await.unwrap();
-
- // User starts with no contacts
- assert_eq!(db.get_contacts(user_1).await.unwrap(), vec![]);
-
- // User requests a contact. Both users see the pending request.
- db.send_contact_request(user_1, user_2).await.unwrap();
- assert!(!db.has_contact(user_1, user_2).await.unwrap());
- assert!(!db.has_contact(user_2, user_1).await.unwrap());
- assert_eq!(
- db.get_contacts(user_1).await.unwrap(),
- &[Contact::Outgoing { user_id: user_2 }],
- );
- assert_eq!(
- db.get_contacts(user_2).await.unwrap(),
- &[Contact::Incoming {
- user_id: user_1,
- should_notify: true
- }]
- );
-
- // User 2 dismisses the contact request notification without accepting or rejecting.
- // We shouldn't notify them again.
- db.dismiss_contact_notification(user_1, user_2)
- .await
- .unwrap_err();
- db.dismiss_contact_notification(user_2, user_1)
- .await
- .unwrap();
- assert_eq!(
- db.get_contacts(user_2).await.unwrap(),
- &[Contact::Incoming {
- user_id: user_1,
- should_notify: false
- }]
- );
-
- // User can't accept their own contact request
- db.respond_to_contact_request(user_1, user_2, true)
- .await
- .unwrap_err();
-
- // User accepts a contact request. Both users see the contact.
- db.respond_to_contact_request(user_2, user_1, true)
- .await
- .unwrap();
- assert_eq!(
- db.get_contacts(user_1).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_2,
- should_notify: true
- }],
- );
- assert!(db.has_contact(user_1, user_2).await.unwrap());
- assert!(db.has_contact(user_2, user_1).await.unwrap());
- assert_eq!(
- db.get_contacts(user_2).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_1,
- should_notify: false,
- }]
- );
-
- // Users cannot re-request existing contacts.
- db.send_contact_request(user_1, user_2).await.unwrap_err();
- db.send_contact_request(user_2, user_1).await.unwrap_err();
-
- // Users can't dismiss notifications of them accepting other users' requests.
- db.dismiss_contact_notification(user_2, user_1)
- .await
- .unwrap_err();
- assert_eq!(
- db.get_contacts(user_1).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_2,
- should_notify: true,
- }]
- );
-
- // Users can dismiss notifications of other users accepting their requests.
- db.dismiss_contact_notification(user_1, user_2)
- .await
- .unwrap();
- assert_eq!(
- db.get_contacts(user_1).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_2,
- should_notify: false,
- },]
- );
-
- // Users send each other concurrent contact requests and
- // see that they are immediately accepted.
- db.send_contact_request(user_1, user_3).await.unwrap();
- db.send_contact_request(user_3, user_1).await.unwrap();
- assert_eq!(
- db.get_contacts(user_1).await.unwrap(),
- &[
- Contact::Accepted {
- user_id: user_2,
- should_notify: false,
- },
- Contact::Accepted {
- user_id: user_3,
- should_notify: false
- },
- ]
- );
- assert_eq!(
- db.get_contacts(user_3).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_1,
- should_notify: false
- }],
- );
-
- // User declines a contact request. Both users see that it is gone.
- db.send_contact_request(user_2, user_3).await.unwrap();
- db.respond_to_contact_request(user_3, user_2, false)
- .await
- .unwrap();
- assert!(!db.has_contact(user_2, user_3).await.unwrap());
- assert!(!db.has_contact(user_3, user_2).await.unwrap());
- assert_eq!(
- db.get_contacts(user_2).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_1,
- should_notify: false
- }]
- );
- assert_eq!(
- db.get_contacts(user_3).await.unwrap(),
- &[Contact::Accepted {
- user_id: user_1,
- should_notify: false
- }],
- );
- }
- }
-
- #[tokio::test(flavor = "multi_thread")]
- async fn test_invite_codes() {
- let postgres = TestDb::postgres().await;
- let db = postgres.db();
- let user1 = db.create_user("user-1", None, false).await.unwrap();
-
- // Initially, user 1 has no invite code
- assert_eq!(db.get_invite_code_for_user(user1).await.unwrap(), None);
-
- // Setting invite count to 0 when no code is assigned does not assign a new code
- db.set_invite_count(user1, 0).await.unwrap();
- assert!(db.get_invite_code_for_user(user1).await.unwrap().is_none());
-
- // User 1 creates an invite code that can be used twice.
- db.set_invite_count(user1, 2).await.unwrap();
- let (invite_code, invite_count) =
- db.get_invite_code_for_user(user1).await.unwrap().unwrap();
- assert_eq!(invite_count, 2);
-
- // User 2 redeems the invite code and becomes a contact of user 1.
- let user2 = db
- .redeem_invite_code(&invite_code, "user-2", None)
- .await
- .unwrap();
- let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
- assert_eq!(invite_count, 1);
- assert_eq!(
- db.get_contacts(user1).await.unwrap(),
- [Contact::Accepted {
- user_id: user2,
- should_notify: true
- }]
- );
- assert_eq!(
- db.get_contacts(user2).await.unwrap(),
- [Contact::Accepted {
- user_id: user1,
- should_notify: false
- }]
- );
-
- // User 3 redeems the invite code and becomes a contact of user 1.
- let user3 = db
- .redeem_invite_code(&invite_code, "user-3", None)
- .await
- .unwrap();
- let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
- assert_eq!(invite_count, 0);
- assert_eq!(
- db.get_contacts(user1).await.unwrap(),
- [
- Contact::Accepted {
- user_id: user2,
- should_notify: true
- },
- Contact::Accepted {
- user_id: user3,
- should_notify: true
- }
- ]
- );
- assert_eq!(
- db.get_contacts(user3).await.unwrap(),
- [Contact::Accepted {
- user_id: user1,
- should_notify: false
- }]
- );
-
- // Trying to reedem the code for the third time results in an error.
- db.redeem_invite_code(&invite_code, "user-4", None)
- .await
- .unwrap_err();
-
- // Invite count can be updated after the code has been created.
- db.set_invite_count(user1, 2).await.unwrap();
- let (latest_code, invite_count) =
- db.get_invite_code_for_user(user1).await.unwrap().unwrap();
- assert_eq!(latest_code, invite_code); // Invite code doesn't change when we increment above 0
- assert_eq!(invite_count, 2);
-
- // User 4 can now redeem the invite code and becomes a contact of user 1.
- let user4 = db
- .redeem_invite_code(&invite_code, "user-4", None)
- .await
- .unwrap();
- let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
- assert_eq!(invite_count, 1);
- assert_eq!(
- db.get_contacts(user1).await.unwrap(),
- [
- Contact::Accepted {
- user_id: user2,
- should_notify: true
- },
- Contact::Accepted {
- user_id: user3,
- should_notify: true
- },
- Contact::Accepted {
- user_id: user4,
- should_notify: true
- }
- ]
- );
- assert_eq!(
- db.get_contacts(user4).await.unwrap(),
- [Contact::Accepted {
- user_id: user1,
- should_notify: false
- }]
- );
-
- // An existing user cannot redeem invite codes.
- db.redeem_invite_code(&invite_code, "user-2", None)
- .await
- .unwrap_err();
- let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
- assert_eq!(invite_count, 1);
-
- // Ensure invited users get invite codes too.
- assert_eq!(
- db.get_invite_code_for_user(user2).await.unwrap().unwrap().1,
- 5
- );
- assert_eq!(
- db.get_invite_code_for_user(user3).await.unwrap().unwrap().1,
- 5
- );
- assert_eq!(
- db.get_invite_code_for_user(user4).await.unwrap().unwrap().1,
- 5
- );
- }
-
- pub struct TestDb {
- pub db: Option<Arc<dyn Db>>,
- pub url: String,
- }
-
- impl TestDb {
- #[allow(clippy::await_holding_lock)]
- pub async fn postgres() -> Self {
- lazy_static! {
- static ref LOCK: Mutex<()> = Mutex::new(());
- }
-
- let _guard = LOCK.lock();
- let mut rng = StdRng::from_entropy();
- let name = format!("zed-test-{}", rng.gen::<u128>());
- let url = format!("postgres://postgres@localhost/{}", name);
- let migrations_path = Path::new(concat!(env!("CARGO_MANIFEST_DIR"), "/migrations"));
- Postgres::create_database(&url)
- .await
- .expect("failed to create test db");
- let db = PostgresDb::new(&url, 5).await.unwrap();
- let migrator = Migrator::new(migrations_path).await.unwrap();
- migrator.run(&db.pool).await.unwrap();
- Self {
- db: Some(Arc::new(db)),
- url,
- }
- }
-
- pub fn fake(background: Arc<Background>) -> Self {
- Self {
- db: Some(Arc::new(FakeDb::new(background))),
- url: Default::default(),
- }
- }
-
- pub fn db(&self) -> &Arc<dyn Db> {
- self.db.as_ref().unwrap()
- }
- }
-
- impl Drop for TestDb {
- fn drop(&mut self) {
- if let Some(db) = self.db.take() {
- futures::executor::block_on(db.teardown(&self.url));
- }
- }
- }
-
- pub struct FakeDb {
- background: Arc<Background>,
- pub users: Mutex<BTreeMap<UserId, User>>,
- pub projects: Mutex<BTreeMap<ProjectId, Project>>,
- pub worktree_extensions: Mutex<BTreeMap<(ProjectId, u64, String), u32>>,
- pub orgs: Mutex<BTreeMap<OrgId, Org>>,
- pub org_memberships: Mutex<BTreeMap<(OrgId, UserId), bool>>,
- pub channels: Mutex<BTreeMap<ChannelId, Channel>>,
- pub channel_memberships: Mutex<BTreeMap<(ChannelId, UserId), bool>>,
- pub channel_messages: Mutex<BTreeMap<MessageId, ChannelMessage>>,
- pub contacts: Mutex<Vec<FakeContact>>,
- next_channel_message_id: Mutex<i32>,
- next_user_id: Mutex<i32>,
- next_org_id: Mutex<i32>,
- next_channel_id: Mutex<i32>,
- next_project_id: Mutex<i32>,
- }
-
- #[derive(Debug)]
- pub struct FakeContact {
- pub requester_id: UserId,
- pub responder_id: UserId,
- pub accepted: bool,
- pub should_notify: bool,
+ #[derive(Debug)]
+ pub struct FakeContact {
+ pub requester_id: UserId,
+ pub responder_id: UserId,
+ pub accepted: bool,
+ pub should_notify: bool,
}
impl FakeDb {
@@ -0,0 +1,1186 @@
+use super::db::*;
+use collections::HashMap;
+use gpui::executor::{Background, Deterministic};
+use std::{sync::Arc, time::Duration};
+use time::OffsetDateTime;
+
+#[tokio::test(flavor = "multi_thread")]
+async fn test_get_users_by_ids() {
+ for test_db in [
+ TestDb::postgres().await,
+ TestDb::fake(build_background_executor()),
+ ] {
+ let db = test_db.db();
+
+ let mut user_ids = Vec::new();
+ for i in 1..=4 {
+ user_ids.push(
+ db.create_user(
+ &format!("user{i}@example.com"),
+ false,
+ NewUserParams {
+ github_login: format!("user{i}"),
+ github_user_id: i,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id,
+ );
+ }
+
+ assert_eq!(
+ db.get_users_by_ids(user_ids.clone()).await.unwrap(),
+ vec![
+ User {
+ id: user_ids[0],
+ github_login: "user1".to_string(),
+ github_user_id: Some(1),
+ email_address: Some("user1@example.com".to_string()),
+ admin: false,
+ ..Default::default()
+ },
+ User {
+ id: user_ids[1],
+ github_login: "user2".to_string(),
+ github_user_id: Some(2),
+ email_address: Some("user2@example.com".to_string()),
+ admin: false,
+ ..Default::default()
+ },
+ User {
+ id: user_ids[2],
+ github_login: "user3".to_string(),
+ github_user_id: Some(3),
+ email_address: Some("user3@example.com".to_string()),
+ admin: false,
+ ..Default::default()
+ },
+ User {
+ id: user_ids[3],
+ github_login: "user4".to_string(),
+ github_user_id: Some(4),
+ email_address: Some("user4@example.com".to_string()),
+ admin: false,
+ ..Default::default()
+ }
+ ]
+ );
+ }
+}
+
+#[tokio::test(flavor = "multi_thread")]
+async fn test_get_user_by_github_account() {
+ for test_db in [
+ TestDb::postgres().await,
+ TestDb::fake(build_background_executor()),
+ ] {
+ let db = test_db.db();
+ let user_id1 = db
+ .create_user(
+ "user1@example.com",
+ false,
+ NewUserParams {
+ github_login: "login1".into(),
+ github_user_id: 101,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+ let user_id2 = db
+ .create_user(
+ "user2@example.com",
+ false,
+ NewUserParams {
+ github_login: "login2".into(),
+ github_user_id: 102,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+
+ let user = db
+ .get_user_by_github_account("login1", None)
+ .await
+ .unwrap()
+ .unwrap();
+ assert_eq!(user.id, user_id1);
+ assert_eq!(&user.github_login, "login1");
+ assert_eq!(user.github_user_id, Some(101));
+
+ assert!(db
+ .get_user_by_github_account("non-existent-login", None)
+ .await
+ .unwrap()
+ .is_none());
+
+ let user = db
+ .get_user_by_github_account("the-new-login2", Some(102))
+ .await
+ .unwrap()
+ .unwrap();
+ assert_eq!(user.id, user_id2);
+ assert_eq!(&user.github_login, "the-new-login2");
+ assert_eq!(user.github_user_id, Some(102));
+ }
+}
+
+#[tokio::test(flavor = "multi_thread")]
+async fn test_worktree_extensions() {
+ let test_db = TestDb::postgres().await;
+ let db = test_db.db();
+
+ let user = db
+ .create_user(
+ "u1@example.com",
+ false,
+ NewUserParams {
+ github_login: "u1".into(),
+ github_user_id: 0,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+ let project = db.register_project(user).await.unwrap();
+
+ db.update_worktree_extensions(project, 100, Default::default())
+ .await
+ .unwrap();
+ db.update_worktree_extensions(
+ project,
+ 100,
+ [("rs".to_string(), 5), ("md".to_string(), 3)]
+ .into_iter()
+ .collect(),
+ )
+ .await
+ .unwrap();
+ db.update_worktree_extensions(
+ project,
+ 100,
+ [("rs".to_string(), 6), ("md".to_string(), 5)]
+ .into_iter()
+ .collect(),
+ )
+ .await
+ .unwrap();
+ db.update_worktree_extensions(
+ project,
+ 101,
+ [("ts".to_string(), 2), ("md".to_string(), 1)]
+ .into_iter()
+ .collect(),
+ )
+ .await
+ .unwrap();
+
+ assert_eq!(
+ db.get_project_extensions(project).await.unwrap(),
+ [
+ (
+ 100,
+ [("rs".into(), 6), ("md".into(), 5),]
+ .into_iter()
+ .collect::<HashMap<_, _>>()
+ ),
+ (
+ 101,
+ [("ts".into(), 2), ("md".into(), 1),]
+ .into_iter()
+ .collect::<HashMap<_, _>>()
+ )
+ ]
+ .into_iter()
+ .collect()
+ );
+}
+
+#[tokio::test(flavor = "multi_thread")]
+async fn test_user_activity() {
+ let test_db = TestDb::postgres().await;
+ let db = test_db.db();
+
+ let mut user_ids = Vec::new();
+ for i in 0..=2 {
+ user_ids.push(
+ db.create_user(
+ &format!("user{i}@example.com"),
+ false,
+ NewUserParams {
+ github_login: format!("user{i}"),
+ github_user_id: i,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id,
+ );
+ }
+
+ let project_1 = db.register_project(user_ids[0]).await.unwrap();
+ db.update_worktree_extensions(
+ project_1,
+ 1,
+ HashMap::from_iter([("rs".into(), 5), ("md".into(), 7)]),
+ )
+ .await
+ .unwrap();
+ let project_2 = db.register_project(user_ids[1]).await.unwrap();
+ let t0 = OffsetDateTime::now_utc() - Duration::from_secs(60 * 60);
+
+ // User 2 opens a project
+ let t1 = t0 + Duration::from_secs(10);
+ db.record_user_activity(t0..t1, &[(user_ids[1], project_2)])
+ .await
+ .unwrap();
+
+ let t2 = t1 + Duration::from_secs(10);
+ db.record_user_activity(t1..t2, &[(user_ids[1], project_2)])
+ .await
+ .unwrap();
+
+ // User 1 joins the project
+ let t3 = t2 + Duration::from_secs(10);
+ db.record_user_activity(
+ t2..t3,
+ &[(user_ids[1], project_2), (user_ids[0], project_2)],
+ )
+ .await
+ .unwrap();
+
+ // User 1 opens another project
+ let t4 = t3 + Duration::from_secs(10);
+ db.record_user_activity(
+ t3..t4,
+ &[
+ (user_ids[1], project_2),
+ (user_ids[0], project_2),
+ (user_ids[0], project_1),
+ ],
+ )
+ .await
+ .unwrap();
+
+ // User 3 joins that project
+ let t5 = t4 + Duration::from_secs(10);
+ db.record_user_activity(
+ t4..t5,
+ &[
+ (user_ids[1], project_2),
+ (user_ids[0], project_2),
+ (user_ids[0], project_1),
+ (user_ids[2], project_1),
+ ],
+ )
+ .await
+ .unwrap();
+
+ // User 2 leaves
+ let t6 = t5 + Duration::from_secs(5);
+ db.record_user_activity(
+ t5..t6,
+ &[(user_ids[0], project_1), (user_ids[2], project_1)],
+ )
+ .await
+ .unwrap();
+
+ let t7 = t6 + Duration::from_secs(60);
+ let t8 = t7 + Duration::from_secs(10);
+ db.record_user_activity(t7..t8, &[(user_ids[0], project_1)])
+ .await
+ .unwrap();
+
+ assert_eq!(
+ db.get_top_users_activity_summary(t0..t6, 10).await.unwrap(),
+ &[
+ UserActivitySummary {
+ id: user_ids[0],
+ github_login: "user0".to_string(),
+ project_activity: vec![
+ ProjectActivitySummary {
+ id: project_1,
+ duration: Duration::from_secs(25),
+ max_collaborators: 2
+ },
+ ProjectActivitySummary {
+ id: project_2,
+ duration: Duration::from_secs(30),
+ max_collaborators: 2
+ }
+ ]
+ },
+ UserActivitySummary {
+ id: user_ids[1],
+ github_login: "user1".to_string(),
+ project_activity: vec![ProjectActivitySummary {
+ id: project_2,
+ duration: Duration::from_secs(50),
+ max_collaborators: 2
+ }]
+ },
+ UserActivitySummary {
+ id: user_ids[2],
+ github_login: "user2".to_string(),
+ project_activity: vec![ProjectActivitySummary {
+ id: project_1,
+ duration: Duration::from_secs(15),
+ max_collaborators: 2
+ }]
+ },
+ ]
+ );
+
+ assert_eq!(
+ db.get_active_user_count(t0..t6, Duration::from_secs(56), false)
+ .await
+ .unwrap(),
+ 0
+ );
+ assert_eq!(
+ db.get_active_user_count(t0..t6, Duration::from_secs(56), true)
+ .await
+ .unwrap(),
+ 0
+ );
+ assert_eq!(
+ db.get_active_user_count(t0..t6, Duration::from_secs(54), false)
+ .await
+ .unwrap(),
+ 1
+ );
+ assert_eq!(
+ db.get_active_user_count(t0..t6, Duration::from_secs(54), true)
+ .await
+ .unwrap(),
+ 1
+ );
+ assert_eq!(
+ db.get_active_user_count(t0..t6, Duration::from_secs(30), false)
+ .await
+ .unwrap(),
+ 2
+ );
+ assert_eq!(
+ db.get_active_user_count(t0..t6, Duration::from_secs(30), true)
+ .await
+ .unwrap(),
+ 2
+ );
+ assert_eq!(
+ db.get_active_user_count(t0..t6, Duration::from_secs(10), false)
+ .await
+ .unwrap(),
+ 3
+ );
+ assert_eq!(
+ db.get_active_user_count(t0..t6, Duration::from_secs(10), true)
+ .await
+ .unwrap(),
+ 3
+ );
+ assert_eq!(
+ db.get_active_user_count(t0..t1, Duration::from_secs(5), false)
+ .await
+ .unwrap(),
+ 1
+ );
+ assert_eq!(
+ db.get_active_user_count(t0..t1, Duration::from_secs(5), true)
+ .await
+ .unwrap(),
+ 0
+ );
+
+ assert_eq!(
+ db.get_user_activity_timeline(t3..t6, user_ids[0])
+ .await
+ .unwrap(),
+ &[
+ UserActivityPeriod {
+ project_id: project_1,
+ start: t3,
+ end: t6,
+ extensions: HashMap::from_iter([("rs".to_string(), 5), ("md".to_string(), 7)]),
+ },
+ UserActivityPeriod {
+ project_id: project_2,
+ start: t3,
+ end: t5,
+ extensions: Default::default(),
+ },
+ ]
+ );
+ assert_eq!(
+ db.get_user_activity_timeline(t0..t8, user_ids[0])
+ .await
+ .unwrap(),
+ &[
+ UserActivityPeriod {
+ project_id: project_2,
+ start: t2,
+ end: t5,
+ extensions: Default::default(),
+ },
+ UserActivityPeriod {
+ project_id: project_1,
+ start: t3,
+ end: t6,
+ extensions: HashMap::from_iter([("rs".to_string(), 5), ("md".to_string(), 7)]),
+ },
+ UserActivityPeriod {
+ project_id: project_1,
+ start: t7,
+ end: t8,
+ extensions: HashMap::from_iter([("rs".to_string(), 5), ("md".to_string(), 7)]),
+ },
+ ]
+ );
+}
+
+#[tokio::test(flavor = "multi_thread")]
+async fn test_recent_channel_messages() {
+ for test_db in [
+ TestDb::postgres().await,
+ TestDb::fake(build_background_executor()),
+ ] {
+ let db = test_db.db();
+ let user = db
+ .create_user(
+ "u@example.com",
+ false,
+ NewUserParams {
+ github_login: "u".into(),
+ github_user_id: 1,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+ let org = db.create_org("org", "org").await.unwrap();
+ let channel = db.create_org_channel(org, "channel").await.unwrap();
+ for i in 0..10 {
+ db.create_channel_message(channel, user, &i.to_string(), OffsetDateTime::now_utc(), i)
+ .await
+ .unwrap();
+ }
+
+ let messages = db.get_channel_messages(channel, 5, None).await.unwrap();
+ assert_eq!(
+ messages.iter().map(|m| &m.body).collect::<Vec<_>>(),
+ ["5", "6", "7", "8", "9"]
+ );
+
+ let prev_messages = db
+ .get_channel_messages(channel, 4, Some(messages[0].id))
+ .await
+ .unwrap();
+ assert_eq!(
+ prev_messages.iter().map(|m| &m.body).collect::<Vec<_>>(),
+ ["1", "2", "3", "4"]
+ );
+ }
+}
+
+#[tokio::test(flavor = "multi_thread")]
+async fn test_channel_message_nonces() {
+ for test_db in [
+ TestDb::postgres().await,
+ TestDb::fake(build_background_executor()),
+ ] {
+ let db = test_db.db();
+ let user = db
+ .create_user(
+ "user@example.com",
+ false,
+ NewUserParams {
+ github_login: "user".into(),
+ github_user_id: 1,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+ let org = db.create_org("org", "org").await.unwrap();
+ let channel = db.create_org_channel(org, "channel").await.unwrap();
+
+ let msg1_id = db
+ .create_channel_message(channel, user, "1", OffsetDateTime::now_utc(), 1)
+ .await
+ .unwrap();
+ let msg2_id = db
+ .create_channel_message(channel, user, "2", OffsetDateTime::now_utc(), 2)
+ .await
+ .unwrap();
+ let msg3_id = db
+ .create_channel_message(channel, user, "3", OffsetDateTime::now_utc(), 1)
+ .await
+ .unwrap();
+ let msg4_id = db
+ .create_channel_message(channel, user, "4", OffsetDateTime::now_utc(), 2)
+ .await
+ .unwrap();
+
+ assert_ne!(msg1_id, msg2_id);
+ assert_eq!(msg1_id, msg3_id);
+ assert_eq!(msg2_id, msg4_id);
+ }
+}
+
+#[tokio::test(flavor = "multi_thread")]
+async fn test_create_access_tokens() {
+ let test_db = TestDb::postgres().await;
+ let db = test_db.db();
+ let user = db
+ .create_user(
+ "u1@example.com",
+ false,
+ NewUserParams {
+ github_login: "u1".into(),
+ github_user_id: 1,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+
+ db.create_access_token_hash(user, "h1", 3).await.unwrap();
+ db.create_access_token_hash(user, "h2", 3).await.unwrap();
+ assert_eq!(
+ db.get_access_token_hashes(user).await.unwrap(),
+ &["h2".to_string(), "h1".to_string()]
+ );
+
+ db.create_access_token_hash(user, "h3", 3).await.unwrap();
+ assert_eq!(
+ db.get_access_token_hashes(user).await.unwrap(),
+ &["h3".to_string(), "h2".to_string(), "h1".to_string(),]
+ );
+
+ db.create_access_token_hash(user, "h4", 3).await.unwrap();
+ assert_eq!(
+ db.get_access_token_hashes(user).await.unwrap(),
+ &["h4".to_string(), "h3".to_string(), "h2".to_string(),]
+ );
+
+ db.create_access_token_hash(user, "h5", 3).await.unwrap();
+ assert_eq!(
+ db.get_access_token_hashes(user).await.unwrap(),
+ &["h5".to_string(), "h4".to_string(), "h3".to_string()]
+ );
+}
+
+#[test]
+fn test_fuzzy_like_string() {
+ assert_eq!(PostgresDb::fuzzy_like_string("abcd"), "%a%b%c%d%");
+ assert_eq!(PostgresDb::fuzzy_like_string("x y"), "%x%y%");
+ assert_eq!(PostgresDb::fuzzy_like_string(" z "), "%z%");
+}
+
+#[tokio::test(flavor = "multi_thread")]
+async fn test_fuzzy_search_users() {
+ let test_db = TestDb::postgres().await;
+ let db = test_db.db();
+ for (i, github_login) in [
+ "California",
+ "colorado",
+ "oregon",
+ "washington",
+ "florida",
+ "delaware",
+ "rhode-island",
+ ]
+ .into_iter()
+ .enumerate()
+ {
+ db.create_user(
+ &format!("{github_login}@example.com"),
+ false,
+ NewUserParams {
+ github_login: github_login.into(),
+ github_user_id: i as i32,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap();
+ }
+
+ assert_eq!(
+ fuzzy_search_user_names(db, "clr").await,
+ &["colorado", "California"]
+ );
+ assert_eq!(
+ fuzzy_search_user_names(db, "ro").await,
+ &["rhode-island", "colorado", "oregon"],
+ );
+
+ async fn fuzzy_search_user_names(db: &Arc<dyn Db>, query: &str) -> Vec<String> {
+ db.fuzzy_search_users(query, 10)
+ .await
+ .unwrap()
+ .into_iter()
+ .map(|user| user.github_login)
+ .collect::<Vec<_>>()
+ }
+}
+
+#[tokio::test(flavor = "multi_thread")]
+async fn test_add_contacts() {
+ for test_db in [
+ TestDb::postgres().await,
+ TestDb::fake(build_background_executor()),
+ ] {
+ let db = test_db.db();
+
+ let mut user_ids = Vec::new();
+ for i in 0..3 {
+ user_ids.push(
+ db.create_user(
+ &format!("user{i}@example.com"),
+ false,
+ NewUserParams {
+ github_login: format!("user{i}"),
+ github_user_id: i,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id,
+ );
+ }
+
+ let user_1 = user_ids[0];
+ let user_2 = user_ids[1];
+ let user_3 = user_ids[2];
+
+ // User starts with no contacts
+ assert_eq!(db.get_contacts(user_1).await.unwrap(), &[]);
+
+ // User requests a contact. Both users see the pending request.
+ db.send_contact_request(user_1, user_2).await.unwrap();
+ assert!(!db.has_contact(user_1, user_2).await.unwrap());
+ assert!(!db.has_contact(user_2, user_1).await.unwrap());
+ assert_eq!(
+ db.get_contacts(user_1).await.unwrap(),
+ &[Contact::Outgoing { user_id: user_2 }],
+ );
+ assert_eq!(
+ db.get_contacts(user_2).await.unwrap(),
+ &[Contact::Incoming {
+ user_id: user_1,
+ should_notify: true
+ }]
+ );
+
+ // User 2 dismisses the contact request notification without accepting or rejecting.
+ // We shouldn't notify them again.
+ db.dismiss_contact_notification(user_1, user_2)
+ .await
+ .unwrap_err();
+ db.dismiss_contact_notification(user_2, user_1)
+ .await
+ .unwrap();
+ assert_eq!(
+ db.get_contacts(user_2).await.unwrap(),
+ &[Contact::Incoming {
+ user_id: user_1,
+ should_notify: false
+ }]
+ );
+
+ // User can't accept their own contact request
+ db.respond_to_contact_request(user_1, user_2, true)
+ .await
+ .unwrap_err();
+
+ // User accepts a contact request. Both users see the contact.
+ db.respond_to_contact_request(user_2, user_1, true)
+ .await
+ .unwrap();
+ assert_eq!(
+ db.get_contacts(user_1).await.unwrap(),
+ &[Contact::Accepted {
+ user_id: user_2,
+ should_notify: true
+ }],
+ );
+ assert!(db.has_contact(user_1, user_2).await.unwrap());
+ assert!(db.has_contact(user_2, user_1).await.unwrap());
+ assert_eq!(
+ db.get_contacts(user_2).await.unwrap(),
+ &[Contact::Accepted {
+ user_id: user_1,
+ should_notify: false,
+ }]
+ );
+
+ // Users cannot re-request existing contacts.
+ db.send_contact_request(user_1, user_2).await.unwrap_err();
+ db.send_contact_request(user_2, user_1).await.unwrap_err();
+
+ // Users can't dismiss notifications of them accepting other users' requests.
+ db.dismiss_contact_notification(user_2, user_1)
+ .await
+ .unwrap_err();
+ assert_eq!(
+ db.get_contacts(user_1).await.unwrap(),
+ &[Contact::Accepted {
+ user_id: user_2,
+ should_notify: true,
+ }]
+ );
+
+ // Users can dismiss notifications of other users accepting their requests.
+ db.dismiss_contact_notification(user_1, user_2)
+ .await
+ .unwrap();
+ assert_eq!(
+ db.get_contacts(user_1).await.unwrap(),
+ &[Contact::Accepted {
+ user_id: user_2,
+ should_notify: false,
+ }]
+ );
+
+ // Users send each other concurrent contact requests and
+ // see that they are immediately accepted.
+ db.send_contact_request(user_1, user_3).await.unwrap();
+ db.send_contact_request(user_3, user_1).await.unwrap();
+ assert_eq!(
+ db.get_contacts(user_1).await.unwrap(),
+ &[
+ Contact::Accepted {
+ user_id: user_2,
+ should_notify: false,
+ },
+ Contact::Accepted {
+ user_id: user_3,
+ should_notify: false
+ }
+ ]
+ );
+ assert_eq!(
+ db.get_contacts(user_3).await.unwrap(),
+ &[Contact::Accepted {
+ user_id: user_1,
+ should_notify: false
+ }],
+ );
+
+ // User declines a contact request. Both users see that it is gone.
+ db.send_contact_request(user_2, user_3).await.unwrap();
+ db.respond_to_contact_request(user_3, user_2, false)
+ .await
+ .unwrap();
+ assert!(!db.has_contact(user_2, user_3).await.unwrap());
+ assert!(!db.has_contact(user_3, user_2).await.unwrap());
+ assert_eq!(
+ db.get_contacts(user_2).await.unwrap(),
+ &[Contact::Accepted {
+ user_id: user_1,
+ should_notify: false
+ }]
+ );
+ assert_eq!(
+ db.get_contacts(user_3).await.unwrap(),
+ &[Contact::Accepted {
+ user_id: user_1,
+ should_notify: false
+ }],
+ );
+ }
+}
+
+#[tokio::test(flavor = "multi_thread")]
+async fn test_invite_codes() {
+ let postgres = TestDb::postgres().await;
+ let db = postgres.db();
+ let NewUserResult { user_id: user1, .. } = db
+ .create_user(
+ "user1@example.com",
+ false,
+ NewUserParams {
+ github_login: "user1".into(),
+ github_user_id: 0,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap();
+
+ // Initially, user 1 has no invite code
+ assert_eq!(db.get_invite_code_for_user(user1).await.unwrap(), None);
+
+ // Setting invite count to 0 when no code is assigned does not assign a new code
+ db.set_invite_count_for_user(user1, 0).await.unwrap();
+ assert!(db.get_invite_code_for_user(user1).await.unwrap().is_none());
+
+ // User 1 creates an invite code that can be used twice.
+ db.set_invite_count_for_user(user1, 2).await.unwrap();
+ let (invite_code, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
+ assert_eq!(invite_count, 2);
+
+ // User 2 redeems the invite code and becomes a contact of user 1.
+ let user2_invite = db
+ .create_invite_from_code(&invite_code, "user2@example.com", Some("user-2-device-id"))
+ .await
+ .unwrap();
+ let NewUserResult {
+ user_id: user2,
+ inviting_user_id,
+ signup_device_id,
+ metrics_id,
+ } = db
+ .create_user_from_invite(
+ &user2_invite,
+ NewUserParams {
+ github_login: "user2".into(),
+ github_user_id: 2,
+ invite_count: 7,
+ },
+ )
+ .await
+ .unwrap();
+ let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
+ assert_eq!(invite_count, 1);
+ assert_eq!(inviting_user_id, Some(user1));
+ assert_eq!(signup_device_id.unwrap(), "user-2-device-id");
+ assert_eq!(db.get_user_metrics_id(user2).await.unwrap(), metrics_id);
+ assert_eq!(
+ db.get_contacts(user1).await.unwrap(),
+ [Contact::Accepted {
+ user_id: user2,
+ should_notify: true
+ }]
+ );
+ assert_eq!(
+ db.get_contacts(user2).await.unwrap(),
+ [Contact::Accepted {
+ user_id: user1,
+ should_notify: false
+ }]
+ );
+ assert_eq!(
+ db.get_invite_code_for_user(user2).await.unwrap().unwrap().1,
+ 7
+ );
+
+ // User 3 redeems the invite code and becomes a contact of user 1.
+ let user3_invite = db
+ .create_invite_from_code(&invite_code, "user3@example.com", None)
+ .await
+ .unwrap();
+ let NewUserResult {
+ user_id: user3,
+ inviting_user_id,
+ signup_device_id,
+ ..
+ } = db
+ .create_user_from_invite(
+ &user3_invite,
+ NewUserParams {
+ github_login: "user-3".into(),
+ github_user_id: 3,
+ invite_count: 3,
+ },
+ )
+ .await
+ .unwrap();
+ let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
+ assert_eq!(invite_count, 0);
+ assert_eq!(inviting_user_id, Some(user1));
+ assert!(signup_device_id.is_none());
+ assert_eq!(
+ db.get_contacts(user1).await.unwrap(),
+ [
+ Contact::Accepted {
+ user_id: user2,
+ should_notify: true
+ },
+ Contact::Accepted {
+ user_id: user3,
+ should_notify: true
+ }
+ ]
+ );
+ assert_eq!(
+ db.get_contacts(user3).await.unwrap(),
+ [Contact::Accepted {
+ user_id: user1,
+ should_notify: false
+ }]
+ );
+ assert_eq!(
+ db.get_invite_code_for_user(user3).await.unwrap().unwrap().1,
+ 3
+ );
+
+ // Trying to reedem the code for the third time results in an error.
+ db.create_invite_from_code(&invite_code, "user4@example.com", Some("user-4-device-id"))
+ .await
+ .unwrap_err();
+
+ // Invite count can be updated after the code has been created.
+ db.set_invite_count_for_user(user1, 2).await.unwrap();
+ let (latest_code, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
+ assert_eq!(latest_code, invite_code); // Invite code doesn't change when we increment above 0
+ assert_eq!(invite_count, 2);
+
+ // User 4 can now redeem the invite code and becomes a contact of user 1.
+ let user4_invite = db
+ .create_invite_from_code(&invite_code, "user4@example.com", Some("user-4-device-id"))
+ .await
+ .unwrap();
+ let user4 = db
+ .create_user_from_invite(
+ &user4_invite,
+ NewUserParams {
+ github_login: "user-4".into(),
+ github_user_id: 4,
+ invite_count: 5,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
+
+ let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
+ assert_eq!(invite_count, 1);
+ assert_eq!(
+ db.get_contacts(user1).await.unwrap(),
+ [
+ Contact::Accepted {
+ user_id: user2,
+ should_notify: true
+ },
+ Contact::Accepted {
+ user_id: user3,
+ should_notify: true
+ },
+ Contact::Accepted {
+ user_id: user4,
+ should_notify: true
+ }
+ ]
+ );
+ assert_eq!(
+ db.get_contacts(user4).await.unwrap(),
+ [Contact::Accepted {
+ user_id: user1,
+ should_notify: false
+ }]
+ );
+ assert_eq!(
+ db.get_invite_code_for_user(user4).await.unwrap().unwrap().1,
+ 5
+ );
+
+ // An existing user cannot redeem invite codes.
+ db.create_invite_from_code(&invite_code, "user2@example.com", Some("user-2-device-id"))
+ .await
+ .unwrap_err();
+ let (_, invite_count) = db.get_invite_code_for_user(user1).await.unwrap().unwrap();
+ assert_eq!(invite_count, 1);
+}
+
+#[tokio::test(flavor = "multi_thread")]
+async fn test_signups() {
+ let postgres = TestDb::postgres().await;
+ let db = postgres.db();
+
+ // people sign up on the waitlist
+ for i in 0..8 {
+ db.create_signup(Signup {
+ email_address: format!("person-{i}@example.com"),
+ platform_mac: true,
+ platform_linux: i % 2 == 0,
+ platform_windows: i % 4 == 0,
+ editor_features: vec!["speed".into()],
+ programming_languages: vec!["rust".into(), "c".into()],
+ device_id: Some(format!("device_id_{i}")),
+ })
+ .await
+ .unwrap();
+ }
+
+ assert_eq!(
+ db.get_waitlist_summary().await.unwrap(),
+ WaitlistSummary {
+ count: 8,
+ mac_count: 8,
+ linux_count: 4,
+ windows_count: 2,
+ }
+ );
+
+ // retrieve the next batch of signup emails to send
+ let signups_batch1 = db.get_unsent_invites(3).await.unwrap();
+ let addresses = signups_batch1
+ .iter()
+ .map(|s| &s.email_address)
+ .collect::<Vec<_>>();
+ assert_eq!(
+ addresses,
+ &[
+ "person-0@example.com",
+ "person-1@example.com",
+ "person-2@example.com"
+ ]
+ );
+ assert_ne!(
+ signups_batch1[0].email_confirmation_code,
+ signups_batch1[1].email_confirmation_code
+ );
+
+ // the waitlist isn't updated until we record that the emails
+ // were successfully sent.
+ let signups_batch = db.get_unsent_invites(3).await.unwrap();
+ assert_eq!(signups_batch, signups_batch1);
+
+ // once the emails go out, we can retrieve the next batch
+ // of signups.
+ db.record_sent_invites(&signups_batch1).await.unwrap();
+ let signups_batch2 = db.get_unsent_invites(3).await.unwrap();
+ let addresses = signups_batch2
+ .iter()
+ .map(|s| &s.email_address)
+ .collect::<Vec<_>>();
+ assert_eq!(
+ addresses,
+ &[
+ "person-3@example.com",
+ "person-4@example.com",
+ "person-5@example.com"
+ ]
+ );
+
+ // the sent invites are excluded from the summary.
+ assert_eq!(
+ db.get_waitlist_summary().await.unwrap(),
+ WaitlistSummary {
+ count: 5,
+ mac_count: 5,
+ linux_count: 2,
+ windows_count: 1,
+ }
+ );
+
+ // user completes the signup process by providing their
+ // github account.
+ let NewUserResult {
+ user_id,
+ inviting_user_id,
+ signup_device_id,
+ ..
+ } = db
+ .create_user_from_invite(
+ &Invite {
+ email_address: signups_batch1[0].email_address.clone(),
+ email_confirmation_code: signups_batch1[0].email_confirmation_code.clone(),
+ },
+ NewUserParams {
+ github_login: "person-0".into(),
+ github_user_id: 0,
+ invite_count: 5,
+ },
+ )
+ .await
+ .unwrap();
+ let user = db.get_user_by_id(user_id).await.unwrap().unwrap();
+ assert!(inviting_user_id.is_none());
+ assert_eq!(user.github_login, "person-0");
+ assert_eq!(user.email_address.as_deref(), Some("person-0@example.com"));
+ assert_eq!(user.invite_count, 5);
+ assert_eq!(signup_device_id.unwrap(), "device_id_0");
+
+ // cannot redeem the same signup again.
+ db.create_user_from_invite(
+ &Invite {
+ email_address: signups_batch1[0].email_address.clone(),
+ email_confirmation_code: signups_batch1[0].email_confirmation_code.clone(),
+ },
+ NewUserParams {
+ github_login: "some-other-github_account".into(),
+ github_user_id: 1,
+ invite_count: 5,
+ },
+ )
+ .await
+ .unwrap_err();
+
+ // cannot redeem a signup with the wrong confirmation code.
+ db.create_user_from_invite(
+ &Invite {
+ email_address: signups_batch1[1].email_address.clone(),
+ email_confirmation_code: "the-wrong-code".to_string(),
+ },
+ NewUserParams {
+ github_login: "person-1".into(),
+ github_user_id: 2,
+ invite_count: 5,
+ },
+ )
+ .await
+ .unwrap_err();
+}
+
+#[tokio::test(flavor = "multi_thread")]
+async fn test_metrics_id() {
+ let postgres = TestDb::postgres().await;
+ let db = postgres.db();
+
+ let NewUserResult {
+ user_id: user1,
+ metrics_id: metrics_id1,
+ ..
+ } = db
+ .create_user(
+ "person1@example.com",
+ false,
+ NewUserParams {
+ github_login: "person1".into(),
+ github_user_id: 101,
+ invite_count: 5,
+ },
+ )
+ .await
+ .unwrap();
+ let NewUserResult {
+ user_id: user2,
+ metrics_id: metrics_id2,
+ ..
+ } = db
+ .create_user(
+ "person2@example.com",
+ false,
+ NewUserParams {
+ github_login: "person2".into(),
+ github_user_id: 102,
+ invite_count: 5,
+ },
+ )
+ .await
+ .unwrap();
+
+ assert_eq!(db.get_user_metrics_id(user1).await.unwrap(), metrics_id1);
+ assert_eq!(db.get_user_metrics_id(user2).await.unwrap(), metrics_id2);
+ assert_eq!(metrics_id1.len(), 36);
+ assert_eq!(metrics_id2.len(), 36);
+ assert_ne!(metrics_id1, metrics_id2);
+}
+
+fn build_background_executor() -> Arc<Background> {
+ Deterministic::new(0).build_background()
+}
@@ -1,5 +1,5 @@
use crate::{
- db::{tests::TestDb, ProjectId, UserId},
+ db::{NewUserParams, ProjectId, TestDb, UserId},
rpc::{Executor, Server, Store},
AppState,
};
@@ -52,6 +52,7 @@ use std::{
time::Duration,
};
use theme::ThemeRegistry;
+use unindent::Unindent as _;
use workspace::{Item, SplitDirection, ToggleFollow, Workspace};
#[ctor::ctor]
@@ -329,6 +330,7 @@ async fn test_room_uniqueness(
})
.await
.unwrap();
+ deterministic.run_until_parked();
let call_b2 = incoming_call_b.next().await.unwrap().unwrap();
assert_eq!(call_b2.caller.github_login, "user_c");
}
@@ -1174,6 +1176,258 @@ async fn test_propagate_saves_and_fs_changes(
.await;
}
+#[gpui::test(iterations = 10)]
+async fn test_git_diff_base_change(
+ executor: Arc<Deterministic>,
+ cx_a: &mut TestAppContext,
+ cx_b: &mut TestAppContext,
+) {
+ executor.forbid_parking();
+ let mut server = TestServer::start(cx_a.foreground(), cx_a.background()).await;
+ let client_a = server.create_client(cx_a, "user_a").await;
+ let client_b = server.create_client(cx_b, "user_b").await;
+ server
+ .create_room(&mut [(&client_a, cx_a), (&client_b, cx_b)])
+ .await;
+ let active_call_a = cx_a.read(ActiveCall::global);
+
+ client_a
+ .fs
+ .insert_tree(
+ "/dir",
+ json!({
+ ".git": {},
+ "sub": {
+ ".git": {},
+ "b.txt": "
+ one
+ two
+ three
+ ".unindent(),
+ },
+ "a.txt": "
+ one
+ two
+ three
+ ".unindent(),
+ }),
+ )
+ .await;
+
+ let (project_local, worktree_id) = client_a.build_local_project("/dir", cx_a).await;
+ let project_id = active_call_a
+ .update(cx_a, |call, cx| {
+ call.share_project(project_local.clone(), cx)
+ })
+ .await
+ .unwrap();
+
+ let project_remote = client_b.build_remote_project(project_id, cx_b).await;
+
+ let diff_base = "
+ one
+ three
+ "
+ .unindent();
+
+ let new_diff_base = "
+ one
+ two
+ "
+ .unindent();
+
+ client_a
+ .fs
+ .as_fake()
+ .set_index_for_repo(
+ Path::new("/dir/.git"),
+ &[(Path::new("a.txt"), diff_base.clone())],
+ )
+ .await;
+
+ // Create the buffer
+ let buffer_local_a = project_local
+ .update(cx_a, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
+ .await
+ .unwrap();
+
+ // Wait for it to catch up to the new diff
+ executor.run_until_parked();
+
+ // Smoke test diffing
+ buffer_local_a.read_with(cx_a, |buffer, _| {
+ assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
+ git::diff::assert_hunks(
+ buffer.snapshot().git_diff_hunks_in_range(0..4),
+ &buffer,
+ &diff_base,
+ &[(1..2, "", "two\n")],
+ );
+ });
+
+ // Create remote buffer
+ let buffer_remote_a = project_remote
+ .update(cx_b, |p, cx| p.open_buffer((worktree_id, "a.txt"), cx))
+ .await
+ .unwrap();
+
+ // Wait remote buffer to catch up to the new diff
+ executor.run_until_parked();
+
+ // Smoke test diffing
+ buffer_remote_a.read_with(cx_b, |buffer, _| {
+ assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
+ git::diff::assert_hunks(
+ buffer.snapshot().git_diff_hunks_in_range(0..4),
+ &buffer,
+ &diff_base,
+ &[(1..2, "", "two\n")],
+ );
+ });
+
+ client_a
+ .fs
+ .as_fake()
+ .set_index_for_repo(
+ Path::new("/dir/.git"),
+ &[(Path::new("a.txt"), new_diff_base.clone())],
+ )
+ .await;
+
+ // Wait for buffer_local_a to receive it
+ executor.run_until_parked();
+
+ // Smoke test new diffing
+ buffer_local_a.read_with(cx_a, |buffer, _| {
+ assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
+
+ git::diff::assert_hunks(
+ buffer.snapshot().git_diff_hunks_in_range(0..4),
+ &buffer,
+ &diff_base,
+ &[(2..3, "", "three\n")],
+ );
+ });
+
+ // Smoke test B
+ buffer_remote_a.read_with(cx_b, |buffer, _| {
+ assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
+ git::diff::assert_hunks(
+ buffer.snapshot().git_diff_hunks_in_range(0..4),
+ &buffer,
+ &diff_base,
+ &[(2..3, "", "three\n")],
+ );
+ });
+
+ //Nested git dir
+
+ let diff_base = "
+ one
+ three
+ "
+ .unindent();
+
+ let new_diff_base = "
+ one
+ two
+ "
+ .unindent();
+
+ client_a
+ .fs
+ .as_fake()
+ .set_index_for_repo(
+ Path::new("/dir/sub/.git"),
+ &[(Path::new("b.txt"), diff_base.clone())],
+ )
+ .await;
+
+ // Create the buffer
+ let buffer_local_b = project_local
+ .update(cx_a, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
+ .await
+ .unwrap();
+
+ // Wait for it to catch up to the new diff
+ executor.run_until_parked();
+
+ // Smoke test diffing
+ buffer_local_b.read_with(cx_a, |buffer, _| {
+ assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
+ git::diff::assert_hunks(
+ buffer.snapshot().git_diff_hunks_in_range(0..4),
+ &buffer,
+ &diff_base,
+ &[(1..2, "", "two\n")],
+ );
+ });
+
+ // Create remote buffer
+ let buffer_remote_b = project_remote
+ .update(cx_b, |p, cx| p.open_buffer((worktree_id, "sub/b.txt"), cx))
+ .await
+ .unwrap();
+
+ // Wait remote buffer to catch up to the new diff
+ executor.run_until_parked();
+
+ // Smoke test diffing
+ buffer_remote_b.read_with(cx_b, |buffer, _| {
+ assert_eq!(buffer.diff_base(), Some(diff_base.as_ref()));
+ git::diff::assert_hunks(
+ buffer.snapshot().git_diff_hunks_in_range(0..4),
+ &buffer,
+ &diff_base,
+ &[(1..2, "", "two\n")],
+ );
+ });
+
+ client_a
+ .fs
+ .as_fake()
+ .set_index_for_repo(
+ Path::new("/dir/sub/.git"),
+ &[(Path::new("b.txt"), new_diff_base.clone())],
+ )
+ .await;
+
+ // Wait for buffer_local_b to receive it
+ executor.run_until_parked();
+
+ // Smoke test new diffing
+ buffer_local_b.read_with(cx_a, |buffer, _| {
+ assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
+ println!("{:?}", buffer.as_rope().to_string());
+ println!("{:?}", buffer.diff_base());
+ println!(
+ "{:?}",
+ buffer
+ .snapshot()
+ .git_diff_hunks_in_range(0..4)
+ .collect::<Vec<_>>()
+ );
+
+ git::diff::assert_hunks(
+ buffer.snapshot().git_diff_hunks_in_range(0..4),
+ &buffer,
+ &diff_base,
+ &[(2..3, "", "three\n")],
+ );
+ });
+
+ // Smoke test B
+ buffer_remote_b.read_with(cx_b, |buffer, _| {
+ assert_eq!(buffer.diff_base(), Some(new_diff_base.as_ref()));
+ git::diff::assert_hunks(
+ buffer.snapshot().git_diff_hunks_in_range(0..4),
+ &buffer,
+ &diff_base,
+ &[(2..3, "", "three\n")],
+ );
+ });
+}
+
#[gpui::test(iterations = 10)]
async fn test_fs_operations(
executor: Arc<Deterministic>,
@@ -5092,7 +5346,19 @@ async fn test_random_collaboration(
let mut server = TestServer::start(cx.foreground(), cx.background()).await;
let db = server.app_state.db.clone();
- let room_creator_user_id = db.create_user("room-creator", None, false).await.unwrap();
+ let room_creator_user_id = db
+ .create_user(
+ "room-creator@example.com",
+ false,
+ NewUserParams {
+ github_login: "room-creator".into(),
+ github_user_id: 0,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
let mut available_guests = vec![
"guest-1".to_string(),
"guest-2".to_string(),
@@ -5100,11 +5366,24 @@ async fn test_random_collaboration(
"guest-4".to_string(),
];
- for username in Some(&"host".to_string())
+ for (ix, username) in Some(&"host".to_string())
.into_iter()
.chain(&available_guests)
+ .enumerate()
{
- let user_id = db.create_user(username, None, false).await.unwrap();
+ let user_id = db
+ .create_user(
+ &format!("{username}@example.com"),
+ false,
+ NewUserParams {
+ github_login: username.into(),
+ github_user_id: (ix + 1) as i32,
+ invite_count: 0,
+ },
+ )
+ .await
+ .unwrap()
+ .user_id;
server
.app_state
.db
@@ -5632,18 +5911,31 @@ impl TestServer {
});
let http = FakeHttpClient::with_404_response();
- let user_id = if let Ok(Some(user)) = self.app_state.db.get_user_by_github_login(name).await
+ let user_id = if let Ok(Some(user)) = self
+ .app_state
+ .db
+ .get_user_by_github_account(name, None)
+ .await
{
user.id
} else {
self.app_state
.db
- .create_user(name, None, false)
+ .create_user(
+ &format!("{name}@example.com"),
+ false,
+ NewUserParams {
+ github_login: name.into(),
+ github_user_id: 0,
+ invite_count: 0,
+ },
+ )
.await
.unwrap()
+ .user_id
};
let client_name = name.to_string();
- let mut client = Client::new(http.clone());
+ let mut client = cx.read(|cx| Client::new(http.clone(), cx));
let server = self.server.clone();
let db = self.app_state.db.clone();
let connection_killers = self.connection_killers.clone();
@@ -4,6 +4,8 @@ mod db;
mod env;
mod rpc;
+#[cfg(test)]
+mod db_tests;
#[cfg(test)]
mod integration_tests;
@@ -206,7 +206,9 @@ impl Server {
.add_request_handler(Server::follow)
.add_message_handler(Server::unfollow)
.add_message_handler(Server::update_followers)
- .add_request_handler(Server::get_channel_messages);
+ .add_request_handler(Server::get_channel_messages)
+ .add_message_handler(Server::update_diff_base)
+ .add_request_handler(Server::get_private_user_info);
Arc::new(server)
}
@@ -528,27 +530,30 @@ impl Server {
pub async fn invite_code_redeemed(
self: &Arc<Self>,
- code: &str,
+ inviter_id: UserId,
invitee_id: UserId,
) -> Result<()> {
- let user = self.app_state.db.get_user_for_invite_code(code).await?;
- let store = self.store().await;
- let invitee_contact = store.contact_for_user(invitee_id, true);
- for connection_id in store.connection_ids_for_user(user.id) {
- self.peer.send(
- connection_id,
- proto::UpdateContacts {
- contacts: vec![invitee_contact.clone()],
- ..Default::default()
- },
- )?;
- self.peer.send(
- connection_id,
- proto::UpdateInviteInfo {
- url: format!("{}{}", self.app_state.invite_link_prefix, code),
- count: user.invite_count as u32,
- },
- )?;
+ if let Some(user) = self.app_state.db.get_user_by_id(inviter_id).await? {
+ if let Some(code) = &user.invite_code {
+ let store = self.store().await;
+ let invitee_contact = store.contact_for_user(invitee_id, true);
+ for connection_id in store.connection_ids_for_user(inviter_id) {
+ self.peer.send(
+ connection_id,
+ proto::UpdateContacts {
+ contacts: vec![invitee_contact.clone()],
+ ..Default::default()
+ },
+ )?;
+ self.peer.send(
+ connection_id,
+ proto::UpdateInviteInfo {
+ url: format!("{}{}", self.app_state.invite_link_prefix, &code),
+ count: user.invite_count as u32,
+ },
+ )?;
+ }
+ }
}
Ok(())
}
@@ -1427,7 +1432,7 @@ impl Server {
let users = match query.len() {
0 => vec![],
1 | 2 => db
- .get_user_by_github_login(&query)
+ .get_user_by_github_account(&query, None)
.await?
.into_iter()
.collect(),
@@ -1750,6 +1755,44 @@ impl Server {
Ok(())
}
+ async fn update_diff_base(
+ self: Arc<Server>,
+ request: TypedEnvelope<proto::UpdateDiffBase>,
+ ) -> Result<()> {
+ let receiver_ids = self.store().await.project_connection_ids(
+ ProjectId::from_proto(request.payload.project_id),
+ request.sender_id,
+ )?;
+ broadcast(request.sender_id, receiver_ids, |connection_id| {
+ self.peer
+ .forward_send(request.sender_id, connection_id, request.payload.clone())
+ });
+ Ok(())
+ }
+
+ async fn get_private_user_info(
+ self: Arc<Self>,
+ request: TypedEnvelope<proto::GetPrivateUserInfo>,
+ response: Response<proto::GetPrivateUserInfo>,
+ ) -> Result<()> {
+ let user_id = self
+ .store()
+ .await
+ .user_id_for_connection(request.sender_id)?;
+ let metrics_id = self.app_state.db.get_user_metrics_id(user_id).await?;
+ let user = self
+ .app_state
+ .db
+ .get_user_by_id(user_id)
+ .await?
+ .ok_or_else(|| anyhow!("user not found"))?;
+ response.send(proto::GetPrivateUserInfoResponse {
+ metrics_id,
+ staff: user.admin,
+ })?;
+ Ok(())
+ }
+
pub(crate) async fn store(&self) -> StoreGuard<'_> {
#[cfg(test)]
tokio::task::yield_now().await;
@@ -0,0 +1,22 @@
+[package]
+name = "db"
+version = "0.1.0"
+edition = "2021"
+
+[lib]
+path = "src/db.rs"
+doctest = false
+
+[features]
+test-support = []
+
+[dependencies]
+collections = { path = "../collections" }
+anyhow = "1.0.57"
+async-trait = "0.1"
+parking_lot = "0.11.1"
+rocksdb = "0.18"
+
+[dev-dependencies]
+gpui = { path = "../gpui", features = ["test-support"] }
+tempdir = { version = "0.3.7" }
@@ -25,6 +25,7 @@ clock = { path = "../clock" }
collections = { path = "../collections" }
context_menu = { path = "../context_menu" }
fuzzy = { path = "../fuzzy" }
+git = { path = "../git" }
gpui = { path = "../gpui" }
language = { path = "../language" }
lsp = { path = "../lsp" }
@@ -51,6 +52,8 @@ serde = { version = "1.0", features = ["derive", "rc"] }
smallvec = { version = "1.6", features = ["union"] }
smol = "1.2"
tree-sitter-rust = { version = "*", optional = true }
+tree-sitter-html = { version = "*", optional = true }
+tree-sitter-javascript = { version = "*", optional = true }
[dev-dependencies]
text = { path = "../text", features = ["test-support"] }
@@ -67,3 +70,5 @@ rand = "0.8"
unindent = "0.1.7"
tree-sitter = "0.20"
tree-sitter-rust = "0.20"
+tree-sitter-html = "0.19"
+tree-sitter-javascript = "0.20"
@@ -274,6 +274,7 @@ impl FoldMap {
if buffer.edit_count() != new_buffer.edit_count()
|| buffer.parse_count() != new_buffer.parse_count()
|| buffer.diagnostics_update_count() != new_buffer.diagnostics_update_count()
+ || buffer.git_diff_update_count() != new_buffer.git_diff_update_count()
|| buffer.trailing_excerpt_update_count()
!= new_buffer.trailing_excerpt_update_count()
{
@@ -9,6 +9,8 @@ pub mod movement;
mod multi_buffer;
pub mod selections_collection;
+#[cfg(test)]
+mod editor_tests;
#[cfg(any(test, feature = "test-support"))]
pub mod test;
@@ -30,6 +32,7 @@ use gpui::{
geometry::vector::{vec2f, Vector2F},
impl_actions, impl_internal_actions,
platform::CursorStyle,
+ serde_json::json,
text_layout, AnyViewHandle, AppContext, AsyncAppContext, ClipboardItem, Element, ElementBox,
Entity, ModelHandle, MouseButton, MutableAppContext, RenderContext, Subscription, Task, View,
ViewContext, ViewHandle, WeakViewHandle,
@@ -409,7 +412,7 @@ pub struct Editor {
add_selections_state: Option<AddSelectionsState>,
select_next_state: Option<SelectNextState>,
selection_history: SelectionHistory,
- autoclose_stack: InvalidationStack<BracketPairState>,
+ autoclose_regions: Vec<AutocloseRegion>,
snippet_stack: InvalidationStack<SnippetState>,
select_larger_syntax_node_stack: Vec<Box<[Selection<usize>]>>,
ime_transaction: Option<TransactionId>,
@@ -568,8 +571,10 @@ struct SelectNextState {
done: bool,
}
-struct BracketPairState {
- ranges: Vec<Range<Anchor>>,
+#[derive(Debug)]
+struct AutocloseRegion {
+ selection_id: usize,
+ range: Range<Anchor>,
pair: BracketPair,
}
@@ -1009,7 +1014,7 @@ impl Editor {
add_selections_state: None,
select_next_state: None,
selection_history: Default::default(),
- autoclose_stack: Default::default(),
+ autoclose_regions: Default::default(),
snippet_stack: Default::default(),
select_larger_syntax_node_stack: Vec::new(),
ime_transaction: Default::default(),
@@ -1058,6 +1063,7 @@ impl Editor {
let editor_created_event = EditorCreated(cx.handle());
cx.emit_global(editor_created_event);
+ this.report_event("open editor", cx);
this
}
@@ -1114,7 +1120,7 @@ impl Editor {
&self,
point: T,
cx: &'a AppContext,
- ) -> Option<&'a Arc<Language>> {
+ ) -> Option<Arc<Language>> {
self.buffer.read(cx).language_at(point, cx)
}
@@ -1399,8 +1405,7 @@ impl Editor {
self.add_selections_state = None;
self.select_next_state = None;
self.select_larger_syntax_node_stack.clear();
- self.autoclose_stack
- .invalidate(&self.selections.disjoint_anchors(), buffer);
+ self.invalidate_autoclose_regions(&self.selections.disjoint_anchors(), buffer);
self.snippet_stack
.invalidate(&self.selections.disjoint_anchors(), buffer);
self.take_rename(false, cx);
@@ -1847,15 +1852,159 @@ impl Editor {
return;
}
- if !self.skip_autoclose_end(text, cx) {
- self.transact(cx, |this, cx| {
- if !this.surround_with_bracket_pair(text, cx) {
- this.insert(text, cx);
- this.autoclose_bracket_pairs(cx);
+ let text: Arc<str> = text.into();
+ let selections = self.selections.all_adjusted(cx);
+ let mut edits = Vec::new();
+ let mut new_selections = Vec::with_capacity(selections.len());
+ let mut new_autoclose_regions = Vec::new();
+ let snapshot = self.buffer.read(cx).read(cx);
+
+ for (selection, autoclose_region) in
+ self.selections_with_autoclose_regions(selections, &snapshot)
+ {
+ if let Some(language) = snapshot.language_at(selection.head()) {
+ // Determine if the inserted text matches the opening or closing
+ // bracket of any of this language's bracket pairs.
+ let mut bracket_pair = None;
+ let mut is_bracket_pair_start = false;
+ for pair in language.brackets() {
+ if pair.start.ends_with(text.as_ref()) {
+ bracket_pair = Some(pair.clone());
+ is_bracket_pair_start = true;
+ break;
+ } else if pair.end.as_str() == text.as_ref() {
+ bracket_pair = Some(pair.clone());
+ break;
+ }
}
- });
- self.trigger_completion_on_input(text, cx);
+
+ if let Some(bracket_pair) = bracket_pair {
+ if selection.is_empty() {
+ if is_bracket_pair_start {
+ let prefix_len = bracket_pair.start.len() - text.len();
+
+ // If the inserted text is a suffix of an opening bracket and the
+ // selection is preceded by the rest of the opening bracket, then
+ // insert the closing bracket.
+ let following_text_allows_autoclose = snapshot
+ .chars_at(selection.start)
+ .next()
+ .map_or(true, |c| language.should_autoclose_before(c));
+ let preceding_text_matches_prefix = prefix_len == 0
+ || (selection.start.column >= (prefix_len as u32)
+ && snapshot.contains_str_at(
+ Point::new(
+ selection.start.row,
+ selection.start.column - (prefix_len as u32),
+ ),
+ &bracket_pair.start[..prefix_len],
+ ));
+ if following_text_allows_autoclose && preceding_text_matches_prefix {
+ let anchor = snapshot.anchor_before(selection.end);
+ new_selections
+ .push((selection.map(|_| anchor.clone()), text.len()));
+ new_autoclose_regions.push((
+ anchor.clone(),
+ text.len(),
+ selection.id,
+ bracket_pair.clone(),
+ ));
+ edits.push((
+ selection.range(),
+ format!("{}{}", text, bracket_pair.end).into(),
+ ));
+ continue;
+ }
+ } else if let Some(region) = autoclose_region {
+ // If the selection is followed by an auto-inserted closing bracket,
+ // then don't insert anything else; just move the selection past the
+ // closing bracket.
+ let should_skip = selection.end == region.range.end.to_point(&snapshot);
+ if should_skip {
+ let anchor = snapshot.anchor_after(selection.end);
+ new_selections.push((
+ selection.map(|_| anchor.clone()),
+ region.pair.end.len(),
+ ));
+ continue;
+ }
+ }
+ }
+ // If an opening bracket is typed while text is selected, then
+ // surround that text with the bracket pair.
+ else if is_bracket_pair_start {
+ edits.push((selection.start..selection.start, text.clone()));
+ edits.push((
+ selection.end..selection.end,
+ bracket_pair.end.as_str().into(),
+ ));
+ new_selections.push((
+ Selection {
+ id: selection.id,
+ start: snapshot.anchor_after(selection.start),
+ end: snapshot.anchor_before(selection.end),
+ reversed: selection.reversed,
+ goal: selection.goal,
+ },
+ 0,
+ ));
+ continue;
+ }
+ }
+ }
+
+ // If not handling any auto-close operation, then just replace the selected
+ // text with the given input and move the selection to the end of the
+ // newly inserted text.
+ let anchor = snapshot.anchor_after(selection.end);
+ new_selections.push((selection.map(|_| anchor.clone()), 0));
+ edits.push((selection.start..selection.end, text.clone()));
}
+
+ drop(snapshot);
+ self.transact(cx, |this, cx| {
+ this.buffer.update(cx, |buffer, cx| {
+ buffer.edit(edits, Some(AutoindentMode::EachLine), cx);
+ });
+
+ let new_anchor_selections = new_selections.iter().map(|e| &e.0);
+ let new_selection_deltas = new_selections.iter().map(|e| e.1);
+ let snapshot = this.buffer.read(cx).read(cx);
+ let new_selections = resolve_multiple::<usize, _>(new_anchor_selections, &snapshot)
+ .zip(new_selection_deltas)
+ .map(|(selection, delta)| selection.map(|e| e + delta))
+ .collect::<Vec<_>>();
+
+ let mut i = 0;
+ for (position, delta, selection_id, pair) in new_autoclose_regions {
+ let position = position.to_offset(&snapshot) + delta;
+ let start = snapshot.anchor_before(position);
+ let end = snapshot.anchor_after(position);
+ while let Some(existing_state) = this.autoclose_regions.get(i) {
+ match existing_state.range.start.cmp(&start, &snapshot) {
+ Ordering::Less => i += 1,
+ Ordering::Greater => break,
+ Ordering::Equal => match end.cmp(&existing_state.range.end, &snapshot) {
+ Ordering::Less => i += 1,
+ Ordering::Equal => break,
+ Ordering::Greater => break,
+ },
+ }
+ }
+ this.autoclose_regions.insert(
+ i,
+ AutocloseRegion {
+ selection_id,
+ range: start..end,
+ pair,
+ },
+ );
+ }
+
+ drop(snapshot);
+ this.change_selections(None, cx, |s| s.select(new_selections));
+ this.trigger_completion_on_input(&text, cx);
+ });
}
pub fn newline(&mut self, _: &Newline, cx: &mut ViewContext<Self>) {
@@ -1874,7 +2023,7 @@ impl Editor {
let end = selection.end;
let mut insert_extra_newline = false;
- if let Some(language) = buffer.language() {
+ if let Some(language) = buffer.language_at(start) {
let leading_whitespace_len = buffer
.reversed_chars_at(start)
.take_while(|c| c.is_whitespace() && *c != '\n')
@@ -2027,232 +2176,89 @@ impl Editor {
}
}
- fn surround_with_bracket_pair(&mut self, text: &str, cx: &mut ViewContext<Self>) -> bool {
- let snapshot = self.buffer.read(cx).snapshot(cx);
- if let Some(pair) = snapshot
- .language()
- .and_then(|language| language.brackets().iter().find(|b| b.start == text))
- .cloned()
- {
- if self
- .selections
- .all::<usize>(cx)
- .iter()
- .any(|selection| selection.is_empty())
- {
- return false;
- }
-
- let mut selections = self.selections.disjoint_anchors().to_vec();
- for selection in &mut selections {
- selection.end = selection.end.bias_left(&snapshot);
- }
- drop(snapshot);
-
- self.buffer.update(cx, |buffer, cx| {
- let pair_start: Arc<str> = pair.start.clone().into();
- let pair_end: Arc<str> = pair.end.clone().into();
- buffer.edit(
- selections.iter().flat_map(|s| {
- [
- (s.start.clone()..s.start.clone(), pair_start.clone()),
- (s.end.clone()..s.end.clone(), pair_end.clone()),
- ]
- }),
- None,
- cx,
- );
- });
-
- let snapshot = self.buffer.read(cx).read(cx);
- for selection in &mut selections {
- selection.end = selection.end.bias_right(&snapshot);
- }
- drop(snapshot);
-
- self.change_selections(None, cx, |s| s.select_anchors(selections));
- true
- } else {
- false
- }
- }
-
- fn autoclose_bracket_pairs(&mut self, cx: &mut ViewContext<Self>) {
+ /// If any empty selections is touching the start of its innermost containing autoclose
+ /// region, expand it to select the brackets.
+ fn select_autoclose_pair(&mut self, cx: &mut ViewContext<Self>) {
let selections = self.selections.all::<usize>(cx);
- let mut bracket_pair_state = None;
- let mut new_selections = None;
- self.buffer.update(cx, |buffer, cx| {
- let mut snapshot = buffer.snapshot(cx);
- let left_biased_selections = selections
- .iter()
- .map(|selection| selection.map(|p| snapshot.anchor_before(p)))
- .collect::<Vec<_>>();
-
- let autoclose_pair = snapshot.language().and_then(|language| {
- let first_selection_start = selections.first().unwrap().start;
- let pair = language.brackets().iter().find(|pair| {
- pair.close
- && snapshot.contains_str_at(
- first_selection_start.saturating_sub(pair.start.len()),
- &pair.start,
- )
- });
- pair.and_then(|pair| {
- let should_autoclose = selections.iter().all(|selection| {
- // Ensure all selections are parked at the end of a pair start.
- if snapshot.contains_str_at(
- selection.start.saturating_sub(pair.start.len()),
- &pair.start,
- ) {
- snapshot
- .chars_at(selection.start)
- .next()
- .map_or(true, |c| language.should_autoclose_before(c))
- } else {
- false
+ let buffer = self.buffer.read(cx).read(cx);
+ let mut new_selections = Vec::new();
+ for (mut selection, region) in self.selections_with_autoclose_regions(selections, &buffer) {
+ if let (Some(region), true) = (region, selection.is_empty()) {
+ let mut range = region.range.to_offset(&buffer);
+ if selection.start == range.start {
+ if range.start >= region.pair.start.len() {
+ range.start -= region.pair.start.len();
+ if buffer.contains_str_at(range.start, ®ion.pair.start) {
+ if buffer.contains_str_at(range.end, ®ion.pair.end) {
+ range.end += region.pair.end.len();
+ selection.start = range.start;
+ selection.end = range.end;
+ }
}
- });
-
- if should_autoclose {
- Some(pair.clone())
- } else {
- None
}
- })
- });
-
- if let Some(pair) = autoclose_pair {
- let selection_ranges = selections
- .iter()
- .map(|selection| {
- let start = selection.start.to_offset(&snapshot);
- start..start
- })
- .collect::<SmallVec<[_; 32]>>();
-
- let pair_end: Arc<str> = pair.end.clone().into();
- buffer.edit(
- selection_ranges
- .iter()
- .map(|range| (range.clone(), pair_end.clone())),
- None,
- cx,
- );
- snapshot = buffer.snapshot(cx);
-
- new_selections = Some(
- resolve_multiple::<usize, _>(left_biased_selections.iter(), &snapshot)
- .collect::<Vec<_>>(),
- );
-
- if pair.end.len() == 1 {
- let mut delta = 0;
- bracket_pair_state = Some(BracketPairState {
- ranges: selections
- .iter()
- .map(move |selection| {
- let offset = selection.start + delta;
- delta += 1;
- snapshot.anchor_before(offset)..snapshot.anchor_after(offset)
- })
- .collect(),
- pair,
- });
}
}
- });
-
- if let Some(new_selections) = new_selections {
- self.change_selections(None, cx, |s| {
- s.select(new_selections);
- });
- }
- if let Some(bracket_pair_state) = bracket_pair_state {
- self.autoclose_stack.push(bracket_pair_state);
- }
- }
-
- fn skip_autoclose_end(&mut self, text: &str, cx: &mut ViewContext<Self>) -> bool {
- let buffer = self.buffer.read(cx).snapshot(cx);
- let old_selections = self.selections.all::<usize>(cx);
- let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() {
- autoclose_pair
- } else {
- return false;
- };
- if text != autoclose_pair.pair.end {
- return false;
+ new_selections.push(selection);
}
- debug_assert_eq!(old_selections.len(), autoclose_pair.ranges.len());
-
- if old_selections
- .iter()
- .zip(autoclose_pair.ranges.iter().map(|r| r.to_offset(&buffer)))
- .all(|(selection, autoclose_range)| {
- let autoclose_range_end = autoclose_range.end.to_offset(&buffer);
- selection.is_empty() && selection.start == autoclose_range_end
- })
- {
- let new_selections = old_selections
- .into_iter()
- .map(|selection| {
- let cursor = selection.start + 1;
- Selection {
- id: selection.id,
- start: cursor,
- end: cursor,
- reversed: false,
- goal: SelectionGoal::None,
- }
- })
- .collect();
- self.autoclose_stack.pop();
- self.change_selections(Some(Autoscroll::Fit), cx, |s| {
- s.select(new_selections);
- });
- true
- } else {
- false
- }
+ drop(buffer);
+ self.change_selections(None, cx, |selections| selections.select(new_selections));
}
- fn select_autoclose_pair(&mut self, cx: &mut ViewContext<Self>) -> bool {
- let buffer = self.buffer.read(cx).snapshot(cx);
- let old_selections = self.selections.all::<usize>(cx);
- let autoclose_pair = if let Some(autoclose_pair) = self.autoclose_stack.last() {
- autoclose_pair
- } else {
- return false;
- };
+ /// Iterate the given selections, and for each one, find the smallest surrounding
+ /// autoclose region. This uses the ordering of the selections and the autoclose
+ /// regions to avoid repeated comparisons.
+ fn selections_with_autoclose_regions<'a, D: ToOffset + Clone>(
+ &'a self,
+ selections: impl IntoIterator<Item = Selection<D>>,
+ buffer: &'a MultiBufferSnapshot,
+ ) -> impl Iterator<Item = (Selection<D>, Option<&'a AutocloseRegion>)> {
+ let mut i = 0;
+ let mut regions = self.autoclose_regions.as_slice();
+ selections.into_iter().map(move |selection| {
+ let range = selection.start.to_offset(buffer)..selection.end.to_offset(buffer);
+
+ let mut enclosing = None;
+ while let Some(pair_state) = regions.get(i) {
+ if pair_state.range.end.to_offset(buffer) < range.start {
+ regions = ®ions[i + 1..];
+ i = 0;
+ } else if pair_state.range.start.to_offset(buffer) > range.end {
+ break;
+ } else if pair_state.selection_id == selection.id {
+ enclosing = Some(pair_state);
+ i += 1;
+ }
+ }
- debug_assert_eq!(old_selections.len(), autoclose_pair.ranges.len());
+ (selection.clone(), enclosing)
+ })
+ }
- let mut new_selections = Vec::new();
- for (selection, autoclose_range) in old_selections
- .iter()
- .zip(autoclose_pair.ranges.iter().map(|r| r.to_offset(&buffer)))
- {
- if selection.is_empty()
- && autoclose_range.is_empty()
- && selection.start == autoclose_range.start
- {
- new_selections.push(Selection {
- id: selection.id,
- start: selection.start - autoclose_pair.pair.start.len(),
- end: selection.end + autoclose_pair.pair.end.len(),
- reversed: true,
- goal: selection.goal,
- });
- } else {
- return false;
+ /// Remove any autoclose regions that no longer contain their selection.
+ fn invalidate_autoclose_regions(
+ &mut self,
+ mut selections: &[Selection<Anchor>],
+ buffer: &MultiBufferSnapshot,
+ ) {
+ self.autoclose_regions.retain(|state| {
+ let mut i = 0;
+ while let Some(selection) = selections.get(i) {
+ if selection.end.cmp(&state.range.start, buffer).is_lt() {
+ selections = &selections[1..];
+ continue;
+ }
+ if selection.start.cmp(&state.range.end, buffer).is_gt() {
+ break;
+ }
+ if selection.id == state.selection_id {
+ return true;
+ } else {
+ i += 1;
+ }
}
- }
-
- self.change_selections(Some(Autoscroll::Fit), cx, |selections| {
- selections.select(new_selections)
+ false
});
- true
}
fn completion_query(buffer: &MultiBufferSnapshot, position: impl ToOffset) -> Option<String> {
@@ -2907,51 +2913,49 @@ impl Editor {
pub fn backspace(&mut self, _: &Backspace, cx: &mut ViewContext<Self>) {
self.transact(cx, |this, cx| {
- if !this.select_autoclose_pair(cx) {
- let mut selections = this.selections.all::<Point>(cx);
- if !this.selections.line_mode {
- let display_map = this.display_map.update(cx, |map, cx| map.snapshot(cx));
- for selection in &mut selections {
- if selection.is_empty() {
- let old_head = selection.head();
- let mut new_head = movement::left(
- &display_map,
- old_head.to_display_point(&display_map),
- )
- .to_point(&display_map);
- if let Some((buffer, line_buffer_range)) = display_map
- .buffer_snapshot
- .buffer_line_for_row(old_head.row)
- {
- let indent_size =
- buffer.indent_size_for_line(line_buffer_range.start.row);
- let language_name =
- buffer.language().map(|language| language.name());
- let indent_len = match indent_size.kind {
- IndentKind::Space => {
- cx.global::<Settings>().tab_size(language_name.as_deref())
- }
- IndentKind::Tab => NonZeroU32::new(1).unwrap(),
- };
- if old_head.column <= indent_size.len && old_head.column > 0 {
- let indent_len = indent_len.get();
- new_head = cmp::min(
- new_head,
- Point::new(
- old_head.row,
- ((old_head.column - 1) / indent_len) * indent_len,
- ),
- );
+ this.select_autoclose_pair(cx);
+ let mut selections = this.selections.all::<Point>(cx);
+ if !this.selections.line_mode {
+ let display_map = this.display_map.update(cx, |map, cx| map.snapshot(cx));
+ for selection in &mut selections {
+ if selection.is_empty() {
+ let old_head = selection.head();
+ let mut new_head =
+ movement::left(&display_map, old_head.to_display_point(&display_map))
+ .to_point(&display_map);
+ if let Some((buffer, line_buffer_range)) = display_map
+ .buffer_snapshot
+ .buffer_line_for_row(old_head.row)
+ {
+ let indent_size =
+ buffer.indent_size_for_line(line_buffer_range.start.row);
+ let language_name = buffer
+ .language_at(line_buffer_range.start)
+ .map(|language| language.name());
+ let indent_len = match indent_size.kind {
+ IndentKind::Space => {
+ cx.global::<Settings>().tab_size(language_name.as_deref())
}
+ IndentKind::Tab => NonZeroU32::new(1).unwrap(),
+ };
+ if old_head.column <= indent_size.len && old_head.column > 0 {
+ let indent_len = indent_len.get();
+ new_head = cmp::min(
+ new_head,
+ Point::new(
+ old_head.row,
+ ((old_head.column - 1) / indent_len) * indent_len,
+ ),
+ );
}
-
- selection.set_head(new_head, SelectionGoal::None);
}
+
+ selection.set_head(new_head, SelectionGoal::None);
}
}
-
- this.change_selections(Some(Autoscroll::Fit), cx, |s| s.select(selections));
}
+
+ this.change_selections(Some(Autoscroll::Fit), cx, |s| s.select(selections));
this.insert("", cx);
});
}
@@ -3955,17 +3959,16 @@ impl Editor {
cx: &mut ViewContext<Self>,
) {
self.transact(cx, |this, cx| {
- if !this.select_autoclose_pair(cx) {
- this.change_selections(Some(Autoscroll::Fit), cx, |s| {
- let line_mode = s.line_mode;
- s.move_with(|map, selection| {
- if selection.is_empty() && !line_mode {
- let cursor = movement::previous_word_start(map, selection.head());
- selection.set_head(cursor, SelectionGoal::None);
- }
- });
+ this.select_autoclose_pair(cx);
+ this.change_selections(Some(Autoscroll::Fit), cx, |s| {
+ let line_mode = s.line_mode;
+ s.move_with(|map, selection| {
+ if selection.is_empty() && !line_mode {
+ let cursor = movement::previous_word_start(map, selection.head());
+ selection.set_head(cursor, SelectionGoal::None);
+ }
});
- }
+ });
this.insert("", cx);
});
}
@@ -3976,17 +3979,16 @@ impl Editor {
cx: &mut ViewContext<Self>,
) {
self.transact(cx, |this, cx| {
- if !this.select_autoclose_pair(cx) {
- this.change_selections(Some(Autoscroll::Fit), cx, |s| {
- let line_mode = s.line_mode;
- s.move_with(|map, selection| {
- if selection.is_empty() && !line_mode {
- let cursor = movement::previous_subword_start(map, selection.head());
- selection.set_head(cursor, SelectionGoal::None);
- }
- });
+ this.select_autoclose_pair(cx);
+ this.change_selections(Some(Autoscroll::Fit), cx, |s| {
+ let line_mode = s.line_mode;
+ s.move_with(|map, selection| {
+ if selection.is_empty() && !line_mode {
+ let cursor = movement::previous_subword_start(map, selection.head());
+ selection.set_head(cursor, SelectionGoal::None);
+ }
});
- }
+ });
this.insert("", cx);
});
}
@@ -4489,108 +4491,218 @@ impl Editor {
pub fn toggle_comments(&mut self, _: &ToggleComments, cx: &mut ViewContext<Self>) {
self.transact(cx, |this, cx| {
let mut selections = this.selections.all::<Point>(cx);
- let mut all_selection_lines_are_comments = true;
- let mut edit_ranges = Vec::new();
+ let mut edits = Vec::new();
+ let mut selection_edit_ranges = Vec::new();
let mut last_toggled_row = None;
- this.buffer.update(cx, |buffer, cx| {
- // TODO: Handle selections that cross excerpts
- for selection in &mut selections {
- // Get the line comment prefix. Split its trailing whitespace into a separate string,
- // as that portion won't be used for detecting if a line is a comment.
- let full_comment_prefix: Arc<str> = if let Some(prefix) = buffer
- .language_at(selection.start, cx)
- .and_then(|l| l.line_comment_prefix())
- {
- prefix.into()
+ let snapshot = this.buffer.read(cx).read(cx);
+ let empty_str: Arc<str> = "".into();
+ let mut suffixes_inserted = Vec::new();
+
+ fn comment_prefix_range(
+ snapshot: &MultiBufferSnapshot,
+ row: u32,
+ comment_prefix: &str,
+ comment_prefix_whitespace: &str,
+ ) -> Range<Point> {
+ let start = Point::new(row, snapshot.indent_size_for_line(row).len);
+
+ let mut line_bytes = snapshot
+ .bytes_in_range(start..snapshot.max_point())
+ .flatten()
+ .copied();
+
+ // If this line currently begins with the line comment prefix, then record
+ // the range containing the prefix.
+ if line_bytes
+ .by_ref()
+ .take(comment_prefix.len())
+ .eq(comment_prefix.bytes())
+ {
+ // Include any whitespace that matches the comment prefix.
+ let matching_whitespace_len = line_bytes
+ .zip(comment_prefix_whitespace.bytes())
+ .take_while(|(a, b)| a == b)
+ .count() as u32;
+ let end = Point::new(
+ start.row,
+ start.column + comment_prefix.len() as u32 + matching_whitespace_len,
+ );
+ start..end
+ } else {
+ start..start
+ }
+ }
+
+ fn comment_suffix_range(
+ snapshot: &MultiBufferSnapshot,
+ row: u32,
+ comment_suffix: &str,
+ comment_suffix_has_leading_space: bool,
+ ) -> Range<Point> {
+ let end = Point::new(row, snapshot.line_len(row));
+ let suffix_start_column = end.column.saturating_sub(comment_suffix.len() as u32);
+
+ let mut line_end_bytes = snapshot
+ .bytes_in_range(Point::new(end.row, suffix_start_column.saturating_sub(1))..end)
+ .flatten()
+ .copied();
+
+ let leading_space_len = if suffix_start_column > 0
+ && line_end_bytes.next() == Some(b' ')
+ && comment_suffix_has_leading_space
+ {
+ 1
+ } else {
+ 0
+ };
+
+ // If this line currently begins with the line comment prefix, then record
+ // the range containing the prefix.
+ if line_end_bytes.by_ref().eq(comment_suffix.bytes()) {
+ let start = Point::new(end.row, suffix_start_column - leading_space_len);
+ start..end
+ } else {
+ end..end
+ }
+ }
+
+ // TODO: Handle selections that cross excerpts
+ for selection in &mut selections {
+ let language = if let Some(language) = snapshot.language_at(selection.start) {
+ language
+ } else {
+ continue;
+ };
+
+ selection_edit_ranges.clear();
+
+ // If multiple selections contain a given row, avoid processing that
+ // row more than once.
+ let mut start_row = selection.start.row;
+ if last_toggled_row == Some(start_row) {
+ start_row += 1;
+ }
+ let end_row =
+ if selection.end.row > selection.start.row && selection.end.column == 0 {
+ selection.end.row - 1
} else {
- return;
+ selection.end.row
};
+ last_toggled_row = Some(end_row);
+
+ if start_row > end_row {
+ continue;
+ }
+
+ // If the language has line comments, toggle those.
+ if let Some(full_comment_prefix) = language.line_comment_prefix() {
+ // Split the comment prefix's trailing whitespace into a separate string,
+ // as that portion won't be used for detecting if a line is a comment.
let comment_prefix = full_comment_prefix.trim_end_matches(' ');
let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..];
- edit_ranges.clear();
- let snapshot = buffer.snapshot(cx);
-
- let end_row =
- if selection.end.row > selection.start.row && selection.end.column == 0 {
- selection.end.row
- } else {
- selection.end.row + 1
- };
+ let mut all_selection_lines_are_comments = true;
- for row in selection.start.row..end_row {
- // If multiple selections contain a given row, avoid processing that
- // row more than once.
- if last_toggled_row == Some(row) {
+ for row in start_row..=end_row {
+ if snapshot.is_line_blank(row) {
continue;
- } else {
- last_toggled_row = Some(row);
}
- if snapshot.is_line_blank(row) {
- continue;
+ let prefix_range = comment_prefix_range(
+ snapshot.deref(),
+ row,
+ comment_prefix,
+ comment_prefix_whitespace,
+ );
+ if prefix_range.is_empty() {
+ all_selection_lines_are_comments = false;
}
+ selection_edit_ranges.push(prefix_range);
+ }
- let start = Point::new(row, snapshot.indent_size_for_line(row).len);
- let mut line_bytes = snapshot
- .bytes_in_range(start..snapshot.max_point())
- .flatten()
- .copied();
-
- // If this line currently begins with the line comment prefix, then record
- // the range containing the prefix.
- if all_selection_lines_are_comments
- && line_bytes
- .by_ref()
- .take(comment_prefix.len())
- .eq(comment_prefix.bytes())
- {
- // Include any whitespace that matches the comment prefix.
- let matching_whitespace_len = line_bytes
- .zip(comment_prefix_whitespace.bytes())
- .take_while(|(a, b)| a == b)
- .count()
- as u32;
- let end = Point::new(
- row,
- start.column
- + comment_prefix.len() as u32
- + matching_whitespace_len,
- );
- edit_ranges.push(start..end);
- }
- // If this line does not begin with the line comment prefix, then record
- // the position where the prefix should be inserted.
- else {
- all_selection_lines_are_comments = false;
- edit_ranges.push(start..start);
- }
+ if all_selection_lines_are_comments {
+ edits.extend(
+ selection_edit_ranges
+ .iter()
+ .cloned()
+ .map(|range| (range, empty_str.clone())),
+ );
+ } else {
+ let min_column = selection_edit_ranges
+ .iter()
+ .map(|r| r.start.column)
+ .min()
+ .unwrap_or(0);
+ edits.extend(selection_edit_ranges.iter().map(|range| {
+ let position = Point::new(range.start.row, min_column);
+ (position..position, full_comment_prefix.clone())
+ }));
}
+ } else if let Some((full_comment_prefix, comment_suffix)) =
+ language.block_comment_delimiters()
+ {
+ let comment_prefix = full_comment_prefix.trim_end_matches(' ');
+ let comment_prefix_whitespace = &full_comment_prefix[comment_prefix.len()..];
+ let prefix_range = comment_prefix_range(
+ snapshot.deref(),
+ start_row,
+ comment_prefix,
+ comment_prefix_whitespace,
+ );
+ let suffix_range = comment_suffix_range(
+ snapshot.deref(),
+ end_row,
+ comment_suffix.trim_start_matches(' '),
+ comment_suffix.starts_with(' '),
+ );
- if !edit_ranges.is_empty() {
- if all_selection_lines_are_comments {
- let empty_str: Arc<str> = "".into();
- buffer.edit(
- edit_ranges
- .iter()
- .cloned()
- .map(|range| (range, empty_str.clone())),
- None,
- cx,
- );
- } else {
- let min_column =
- edit_ranges.iter().map(|r| r.start.column).min().unwrap();
- let edits = edit_ranges.iter().map(|range| {
- let position = Point::new(range.start.row, min_column);
- (position..position, full_comment_prefix.clone())
- });
- buffer.edit(edits, None, cx);
- }
+ if prefix_range.is_empty() || suffix_range.is_empty() {
+ edits.push((
+ prefix_range.start..prefix_range.start,
+ full_comment_prefix.clone(),
+ ));
+ edits.push((suffix_range.end..suffix_range.end, comment_suffix.clone()));
+ suffixes_inserted.push((end_row, comment_suffix.len()));
+ } else {
+ edits.push((prefix_range, empty_str.clone()));
+ edits.push((suffix_range, empty_str.clone()));
}
+ } else {
+ continue;
}
+ }
+
+ drop(snapshot);
+ this.buffer.update(cx, |buffer, cx| {
+ buffer.edit(edits, None, cx);
});
- let selections = this.selections.all::<usize>(cx);
+ // Adjust selections so that they end before any comment suffixes that
+ // were inserted.
+ let mut suffixes_inserted = suffixes_inserted.into_iter().peekable();
+ let mut selections = this.selections.all::<Point>(cx);
+ let snapshot = this.buffer.read(cx).read(cx);
+ for selection in &mut selections {
+ while let Some((row, suffix_len)) = suffixes_inserted.peek().copied() {
+ match row.cmp(&selection.end.row) {
+ Ordering::Less => {
+ suffixes_inserted.next();
+ continue;
+ }
+ Ordering::Greater => break,
+ Ordering::Equal => {
+ if selection.end.column == snapshot.line_len(row) {
+ if selection.is_empty() {
+ selection.start.column -= suffix_len as u32;
+ }
+ selection.end.column -= suffix_len as u32;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+ drop(snapshot);
this.change_selections(Some(Autoscroll::Fit), cx, |s| s.select(selections));
});
}
@@ -0,0 +1,4936 @@
+use super::*;
+use crate::test::{
+ assert_text_with_selections, build_editor, select_ranges, EditorLspTestContext,
+ EditorTestContext,
+};
+use futures::StreamExt;
+use gpui::{
+ geometry::rect::RectF,
+ platform::{WindowBounds, WindowOptions},
+};
+use indoc::indoc;
+use language::{FakeLspAdapter, LanguageConfig, LanguageRegistry};
+use project::FakeFs;
+use settings::EditorSettings;
+use std::{cell::RefCell, rc::Rc, time::Instant};
+use text::Point;
+use unindent::Unindent;
+use util::{
+ assert_set_eq,
+ test::{marked_text_ranges, marked_text_ranges_by, sample_text, TextRangeMarker},
+};
+use workspace::{FollowableItem, ItemHandle, NavigationEntry, Pane};
+
+#[gpui::test]
+fn test_edit_events(cx: &mut MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = cx.add_model(|cx| language::Buffer::new(0, "123456", cx));
+
+ let events = Rc::new(RefCell::new(Vec::new()));
+ let (_, editor1) = cx.add_window(Default::default(), {
+ let events = events.clone();
+ |cx| {
+ cx.subscribe(&cx.handle(), move |_, _, event, _| {
+ if matches!(
+ event,
+ Event::Edited | Event::BufferEdited | Event::DirtyChanged
+ ) {
+ events.borrow_mut().push(("editor1", *event));
+ }
+ })
+ .detach();
+ Editor::for_buffer(buffer.clone(), None, cx)
+ }
+ });
+ let (_, editor2) = cx.add_window(Default::default(), {
+ let events = events.clone();
+ |cx| {
+ cx.subscribe(&cx.handle(), move |_, _, event, _| {
+ if matches!(
+ event,
+ Event::Edited | Event::BufferEdited | Event::DirtyChanged
+ ) {
+ events.borrow_mut().push(("editor2", *event));
+ }
+ })
+ .detach();
+ Editor::for_buffer(buffer.clone(), None, cx)
+ }
+ });
+ assert_eq!(mem::take(&mut *events.borrow_mut()), []);
+
+ // Mutating editor 1 will emit an `Edited` event only for that editor.
+ editor1.update(cx, |editor, cx| editor.insert("X", cx));
+ assert_eq!(
+ mem::take(&mut *events.borrow_mut()),
+ [
+ ("editor1", Event::Edited),
+ ("editor1", Event::BufferEdited),
+ ("editor2", Event::BufferEdited),
+ ("editor1", Event::DirtyChanged),
+ ("editor2", Event::DirtyChanged)
+ ]
+ );
+
+ // Mutating editor 2 will emit an `Edited` event only for that editor.
+ editor2.update(cx, |editor, cx| editor.delete(&Delete, cx));
+ assert_eq!(
+ mem::take(&mut *events.borrow_mut()),
+ [
+ ("editor2", Event::Edited),
+ ("editor1", Event::BufferEdited),
+ ("editor2", Event::BufferEdited),
+ ]
+ );
+
+ // Undoing on editor 1 will emit an `Edited` event only for that editor.
+ editor1.update(cx, |editor, cx| editor.undo(&Undo, cx));
+ assert_eq!(
+ mem::take(&mut *events.borrow_mut()),
+ [
+ ("editor1", Event::Edited),
+ ("editor1", Event::BufferEdited),
+ ("editor2", Event::BufferEdited),
+ ("editor1", Event::DirtyChanged),
+ ("editor2", Event::DirtyChanged),
+ ]
+ );
+
+ // Redoing on editor 1 will emit an `Edited` event only for that editor.
+ editor1.update(cx, |editor, cx| editor.redo(&Redo, cx));
+ assert_eq!(
+ mem::take(&mut *events.borrow_mut()),
+ [
+ ("editor1", Event::Edited),
+ ("editor1", Event::BufferEdited),
+ ("editor2", Event::BufferEdited),
+ ("editor1", Event::DirtyChanged),
+ ("editor2", Event::DirtyChanged),
+ ]
+ );
+
+ // Undoing on editor 2 will emit an `Edited` event only for that editor.
+ editor2.update(cx, |editor, cx| editor.undo(&Undo, cx));
+ assert_eq!(
+ mem::take(&mut *events.borrow_mut()),
+ [
+ ("editor2", Event::Edited),
+ ("editor1", Event::BufferEdited),
+ ("editor2", Event::BufferEdited),
+ ("editor1", Event::DirtyChanged),
+ ("editor2", Event::DirtyChanged),
+ ]
+ );
+
+ // Redoing on editor 2 will emit an `Edited` event only for that editor.
+ editor2.update(cx, |editor, cx| editor.redo(&Redo, cx));
+ assert_eq!(
+ mem::take(&mut *events.borrow_mut()),
+ [
+ ("editor2", Event::Edited),
+ ("editor1", Event::BufferEdited),
+ ("editor2", Event::BufferEdited),
+ ("editor1", Event::DirtyChanged),
+ ("editor2", Event::DirtyChanged),
+ ]
+ );
+
+ // No event is emitted when the mutation is a no-op.
+ editor2.update(cx, |editor, cx| {
+ editor.change_selections(None, cx, |s| s.select_ranges([0..0]));
+
+ editor.backspace(&Backspace, cx);
+ });
+ assert_eq!(mem::take(&mut *events.borrow_mut()), []);
+}
+
+#[gpui::test]
+fn test_undo_redo_with_selection_restoration(cx: &mut MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let mut now = Instant::now();
+ let buffer = cx.add_model(|cx| language::Buffer::new(0, "123456", cx));
+ let group_interval = buffer.read(cx).transaction_group_interval();
+ let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
+ let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx));
+
+ editor.update(cx, |editor, cx| {
+ editor.start_transaction_at(now, cx);
+ editor.change_selections(None, cx, |s| s.select_ranges([2..4]));
+
+ editor.insert("cd", cx);
+ editor.end_transaction_at(now, cx);
+ assert_eq!(editor.text(cx), "12cd56");
+ assert_eq!(editor.selections.ranges(cx), vec![4..4]);
+
+ editor.start_transaction_at(now, cx);
+ editor.change_selections(None, cx, |s| s.select_ranges([4..5]));
+ editor.insert("e", cx);
+ editor.end_transaction_at(now, cx);
+ assert_eq!(editor.text(cx), "12cde6");
+ assert_eq!(editor.selections.ranges(cx), vec![5..5]);
+
+ now += group_interval + Duration::from_millis(1);
+ editor.change_selections(None, cx, |s| s.select_ranges([2..2]));
+
+ // Simulate an edit in another editor
+ buffer.update(cx, |buffer, cx| {
+ buffer.start_transaction_at(now, cx);
+ buffer.edit([(0..1, "a")], None, cx);
+ buffer.edit([(1..1, "b")], None, cx);
+ buffer.end_transaction_at(now, cx);
+ });
+
+ assert_eq!(editor.text(cx), "ab2cde6");
+ assert_eq!(editor.selections.ranges(cx), vec![3..3]);
+
+ // Last transaction happened past the group interval in a different editor.
+ // Undo it individually and don't restore selections.
+ editor.undo(&Undo, cx);
+ assert_eq!(editor.text(cx), "12cde6");
+ assert_eq!(editor.selections.ranges(cx), vec![2..2]);
+
+ // First two transactions happened within the group interval in this editor.
+ // Undo them together and restore selections.
+ editor.undo(&Undo, cx);
+ editor.undo(&Undo, cx); // Undo stack is empty here, so this is a no-op.
+ assert_eq!(editor.text(cx), "123456");
+ assert_eq!(editor.selections.ranges(cx), vec![0..0]);
+
+ // Redo the first two transactions together.
+ editor.redo(&Redo, cx);
+ assert_eq!(editor.text(cx), "12cde6");
+ assert_eq!(editor.selections.ranges(cx), vec![5..5]);
+
+ // Redo the last transaction on its own.
+ editor.redo(&Redo, cx);
+ assert_eq!(editor.text(cx), "ab2cde6");
+ assert_eq!(editor.selections.ranges(cx), vec![6..6]);
+
+ // Test empty transactions.
+ editor.start_transaction_at(now, cx);
+ editor.end_transaction_at(now, cx);
+ editor.undo(&Undo, cx);
+ assert_eq!(editor.text(cx), "12cde6");
+ });
+}
+
+#[gpui::test]
+fn test_ime_composition(cx: &mut MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = cx.add_model(|cx| {
+ let mut buffer = language::Buffer::new(0, "abcde", cx);
+ // Ensure automatic grouping doesn't occur.
+ buffer.set_group_interval(Duration::ZERO);
+ buffer
+ });
+
+ let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
+ cx.add_window(Default::default(), |cx| {
+ let mut editor = build_editor(buffer.clone(), cx);
+
+ // Start a new IME composition.
+ editor.replace_and_mark_text_in_range(Some(0..1), "à", None, cx);
+ editor.replace_and_mark_text_in_range(Some(0..1), "á", None, cx);
+ editor.replace_and_mark_text_in_range(Some(0..1), "ä", None, cx);
+ assert_eq!(editor.text(cx), "äbcde");
+ assert_eq!(
+ editor.marked_text_ranges(cx),
+ Some(vec![OffsetUtf16(0)..OffsetUtf16(1)])
+ );
+
+ // Finalize IME composition.
+ editor.replace_text_in_range(None, "ā", cx);
+ assert_eq!(editor.text(cx), "ābcde");
+ assert_eq!(editor.marked_text_ranges(cx), None);
+
+ // IME composition edits are grouped and are undone/redone at once.
+ editor.undo(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "abcde");
+ assert_eq!(editor.marked_text_ranges(cx), None);
+ editor.redo(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "ābcde");
+ assert_eq!(editor.marked_text_ranges(cx), None);
+
+ // Start a new IME composition.
+ editor.replace_and_mark_text_in_range(Some(0..1), "à", None, cx);
+ assert_eq!(
+ editor.marked_text_ranges(cx),
+ Some(vec![OffsetUtf16(0)..OffsetUtf16(1)])
+ );
+
+ // Undoing during an IME composition cancels it.
+ editor.undo(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "ābcde");
+ assert_eq!(editor.marked_text_ranges(cx), None);
+
+ // Start a new IME composition with an invalid marked range, ensuring it gets clipped.
+ editor.replace_and_mark_text_in_range(Some(4..999), "è", None, cx);
+ assert_eq!(editor.text(cx), "ābcdè");
+ assert_eq!(
+ editor.marked_text_ranges(cx),
+ Some(vec![OffsetUtf16(4)..OffsetUtf16(5)])
+ );
+
+ // Finalize IME composition with an invalid replacement range, ensuring it gets clipped.
+ editor.replace_text_in_range(Some(4..999), "ę", cx);
+ assert_eq!(editor.text(cx), "ābcdę");
+ assert_eq!(editor.marked_text_ranges(cx), None);
+
+ // Start a new IME composition with multiple cursors.
+ editor.change_selections(None, cx, |s| {
+ s.select_ranges([
+ OffsetUtf16(1)..OffsetUtf16(1),
+ OffsetUtf16(3)..OffsetUtf16(3),
+ OffsetUtf16(5)..OffsetUtf16(5),
+ ])
+ });
+ editor.replace_and_mark_text_in_range(Some(4..5), "XYZ", None, cx);
+ assert_eq!(editor.text(cx), "XYZbXYZdXYZ");
+ assert_eq!(
+ editor.marked_text_ranges(cx),
+ Some(vec![
+ OffsetUtf16(0)..OffsetUtf16(3),
+ OffsetUtf16(4)..OffsetUtf16(7),
+ OffsetUtf16(8)..OffsetUtf16(11)
+ ])
+ );
+
+ // Ensure the newly-marked range gets treated as relative to the previously-marked ranges.
+ editor.replace_and_mark_text_in_range(Some(1..2), "1", None, cx);
+ assert_eq!(editor.text(cx), "X1ZbX1ZdX1Z");
+ assert_eq!(
+ editor.marked_text_ranges(cx),
+ Some(vec![
+ OffsetUtf16(1)..OffsetUtf16(2),
+ OffsetUtf16(5)..OffsetUtf16(6),
+ OffsetUtf16(9)..OffsetUtf16(10)
+ ])
+ );
+
+ // Finalize IME composition with multiple cursors.
+ editor.replace_text_in_range(Some(9..10), "2", cx);
+ assert_eq!(editor.text(cx), "X2ZbX2ZdX2Z");
+ assert_eq!(editor.marked_text_ranges(cx), None);
+
+ editor
+ });
+}
+
+#[gpui::test]
+fn test_selection_with_mouse(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+
+ let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\nddddddd\n", cx);
+ let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+ editor.update(cx, |view, cx| {
+ view.begin_selection(DisplayPoint::new(2, 2), false, 1, cx);
+ });
+ assert_eq!(
+ editor.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ [DisplayPoint::new(2, 2)..DisplayPoint::new(2, 2)]
+ );
+
+ editor.update(cx, |view, cx| {
+ view.update_selection(DisplayPoint::new(3, 3), 0, Vector2F::zero(), cx);
+ });
+
+ assert_eq!(
+ editor.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ [DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3)]
+ );
+
+ editor.update(cx, |view, cx| {
+ view.update_selection(DisplayPoint::new(1, 1), 0, Vector2F::zero(), cx);
+ });
+
+ assert_eq!(
+ editor.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ [DisplayPoint::new(2, 2)..DisplayPoint::new(1, 1)]
+ );
+
+ editor.update(cx, |view, cx| {
+ view.end_selection(cx);
+ view.update_selection(DisplayPoint::new(3, 3), 0, Vector2F::zero(), cx);
+ });
+
+ assert_eq!(
+ editor.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ [DisplayPoint::new(2, 2)..DisplayPoint::new(1, 1)]
+ );
+
+ editor.update(cx, |view, cx| {
+ view.begin_selection(DisplayPoint::new(3, 3), true, 1, cx);
+ view.update_selection(DisplayPoint::new(0, 0), 0, Vector2F::zero(), cx);
+ });
+
+ assert_eq!(
+ editor.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ [
+ DisplayPoint::new(2, 2)..DisplayPoint::new(1, 1),
+ DisplayPoint::new(3, 3)..DisplayPoint::new(0, 0)
+ ]
+ );
+
+ editor.update(cx, |view, cx| {
+ view.end_selection(cx);
+ });
+
+ assert_eq!(
+ editor.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ [DisplayPoint::new(3, 3)..DisplayPoint::new(0, 0)]
+ );
+}
+
+#[gpui::test]
+fn test_canceling_pending_selection(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+
+ view.update(cx, |view, cx| {
+ view.begin_selection(DisplayPoint::new(2, 2), false, 1, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ [DisplayPoint::new(2, 2)..DisplayPoint::new(2, 2)]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.update_selection(DisplayPoint::new(3, 3), 0, Vector2F::zero(), cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ [DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3)]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.cancel(&Cancel, cx);
+ view.update_selection(DisplayPoint::new(1, 1), 0, Vector2F::zero(), cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ [DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3)]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_clone(cx: &mut gpui::MutableAppContext) {
+ let (text, selection_ranges) = marked_text_ranges(
+ indoc! {"
+ one
+ two
+ threeˇ
+ four
+ fiveˇ
+ "},
+ true,
+ );
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple(&text, cx);
+
+ let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+
+ editor.update(cx, |editor, cx| {
+ editor.change_selections(None, cx, |s| s.select_ranges(selection_ranges.clone()));
+ editor.fold_ranges(
+ [
+ Point::new(1, 0)..Point::new(2, 0),
+ Point::new(3, 0)..Point::new(4, 0),
+ ],
+ cx,
+ );
+ });
+
+ let (_, cloned_editor) = editor.update(cx, |editor, cx| {
+ cx.add_window(Default::default(), |cx| editor.clone(cx))
+ });
+
+ let snapshot = editor.update(cx, |e, cx| e.snapshot(cx));
+ let cloned_snapshot = cloned_editor.update(cx, |e, cx| e.snapshot(cx));
+
+ assert_eq!(
+ cloned_editor.update(cx, |e, cx| e.display_text(cx)),
+ editor.update(cx, |e, cx| e.display_text(cx))
+ );
+ assert_eq!(
+ cloned_snapshot
+ .folds_in_range(0..text.len())
+ .collect::<Vec<_>>(),
+ snapshot.folds_in_range(0..text.len()).collect::<Vec<_>>(),
+ );
+ assert_set_eq!(
+ cloned_editor.read(cx).selections.ranges::<Point>(cx),
+ editor.read(cx).selections.ranges(cx)
+ );
+ assert_set_eq!(
+ cloned_editor.update(cx, |e, cx| e.selections.display_ranges(cx)),
+ editor.update(cx, |e, cx| e.selections.display_ranges(cx))
+ );
+}
+
+#[gpui::test]
+fn test_navigation_history(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ use workspace::Item;
+ let (_, pane) = cx.add_window(Default::default(), |cx| Pane::new(None, cx));
+ let buffer = MultiBuffer::build_simple(&sample_text(300, 5, 'a'), cx);
+
+ cx.add_view(&pane, |cx| {
+ let mut editor = build_editor(buffer.clone(), cx);
+ let handle = cx.handle();
+ editor.set_nav_history(Some(pane.read(cx).nav_history_for_item(&handle)));
+
+ fn pop_history(editor: &mut Editor, cx: &mut MutableAppContext) -> Option<NavigationEntry> {
+ editor.nav_history.as_mut().unwrap().pop_backward(cx)
+ }
+
+ // Move the cursor a small distance.
+ // Nothing is added to the navigation history.
+ editor.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)])
+ });
+ editor.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0)])
+ });
+ assert!(pop_history(&mut editor, cx).is_none());
+
+ // Move the cursor a large distance.
+ // The history can jump back to the previous position.
+ editor.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(13, 0)..DisplayPoint::new(13, 3)])
+ });
+ let nav_entry = pop_history(&mut editor, cx).unwrap();
+ editor.navigate(nav_entry.data.unwrap(), cx);
+ assert_eq!(nav_entry.item.id(), cx.view_id());
+ assert_eq!(
+ editor.selections.display_ranges(cx),
+ &[DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0)]
+ );
+ assert!(pop_history(&mut editor, cx).is_none());
+
+ // Move the cursor a small distance via the mouse.
+ // Nothing is added to the navigation history.
+ editor.begin_selection(DisplayPoint::new(5, 0), false, 1, cx);
+ editor.end_selection(cx);
+ assert_eq!(
+ editor.selections.display_ranges(cx),
+ &[DisplayPoint::new(5, 0)..DisplayPoint::new(5, 0)]
+ );
+ assert!(pop_history(&mut editor, cx).is_none());
+
+ // Move the cursor a large distance via the mouse.
+ // The history can jump back to the previous position.
+ editor.begin_selection(DisplayPoint::new(15, 0), false, 1, cx);
+ editor.end_selection(cx);
+ assert_eq!(
+ editor.selections.display_ranges(cx),
+ &[DisplayPoint::new(15, 0)..DisplayPoint::new(15, 0)]
+ );
+ let nav_entry = pop_history(&mut editor, cx).unwrap();
+ editor.navigate(nav_entry.data.unwrap(), cx);
+ assert_eq!(nav_entry.item.id(), cx.view_id());
+ assert_eq!(
+ editor.selections.display_ranges(cx),
+ &[DisplayPoint::new(5, 0)..DisplayPoint::new(5, 0)]
+ );
+ assert!(pop_history(&mut editor, cx).is_none());
+
+ // Set scroll position to check later
+ editor.set_scroll_position(Vector2F::new(5.5, 5.5), cx);
+ let original_scroll_position = editor.scroll_position;
+ let original_scroll_top_anchor = editor.scroll_top_anchor.clone();
+
+ // Jump to the end of the document and adjust scroll
+ editor.move_to_end(&MoveToEnd, cx);
+ editor.set_scroll_position(Vector2F::new(-2.5, -0.5), cx);
+ assert_ne!(editor.scroll_position, original_scroll_position);
+ assert_ne!(editor.scroll_top_anchor, original_scroll_top_anchor);
+
+ let nav_entry = pop_history(&mut editor, cx).unwrap();
+ editor.navigate(nav_entry.data.unwrap(), cx);
+ assert_eq!(editor.scroll_position, original_scroll_position);
+ assert_eq!(editor.scroll_top_anchor, original_scroll_top_anchor);
+
+ // Ensure we don't panic when navigation data contains invalid anchors *and* points.
+ let mut invalid_anchor = editor.scroll_top_anchor.clone();
+ invalid_anchor.text_anchor.buffer_id = Some(999);
+ let invalid_point = Point::new(9999, 0);
+ editor.navigate(
+ Box::new(NavigationData {
+ cursor_anchor: invalid_anchor.clone(),
+ cursor_position: invalid_point,
+ scroll_top_anchor: invalid_anchor,
+ scroll_top_row: invalid_point.row,
+ scroll_position: Default::default(),
+ }),
+ cx,
+ );
+ assert_eq!(
+ editor.selections.display_ranges(cx),
+ &[editor.max_point(cx)..editor.max_point(cx)]
+ );
+ assert_eq!(
+ editor.scroll_position(cx),
+ vec2f(0., editor.max_point(cx).row() as f32)
+ );
+
+ editor
+ });
+}
+
+#[gpui::test]
+fn test_cancel(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("aaaaaa\nbbbbbb\ncccccc\ndddddd\n", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+
+ view.update(cx, |view, cx| {
+ view.begin_selection(DisplayPoint::new(3, 4), false, 1, cx);
+ view.update_selection(DisplayPoint::new(1, 1), 0, Vector2F::zero(), cx);
+ view.end_selection(cx);
+
+ view.begin_selection(DisplayPoint::new(0, 1), true, 1, cx);
+ view.update_selection(DisplayPoint::new(0, 3), 0, Vector2F::zero(), cx);
+ view.end_selection(cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ [
+ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3),
+ DisplayPoint::new(3, 4)..DisplayPoint::new(1, 1),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.cancel(&Cancel, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ [DisplayPoint::new(3, 4)..DisplayPoint::new(1, 1)]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.cancel(&Cancel, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ [DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1)]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_fold(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple(
+ &"
+ impl Foo {
+ // Hello!
+
+ fn a() {
+ 1
+ }
+
+ fn b() {
+ 2
+ }
+
+ fn c() {
+ 3
+ }
+ }
+ "
+ .unindent(),
+ cx,
+ );
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx));
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(8, 0)..DisplayPoint::new(12, 0)]);
+ });
+ view.fold(&Fold, cx);
+ assert_eq!(
+ view.display_text(cx),
+ "
+ impl Foo {
+ // Hello!
+
+ fn a() {
+ 1
+ }
+
+ fn b() {…
+ }
+
+ fn c() {…
+ }
+ }
+ "
+ .unindent(),
+ );
+
+ view.fold(&Fold, cx);
+ assert_eq!(
+ view.display_text(cx),
+ "
+ impl Foo {…
+ }
+ "
+ .unindent(),
+ );
+
+ view.unfold_lines(&UnfoldLines, cx);
+ assert_eq!(
+ view.display_text(cx),
+ "
+ impl Foo {
+ // Hello!
+
+ fn a() {
+ 1
+ }
+
+ fn b() {…
+ }
+
+ fn c() {…
+ }
+ }
+ "
+ .unindent(),
+ );
+
+ view.unfold_lines(&UnfoldLines, cx);
+ assert_eq!(view.display_text(cx), buffer.read(cx).read(cx).text());
+ });
+}
+
+#[gpui::test]
+fn test_move_cursor(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple(&sample_text(6, 6, 'a'), cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx));
+
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit(
+ vec![
+ (Point::new(1, 0)..Point::new(1, 0), "\t"),
+ (Point::new(1, 1)..Point::new(1, 1), "\t"),
+ ],
+ None,
+ cx,
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)]
+ );
+
+ view.move_down(&MoveDown, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)]
+ );
+
+ view.move_right(&MoveRight, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(1, 4)..DisplayPoint::new(1, 4)]
+ );
+
+ view.move_left(&MoveLeft, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0)]
+ );
+
+ view.move_up(&MoveUp, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)]
+ );
+
+ view.move_to_end(&MoveToEnd, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(5, 6)..DisplayPoint::new(5, 6)]
+ );
+
+ view.move_to_beginning(&MoveToBeginning, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0)]
+ );
+
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(0, 1)..DisplayPoint::new(0, 2)]);
+ });
+ view.select_to_beginning(&SelectToBeginning, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(0, 1)..DisplayPoint::new(0, 0)]
+ );
+
+ view.select_to_end(&SelectToEnd, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(0, 1)..DisplayPoint::new(5, 6)]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_move_cursor_multibyte(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcde\nαβγδε\n", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx));
+
+ assert_eq!('ⓐ'.len_utf8(), 3);
+ assert_eq!('α'.len_utf8(), 2);
+
+ view.update(cx, |view, cx| {
+ view.fold_ranges(
+ vec![
+ Point::new(0, 6)..Point::new(0, 12),
+ Point::new(1, 2)..Point::new(1, 4),
+ Point::new(2, 4)..Point::new(2, 8),
+ ],
+ cx,
+ );
+ assert_eq!(view.display_text(cx), "ⓐⓑ…ⓔ\nab…e\nαβ…ε\n");
+
+ view.move_right(&MoveRight, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(0, "ⓐ".len())]
+ );
+ view.move_right(&MoveRight, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(0, "ⓐⓑ".len())]
+ );
+ view.move_right(&MoveRight, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(0, "ⓐⓑ…".len())]
+ );
+
+ view.move_down(&MoveDown, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(1, "ab…".len())]
+ );
+ view.move_left(&MoveLeft, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(1, "ab".len())]
+ );
+ view.move_left(&MoveLeft, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(1, "a".len())]
+ );
+
+ view.move_down(&MoveDown, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(2, "α".len())]
+ );
+ view.move_right(&MoveRight, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(2, "αβ".len())]
+ );
+ view.move_right(&MoveRight, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(2, "αβ…".len())]
+ );
+ view.move_right(&MoveRight, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(2, "αβ…ε".len())]
+ );
+
+ view.move_up(&MoveUp, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(1, "ab…e".len())]
+ );
+ view.move_up(&MoveUp, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(0, "ⓐⓑ…ⓔ".len())]
+ );
+ view.move_left(&MoveLeft, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(0, "ⓐⓑ…".len())]
+ );
+ view.move_left(&MoveLeft, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(0, "ⓐⓑ".len())]
+ );
+ view.move_left(&MoveLeft, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(0, "ⓐ".len())]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_move_cursor_different_line_lengths(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("ⓐⓑⓒⓓⓔ\nabcd\nαβγ\nabcd\nⓐⓑⓒⓓⓔ\n", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx));
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([empty_range(0, "ⓐⓑⓒⓓⓔ".len())]);
+ });
+ view.move_down(&MoveDown, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(1, "abcd".len())]
+ );
+
+ view.move_down(&MoveDown, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(2, "αβγ".len())]
+ );
+
+ view.move_down(&MoveDown, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(3, "abcd".len())]
+ );
+
+ view.move_down(&MoveDown, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(4, "ⓐⓑⓒⓓⓔ".len())]
+ );
+
+ view.move_up(&MoveUp, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(3, "abcd".len())]
+ );
+
+ view.move_up(&MoveUp, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[empty_range(2, "αβγ".len())]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_beginning_end_of_line(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("abc\n def", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1),
+ DisplayPoint::new(1, 4)..DisplayPoint::new(1, 4),
+ ]);
+ });
+ });
+
+ view.update(cx, |view, cx| {
+ view.move_to_beginning_of_line(&MoveToBeginningOfLine, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[
+ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0),
+ DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.move_to_beginning_of_line(&MoveToBeginningOfLine, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[
+ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0),
+ DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.move_to_beginning_of_line(&MoveToBeginningOfLine, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[
+ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0),
+ DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.move_to_end_of_line(&MoveToEndOfLine, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[
+ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3),
+ DisplayPoint::new(1, 5)..DisplayPoint::new(1, 5),
+ ]
+ );
+ });
+
+ // Moving to the end of line again is a no-op.
+ view.update(cx, |view, cx| {
+ view.move_to_end_of_line(&MoveToEndOfLine, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[
+ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3),
+ DisplayPoint::new(1, 5)..DisplayPoint::new(1, 5),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.move_left(&MoveLeft, cx);
+ view.select_to_beginning_of_line(
+ &SelectToBeginningOfLine {
+ stop_at_soft_wraps: true,
+ },
+ cx,
+ );
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 0),
+ DisplayPoint::new(1, 4)..DisplayPoint::new(1, 2),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.select_to_beginning_of_line(
+ &SelectToBeginningOfLine {
+ stop_at_soft_wraps: true,
+ },
+ cx,
+ );
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 0),
+ DisplayPoint::new(1, 4)..DisplayPoint::new(1, 0),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.select_to_beginning_of_line(
+ &SelectToBeginningOfLine {
+ stop_at_soft_wraps: true,
+ },
+ cx,
+ );
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 0),
+ DisplayPoint::new(1, 4)..DisplayPoint::new(1, 2),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.select_to_end_of_line(
+ &SelectToEndOfLine {
+ stop_at_soft_wraps: true,
+ },
+ cx,
+ );
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 3),
+ DisplayPoint::new(1, 4)..DisplayPoint::new(1, 5),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.delete_to_end_of_line(&DeleteToEndOfLine, cx);
+ assert_eq!(view.display_text(cx), "ab\n de");
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2),
+ DisplayPoint::new(1, 4)..DisplayPoint::new(1, 4),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.delete_to_beginning_of_line(&DeleteToBeginningOfLine, cx);
+ assert_eq!(view.display_text(cx), "\n");
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[
+ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0),
+ DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0),
+ ]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_prev_next_word_boundary(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("use std::str::{foo, bar}\n\n {baz.qux()}", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 11)..DisplayPoint::new(0, 11),
+ DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4),
+ ])
+ });
+
+ view.move_to_previous_word_start(&MoveToPreviousWordStart, cx);
+ assert_selection_ranges("use std::ˇstr::{foo, bar}\n\n {ˇbaz.qux()}", view, cx);
+
+ view.move_to_previous_word_start(&MoveToPreviousWordStart, cx);
+ assert_selection_ranges("use stdˇ::str::{foo, bar}\n\n ˇ{baz.qux()}", view, cx);
+
+ view.move_to_previous_word_start(&MoveToPreviousWordStart, cx);
+ assert_selection_ranges("use ˇstd::str::{foo, bar}\n\nˇ {baz.qux()}", view, cx);
+
+ view.move_to_previous_word_start(&MoveToPreviousWordStart, cx);
+ assert_selection_ranges("ˇuse std::str::{foo, bar}\nˇ\n {baz.qux()}", view, cx);
+
+ view.move_to_previous_word_start(&MoveToPreviousWordStart, cx);
+ assert_selection_ranges("ˇuse std::str::{foo, barˇ}\n\n {baz.qux()}", view, cx);
+
+ view.move_to_next_word_end(&MoveToNextWordEnd, cx);
+ assert_selection_ranges("useˇ std::str::{foo, bar}ˇ\n\n {baz.qux()}", view, cx);
+
+ view.move_to_next_word_end(&MoveToNextWordEnd, cx);
+ assert_selection_ranges("use stdˇ::str::{foo, bar}\nˇ\n {baz.qux()}", view, cx);
+
+ view.move_to_next_word_end(&MoveToNextWordEnd, cx);
+ assert_selection_ranges("use std::ˇstr::{foo, bar}\n\n {ˇbaz.qux()}", view, cx);
+
+ view.move_right(&MoveRight, cx);
+ view.select_to_previous_word_start(&SelectToPreviousWordStart, cx);
+ assert_selection_ranges("use std::«ˇs»tr::{foo, bar}\n\n {«ˇb»az.qux()}", view, cx);
+
+ view.select_to_previous_word_start(&SelectToPreviousWordStart, cx);
+ assert_selection_ranges("use std«ˇ::s»tr::{foo, bar}\n\n «ˇ{b»az.qux()}", view, cx);
+
+ view.select_to_next_word_end(&SelectToNextWordEnd, cx);
+ assert_selection_ranges("use std::«ˇs»tr::{foo, bar}\n\n {«ˇb»az.qux()}", view, cx);
+ });
+}
+
+#[gpui::test]
+fn test_prev_next_word_bounds_with_soft_wrap(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("use one::{\n two::three::four::five\n};", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+
+ view.update(cx, |view, cx| {
+ view.set_wrap_width(Some(140.), cx);
+ assert_eq!(
+ view.display_text(cx),
+ "use one::{\n two::three::\n four::five\n};"
+ );
+
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(1, 7)..DisplayPoint::new(1, 7)]);
+ });
+
+ view.move_to_next_word_end(&MoveToNextWordEnd, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(1, 9)..DisplayPoint::new(1, 9)]
+ );
+
+ view.move_to_next_word_end(&MoveToNextWordEnd, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(1, 14)..DisplayPoint::new(1, 14)]
+ );
+
+ view.move_to_next_word_end(&MoveToNextWordEnd, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4)]
+ );
+
+ view.move_to_next_word_end(&MoveToNextWordEnd, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(2, 8)..DisplayPoint::new(2, 8)]
+ );
+
+ view.move_to_previous_word_start(&MoveToPreviousWordStart, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(2, 4)..DisplayPoint::new(2, 4)]
+ );
+
+ view.move_to_previous_word_start(&MoveToPreviousWordStart, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(1, 14)..DisplayPoint::new(1, 14)]
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_delete_to_beginning_of_line(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+ cx.set_state("one «two threeˇ» four");
+ cx.update_editor(|editor, cx| {
+ editor.delete_to_beginning_of_line(&DeleteToBeginningOfLine, cx);
+ assert_eq!(editor.text(cx), " four");
+ });
+}
+
+#[gpui::test]
+fn test_delete_to_word_boundary(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("one two three four", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx));
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ // an empty selection - the preceding word fragment is deleted
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2),
+ // characters selected - they are deleted
+ DisplayPoint::new(0, 9)..DisplayPoint::new(0, 12),
+ ])
+ });
+ view.delete_to_previous_word_start(&DeleteToPreviousWordStart, cx);
+ });
+
+ assert_eq!(buffer.read(cx).read(cx).text(), "e two te four");
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ // an empty selection - the following word fragment is deleted
+ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3),
+ // characters selected - they are deleted
+ DisplayPoint::new(0, 9)..DisplayPoint::new(0, 10),
+ ])
+ });
+ view.delete_to_next_word_end(&DeleteToNextWordEnd, cx);
+ });
+
+ assert_eq!(buffer.read(cx).read(cx).text(), "e t te our");
+}
+
+#[gpui::test]
+fn test_newline(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("aaaa\n bbbb\n", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx));
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2),
+ DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2),
+ DisplayPoint::new(1, 6)..DisplayPoint::new(1, 6),
+ ])
+ });
+
+ view.newline(&Newline, cx);
+ assert_eq!(view.text(cx), "aa\naa\n \n bb\n bb\n");
+ });
+}
+
+#[gpui::test]
+fn test_newline_with_old_selections(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple(
+ "
+ a
+ b(
+ X
+ )
+ c(
+ X
+ )
+ "
+ .unindent()
+ .as_str(),
+ cx,
+ );
+
+ let (_, editor) = cx.add_window(Default::default(), |cx| {
+ let mut editor = build_editor(buffer.clone(), cx);
+ editor.change_selections(None, cx, |s| {
+ s.select_ranges([
+ Point::new(2, 4)..Point::new(2, 5),
+ Point::new(5, 4)..Point::new(5, 5),
+ ])
+ });
+ editor
+ });
+
+ // Edit the buffer directly, deleting ranges surrounding the editor's selections
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit(
+ [
+ (Point::new(1, 2)..Point::new(3, 0), ""),
+ (Point::new(4, 2)..Point::new(6, 0), ""),
+ ],
+ None,
+ cx,
+ );
+ assert_eq!(
+ buffer.read(cx).text(),
+ "
+ a
+ b()
+ c()
+ "
+ .unindent()
+ );
+ });
+
+ editor.update(cx, |editor, cx| {
+ assert_eq!(
+ editor.selections.ranges(cx),
+ &[
+ Point::new(1, 2)..Point::new(1, 2),
+ Point::new(2, 2)..Point::new(2, 2),
+ ],
+ );
+
+ editor.newline(&Newline, cx);
+ assert_eq!(
+ editor.text(cx),
+ "
+ a
+ b(
+ )
+ c(
+ )
+ "
+ .unindent()
+ );
+
+ // The selections are moved after the inserted newlines
+ assert_eq!(
+ editor.selections.ranges(cx),
+ &[
+ Point::new(2, 0)..Point::new(2, 0),
+ Point::new(4, 0)..Point::new(4, 0),
+ ],
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_newline_below(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+ cx.update(|cx| {
+ cx.update_global::<Settings, _, _>(|settings, _| {
+ settings.editor_overrides.tab_size = Some(NonZeroU32::new(4).unwrap());
+ });
+ });
+
+ let language = Arc::new(
+ Language::new(
+ LanguageConfig::default(),
+ Some(tree_sitter_rust::language()),
+ )
+ .with_indents_query(r#"(_ "(" ")" @end) @indent"#)
+ .unwrap(),
+ );
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
+
+ cx.set_state(indoc! {"
+ const a: ˇA = (
+ (ˇ
+ «const_functionˇ»(ˇ),
+ so«mˇ»et«hˇ»ing_ˇelse,ˇ
+ )ˇ
+ ˇ);ˇ
+ "});
+ cx.update_editor(|e, cx| e.newline_below(&NewlineBelow, cx));
+ cx.assert_editor_state(indoc! {"
+ const a: A = (
+ ˇ
+ (
+ ˇ
+ const_function(),
+ ˇ
+ ˇ
+ something_else,
+ ˇ
+ ˇ
+ ˇ
+ ˇ
+ )
+ ˇ
+ );
+ ˇ
+ ˇ
+ "});
+}
+
+#[gpui::test]
+fn test_insert_with_old_selections(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("a( X ), b( Y ), c( Z )", cx);
+ let (_, editor) = cx.add_window(Default::default(), |cx| {
+ let mut editor = build_editor(buffer.clone(), cx);
+ editor.change_selections(None, cx, |s| s.select_ranges([3..4, 11..12, 19..20]));
+ editor
+ });
+
+ // Edit the buffer directly, deleting ranges surrounding the editor's selections
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(2..5, ""), (10..13, ""), (18..21, "")], None, cx);
+ assert_eq!(buffer.read(cx).text(), "a(), b(), c()".unindent());
+ });
+
+ editor.update(cx, |editor, cx| {
+ assert_eq!(editor.selections.ranges(cx), &[2..2, 7..7, 12..12],);
+
+ editor.insert("Z", cx);
+ assert_eq!(editor.text(cx), "a(Z), b(Z), c(Z)");
+
+ // The selections are moved after the inserted characters
+ assert_eq!(editor.selections.ranges(cx), &[3..3, 9..9, 15..15],);
+ });
+}
+
+#[gpui::test]
+async fn test_tab(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+ cx.update(|cx| {
+ cx.update_global::<Settings, _, _>(|settings, _| {
+ settings.editor_overrides.tab_size = Some(NonZeroU32::new(3).unwrap());
+ });
+ });
+ cx.set_state(indoc! {"
+ ˇabˇc
+ ˇ🏀ˇ🏀ˇefg
+ dˇ
+ "});
+ cx.update_editor(|e, cx| e.tab(&Tab, cx));
+ cx.assert_editor_state(indoc! {"
+ ˇab ˇc
+ ˇ🏀 ˇ🏀 ˇefg
+ d ˇ
+ "});
+
+ cx.set_state(indoc! {"
+ a
+ «🏀ˇ»🏀«🏀ˇ»🏀«🏀ˇ»
+ "});
+ cx.update_editor(|e, cx| e.tab(&Tab, cx));
+ cx.assert_editor_state(indoc! {"
+ a
+ «🏀ˇ»🏀«🏀ˇ»🏀«🏀ˇ»
+ "});
+}
+
+#[gpui::test]
+async fn test_tab_on_blank_line_auto_indents(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+ let language = Arc::new(
+ Language::new(
+ LanguageConfig::default(),
+ Some(tree_sitter_rust::language()),
+ )
+ .with_indents_query(r#"(_ "(" ")" @end) @indent"#)
+ .unwrap(),
+ );
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
+
+ // cursors that are already at the suggested indent level insert
+ // a soft tab. cursors that are to the left of the suggested indent
+ // auto-indent their line.
+ cx.set_state(indoc! {"
+ ˇ
+ const a: B = (
+ c(
+ d(
+ ˇ
+ )
+ ˇ
+ ˇ )
+ );
+ "});
+ cx.update_editor(|e, cx| e.tab(&Tab, cx));
+ cx.assert_editor_state(indoc! {"
+ ˇ
+ const a: B = (
+ c(
+ d(
+ ˇ
+ )
+ ˇ
+ ˇ)
+ );
+ "});
+
+ // handle auto-indent when there are multiple cursors on the same line
+ cx.set_state(indoc! {"
+ const a: B = (
+ c(
+ ˇ ˇ
+ ˇ )
+ );
+ "});
+ cx.update_editor(|e, cx| e.tab(&Tab, cx));
+ cx.assert_editor_state(indoc! {"
+ const a: B = (
+ c(
+ ˇ
+ ˇ)
+ );
+ "});
+}
+
+#[gpui::test]
+async fn test_indent_outdent(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+
+ cx.set_state(indoc! {"
+ «oneˇ» «twoˇ»
+ three
+ four
+ "});
+ cx.update_editor(|e, cx| e.tab(&Tab, cx));
+ cx.assert_editor_state(indoc! {"
+ «oneˇ» «twoˇ»
+ three
+ four
+ "});
+
+ cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
+ cx.assert_editor_state(indoc! {"
+ «oneˇ» «twoˇ»
+ three
+ four
+ "});
+
+ // select across line ending
+ cx.set_state(indoc! {"
+ one two
+ t«hree
+ ˇ» four
+ "});
+ cx.update_editor(|e, cx| e.tab(&Tab, cx));
+ cx.assert_editor_state(indoc! {"
+ one two
+ t«hree
+ ˇ» four
+ "});
+
+ cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
+ cx.assert_editor_state(indoc! {"
+ one two
+ t«hree
+ ˇ» four
+ "});
+
+ // Ensure that indenting/outdenting works when the cursor is at column 0.
+ cx.set_state(indoc! {"
+ one two
+ ˇthree
+ four
+ "});
+ cx.update_editor(|e, cx| e.tab(&Tab, cx));
+ cx.assert_editor_state(indoc! {"
+ one two
+ ˇthree
+ four
+ "});
+
+ cx.set_state(indoc! {"
+ one two
+ ˇ three
+ four
+ "});
+ cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
+ cx.assert_editor_state(indoc! {"
+ one two
+ ˇthree
+ four
+ "});
+}
+
+#[gpui::test]
+async fn test_indent_outdent_with_hard_tabs(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+ cx.update(|cx| {
+ cx.update_global::<Settings, _, _>(|settings, _| {
+ settings.editor_overrides.hard_tabs = Some(true);
+ });
+ });
+
+ // select two ranges on one line
+ cx.set_state(indoc! {"
+ «oneˇ» «twoˇ»
+ three
+ four
+ "});
+ cx.update_editor(|e, cx| e.tab(&Tab, cx));
+ cx.assert_editor_state(indoc! {"
+ \t«oneˇ» «twoˇ»
+ three
+ four
+ "});
+ cx.update_editor(|e, cx| e.tab(&Tab, cx));
+ cx.assert_editor_state(indoc! {"
+ \t\t«oneˇ» «twoˇ»
+ three
+ four
+ "});
+ cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
+ cx.assert_editor_state(indoc! {"
+ \t«oneˇ» «twoˇ»
+ three
+ four
+ "});
+ cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
+ cx.assert_editor_state(indoc! {"
+ «oneˇ» «twoˇ»
+ three
+ four
+ "});
+
+ // select across a line ending
+ cx.set_state(indoc! {"
+ one two
+ t«hree
+ ˇ»four
+ "});
+ cx.update_editor(|e, cx| e.tab(&Tab, cx));
+ cx.assert_editor_state(indoc! {"
+ one two
+ \tt«hree
+ ˇ»four
+ "});
+ cx.update_editor(|e, cx| e.tab(&Tab, cx));
+ cx.assert_editor_state(indoc! {"
+ one two
+ \t\tt«hree
+ ˇ»four
+ "});
+ cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
+ cx.assert_editor_state(indoc! {"
+ one two
+ \tt«hree
+ ˇ»four
+ "});
+ cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
+ cx.assert_editor_state(indoc! {"
+ one two
+ t«hree
+ ˇ»four
+ "});
+
+ // Ensure that indenting/outdenting works when the cursor is at column 0.
+ cx.set_state(indoc! {"
+ one two
+ ˇthree
+ four
+ "});
+ cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
+ cx.assert_editor_state(indoc! {"
+ one two
+ ˇthree
+ four
+ "});
+ cx.update_editor(|e, cx| e.tab(&Tab, cx));
+ cx.assert_editor_state(indoc! {"
+ one two
+ \tˇthree
+ four
+ "});
+ cx.update_editor(|e, cx| e.tab_prev(&TabPrev, cx));
+ cx.assert_editor_state(indoc! {"
+ one two
+ ˇthree
+ four
+ "});
+}
+
+#[gpui::test]
+fn test_indent_outdent_with_excerpts(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(
+ Settings::test(cx)
+ .with_language_defaults(
+ "TOML",
+ EditorSettings {
+ tab_size: Some(2.try_into().unwrap()),
+ ..Default::default()
+ },
+ )
+ .with_language_defaults(
+ "Rust",
+ EditorSettings {
+ tab_size: Some(4.try_into().unwrap()),
+ ..Default::default()
+ },
+ ),
+ );
+ let toml_language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "TOML".into(),
+ ..Default::default()
+ },
+ None,
+ ));
+ let rust_language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ ..Default::default()
+ },
+ None,
+ ));
+
+ let toml_buffer =
+ cx.add_model(|cx| Buffer::new(0, "a = 1\nb = 2\n", cx).with_language(toml_language, cx));
+ let rust_buffer = cx.add_model(|cx| {
+ Buffer::new(0, "const c: usize = 3;\n", cx).with_language(rust_language, cx)
+ });
+ let multibuffer = cx.add_model(|cx| {
+ let mut multibuffer = MultiBuffer::new(0);
+ multibuffer.push_excerpts(
+ toml_buffer.clone(),
+ [ExcerptRange {
+ context: Point::new(0, 0)..Point::new(2, 0),
+ primary: None,
+ }],
+ cx,
+ );
+ multibuffer.push_excerpts(
+ rust_buffer.clone(),
+ [ExcerptRange {
+ context: Point::new(0, 0)..Point::new(1, 0),
+ primary: None,
+ }],
+ cx,
+ );
+ multibuffer
+ });
+
+ cx.add_window(Default::default(), |cx| {
+ let mut editor = build_editor(multibuffer, cx);
+
+ assert_eq!(
+ editor.text(cx),
+ indoc! {"
+ a = 1
+ b = 2
+
+ const c: usize = 3;
+ "}
+ );
+
+ select_ranges(
+ &mut editor,
+ indoc! {"
+ «aˇ» = 1
+ b = 2
+
+ «const c:ˇ» usize = 3;
+ "},
+ cx,
+ );
+
+ editor.tab(&Tab, cx);
+ assert_text_with_selections(
+ &mut editor,
+ indoc! {"
+ «aˇ» = 1
+ b = 2
+
+ «const c:ˇ» usize = 3;
+ "},
+ cx,
+ );
+ editor.tab_prev(&TabPrev, cx);
+ assert_text_with_selections(
+ &mut editor,
+ indoc! {"
+ «aˇ» = 1
+ b = 2
+
+ «const c:ˇ» usize = 3;
+ "},
+ cx,
+ );
+
+ editor
+ });
+}
+
+#[gpui::test]
+async fn test_backspace(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+
+ // Basic backspace
+ cx.set_state(indoc! {"
+ onˇe two three
+ fou«rˇ» five six
+ seven «ˇeight nine
+ »ten
+ "});
+ cx.update_editor(|e, cx| e.backspace(&Backspace, cx));
+ cx.assert_editor_state(indoc! {"
+ oˇe two three
+ fouˇ five six
+ seven ˇten
+ "});
+
+ // Test backspace inside and around indents
+ cx.set_state(indoc! {"
+ zero
+ ˇone
+ ˇtwo
+ ˇ ˇ ˇ three
+ ˇ ˇ four
+ "});
+ cx.update_editor(|e, cx| e.backspace(&Backspace, cx));
+ cx.assert_editor_state(indoc! {"
+ zero
+ ˇone
+ ˇtwo
+ ˇ threeˇ four
+ "});
+
+ // Test backspace with line_mode set to true
+ cx.update_editor(|e, _| e.selections.line_mode = true);
+ cx.set_state(indoc! {"
+ The ˇquick ˇbrown
+ fox jumps over
+ the lazy dog
+ ˇThe qu«ick bˇ»rown"});
+ cx.update_editor(|e, cx| e.backspace(&Backspace, cx));
+ cx.assert_editor_state(indoc! {"
+ ˇfox jumps over
+ the lazy dogˇ"});
+}
+
+#[gpui::test]
+async fn test_delete(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+
+ cx.set_state(indoc! {"
+ onˇe two three
+ fou«rˇ» five six
+ seven «ˇeight nine
+ »ten
+ "});
+ cx.update_editor(|e, cx| e.delete(&Delete, cx));
+ cx.assert_editor_state(indoc! {"
+ onˇ two three
+ fouˇ five six
+ seven ˇten
+ "});
+
+ // Test backspace with line_mode set to true
+ cx.update_editor(|e, _| e.selections.line_mode = true);
+ cx.set_state(indoc! {"
+ The ˇquick ˇbrown
+ fox «ˇjum»ps over
+ the lazy dog
+ ˇThe qu«ick bˇ»rown"});
+ cx.update_editor(|e, cx| e.backspace(&Backspace, cx));
+ cx.assert_editor_state("ˇthe lazy dogˇ");
+}
+
+#[gpui::test]
+fn test_delete_line(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1),
+ DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1),
+ DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0),
+ ])
+ });
+ view.delete_line(&DeleteLine, cx);
+ assert_eq!(view.display_text(cx), "ghi");
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 0),
+ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1)
+ ]
+ );
+ });
+
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(2, 0)..DisplayPoint::new(0, 1)])
+ });
+ view.delete_line(&DeleteLine, cx);
+ assert_eq!(view.display_text(cx), "ghi\n");
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1)]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_duplicate_line(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1),
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2),
+ DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0),
+ DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0),
+ ])
+ });
+ view.duplicate_line(&DuplicateLine, cx);
+ assert_eq!(view.display_text(cx), "abc\nabc\ndef\ndef\nghi\n\n");
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1),
+ DisplayPoint::new(1, 2)..DisplayPoint::new(1, 2),
+ DisplayPoint::new(3, 0)..DisplayPoint::new(3, 0),
+ DisplayPoint::new(6, 0)..DisplayPoint::new(6, 0),
+ ]
+ );
+ });
+
+ let buffer = MultiBuffer::build_simple("abc\ndef\nghi\n", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 1)..DisplayPoint::new(1, 1),
+ DisplayPoint::new(1, 2)..DisplayPoint::new(2, 1),
+ ])
+ });
+ view.duplicate_line(&DuplicateLine, cx);
+ assert_eq!(view.display_text(cx), "abc\ndef\nghi\nabc\ndef\nghi\n");
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(3, 1)..DisplayPoint::new(4, 1),
+ DisplayPoint::new(4, 2)..DisplayPoint::new(5, 1),
+ ]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_move_line_up_down(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple(&sample_text(10, 5, 'a'), cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+ view.update(cx, |view, cx| {
+ view.fold_ranges(
+ vec![
+ Point::new(0, 2)..Point::new(1, 2),
+ Point::new(2, 3)..Point::new(4, 1),
+ Point::new(7, 0)..Point::new(8, 4),
+ ],
+ cx,
+ );
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1),
+ DisplayPoint::new(3, 1)..DisplayPoint::new(3, 1),
+ DisplayPoint::new(3, 2)..DisplayPoint::new(4, 3),
+ DisplayPoint::new(5, 0)..DisplayPoint::new(5, 2),
+ ])
+ });
+ assert_eq!(
+ view.display_text(cx),
+ "aa…bbb\nccc…eeee\nfffff\nggggg\n…i\njjjjj"
+ );
+
+ view.move_line_up(&MoveLineUp, cx);
+ assert_eq!(
+ view.display_text(cx),
+ "aa…bbb\nccc…eeee\nggggg\n…i\njjjjj\nfffff"
+ );
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1),
+ DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1),
+ DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3),
+ DisplayPoint::new(4, 0)..DisplayPoint::new(4, 2)
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.move_line_down(&MoveLineDown, cx);
+ assert_eq!(
+ view.display_text(cx),
+ "ccc…eeee\naa…bbb\nfffff\nggggg\n…i\njjjjj"
+ );
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1),
+ DisplayPoint::new(3, 1)..DisplayPoint::new(3, 1),
+ DisplayPoint::new(3, 2)..DisplayPoint::new(4, 3),
+ DisplayPoint::new(5, 0)..DisplayPoint::new(5, 2)
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.move_line_down(&MoveLineDown, cx);
+ assert_eq!(
+ view.display_text(cx),
+ "ccc…eeee\nfffff\naa…bbb\nggggg\n…i\njjjjj"
+ );
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1),
+ DisplayPoint::new(3, 1)..DisplayPoint::new(3, 1),
+ DisplayPoint::new(3, 2)..DisplayPoint::new(4, 3),
+ DisplayPoint::new(5, 0)..DisplayPoint::new(5, 2)
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.move_line_up(&MoveLineUp, cx);
+ assert_eq!(
+ view.display_text(cx),
+ "ccc…eeee\naa…bbb\nggggg\n…i\njjjjj\nfffff"
+ );
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(1, 1)..DisplayPoint::new(1, 1),
+ DisplayPoint::new(2, 1)..DisplayPoint::new(2, 1),
+ DisplayPoint::new(2, 2)..DisplayPoint::new(3, 3),
+ DisplayPoint::new(4, 0)..DisplayPoint::new(4, 2)
+ ]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_move_line_up_down_with_blocks(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple(&sample_text(10, 5, 'a'), cx);
+ let snapshot = buffer.read(cx).snapshot(cx);
+ let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+ editor.update(cx, |editor, cx| {
+ editor.insert_blocks(
+ [BlockProperties {
+ style: BlockStyle::Fixed,
+ position: snapshot.anchor_after(Point::new(2, 0)),
+ disposition: BlockDisposition::Below,
+ height: 1,
+ render: Arc::new(|_| Empty::new().boxed()),
+ }],
+ cx,
+ );
+ editor.change_selections(None, cx, |s| {
+ s.select_ranges([Point::new(2, 0)..Point::new(2, 0)])
+ });
+ editor.move_line_down(&MoveLineDown, cx);
+ });
+}
+
+#[gpui::test]
+fn test_transpose(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+
+ _ = cx
+ .add_window(Default::default(), |cx| {
+ let mut editor = build_editor(MultiBuffer::build_simple("abc", cx), cx);
+
+ editor.change_selections(None, cx, |s| s.select_ranges([1..1]));
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "bac");
+ assert_eq!(editor.selections.ranges(cx), [2..2]);
+
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "bca");
+ assert_eq!(editor.selections.ranges(cx), [3..3]);
+
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "bac");
+ assert_eq!(editor.selections.ranges(cx), [3..3]);
+
+ editor
+ })
+ .1;
+
+ _ = cx
+ .add_window(Default::default(), |cx| {
+ let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), cx);
+
+ editor.change_selections(None, cx, |s| s.select_ranges([3..3]));
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "acb\nde");
+ assert_eq!(editor.selections.ranges(cx), [3..3]);
+
+ editor.change_selections(None, cx, |s| s.select_ranges([4..4]));
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "acbd\ne");
+ assert_eq!(editor.selections.ranges(cx), [5..5]);
+
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "acbde\n");
+ assert_eq!(editor.selections.ranges(cx), [6..6]);
+
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "acbd\ne");
+ assert_eq!(editor.selections.ranges(cx), [6..6]);
+
+ editor
+ })
+ .1;
+
+ _ = cx
+ .add_window(Default::default(), |cx| {
+ let mut editor = build_editor(MultiBuffer::build_simple("abc\nde", cx), cx);
+
+ editor.change_selections(None, cx, |s| s.select_ranges([1..1, 2..2, 4..4]));
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "bacd\ne");
+ assert_eq!(editor.selections.ranges(cx), [2..2, 3..3, 5..5]);
+
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "bcade\n");
+ assert_eq!(editor.selections.ranges(cx), [3..3, 4..4, 6..6]);
+
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "bcda\ne");
+ assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]);
+
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "bcade\n");
+ assert_eq!(editor.selections.ranges(cx), [4..4, 6..6]);
+
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "bcaed\n");
+ assert_eq!(editor.selections.ranges(cx), [5..5, 6..6]);
+
+ editor
+ })
+ .1;
+
+ _ = cx
+ .add_window(Default::default(), |cx| {
+ let mut editor = build_editor(MultiBuffer::build_simple("🍐🏀✋", cx), cx);
+
+ editor.change_selections(None, cx, |s| s.select_ranges([4..4]));
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "🏀🍐✋");
+ assert_eq!(editor.selections.ranges(cx), [8..8]);
+
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "🏀✋🍐");
+ assert_eq!(editor.selections.ranges(cx), [11..11]);
+
+ editor.transpose(&Default::default(), cx);
+ assert_eq!(editor.text(cx), "🏀🍐✋");
+ assert_eq!(editor.selections.ranges(cx), [11..11]);
+
+ editor
+ })
+ .1;
+}
+
+#[gpui::test]
+async fn test_clipboard(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+
+ cx.set_state("«one✅ ˇ»two «three ˇ»four «five ˇ»six ");
+ cx.update_editor(|e, cx| e.cut(&Cut, cx));
+ cx.assert_editor_state("ˇtwo ˇfour ˇsix ");
+
+ // Paste with three cursors. Each cursor pastes one slice of the clipboard text.
+ cx.set_state("two ˇfour ˇsix ˇ");
+ cx.update_editor(|e, cx| e.paste(&Paste, cx));
+ cx.assert_editor_state("two one✅ ˇfour three ˇsix five ˇ");
+
+ // Paste again but with only two cursors. Since the number of cursors doesn't
+ // match the number of slices in the clipboard, the entire clipboard text
+ // is pasted at each cursor.
+ cx.set_state("ˇtwo one✅ four three six five ˇ");
+ cx.update_editor(|e, cx| {
+ e.handle_input("( ", cx);
+ e.paste(&Paste, cx);
+ e.handle_input(") ", cx);
+ });
+ cx.assert_editor_state(indoc! {"
+ ( one✅
+ three
+ five ) ˇtwo one✅ four three six five ( one✅
+ three
+ five ) ˇ"});
+
+ // Cut with three selections, one of which is full-line.
+ cx.set_state(indoc! {"
+ 1«2ˇ»3
+ 4ˇ567
+ «8ˇ»9"});
+ cx.update_editor(|e, cx| e.cut(&Cut, cx));
+ cx.assert_editor_state(indoc! {"
+ 1ˇ3
+ ˇ9"});
+
+ // Paste with three selections, noticing how the copied selection that was full-line
+ // gets inserted before the second cursor.
+ cx.set_state(indoc! {"
+ 1ˇ3
+ 9ˇ
+ «oˇ»ne"});
+ cx.update_editor(|e, cx| e.paste(&Paste, cx));
+ cx.assert_editor_state(indoc! {"
+ 12ˇ3
+ 4567
+ 9ˇ
+ 8ˇne"});
+
+ // Copy with a single cursor only, which writes the whole line into the clipboard.
+ cx.set_state(indoc! {"
+ The quick brown
+ fox juˇmps over
+ the lazy dog"});
+ cx.update_editor(|e, cx| e.copy(&Copy, cx));
+ cx.cx.assert_clipboard_content(Some("fox jumps over\n"));
+
+ // Paste with three selections, noticing how the copied full-line selection is inserted
+ // before the empty selections but replaces the selection that is non-empty.
+ cx.set_state(indoc! {"
+ Tˇhe quick brown
+ «foˇ»x jumps over
+ tˇhe lazy dog"});
+ cx.update_editor(|e, cx| e.paste(&Paste, cx));
+ cx.assert_editor_state(indoc! {"
+ fox jumps over
+ Tˇhe quick brown
+ fox jumps over
+ ˇx jumps over
+ fox jumps over
+ tˇhe lazy dog"});
+}
+
+#[gpui::test]
+async fn test_paste_multiline(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+ let language = Arc::new(Language::new(
+ LanguageConfig::default(),
+ Some(tree_sitter_rust::language()),
+ ));
+ cx.update_buffer(|buffer, cx| buffer.set_language(Some(language), cx));
+
+ // Cut an indented block, without the leading whitespace.
+ cx.set_state(indoc! {"
+ const a: B = (
+ c(),
+ «d(
+ e,
+ f
+ )ˇ»
+ );
+ "});
+ cx.update_editor(|e, cx| e.cut(&Cut, cx));
+ cx.assert_editor_state(indoc! {"
+ const a: B = (
+ c(),
+ ˇ
+ );
+ "});
+
+ // Paste it at the same position.
+ cx.update_editor(|e, cx| e.paste(&Paste, cx));
+ cx.assert_editor_state(indoc! {"
+ const a: B = (
+ c(),
+ d(
+ e,
+ f
+ )ˇ
+ );
+ "});
+
+ // Paste it at a line with a lower indent level.
+ cx.set_state(indoc! {"
+ ˇ
+ const a: B = (
+ c(),
+ );
+ "});
+ cx.update_editor(|e, cx| e.paste(&Paste, cx));
+ cx.assert_editor_state(indoc! {"
+ d(
+ e,
+ f
+ )ˇ
+ const a: B = (
+ c(),
+ );
+ "});
+
+ // Cut an indented block, with the leading whitespace.
+ cx.set_state(indoc! {"
+ const a: B = (
+ c(),
+ « d(
+ e,
+ f
+ )
+ ˇ»);
+ "});
+ cx.update_editor(|e, cx| e.cut(&Cut, cx));
+ cx.assert_editor_state(indoc! {"
+ const a: B = (
+ c(),
+ ˇ);
+ "});
+
+ // Paste it at the same position.
+ cx.update_editor(|e, cx| e.paste(&Paste, cx));
+ cx.assert_editor_state(indoc! {"
+ const a: B = (
+ c(),
+ d(
+ e,
+ f
+ )
+ ˇ);
+ "});
+
+ // Paste it at a line with a higher indent level.
+ cx.set_state(indoc! {"
+ const a: B = (
+ c(),
+ d(
+ e,
+ fˇ
+ )
+ );
+ "});
+ cx.update_editor(|e, cx| e.paste(&Paste, cx));
+ cx.assert_editor_state(indoc! {"
+ const a: B = (
+ c(),
+ d(
+ e,
+ f d(
+ e,
+ f
+ )
+ ˇ
+ )
+ );
+ "});
+}
+
+#[gpui::test]
+fn test_select_all(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("abc\nde\nfgh", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+ view.update(cx, |view, cx| {
+ view.select_all(&SelectAll, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ &[DisplayPoint::new(0, 0)..DisplayPoint::new(2, 3)]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_select_line(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple(&sample_text(6, 5, 'a'), cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1),
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2),
+ DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0),
+ DisplayPoint::new(4, 2)..DisplayPoint::new(4, 2),
+ ])
+ });
+ view.select_line(&SelectLine, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(0, 0)..DisplayPoint::new(2, 0),
+ DisplayPoint::new(4, 0)..DisplayPoint::new(5, 0),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.select_line(&SelectLine, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(0, 0)..DisplayPoint::new(3, 0),
+ DisplayPoint::new(4, 0)..DisplayPoint::new(5, 5),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.select_line(&SelectLine, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![DisplayPoint::new(0, 0)..DisplayPoint::new(5, 5)]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_split_selection_into_lines(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple(&sample_text(9, 5, 'a'), cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+ view.update(cx, |view, cx| {
+ view.fold_ranges(
+ vec![
+ Point::new(0, 2)..Point::new(1, 2),
+ Point::new(2, 3)..Point::new(4, 1),
+ Point::new(7, 0)..Point::new(8, 4),
+ ],
+ cx,
+ );
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1),
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2),
+ DisplayPoint::new(1, 0)..DisplayPoint::new(1, 0),
+ DisplayPoint::new(4, 4)..DisplayPoint::new(4, 4),
+ ])
+ });
+ assert_eq!(view.display_text(cx), "aa…bbb\nccc…eeee\nfffff\nggggg\n…i");
+ });
+
+ view.update(cx, |view, cx| {
+ view.split_selection_into_lines(&SplitSelectionIntoLines, cx);
+ assert_eq!(
+ view.display_text(cx),
+ "aaaaa\nbbbbb\nccc…eeee\nfffff\nggggg\n…i"
+ );
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ [
+ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 1),
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 2),
+ DisplayPoint::new(2, 0)..DisplayPoint::new(2, 0),
+ DisplayPoint::new(5, 4)..DisplayPoint::new(5, 4)
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(5, 0)..DisplayPoint::new(0, 1)])
+ });
+ view.split_selection_into_lines(&SplitSelectionIntoLines, cx);
+ assert_eq!(
+ view.display_text(cx),
+ "aaaaa\nbbbbb\nccccc\nddddd\neeeee\nfffff\nggggg\nhhhhh\niiiii"
+ );
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ [
+ DisplayPoint::new(0, 5)..DisplayPoint::new(0, 5),
+ DisplayPoint::new(1, 5)..DisplayPoint::new(1, 5),
+ DisplayPoint::new(2, 5)..DisplayPoint::new(2, 5),
+ DisplayPoint::new(3, 5)..DisplayPoint::new(3, 5),
+ DisplayPoint::new(4, 5)..DisplayPoint::new(4, 5),
+ DisplayPoint::new(5, 5)..DisplayPoint::new(5, 5),
+ DisplayPoint::new(6, 5)..DisplayPoint::new(6, 5),
+ DisplayPoint::new(7, 0)..DisplayPoint::new(7, 0)
+ ]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_add_selection_above_below(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = MultiBuffer::build_simple("abc\ndefghi\n\njk\nlmno\n", cx);
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(buffer, cx));
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)])
+ });
+ });
+ view.update(cx, |view, cx| {
+ view.add_selection_above(&AddSelectionAbove, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3),
+ DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.add_selection_above(&AddSelectionAbove, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3),
+ DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.add_selection_below(&AddSelectionBelow, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)]
+ );
+
+ view.undo_selection(&UndoSelection, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 3),
+ DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)
+ ]
+ );
+
+ view.redo_selection(&RedoSelection, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3)]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.add_selection_below(&AddSelectionBelow, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3),
+ DisplayPoint::new(4, 3)..DisplayPoint::new(4, 3)
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.add_selection_below(&AddSelectionBelow, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(1, 3)..DisplayPoint::new(1, 3),
+ DisplayPoint::new(4, 3)..DisplayPoint::new(4, 3)
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3)])
+ });
+ });
+ view.update(cx, |view, cx| {
+ view.add_selection_below(&AddSelectionBelow, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3),
+ DisplayPoint::new(4, 4)..DisplayPoint::new(4, 3)
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.add_selection_below(&AddSelectionBelow, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3),
+ DisplayPoint::new(4, 4)..DisplayPoint::new(4, 3)
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.add_selection_above(&AddSelectionAbove, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3)]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.add_selection_above(&AddSelectionAbove, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![DisplayPoint::new(1, 4)..DisplayPoint::new(1, 3)]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(0, 1)..DisplayPoint::new(1, 4)])
+ });
+ view.add_selection_below(&AddSelectionBelow, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3),
+ DisplayPoint::new(1, 1)..DisplayPoint::new(1, 4),
+ DisplayPoint::new(3, 1)..DisplayPoint::new(3, 2),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.add_selection_below(&AddSelectionBelow, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3),
+ DisplayPoint::new(1, 1)..DisplayPoint::new(1, 4),
+ DisplayPoint::new(3, 1)..DisplayPoint::new(3, 2),
+ DisplayPoint::new(4, 1)..DisplayPoint::new(4, 4),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.add_selection_above(&AddSelectionAbove, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(0, 1)..DisplayPoint::new(0, 3),
+ DisplayPoint::new(1, 1)..DisplayPoint::new(1, 4),
+ DisplayPoint::new(3, 1)..DisplayPoint::new(3, 2),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(4, 3)..DisplayPoint::new(1, 1)])
+ });
+ });
+ view.update(cx, |view, cx| {
+ view.add_selection_above(&AddSelectionAbove, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 1),
+ DisplayPoint::new(1, 3)..DisplayPoint::new(1, 1),
+ DisplayPoint::new(3, 2)..DisplayPoint::new(3, 1),
+ DisplayPoint::new(4, 3)..DisplayPoint::new(4, 1),
+ ]
+ );
+ });
+
+ view.update(cx, |view, cx| {
+ view.add_selection_below(&AddSelectionBelow, cx);
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ vec![
+ DisplayPoint::new(1, 3)..DisplayPoint::new(1, 1),
+ DisplayPoint::new(3, 2)..DisplayPoint::new(3, 1),
+ DisplayPoint::new(4, 3)..DisplayPoint::new(4, 1),
+ ]
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_select_next(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+ cx.set_state("abc\nˇabc abc\ndefabc\nabc");
+
+ cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx));
+ cx.assert_editor_state("abc\n«abcˇ» abc\ndefabc\nabc");
+
+ cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx));
+ cx.assert_editor_state("abc\n«abcˇ» «abcˇ»\ndefabc\nabc");
+
+ cx.update_editor(|view, cx| view.undo_selection(&UndoSelection, cx));
+ cx.assert_editor_state("abc\n«abcˇ» abc\ndefabc\nabc");
+
+ cx.update_editor(|view, cx| view.redo_selection(&RedoSelection, cx));
+ cx.assert_editor_state("abc\n«abcˇ» «abcˇ»\ndefabc\nabc");
+
+ cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx));
+ cx.assert_editor_state("abc\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»");
+
+ cx.update_editor(|e, cx| e.select_next(&SelectNext::default(), cx));
+ cx.assert_editor_state("«abcˇ»\n«abcˇ» «abcˇ»\ndefabc\n«abcˇ»");
+}
+
+#[gpui::test]
+async fn test_select_larger_smaller_syntax_node(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| cx.set_global(Settings::test(cx)));
+ let language = Arc::new(Language::new(
+ LanguageConfig::default(),
+ Some(tree_sitter_rust::language()),
+ ));
+
+ let text = r#"
+ use mod1::mod2::{mod3, mod4};
+
+ fn fn_1(param1: bool, param2: &str) {
+ let var1 = "text";
+ }
+ "#
+ .unindent();
+
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
+ let (_, view) = cx.add_window(|cx| build_editor(buffer, cx));
+ view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
+ .await;
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25),
+ DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12),
+ DisplayPoint::new(3, 18)..DisplayPoint::new(3, 18),
+ ]);
+ });
+ view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx);
+ });
+ assert_eq!(
+ view.update(cx, |view, cx| { view.selections.display_ranges(cx) }),
+ &[
+ DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27),
+ DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7),
+ DisplayPoint::new(3, 15)..DisplayPoint::new(3, 21),
+ ]
+ );
+
+ view.update(cx, |view, cx| {
+ view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx);
+ });
+ assert_eq!(
+ view.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ &[
+ DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28),
+ DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0),
+ ]
+ );
+
+ view.update(cx, |view, cx| {
+ view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx);
+ });
+ assert_eq!(
+ view.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ &[DisplayPoint::new(5, 0)..DisplayPoint::new(0, 0)]
+ );
+
+ // Trying to expand the selected syntax node one more time has no effect.
+ view.update(cx, |view, cx| {
+ view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx);
+ });
+ assert_eq!(
+ view.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ &[DisplayPoint::new(5, 0)..DisplayPoint::new(0, 0)]
+ );
+
+ view.update(cx, |view, cx| {
+ view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx);
+ });
+ assert_eq!(
+ view.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ &[
+ DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28),
+ DisplayPoint::new(4, 1)..DisplayPoint::new(2, 0),
+ ]
+ );
+
+ view.update(cx, |view, cx| {
+ view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx);
+ });
+ assert_eq!(
+ view.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ &[
+ DisplayPoint::new(0, 23)..DisplayPoint::new(0, 27),
+ DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7),
+ DisplayPoint::new(3, 15)..DisplayPoint::new(3, 21),
+ ]
+ );
+
+ view.update(cx, |view, cx| {
+ view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx);
+ });
+ assert_eq!(
+ view.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ &[
+ DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25),
+ DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12),
+ DisplayPoint::new(3, 18)..DisplayPoint::new(3, 18),
+ ]
+ );
+
+ // Trying to shrink the selected syntax node one more time has no effect.
+ view.update(cx, |view, cx| {
+ view.select_smaller_syntax_node(&SelectSmallerSyntaxNode, cx);
+ });
+ assert_eq!(
+ view.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ &[
+ DisplayPoint::new(0, 25)..DisplayPoint::new(0, 25),
+ DisplayPoint::new(2, 24)..DisplayPoint::new(2, 12),
+ DisplayPoint::new(3, 18)..DisplayPoint::new(3, 18),
+ ]
+ );
+
+ // Ensure that we keep expanding the selection if the larger selection starts or ends within
+ // a fold.
+ view.update(cx, |view, cx| {
+ view.fold_ranges(
+ vec![
+ Point::new(0, 21)..Point::new(0, 24),
+ Point::new(3, 20)..Point::new(3, 22),
+ ],
+ cx,
+ );
+ view.select_larger_syntax_node(&SelectLargerSyntaxNode, cx);
+ });
+ assert_eq!(
+ view.update(cx, |view, cx| view.selections.display_ranges(cx)),
+ &[
+ DisplayPoint::new(0, 16)..DisplayPoint::new(0, 28),
+ DisplayPoint::new(2, 35)..DisplayPoint::new(2, 7),
+ DisplayPoint::new(3, 4)..DisplayPoint::new(3, 23),
+ ]
+ );
+}
+
+#[gpui::test]
+async fn test_autoindent_selections(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| cx.set_global(Settings::test(cx)));
+ let language = Arc::new(
+ Language::new(
+ LanguageConfig {
+ brackets: vec![
+ BracketPair {
+ start: "{".to_string(),
+ end: "}".to_string(),
+ close: false,
+ newline: true,
+ },
+ BracketPair {
+ start: "(".to_string(),
+ end: ")".to_string(),
+ close: false,
+ newline: true,
+ },
+ ],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ )
+ .with_indents_query(
+ r#"
+ (_ "(" ")" @end) @indent
+ (_ "{" "}" @end) @indent
+ "#,
+ )
+ .unwrap(),
+ );
+
+ let text = "fn a() {}";
+
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
+ let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx));
+ editor
+ .condition(cx, |editor, cx| !editor.buffer.read(cx).is_parsing(cx))
+ .await;
+
+ editor.update(cx, |editor, cx| {
+ editor.change_selections(None, cx, |s| s.select_ranges([5..5, 8..8, 9..9]));
+ editor.newline(&Newline, cx);
+ assert_eq!(editor.text(cx), "fn a(\n \n) {\n \n}\n");
+ assert_eq!(
+ editor.selections.ranges(cx),
+ &[
+ Point::new(1, 4)..Point::new(1, 4),
+ Point::new(3, 4)..Point::new(3, 4),
+ Point::new(5, 0)..Point::new(5, 0)
+ ]
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_autoclose_pairs(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+
+ let language = Arc::new(Language::new(
+ LanguageConfig {
+ brackets: vec![
+ BracketPair {
+ start: "{".to_string(),
+ end: "}".to_string(),
+ close: true,
+ newline: true,
+ },
+ BracketPair {
+ start: "/*".to_string(),
+ end: " */".to_string(),
+ close: true,
+ newline: true,
+ },
+ BracketPair {
+ start: "[".to_string(),
+ end: "]".to_string(),
+ close: false,
+ newline: true,
+ },
+ ],
+ autoclose_before: "})]".to_string(),
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ ));
+
+ let registry = Arc::new(LanguageRegistry::test());
+ registry.add(language.clone());
+ cx.update_buffer(|buffer, cx| {
+ buffer.set_language_registry(registry);
+ buffer.set_language(Some(language), cx);
+ });
+
+ cx.set_state(
+ &r#"
+ 🏀ˇ
+ εˇ
+ ❤️ˇ
+ "#
+ .unindent(),
+ );
+
+ // autoclose multiple nested brackets at multiple cursors
+ cx.update_editor(|view, cx| {
+ view.handle_input("{", cx);
+ view.handle_input("{", cx);
+ view.handle_input("{", cx);
+ });
+ cx.assert_editor_state(
+ &"
+ 🏀{{{ˇ}}}
+ ε{{{ˇ}}}
+ ❤️{{{ˇ}}}
+ "
+ .unindent(),
+ );
+
+ // skip over the auto-closed brackets when typing a closing bracket
+ cx.update_editor(|view, cx| {
+ view.move_right(&MoveRight, cx);
+ view.handle_input("}", cx);
+ view.handle_input("}", cx);
+ view.handle_input("}", cx);
+ });
+ cx.assert_editor_state(
+ &"
+ 🏀{{{}}}}ˇ
+ ε{{{}}}}ˇ
+ ❤️{{{}}}}ˇ
+ "
+ .unindent(),
+ );
+
+ // autoclose multi-character pairs
+ cx.set_state(
+ &"
+ ˇ
+ ˇ
+ "
+ .unindent(),
+ );
+ cx.update_editor(|view, cx| {
+ view.handle_input("/", cx);
+ view.handle_input("*", cx);
+ });
+ cx.assert_editor_state(
+ &"
+ /*ˇ */
+ /*ˇ */
+ "
+ .unindent(),
+ );
+
+ // one cursor autocloses a multi-character pair, one cursor
+ // does not autoclose.
+ cx.set_state(
+ &"
+ /ˇ
+ ˇ
+ "
+ .unindent(),
+ );
+ cx.update_editor(|view, cx| view.handle_input("*", cx));
+ cx.assert_editor_state(
+ &"
+ /*ˇ */
+ *ˇ
+ "
+ .unindent(),
+ );
+
+ // Don't autoclose if the next character isn't whitespace and isn't
+ // listed in the language's "autoclose_before" section.
+ cx.set_state("ˇa b");
+ cx.update_editor(|view, cx| view.handle_input("{", cx));
+ cx.assert_editor_state("{ˇa b");
+
+ // Surround with brackets if text is selected
+ cx.set_state("«aˇ» b");
+ cx.update_editor(|view, cx| view.handle_input("{", cx));
+ cx.assert_editor_state("{«aˇ»} b");
+}
+
+#[gpui::test]
+async fn test_autoclose_with_embedded_language(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+
+ let html_language = Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "HTML".into(),
+ brackets: vec![
+ BracketPair {
+ start: "<".into(),
+ end: ">".into(),
+ ..Default::default()
+ },
+ BracketPair {
+ start: "{".into(),
+ end: "}".into(),
+ ..Default::default()
+ },
+ BracketPair {
+ start: "(".into(),
+ end: ")".into(),
+ ..Default::default()
+ },
+ ],
+ autoclose_before: "})]>".into(),
+ ..Default::default()
+ },
+ Some(tree_sitter_html::language()),
+ )
+ .with_injection_query(
+ r#"
+ (script_element
+ (raw_text) @content
+ (#set! "language" "javascript"))
+ "#,
+ )
+ .unwrap(),
+ );
+
+ let javascript_language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "JavaScript".into(),
+ brackets: vec![
+ BracketPair {
+ start: "/*".into(),
+ end: " */".into(),
+ ..Default::default()
+ },
+ BracketPair {
+ start: "{".into(),
+ end: "}".into(),
+ ..Default::default()
+ },
+ BracketPair {
+ start: "(".into(),
+ end: ")".into(),
+ ..Default::default()
+ },
+ ],
+ autoclose_before: "})]>".into(),
+ ..Default::default()
+ },
+ Some(tree_sitter_javascript::language()),
+ ));
+
+ let registry = Arc::new(LanguageRegistry::test());
+ registry.add(html_language.clone());
+ registry.add(javascript_language.clone());
+
+ cx.update_buffer(|buffer, cx| {
+ buffer.set_language_registry(registry);
+ buffer.set_language(Some(html_language), cx);
+ });
+
+ cx.set_state(
+ &r#"
+ <body>ˇ
+ <script>
+ var x = 1;ˇ
+ </script>
+ </body>ˇ
+ "#
+ .unindent(),
+ );
+
+ // Precondition: different languages are active at different locations.
+ cx.update_editor(|editor, cx| {
+ let snapshot = editor.snapshot(cx);
+ let cursors = editor.selections.ranges::<usize>(cx);
+ let languages = cursors
+ .iter()
+ .map(|c| snapshot.language_at(c.start).unwrap().name())
+ .collect::<Vec<_>>();
+ assert_eq!(
+ languages,
+ &["HTML".into(), "JavaScript".into(), "HTML".into()]
+ );
+ });
+
+ // Angle brackets autoclose in HTML, but not JavaScript.
+ cx.update_editor(|editor, cx| {
+ editor.handle_input("<", cx);
+ editor.handle_input("a", cx);
+ });
+ cx.assert_editor_state(
+ &r#"
+ <body><aˇ>
+ <script>
+ var x = 1;<aˇ
+ </script>
+ </body><aˇ>
+ "#
+ .unindent(),
+ );
+
+ // Curly braces and parens autoclose in both HTML and JavaScript.
+ cx.update_editor(|editor, cx| {
+ editor.handle_input(" b=", cx);
+ editor.handle_input("{", cx);
+ editor.handle_input("c", cx);
+ editor.handle_input("(", cx);
+ });
+ cx.assert_editor_state(
+ &r#"
+ <body><a b={c(ˇ)}>
+ <script>
+ var x = 1;<a b={c(ˇ)}
+ </script>
+ </body><a b={c(ˇ)}>
+ "#
+ .unindent(),
+ );
+
+ // Brackets that were already autoclosed are skipped.
+ cx.update_editor(|editor, cx| {
+ editor.handle_input(")", cx);
+ editor.handle_input("d", cx);
+ editor.handle_input("}", cx);
+ });
+ cx.assert_editor_state(
+ &r#"
+ <body><a b={c()d}ˇ>
+ <script>
+ var x = 1;<a b={c()d}ˇ
+ </script>
+ </body><a b={c()d}ˇ>
+ "#
+ .unindent(),
+ );
+ cx.update_editor(|editor, cx| {
+ editor.handle_input(">", cx);
+ });
+ cx.assert_editor_state(
+ &r#"
+ <body><a b={c()d}>ˇ
+ <script>
+ var x = 1;<a b={c()d}>ˇ
+ </script>
+ </body><a b={c()d}>ˇ
+ "#
+ .unindent(),
+ );
+
+ // Reset
+ cx.set_state(
+ &r#"
+ <body>ˇ
+ <script>
+ var x = 1;ˇ
+ </script>
+ </body>ˇ
+ "#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| {
+ editor.handle_input("<", cx);
+ });
+ cx.assert_editor_state(
+ &r#"
+ <body><ˇ>
+ <script>
+ var x = 1;<ˇ
+ </script>
+ </body><ˇ>
+ "#
+ .unindent(),
+ );
+
+ // When backspacing, the closing angle brackets are removed.
+ cx.update_editor(|editor, cx| {
+ editor.backspace(&Backspace, cx);
+ });
+ cx.assert_editor_state(
+ &r#"
+ <body>ˇ
+ <script>
+ var x = 1;ˇ
+ </script>
+ </body>ˇ
+ "#
+ .unindent(),
+ );
+
+ // Block comments autoclose in JavaScript, but not HTML.
+ cx.update_editor(|editor, cx| {
+ editor.handle_input("/", cx);
+ editor.handle_input("*", cx);
+ });
+ cx.assert_editor_state(
+ &r#"
+ <body>/*ˇ
+ <script>
+ var x = 1;/*ˇ */
+ </script>
+ </body>/*ˇ
+ "#
+ .unindent(),
+ );
+}
+
+#[gpui::test]
+async fn test_surround_with_pair(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| cx.set_global(Settings::test(cx)));
+ let language = Arc::new(Language::new(
+ LanguageConfig {
+ brackets: vec![BracketPair {
+ start: "{".to_string(),
+ end: "}".to_string(),
+ close: true,
+ newline: true,
+ }],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ ));
+
+ let text = r#"
+ a
+ b
+ c
+ "#
+ .unindent();
+
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
+ let (_, view) = cx.add_window(|cx| build_editor(buffer, cx));
+ view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
+ .await;
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1),
+ DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1),
+ DisplayPoint::new(2, 0)..DisplayPoint::new(2, 1),
+ ])
+ });
+
+ view.handle_input("{", cx);
+ view.handle_input("{", cx);
+ view.handle_input("{", cx);
+ assert_eq!(
+ view.text(cx),
+ "
+ {{{a}}}
+ {{{b}}}
+ {{{c}}}
+ "
+ .unindent()
+ );
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ [
+ DisplayPoint::new(0, 3)..DisplayPoint::new(0, 4),
+ DisplayPoint::new(1, 3)..DisplayPoint::new(1, 4),
+ DisplayPoint::new(2, 3)..DisplayPoint::new(2, 4)
+ ]
+ );
+
+ view.undo(&Undo, cx);
+ assert_eq!(
+ view.text(cx),
+ "
+ a
+ b
+ c
+ "
+ .unindent()
+ );
+ assert_eq!(
+ view.selections.display_ranges(cx),
+ [
+ DisplayPoint::new(0, 0)..DisplayPoint::new(0, 1),
+ DisplayPoint::new(1, 0)..DisplayPoint::new(1, 1),
+ DisplayPoint::new(2, 0)..DisplayPoint::new(2, 1)
+ ]
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_delete_autoclose_pair(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| cx.set_global(Settings::test(cx)));
+ let language = Arc::new(Language::new(
+ LanguageConfig {
+ brackets: vec![BracketPair {
+ start: "{".to_string(),
+ end: "}".to_string(),
+ close: true,
+ newline: true,
+ }],
+ autoclose_before: "}".to_string(),
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ ));
+
+ let text = r#"
+ a
+ b
+ c
+ "#
+ .unindent();
+
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
+ let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx));
+ editor
+ .condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
+ .await;
+
+ editor.update(cx, |editor, cx| {
+ editor.change_selections(None, cx, |s| {
+ s.select_ranges([
+ Point::new(0, 1)..Point::new(0, 1),
+ Point::new(1, 1)..Point::new(1, 1),
+ Point::new(2, 1)..Point::new(2, 1),
+ ])
+ });
+
+ editor.handle_input("{", cx);
+ editor.handle_input("{", cx);
+ editor.handle_input("_", cx);
+ assert_eq!(
+ editor.text(cx),
+ "
+ a{{_}}
+ b{{_}}
+ c{{_}}
+ "
+ .unindent()
+ );
+ assert_eq!(
+ editor.selections.ranges::<Point>(cx),
+ [
+ Point::new(0, 4)..Point::new(0, 4),
+ Point::new(1, 4)..Point::new(1, 4),
+ Point::new(2, 4)..Point::new(2, 4)
+ ]
+ );
+
+ editor.backspace(&Default::default(), cx);
+ editor.backspace(&Default::default(), cx);
+ assert_eq!(
+ editor.text(cx),
+ "
+ a{}
+ b{}
+ c{}
+ "
+ .unindent()
+ );
+ assert_eq!(
+ editor.selections.ranges::<Point>(cx),
+ [
+ Point::new(0, 2)..Point::new(0, 2),
+ Point::new(1, 2)..Point::new(1, 2),
+ Point::new(2, 2)..Point::new(2, 2)
+ ]
+ );
+
+ editor.delete_to_previous_word_start(&Default::default(), cx);
+ assert_eq!(
+ editor.text(cx),
+ "
+ a
+ b
+ c
+ "
+ .unindent()
+ );
+ assert_eq!(
+ editor.selections.ranges::<Point>(cx),
+ [
+ Point::new(0, 1)..Point::new(0, 1),
+ Point::new(1, 1)..Point::new(1, 1),
+ Point::new(2, 1)..Point::new(2, 1)
+ ]
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_snippets(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| cx.set_global(Settings::test(cx)));
+
+ let (text, insertion_ranges) = marked_text_ranges(
+ indoc! {"
+ a.ˇ b
+ a.ˇ b
+ a.ˇ b
+ "},
+ false,
+ );
+
+ let buffer = cx.update(|cx| MultiBuffer::build_simple(&text, cx));
+ let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx));
+
+ editor.update(cx, |editor, cx| {
+ let snippet = Snippet::parse("f(${1:one}, ${2:two}, ${1:three})$0").unwrap();
+
+ editor
+ .insert_snippet(&insertion_ranges, snippet, cx)
+ .unwrap();
+
+ fn assert(editor: &mut Editor, cx: &mut ViewContext<Editor>, marked_text: &str) {
+ let (expected_text, selection_ranges) = marked_text_ranges(marked_text, false);
+ assert_eq!(editor.text(cx), expected_text);
+ assert_eq!(editor.selections.ranges::<usize>(cx), selection_ranges);
+ }
+
+ assert(
+ editor,
+ cx,
+ indoc! {"
+ a.f(«one», two, «three») b
+ a.f(«one», two, «three») b
+ a.f(«one», two, «three») b
+ "},
+ );
+
+ // Can't move earlier than the first tab stop
+ assert!(!editor.move_to_prev_snippet_tabstop(cx));
+ assert(
+ editor,
+ cx,
+ indoc! {"
+ a.f(«one», two, «three») b
+ a.f(«one», two, «three») b
+ a.f(«one», two, «three») b
+ "},
+ );
+
+ assert!(editor.move_to_next_snippet_tabstop(cx));
+ assert(
+ editor,
+ cx,
+ indoc! {"
+ a.f(one, «two», three) b
+ a.f(one, «two», three) b
+ a.f(one, «two», three) b
+ "},
+ );
+
+ editor.move_to_prev_snippet_tabstop(cx);
+ assert(
+ editor,
+ cx,
+ indoc! {"
+ a.f(«one», two, «three») b
+ a.f(«one», two, «three») b
+ a.f(«one», two, «three») b
+ "},
+ );
+
+ assert!(editor.move_to_next_snippet_tabstop(cx));
+ assert(
+ editor,
+ cx,
+ indoc! {"
+ a.f(one, «two», three) b
+ a.f(one, «two», three) b
+ a.f(one, «two», three) b
+ "},
+ );
+ assert!(editor.move_to_next_snippet_tabstop(cx));
+ assert(
+ editor,
+ cx,
+ indoc! {"
+ a.f(one, two, three)ˇ b
+ a.f(one, two, three)ˇ b
+ a.f(one, two, three)ˇ b
+ "},
+ );
+
+ // As soon as the last tab stop is reached, snippet state is gone
+ editor.move_to_prev_snippet_tabstop(cx);
+ assert(
+ editor,
+ cx,
+ indoc! {"
+ a.f(one, two, three)ˇ b
+ a.f(one, two, three)ˇ b
+ a.f(one, two, three)ˇ b
+ "},
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_document_format_during_save(cx: &mut gpui::TestAppContext) {
+ cx.foreground().forbid_parking();
+
+ let mut language = Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ );
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ capabilities: lsp::ServerCapabilities {
+ document_formatting_provider: Some(lsp::OneOf::Left(true)),
+ ..Default::default()
+ },
+ ..Default::default()
+ }))
+ .await;
+
+ let fs = FakeFs::new(cx.background());
+ fs.insert_file("/file.rs", Default::default()).await;
+
+ let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
+ project.update(cx, |project, _| project.languages().add(Arc::new(language)));
+ let buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx))
+ .await
+ .unwrap();
+
+ cx.foreground().start_waiting();
+ let fake_server = fake_servers.next().await.unwrap();
+
+ let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
+ let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx));
+ editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx));
+ assert!(cx.read(|cx| editor.is_dirty(cx)));
+
+ let save = cx.update(|cx| editor.save(project.clone(), cx));
+ fake_server
+ .handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
+ assert_eq!(
+ params.text_document.uri,
+ lsp::Url::from_file_path("/file.rs").unwrap()
+ );
+ assert_eq!(params.options.tab_size, 4);
+ Ok(Some(vec![lsp::TextEdit::new(
+ lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)),
+ ", ".to_string(),
+ )]))
+ })
+ .next()
+ .await;
+ cx.foreground().start_waiting();
+ save.await.unwrap();
+ assert_eq!(
+ editor.read_with(cx, |editor, cx| editor.text(cx)),
+ "one, two\nthree\n"
+ );
+ assert!(!cx.read(|cx| editor.is_dirty(cx)));
+
+ editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx));
+ assert!(cx.read(|cx| editor.is_dirty(cx)));
+
+ // Ensure we can still save even if formatting hangs.
+ fake_server.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
+ assert_eq!(
+ params.text_document.uri,
+ lsp::Url::from_file_path("/file.rs").unwrap()
+ );
+ futures::future::pending::<()>().await;
+ unreachable!()
+ });
+ let save = cx.update(|cx| editor.save(project.clone(), cx));
+ cx.foreground().advance_clock(super::FORMAT_TIMEOUT);
+ cx.foreground().start_waiting();
+ save.await.unwrap();
+ assert_eq!(
+ editor.read_with(cx, |editor, cx| editor.text(cx)),
+ "one\ntwo\nthree\n"
+ );
+ assert!(!cx.read(|cx| editor.is_dirty(cx)));
+
+ // Set rust language override and assert overriden tabsize is sent to language server
+ cx.update(|cx| {
+ cx.update_global::<Settings, _, _>(|settings, _| {
+ settings.language_overrides.insert(
+ "Rust".into(),
+ EditorSettings {
+ tab_size: Some(8.try_into().unwrap()),
+ ..Default::default()
+ },
+ );
+ })
+ });
+
+ let save = cx.update(|cx| editor.save(project.clone(), cx));
+ fake_server
+ .handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
+ assert_eq!(
+ params.text_document.uri,
+ lsp::Url::from_file_path("/file.rs").unwrap()
+ );
+ assert_eq!(params.options.tab_size, 8);
+ Ok(Some(vec![]))
+ })
+ .next()
+ .await;
+ cx.foreground().start_waiting();
+ save.await.unwrap();
+}
+
+#[gpui::test]
+async fn test_range_format_during_save(cx: &mut gpui::TestAppContext) {
+ cx.foreground().forbid_parking();
+
+ let mut language = Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ );
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ capabilities: lsp::ServerCapabilities {
+ document_range_formatting_provider: Some(lsp::OneOf::Left(true)),
+ ..Default::default()
+ },
+ ..Default::default()
+ }))
+ .await;
+
+ let fs = FakeFs::new(cx.background());
+ fs.insert_file("/file.rs", Default::default()).await;
+
+ let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
+ project.update(cx, |project, _| project.languages().add(Arc::new(language)));
+ let buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx))
+ .await
+ .unwrap();
+
+ cx.foreground().start_waiting();
+ let fake_server = fake_servers.next().await.unwrap();
+
+ let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
+ let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx));
+ editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx));
+ assert!(cx.read(|cx| editor.is_dirty(cx)));
+
+ let save = cx.update(|cx| editor.save(project.clone(), cx));
+ fake_server
+ .handle_request::<lsp::request::RangeFormatting, _, _>(move |params, _| async move {
+ assert_eq!(
+ params.text_document.uri,
+ lsp::Url::from_file_path("/file.rs").unwrap()
+ );
+ assert_eq!(params.options.tab_size, 4);
+ Ok(Some(vec![lsp::TextEdit::new(
+ lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)),
+ ", ".to_string(),
+ )]))
+ })
+ .next()
+ .await;
+ cx.foreground().start_waiting();
+ save.await.unwrap();
+ assert_eq!(
+ editor.read_with(cx, |editor, cx| editor.text(cx)),
+ "one, two\nthree\n"
+ );
+ assert!(!cx.read(|cx| editor.is_dirty(cx)));
+
+ editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx));
+ assert!(cx.read(|cx| editor.is_dirty(cx)));
+
+ // Ensure we can still save even if formatting hangs.
+ fake_server.handle_request::<lsp::request::RangeFormatting, _, _>(
+ move |params, _| async move {
+ assert_eq!(
+ params.text_document.uri,
+ lsp::Url::from_file_path("/file.rs").unwrap()
+ );
+ futures::future::pending::<()>().await;
+ unreachable!()
+ },
+ );
+ let save = cx.update(|cx| editor.save(project.clone(), cx));
+ cx.foreground().advance_clock(super::FORMAT_TIMEOUT);
+ cx.foreground().start_waiting();
+ save.await.unwrap();
+ assert_eq!(
+ editor.read_with(cx, |editor, cx| editor.text(cx)),
+ "one\ntwo\nthree\n"
+ );
+ assert!(!cx.read(|cx| editor.is_dirty(cx)));
+
+ // Set rust language override and assert overriden tabsize is sent to language server
+ cx.update(|cx| {
+ cx.update_global::<Settings, _, _>(|settings, _| {
+ settings.language_overrides.insert(
+ "Rust".into(),
+ EditorSettings {
+ tab_size: Some(8.try_into().unwrap()),
+ ..Default::default()
+ },
+ );
+ })
+ });
+
+ let save = cx.update(|cx| editor.save(project.clone(), cx));
+ fake_server
+ .handle_request::<lsp::request::RangeFormatting, _, _>(move |params, _| async move {
+ assert_eq!(
+ params.text_document.uri,
+ lsp::Url::from_file_path("/file.rs").unwrap()
+ );
+ assert_eq!(params.options.tab_size, 8);
+ Ok(Some(vec![]))
+ })
+ .next()
+ .await;
+ cx.foreground().start_waiting();
+ save.await.unwrap();
+}
+
+#[gpui::test]
+async fn test_document_format_manual_trigger(cx: &mut gpui::TestAppContext) {
+ cx.foreground().forbid_parking();
+
+ let mut language = Language::new(
+ LanguageConfig {
+ name: "Rust".into(),
+ path_suffixes: vec!["rs".to_string()],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ );
+ let mut fake_servers = language
+ .set_fake_lsp_adapter(Arc::new(FakeLspAdapter {
+ capabilities: lsp::ServerCapabilities {
+ document_formatting_provider: Some(lsp::OneOf::Left(true)),
+ ..Default::default()
+ },
+ ..Default::default()
+ }))
+ .await;
+
+ let fs = FakeFs::new(cx.background());
+ fs.insert_file("/file.rs", Default::default()).await;
+
+ let project = Project::test(fs, ["/file.rs".as_ref()], cx).await;
+ project.update(cx, |project, _| project.languages().add(Arc::new(language)));
+ let buffer = project
+ .update(cx, |project, cx| project.open_local_buffer("/file.rs", cx))
+ .await
+ .unwrap();
+
+ cx.foreground().start_waiting();
+ let fake_server = fake_servers.next().await.unwrap();
+
+ let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
+ let (_, editor) = cx.add_window(|cx| build_editor(buffer, cx));
+ editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx));
+
+ let format = editor.update(cx, |editor, cx| editor.perform_format(project.clone(), cx));
+ fake_server
+ .handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
+ assert_eq!(
+ params.text_document.uri,
+ lsp::Url::from_file_path("/file.rs").unwrap()
+ );
+ assert_eq!(params.options.tab_size, 4);
+ Ok(Some(vec![lsp::TextEdit::new(
+ lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(1, 0)),
+ ", ".to_string(),
+ )]))
+ })
+ .next()
+ .await;
+ cx.foreground().start_waiting();
+ format.await.unwrap();
+ assert_eq!(
+ editor.read_with(cx, |editor, cx| editor.text(cx)),
+ "one, two\nthree\n"
+ );
+
+ editor.update(cx, |editor, cx| editor.set_text("one\ntwo\nthree\n", cx));
+ // Ensure we don't lock if formatting hangs.
+ fake_server.handle_request::<lsp::request::Formatting, _, _>(move |params, _| async move {
+ assert_eq!(
+ params.text_document.uri,
+ lsp::Url::from_file_path("/file.rs").unwrap()
+ );
+ futures::future::pending::<()>().await;
+ unreachable!()
+ });
+ let format = editor.update(cx, |editor, cx| editor.perform_format(project, cx));
+ cx.foreground().advance_clock(super::FORMAT_TIMEOUT);
+ cx.foreground().start_waiting();
+ format.await.unwrap();
+ assert_eq!(
+ editor.read_with(cx, |editor, cx| editor.text(cx)),
+ "one\ntwo\nthree\n"
+ );
+}
+
+#[gpui::test]
+async fn test_concurrent_format_requests(cx: &mut gpui::TestAppContext) {
+ cx.foreground().forbid_parking();
+
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ document_formatting_provider: Some(lsp::OneOf::Left(true)),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ cx.set_state(indoc! {"
+ one.twoˇ
+ "});
+
+ // The format request takes a long time. When it completes, it inserts
+ // a newline and an indent before the `.`
+ cx.lsp
+ .handle_request::<lsp::request::Formatting, _, _>(move |_, cx| {
+ let executor = cx.background();
+ async move {
+ executor.timer(Duration::from_millis(100)).await;
+ Ok(Some(vec![lsp::TextEdit {
+ range: lsp::Range::new(lsp::Position::new(0, 3), lsp::Position::new(0, 3)),
+ new_text: "\n ".into(),
+ }]))
+ }
+ });
+
+ // Submit a format request.
+ let format_1 = cx
+ .update_editor(|editor, cx| editor.format(&Format, cx))
+ .unwrap();
+ cx.foreground().run_until_parked();
+
+ // Submit a second format request.
+ let format_2 = cx
+ .update_editor(|editor, cx| editor.format(&Format, cx))
+ .unwrap();
+ cx.foreground().run_until_parked();
+
+ // Wait for both format requests to complete
+ cx.foreground().advance_clock(Duration::from_millis(200));
+ cx.foreground().start_waiting();
+ format_1.await.unwrap();
+ cx.foreground().start_waiting();
+ format_2.await.unwrap();
+
+ // The formatting edits only happens once.
+ cx.assert_editor_state(indoc! {"
+ one
+ .twoˇ
+ "});
+}
+
+#[gpui::test]
+async fn test_completion(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorLspTestContext::new_rust(
+ lsp::ServerCapabilities {
+ completion_provider: Some(lsp::CompletionOptions {
+ trigger_characters: Some(vec![".".to_string(), ":".to_string()]),
+ ..Default::default()
+ }),
+ ..Default::default()
+ },
+ cx,
+ )
+ .await;
+
+ cx.set_state(indoc! {"
+ oneˇ
+ two
+ three
+ "});
+ cx.simulate_keystroke(".");
+ handle_completion_request(
+ &mut cx,
+ indoc! {"
+ one.|<>
+ two
+ three
+ "},
+ vec!["first_completion", "second_completion"],
+ )
+ .await;
+ cx.condition(|editor, _| editor.context_menu_visible())
+ .await;
+ let apply_additional_edits = cx.update_editor(|editor, cx| {
+ editor.move_down(&MoveDown, cx);
+ editor
+ .confirm_completion(&ConfirmCompletion::default(), cx)
+ .unwrap()
+ });
+ cx.assert_editor_state(indoc! {"
+ one.second_completionˇ
+ two
+ three
+ "});
+
+ handle_resolve_completion_request(
+ &mut cx,
+ Some((
+ indoc! {"
+ one.second_completion
+ two
+ threeˇ
+ "},
+ "\nadditional edit",
+ )),
+ )
+ .await;
+ apply_additional_edits.await.unwrap();
+ cx.assert_editor_state(indoc! {"
+ one.second_completionˇ
+ two
+ three
+ additional edit
+ "});
+
+ cx.set_state(indoc! {"
+ one.second_completion
+ twoˇ
+ threeˇ
+ additional edit
+ "});
+ cx.simulate_keystroke(" ");
+ assert!(cx.editor(|e, _| e.context_menu.is_none()));
+ cx.simulate_keystroke("s");
+ assert!(cx.editor(|e, _| e.context_menu.is_none()));
+
+ cx.assert_editor_state(indoc! {"
+ one.second_completion
+ two sˇ
+ three sˇ
+ additional edit
+ "});
+ handle_completion_request(
+ &mut cx,
+ indoc! {"
+ one.second_completion
+ two s
+ three <s|>
+ additional edit
+ "},
+ vec!["fourth_completion", "fifth_completion", "sixth_completion"],
+ )
+ .await;
+ cx.condition(|editor, _| editor.context_menu_visible())
+ .await;
+
+ cx.simulate_keystroke("i");
+
+ handle_completion_request(
+ &mut cx,
+ indoc! {"
+ one.second_completion
+ two si
+ three <si|>
+ additional edit
+ "},
+ vec!["fourth_completion", "fifth_completion", "sixth_completion"],
+ )
+ .await;
+ cx.condition(|editor, _| editor.context_menu_visible())
+ .await;
+
+ let apply_additional_edits = cx.update_editor(|editor, cx| {
+ editor
+ .confirm_completion(&ConfirmCompletion::default(), cx)
+ .unwrap()
+ });
+ cx.assert_editor_state(indoc! {"
+ one.second_completion
+ two sixth_completionˇ
+ three sixth_completionˇ
+ additional edit
+ "});
+
+ handle_resolve_completion_request(&mut cx, None).await;
+ apply_additional_edits.await.unwrap();
+
+ cx.update(|cx| {
+ cx.update_global::<Settings, _, _>(|settings, _| {
+ settings.show_completions_on_input = false;
+ })
+ });
+ cx.set_state("editorˇ");
+ cx.simulate_keystroke(".");
+ assert!(cx.editor(|e, _| e.context_menu.is_none()));
+ cx.simulate_keystroke("c");
+ cx.simulate_keystroke("l");
+ cx.simulate_keystroke("o");
+ cx.assert_editor_state("editor.cloˇ");
+ assert!(cx.editor(|e, _| e.context_menu.is_none()));
+ cx.update_editor(|editor, cx| {
+ editor.show_completions(&ShowCompletions, cx);
+ });
+ handle_completion_request(&mut cx, "editor.<clo|>", vec!["close", "clobber"]).await;
+ cx.condition(|editor, _| editor.context_menu_visible())
+ .await;
+ let apply_additional_edits = cx.update_editor(|editor, cx| {
+ editor
+ .confirm_completion(&ConfirmCompletion::default(), cx)
+ .unwrap()
+ });
+ cx.assert_editor_state("editor.closeˇ");
+ handle_resolve_completion_request(&mut cx, None).await;
+ apply_additional_edits.await.unwrap();
+
+ // Handle completion request passing a marked string specifying where the completion
+ // should be triggered from using '|' character, what range should be replaced, and what completions
+ // should be returned using '<' and '>' to delimit the range
+ async fn handle_completion_request<'a>(
+ cx: &mut EditorLspTestContext<'a>,
+ marked_string: &str,
+ completions: Vec<&'static str>,
+ ) {
+ let complete_from_marker: TextRangeMarker = '|'.into();
+ let replace_range_marker: TextRangeMarker = ('<', '>').into();
+ let (_, mut marked_ranges) = marked_text_ranges_by(
+ marked_string,
+ vec![complete_from_marker.clone(), replace_range_marker.clone()],
+ );
+
+ let complete_from_position =
+ cx.to_lsp(marked_ranges.remove(&complete_from_marker).unwrap()[0].start);
+ let replace_range =
+ cx.to_lsp_range(marked_ranges.remove(&replace_range_marker).unwrap()[0].clone());
+
+ cx.handle_request::<lsp::request::Completion, _, _>(move |url, params, _| {
+ let completions = completions.clone();
+ async move {
+ assert_eq!(params.text_document_position.text_document.uri, url.clone());
+ assert_eq!(
+ params.text_document_position.position,
+ complete_from_position
+ );
+ Ok(Some(lsp::CompletionResponse::Array(
+ completions
+ .iter()
+ .map(|completion_text| lsp::CompletionItem {
+ label: completion_text.to_string(),
+ text_edit: Some(lsp::CompletionTextEdit::Edit(lsp::TextEdit {
+ range: replace_range,
+ new_text: completion_text.to_string(),
+ })),
+ ..Default::default()
+ })
+ .collect(),
+ )))
+ }
+ })
+ .next()
+ .await;
+ }
+
+ async fn handle_resolve_completion_request<'a>(
+ cx: &mut EditorLspTestContext<'a>,
+ edit: Option<(&'static str, &'static str)>,
+ ) {
+ let edit = edit.map(|(marked_string, new_text)| {
+ let (_, marked_ranges) = marked_text_ranges(marked_string, false);
+ let replace_range = cx.to_lsp_range(marked_ranges[0].clone());
+ vec![lsp::TextEdit::new(replace_range, new_text.to_string())]
+ });
+
+ cx.handle_request::<lsp::request::ResolveCompletionItem, _, _>(move |_, _, _| {
+ let edit = edit.clone();
+ async move {
+ Ok(lsp::CompletionItem {
+ additional_text_edits: edit,
+ ..Default::default()
+ })
+ }
+ })
+ .next()
+ .await;
+ }
+}
+
+#[gpui::test]
+async fn test_toggle_comment(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| cx.set_global(Settings::test(cx)));
+ let language = Arc::new(Language::new(
+ LanguageConfig {
+ line_comment: Some("// ".into()),
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ ));
+
+ let text = "
+ fn a() {
+ //b();
+ // c();
+ // d();
+ }
+ "
+ .unindent();
+
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
+ let (_, view) = cx.add_window(|cx| build_editor(buffer, cx));
+
+ view.update(cx, |editor, cx| {
+ // If multiple selections intersect a line, the line is only
+ // toggled once.
+ editor.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(1, 3)..DisplayPoint::new(2, 3),
+ DisplayPoint::new(3, 5)..DisplayPoint::new(3, 6),
+ ])
+ });
+ editor.toggle_comments(&ToggleComments, cx);
+ assert_eq!(
+ editor.text(cx),
+ "
+ fn a() {
+ b();
+ c();
+ d();
+ }
+ "
+ .unindent()
+ );
+
+ // The comment prefix is inserted at the same column for every line
+ // in a selection.
+ editor.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(1, 3)..DisplayPoint::new(3, 6)])
+ });
+ editor.toggle_comments(&ToggleComments, cx);
+ assert_eq!(
+ editor.text(cx),
+ "
+ fn a() {
+ // b();
+ // c();
+ // d();
+ }
+ "
+ .unindent()
+ );
+
+ // If a selection ends at the beginning of a line, that line is not toggled.
+ editor.change_selections(None, cx, |s| {
+ s.select_display_ranges([DisplayPoint::new(2, 0)..DisplayPoint::new(3, 0)])
+ });
+ editor.toggle_comments(&ToggleComments, cx);
+ assert_eq!(
+ editor.text(cx),
+ "
+ fn a() {
+ // b();
+ c();
+ // d();
+ }
+ "
+ .unindent()
+ );
+ });
+}
+
+#[gpui::test]
+async fn test_toggle_block_comment(cx: &mut gpui::TestAppContext) {
+ let mut cx = EditorTestContext::new(cx);
+
+ let html_language = Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "HTML".into(),
+ block_comment: Some(("<!-- ".into(), " -->".into())),
+ ..Default::default()
+ },
+ Some(tree_sitter_html::language()),
+ )
+ .with_injection_query(
+ r#"
+ (script_element
+ (raw_text) @content
+ (#set! "language" "javascript"))
+ "#,
+ )
+ .unwrap(),
+ );
+
+ let javascript_language = Arc::new(Language::new(
+ LanguageConfig {
+ name: "JavaScript".into(),
+ line_comment: Some("// ".into()),
+ ..Default::default()
+ },
+ Some(tree_sitter_javascript::language()),
+ ));
+
+ let registry = Arc::new(LanguageRegistry::test());
+ registry.add(html_language.clone());
+ registry.add(javascript_language.clone());
+
+ cx.update_buffer(|buffer, cx| {
+ buffer.set_language_registry(registry);
+ buffer.set_language(Some(html_language), cx);
+ });
+
+ // Toggle comments for empty selections
+ cx.set_state(
+ &r#"
+ <p>A</p>ˇ
+ <p>B</p>ˇ
+ <p>C</p>ˇ
+ "#
+ .unindent(),
+ );
+ cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx));
+ cx.assert_editor_state(
+ &r#"
+ <!-- <p>A</p>ˇ -->
+ <!-- <p>B</p>ˇ -->
+ <!-- <p>C</p>ˇ -->
+ "#
+ .unindent(),
+ );
+ cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx));
+ cx.assert_editor_state(
+ &r#"
+ <p>A</p>ˇ
+ <p>B</p>ˇ
+ <p>C</p>ˇ
+ "#
+ .unindent(),
+ );
+
+ // Toggle comments for mixture of empty and non-empty selections, where
+ // multiple selections occupy a given line.
+ cx.set_state(
+ &r#"
+ <p>A«</p>
+ <p>ˇ»B</p>ˇ
+ <p>C«</p>
+ <p>ˇ»D</p>ˇ
+ "#
+ .unindent(),
+ );
+
+ cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx));
+ cx.assert_editor_state(
+ &r#"
+ <!-- <p>A«</p>
+ <p>ˇ»B</p>ˇ -->
+ <!-- <p>C«</p>
+ <p>ˇ»D</p>ˇ -->
+ "#
+ .unindent(),
+ );
+ cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx));
+ cx.assert_editor_state(
+ &r#"
+ <p>A«</p>
+ <p>ˇ»B</p>ˇ
+ <p>C«</p>
+ <p>ˇ»D</p>ˇ
+ "#
+ .unindent(),
+ );
+
+ // Toggle comments when different languages are active for different
+ // selections.
+ cx.set_state(
+ &r#"
+ ˇ<script>
+ ˇvar x = new Y();
+ ˇ</script>
+ "#
+ .unindent(),
+ );
+ cx.foreground().run_until_parked();
+ cx.update_editor(|editor, cx| editor.toggle_comments(&ToggleComments, cx));
+ cx.assert_editor_state(
+ &r#"
+ <!-- ˇ<script> -->
+ // ˇvar x = new Y();
+ <!-- ˇ</script> -->
+ "#
+ .unindent(),
+ );
+}
+
+#[gpui::test]
+fn test_editing_disjoint_excerpts(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx));
+ let multibuffer = cx.add_model(|cx| {
+ let mut multibuffer = MultiBuffer::new(0);
+ multibuffer.push_excerpts(
+ buffer.clone(),
+ [
+ ExcerptRange {
+ context: Point::new(0, 0)..Point::new(0, 4),
+ primary: None,
+ },
+ ExcerptRange {
+ context: Point::new(1, 0)..Point::new(1, 4),
+ primary: None,
+ },
+ ],
+ cx,
+ );
+ multibuffer
+ });
+
+ assert_eq!(multibuffer.read(cx).read(cx).text(), "aaaa\nbbbb");
+
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(multibuffer, cx));
+ view.update(cx, |view, cx| {
+ assert_eq!(view.text(cx), "aaaa\nbbbb");
+ view.change_selections(None, cx, |s| {
+ s.select_ranges([
+ Point::new(0, 0)..Point::new(0, 0),
+ Point::new(1, 0)..Point::new(1, 0),
+ ])
+ });
+
+ view.handle_input("X", cx);
+ assert_eq!(view.text(cx), "Xaaaa\nXbbbb");
+ assert_eq!(
+ view.selections.ranges(cx),
+ [
+ Point::new(0, 1)..Point::new(0, 1),
+ Point::new(1, 1)..Point::new(1, 1),
+ ]
+ )
+ });
+}
+
+#[gpui::test]
+fn test_editing_overlapping_excerpts(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let markers = vec![('[', ']').into(), ('(', ')').into()];
+ let (initial_text, mut excerpt_ranges) = marked_text_ranges_by(
+ indoc! {"
+ [aaaa
+ (bbbb]
+ cccc)",
+ },
+ markers.clone(),
+ );
+ let excerpt_ranges = markers.into_iter().map(|marker| {
+ let context = excerpt_ranges.remove(&marker).unwrap()[0].clone();
+ ExcerptRange {
+ context,
+ primary: None,
+ }
+ });
+ let buffer = cx.add_model(|cx| Buffer::new(0, initial_text, cx));
+ let multibuffer = cx.add_model(|cx| {
+ let mut multibuffer = MultiBuffer::new(0);
+ multibuffer.push_excerpts(buffer, excerpt_ranges, cx);
+ multibuffer
+ });
+
+ let (_, view) = cx.add_window(Default::default(), |cx| build_editor(multibuffer, cx));
+ view.update(cx, |view, cx| {
+ let (expected_text, selection_ranges) = marked_text_ranges(
+ indoc! {"
+ aaaa
+ bˇbbb
+ bˇbbˇb
+ cccc"
+ },
+ true,
+ );
+ assert_eq!(view.text(cx), expected_text);
+ view.change_selections(None, cx, |s| s.select_ranges(selection_ranges));
+
+ view.handle_input("X", cx);
+
+ let (expected_text, expected_selections) = marked_text_ranges(
+ indoc! {"
+ aaaa
+ bXˇbbXb
+ bXˇbbXˇb
+ cccc"
+ },
+ false,
+ );
+ assert_eq!(view.text(cx), expected_text);
+ assert_eq!(view.selections.ranges(cx), expected_selections);
+
+ view.newline(&Newline, cx);
+ let (expected_text, expected_selections) = marked_text_ranges(
+ indoc! {"
+ aaaa
+ bX
+ ˇbbX
+ b
+ bX
+ ˇbbX
+ ˇb
+ cccc"
+ },
+ false,
+ );
+ assert_eq!(view.text(cx), expected_text);
+ assert_eq!(view.selections.ranges(cx), expected_selections);
+ });
+}
+
+#[gpui::test]
+fn test_refresh_selections(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx));
+ let mut excerpt1_id = None;
+ let multibuffer = cx.add_model(|cx| {
+ let mut multibuffer = MultiBuffer::new(0);
+ excerpt1_id = multibuffer
+ .push_excerpts(
+ buffer.clone(),
+ [
+ ExcerptRange {
+ context: Point::new(0, 0)..Point::new(1, 4),
+ primary: None,
+ },
+ ExcerptRange {
+ context: Point::new(1, 0)..Point::new(2, 4),
+ primary: None,
+ },
+ ],
+ cx,
+ )
+ .into_iter()
+ .next();
+ multibuffer
+ });
+ assert_eq!(
+ multibuffer.read(cx).read(cx).text(),
+ "aaaa\nbbbb\nbbbb\ncccc"
+ );
+ let (_, editor) = cx.add_window(Default::default(), |cx| {
+ let mut editor = build_editor(multibuffer.clone(), cx);
+ let snapshot = editor.snapshot(cx);
+ editor.change_selections(None, cx, |s| {
+ s.select_ranges([Point::new(1, 3)..Point::new(1, 3)])
+ });
+ editor.begin_selection(Point::new(2, 1).to_display_point(&snapshot), true, 1, cx);
+ assert_eq!(
+ editor.selections.ranges(cx),
+ [
+ Point::new(1, 3)..Point::new(1, 3),
+ Point::new(2, 1)..Point::new(2, 1),
+ ]
+ );
+ editor
+ });
+
+ // Refreshing selections is a no-op when excerpts haven't changed.
+ editor.update(cx, |editor, cx| {
+ editor.change_selections(None, cx, |s| {
+ s.refresh();
+ });
+ assert_eq!(
+ editor.selections.ranges(cx),
+ [
+ Point::new(1, 3)..Point::new(1, 3),
+ Point::new(2, 1)..Point::new(2, 1),
+ ]
+ );
+ });
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.remove_excerpts([&excerpt1_id.unwrap()], cx);
+ });
+ editor.update(cx, |editor, cx| {
+ // Removing an excerpt causes the first selection to become degenerate.
+ assert_eq!(
+ editor.selections.ranges(cx),
+ [
+ Point::new(0, 0)..Point::new(0, 0),
+ Point::new(0, 1)..Point::new(0, 1)
+ ]
+ );
+
+ // Refreshing selections will relocate the first selection to the original buffer
+ // location.
+ editor.change_selections(None, cx, |s| {
+ s.refresh();
+ });
+ assert_eq!(
+ editor.selections.ranges(cx),
+ [
+ Point::new(0, 1)..Point::new(0, 1),
+ Point::new(0, 3)..Point::new(0, 3)
+ ]
+ );
+ assert!(editor.selections.pending_anchor().is_some());
+ });
+}
+
+#[gpui::test]
+fn test_refresh_selections_while_selecting_with_mouse(cx: &mut gpui::MutableAppContext) {
+ cx.set_global(Settings::test(cx));
+ let buffer = cx.add_model(|cx| Buffer::new(0, sample_text(3, 4, 'a'), cx));
+ let mut excerpt1_id = None;
+ let multibuffer = cx.add_model(|cx| {
+ let mut multibuffer = MultiBuffer::new(0);
+ excerpt1_id = multibuffer
+ .push_excerpts(
+ buffer.clone(),
+ [
+ ExcerptRange {
+ context: Point::new(0, 0)..Point::new(1, 4),
+ primary: None,
+ },
+ ExcerptRange {
+ context: Point::new(1, 0)..Point::new(2, 4),
+ primary: None,
+ },
+ ],
+ cx,
+ )
+ .into_iter()
+ .next();
+ multibuffer
+ });
+ assert_eq!(
+ multibuffer.read(cx).read(cx).text(),
+ "aaaa\nbbbb\nbbbb\ncccc"
+ );
+ let (_, editor) = cx.add_window(Default::default(), |cx| {
+ let mut editor = build_editor(multibuffer.clone(), cx);
+ let snapshot = editor.snapshot(cx);
+ editor.begin_selection(Point::new(1, 3).to_display_point(&snapshot), false, 1, cx);
+ assert_eq!(
+ editor.selections.ranges(cx),
+ [Point::new(1, 3)..Point::new(1, 3)]
+ );
+ editor
+ });
+
+ multibuffer.update(cx, |multibuffer, cx| {
+ multibuffer.remove_excerpts([&excerpt1_id.unwrap()], cx);
+ });
+ editor.update(cx, |editor, cx| {
+ assert_eq!(
+ editor.selections.ranges(cx),
+ [Point::new(0, 0)..Point::new(0, 0)]
+ );
+
+ // Ensure we don't panic when selections are refreshed and that the pending selection is finalized.
+ editor.change_selections(None, cx, |s| {
+ s.refresh();
+ });
+ assert_eq!(
+ editor.selections.ranges(cx),
+ [Point::new(0, 3)..Point::new(0, 3)]
+ );
+ assert!(editor.selections.pending_anchor().is_some());
+ });
+}
+
+#[gpui::test]
+async fn test_extra_newline_insertion(cx: &mut gpui::TestAppContext) {
+ cx.update(|cx| cx.set_global(Settings::test(cx)));
+ let language = Arc::new(
+ Language::new(
+ LanguageConfig {
+ brackets: vec![
+ BracketPair {
+ start: "{".to_string(),
+ end: "}".to_string(),
+ close: true,
+ newline: true,
+ },
+ BracketPair {
+ start: "/* ".to_string(),
+ end: " */".to_string(),
+ close: true,
+ newline: true,
+ },
+ ],
+ ..Default::default()
+ },
+ Some(tree_sitter_rust::language()),
+ )
+ .with_indents_query("")
+ .unwrap(),
+ );
+
+ let text = concat!(
+ "{ }\n", //
+ " x\n", //
+ " /* */\n", //
+ "x\n", //
+ "{{} }\n", //
+ );
+
+ let buffer = cx.add_model(|cx| Buffer::new(0, text, cx).with_language(language, cx));
+ let buffer = cx.add_model(|cx| MultiBuffer::singleton(buffer, cx));
+ let (_, view) = cx.add_window(|cx| build_editor(buffer, cx));
+ view.condition(cx, |view, cx| !view.buffer.read(cx).is_parsing(cx))
+ .await;
+
+ view.update(cx, |view, cx| {
+ view.change_selections(None, cx, |s| {
+ s.select_display_ranges([
+ DisplayPoint::new(0, 2)..DisplayPoint::new(0, 3),
+ DisplayPoint::new(2, 5)..DisplayPoint::new(2, 5),
+ DisplayPoint::new(4, 4)..DisplayPoint::new(4, 4),
+ ])
+ });
+ view.newline(&Newline, cx);
+
+ assert_eq!(
+ view.buffer().read(cx).read(cx).text(),
+ concat!(
+ "{ \n", // Suppress rustfmt
+ "\n", //
+ "}\n", //
+ " x\n", //
+ " /* \n", //
+ " \n", //
+ " */\n", //
+ "x\n", //
+ "{{} \n", //
+ "}\n", //
+ )
+ );
+ });
+}
+
+#[gpui::test]
+fn test_highlighted_ranges(cx: &mut gpui::MutableAppContext) {
+ let buffer = MultiBuffer::build_simple(&sample_text(16, 8, 'a'), cx);
+
+ cx.set_global(Settings::test(cx));
+ let (_, editor) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx));
+
+ editor.update(cx, |editor, cx| {
+ struct Type1;
+ struct Type2;
+
+ let buffer = buffer.read(cx).snapshot(cx);
+
+ let anchor_range =
+ |range: Range<Point>| buffer.anchor_after(range.start)..buffer.anchor_after(range.end);
+
+ editor.highlight_background::<Type1>(
+ vec![
+ anchor_range(Point::new(2, 1)..Point::new(2, 3)),
+ anchor_range(Point::new(4, 2)..Point::new(4, 4)),
+ anchor_range(Point::new(6, 3)..Point::new(6, 5)),
+ anchor_range(Point::new(8, 4)..Point::new(8, 6)),
+ ],
+ |_| Color::red(),
+ cx,
+ );
+ editor.highlight_background::<Type2>(
+ vec![
+ anchor_range(Point::new(3, 2)..Point::new(3, 5)),
+ anchor_range(Point::new(5, 3)..Point::new(5, 6)),
+ anchor_range(Point::new(7, 4)..Point::new(7, 7)),
+ anchor_range(Point::new(9, 5)..Point::new(9, 8)),
+ ],
+ |_| Color::green(),
+ cx,
+ );
+
+ let snapshot = editor.snapshot(cx);
+ let mut highlighted_ranges = editor.background_highlights_in_range(
+ anchor_range(Point::new(3, 4)..Point::new(7, 4)),
+ &snapshot,
+ cx.global::<Settings>().theme.as_ref(),
+ );
+ // Enforce a consistent ordering based on color without relying on the ordering of the
+ // highlight's `TypeId` which is non-deterministic.
+ highlighted_ranges.sort_unstable_by_key(|(_, color)| *color);
+ assert_eq!(
+ highlighted_ranges,
+ &[
+ (
+ DisplayPoint::new(3, 2)..DisplayPoint::new(3, 5),
+ Color::green(),
+ ),
+ (
+ DisplayPoint::new(5, 3)..DisplayPoint::new(5, 6),
+ Color::green(),
+ ),
+ (
+ DisplayPoint::new(4, 2)..DisplayPoint::new(4, 4),
+ Color::red(),
+ ),
+ (
+ DisplayPoint::new(6, 3)..DisplayPoint::new(6, 5),
+ Color::red(),
+ ),
+ ]
+ );
+ assert_eq!(
+ editor.background_highlights_in_range(
+ anchor_range(Point::new(5, 6)..Point::new(6, 4)),
+ &snapshot,
+ cx.global::<Settings>().theme.as_ref(),
+ ),
+ &[(
+ DisplayPoint::new(6, 3)..DisplayPoint::new(6, 5),
+ Color::red(),
+ )]
+ );
+ });
+}
+
+#[gpui::test]
+fn test_following(cx: &mut gpui::MutableAppContext) {
+ let buffer = MultiBuffer::build_simple(&sample_text(16, 8, 'a'), cx);
+
+ cx.set_global(Settings::test(cx));
+
+ let (_, leader) = cx.add_window(Default::default(), |cx| build_editor(buffer.clone(), cx));
+ let (_, follower) = cx.add_window(
+ WindowOptions {
+ bounds: WindowBounds::Fixed(RectF::from_points(vec2f(0., 0.), vec2f(10., 80.))),
+ ..Default::default()
+ },
+ |cx| build_editor(buffer.clone(), cx),
+ );
+
+ let pending_update = Rc::new(RefCell::new(None));
+ follower.update(cx, {
+ let update = pending_update.clone();
+ |_, cx| {
+ cx.subscribe(&leader, move |_, leader, event, cx| {
+ leader
+ .read(cx)
+ .add_event_to_update_proto(event, &mut *update.borrow_mut(), cx);
+ })
+ .detach();
+ }
+ });
+
+ // Update the selections only
+ leader.update(cx, |leader, cx| {
+ leader.change_selections(None, cx, |s| s.select_ranges([1..1]));
+ });
+ follower.update(cx, |follower, cx| {
+ follower
+ .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx)
+ .unwrap();
+ });
+ assert_eq!(follower.read(cx).selections.ranges(cx), vec![1..1]);
+
+ // Update the scroll position only
+ leader.update(cx, |leader, cx| {
+ leader.set_scroll_position(vec2f(1.5, 3.5), cx);
+ });
+ follower.update(cx, |follower, cx| {
+ follower
+ .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx)
+ .unwrap();
+ });
+ assert_eq!(
+ follower.update(cx, |follower, cx| follower.scroll_position(cx)),
+ vec2f(1.5, 3.5)
+ );
+
+ // Update the selections and scroll position
+ leader.update(cx, |leader, cx| {
+ leader.change_selections(None, cx, |s| s.select_ranges([0..0]));
+ leader.request_autoscroll(Autoscroll::Newest, cx);
+ leader.set_scroll_position(vec2f(1.5, 3.5), cx);
+ });
+ follower.update(cx, |follower, cx| {
+ let initial_scroll_position = follower.scroll_position(cx);
+ follower
+ .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx)
+ .unwrap();
+ assert_eq!(follower.scroll_position(cx), initial_scroll_position);
+ assert!(follower.autoscroll_request.is_some());
+ });
+ assert_eq!(follower.read(cx).selections.ranges(cx), vec![0..0]);
+
+ // Creating a pending selection that precedes another selection
+ leader.update(cx, |leader, cx| {
+ leader.change_selections(None, cx, |s| s.select_ranges([1..1]));
+ leader.begin_selection(DisplayPoint::new(0, 0), true, 1, cx);
+ });
+ follower.update(cx, |follower, cx| {
+ follower
+ .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx)
+ .unwrap();
+ });
+ assert_eq!(follower.read(cx).selections.ranges(cx), vec![0..0, 1..1]);
+
+ // Extend the pending selection so that it surrounds another selection
+ leader.update(cx, |leader, cx| {
+ leader.extend_selection(DisplayPoint::new(0, 2), 1, cx);
+ });
+ follower.update(cx, |follower, cx| {
+ follower
+ .apply_update_proto(pending_update.borrow_mut().take().unwrap(), cx)
+ .unwrap();
+ });
+ assert_eq!(follower.read(cx).selections.ranges(cx), vec![0..2]);
+}
+
+#[test]
+fn test_combine_syntax_and_fuzzy_match_highlights() {
+ let string = "abcdefghijklmnop";
+ let syntax_ranges = [
+ (
+ 0..3,
+ HighlightStyle {
+ color: Some(Color::red()),
+ ..Default::default()
+ },
+ ),
+ (
+ 4..8,
+ HighlightStyle {
+ color: Some(Color::green()),
+ ..Default::default()
+ },
+ ),
+ ];
+ let match_indices = [4, 6, 7, 8];
+ assert_eq!(
+ combine_syntax_and_fuzzy_match_highlights(
+ string,
+ Default::default(),
+ syntax_ranges.into_iter(),
+ &match_indices,
+ ),
+ &[
+ (
+ 0..3,
+ HighlightStyle {
+ color: Some(Color::red()),
+ ..Default::default()
+ },
+ ),
+ (
+ 4..5,
+ HighlightStyle {
+ color: Some(Color::green()),
+ weight: Some(fonts::Weight::BOLD),
+ ..Default::default()
+ },
+ ),
+ (
+ 5..6,
+ HighlightStyle {
+ color: Some(Color::green()),
+ ..Default::default()
+ },
+ ),
+ (
+ 6..8,
+ HighlightStyle {
+ color: Some(Color::green()),
+ weight: Some(fonts::Weight::BOLD),
+ ..Default::default()
+ },
+ ),
+ (
+ 8..9,
+ HighlightStyle {
+ weight: Some(fonts::Weight::BOLD),
+ ..Default::default()
+ },
+ ),
+ ]
+ );
+}
+
+fn empty_range(row: usize, column: usize) -> Range<DisplayPoint> {
+ let point = DisplayPoint::new(row as u32, column as u32);
+ point..point
+}
+
+fn assert_selection_ranges(marked_text: &str, view: &mut Editor, cx: &mut ViewContext<Editor>) {
+ let (text, ranges) = marked_text_ranges(marked_text, true);
+ assert_eq!(view.text(cx), text);
+ assert_eq!(
+ view.selections.ranges(cx),
+ ranges,
+ "Assert selections are {}",
+ marked_text
+ );
+}
@@ -16,6 +16,7 @@ use crate::{
};
use clock::ReplicaId;
use collections::{BTreeMap, HashMap};
+use git::diff::{DiffHunk, DiffHunkStatus};
use gpui::{
color::Color,
elements::*,
@@ -36,7 +37,7 @@ use gpui::{
use json::json;
use language::{Bias, DiagnosticSeverity, OffsetUtf16, Selection};
use project::ProjectPath;
-use settings::Settings;
+use settings::{GitGutter, Settings};
use smallvec::SmallVec;
use std::{
cmp::{self, Ordering},
@@ -45,6 +46,7 @@ use std::{
ops::Range,
sync::Arc,
};
+use theme::DiffStyle;
struct SelectionLayout {
head: DisplayPoint,
@@ -524,30 +526,141 @@ impl EditorElement {
layout: &mut LayoutState,
cx: &mut PaintContext,
) {
- let scroll_top =
- layout.position_map.snapshot.scroll_position().y() * layout.position_map.line_height;
+ struct GutterLayout {
+ line_height: f32,
+ // scroll_position: Vector2F,
+ scroll_top: f32,
+ bounds: RectF,
+ }
+
+ struct DiffLayout<'a> {
+ buffer_row: u32,
+ last_diff: Option<&'a DiffHunk<u32>>,
+ }
+
+ fn diff_quad(
+ hunk: &DiffHunk<u32>,
+ gutter_layout: &GutterLayout,
+ diff_style: &DiffStyle,
+ ) -> Quad {
+ let color = match hunk.status() {
+ DiffHunkStatus::Added => diff_style.inserted,
+ DiffHunkStatus::Modified => diff_style.modified,
+
+ //TODO: This rendering is entirely a horrible hack
+ DiffHunkStatus::Removed => {
+ let row = hunk.buffer_range.start;
+
+ let offset = gutter_layout.line_height / 2.;
+ let start_y =
+ row as f32 * gutter_layout.line_height + offset - gutter_layout.scroll_top;
+ let end_y = start_y + gutter_layout.line_height;
+
+ let width = diff_style.removed_width_em * gutter_layout.line_height;
+ let highlight_origin = gutter_layout.bounds.origin() + vec2f(-width, start_y);
+ let highlight_size = vec2f(width * 2., end_y - start_y);
+ let highlight_bounds = RectF::new(highlight_origin, highlight_size);
+
+ return Quad {
+ bounds: highlight_bounds,
+ background: Some(diff_style.deleted),
+ border: Border::new(0., Color::transparent_black()),
+ corner_radius: 1. * gutter_layout.line_height,
+ };
+ }
+ };
+
+ let start_row = hunk.buffer_range.start;
+ let end_row = hunk.buffer_range.end;
+
+ let start_y = start_row as f32 * gutter_layout.line_height - gutter_layout.scroll_top;
+ let end_y = end_row as f32 * gutter_layout.line_height - gutter_layout.scroll_top;
+
+ let width = diff_style.width_em * gutter_layout.line_height;
+ let highlight_origin = gutter_layout.bounds.origin() + vec2f(-width, start_y);
+ let highlight_size = vec2f(width * 2., end_y - start_y);
+ let highlight_bounds = RectF::new(highlight_origin, highlight_size);
+
+ Quad {
+ bounds: highlight_bounds,
+ background: Some(color),
+ border: Border::new(0., Color::transparent_black()),
+ corner_radius: diff_style.corner_radius * gutter_layout.line_height,
+ }
+ }
+
+ let scroll_position = layout.position_map.snapshot.scroll_position();
+ let gutter_layout = {
+ let line_height = layout.position_map.line_height;
+ GutterLayout {
+ scroll_top: scroll_position.y() * line_height,
+ line_height,
+ bounds,
+ }
+ };
+
+ let mut diff_layout = DiffLayout {
+ buffer_row: scroll_position.y() as u32,
+ last_diff: None,
+ };
+
+ let diff_style = &cx.global::<Settings>().theme.editor.diff.clone();
+ let show_gutter = matches!(
+ &cx.global::<Settings>()
+ .git_overrides
+ .git_gutter
+ .unwrap_or_default(),
+ GitGutter::TrackedFiles
+ );
+
+ // line is `None` when there's a line wrap
for (ix, line) in layout.line_number_layouts.iter().enumerate() {
if let Some(line) = line {
let line_origin = bounds.origin()
+ vec2f(
bounds.width() - line.width() - layout.gutter_padding,
- ix as f32 * layout.position_map.line_height
- - (scroll_top % layout.position_map.line_height),
+ ix as f32 * gutter_layout.line_height
+ - (gutter_layout.scroll_top % gutter_layout.line_height),
);
- line.paint(
- line_origin,
- visible_bounds,
- layout.position_map.line_height,
- cx,
- );
+
+ line.paint(line_origin, visible_bounds, gutter_layout.line_height, cx);
+
+ if show_gutter {
+ //This line starts a buffer line, so let's do the diff calculation
+ let new_hunk = get_hunk(diff_layout.buffer_row, &layout.diff_hunks);
+
+ let (is_ending, is_starting) = match (diff_layout.last_diff, new_hunk) {
+ (Some(old_hunk), Some(new_hunk)) if new_hunk == old_hunk => (false, false),
+ (a, b) => (a.is_some(), b.is_some()),
+ };
+
+ if is_ending {
+ let last_hunk = diff_layout.last_diff.take().unwrap();
+ cx.scene
+ .push_quad(diff_quad(last_hunk, &gutter_layout, diff_style));
+ }
+
+ if is_starting {
+ let new_hunk = new_hunk.unwrap();
+ diff_layout.last_diff = Some(new_hunk);
+ };
+
+ diff_layout.buffer_row += 1;
+ }
}
}
+ // If we ran out with a diff hunk still being prepped, paint it now
+ if let Some(last_hunk) = diff_layout.last_diff {
+ cx.scene
+ .push_quad(diff_quad(last_hunk, &gutter_layout, diff_style))
+ }
+
if let Some((row, indicator)) = layout.code_actions_indicator.as_mut() {
let mut x = bounds.width() - layout.gutter_padding;
- let mut y = *row as f32 * layout.position_map.line_height - scroll_top;
+ let mut y = *row as f32 * gutter_layout.line_height - gutter_layout.scroll_top;
x += ((layout.gutter_padding + layout.gutter_margin) - indicator.size().x()) / 2.;
- y += (layout.position_map.line_height - indicator.size().y()) / 2.;
+ y += (gutter_layout.line_height - indicator.size().y()) / 2.;
indicator.paint(bounds.origin() + vec2f(x, y), visible_bounds, cx);
}
}
@@ -1252,6 +1365,27 @@ impl EditorElement {
}
}
+/// Get the hunk that contains buffer_line, starting from start_idx
+/// Returns none if there is none found, and
+fn get_hunk(buffer_line: u32, hunks: &[DiffHunk<u32>]) -> Option<&DiffHunk<u32>> {
+ for i in 0..hunks.len() {
+ // Safety: Index out of bounds is handled by the check above
+ let hunk = hunks.get(i).unwrap();
+ if hunk.buffer_range.contains(&(buffer_line as u32)) {
+ return Some(hunk);
+ } else if hunk.status() == DiffHunkStatus::Removed && buffer_line == hunk.buffer_range.start
+ {
+ return Some(hunk);
+ } else if hunk.buffer_range.start > buffer_line as u32 {
+ // If we've passed the buffer_line, just stop
+ return None;
+ }
+ }
+
+ // We reached the end of the array without finding a hunk, just return none.
+ return None;
+}
+
impl Element for EditorElement {
type LayoutState = LayoutState;
type PaintState = ();
@@ -1425,6 +1559,11 @@ impl Element for EditorElement {
let line_number_layouts =
self.layout_line_numbers(start_row..end_row, &active_rows, &snapshot, cx);
+ let diff_hunks = snapshot
+ .buffer_snapshot
+ .git_diff_hunks_in_range(start_row..end_row)
+ .collect();
+
let mut max_visible_line_width = 0.0;
let line_layouts = self.layout_lines(start_row..end_row, &snapshot, cx);
for line in &line_layouts {
@@ -1573,6 +1712,7 @@ impl Element for EditorElement {
highlighted_rows,
highlighted_ranges,
line_number_layouts,
+ diff_hunks,
blocks,
selections,
context_menu,
@@ -1710,6 +1850,7 @@ pub struct LayoutState {
highlighted_ranges: Vec<(Range<DisplayPoint>, Color)>,
selections: Vec<(ReplicaId, Vec<SelectionLayout>)>,
context_menu: Option<(DisplayPoint, ElementBox)>,
+ diff_hunks: Vec<DiffHunk<u32>>,
code_actions_indicator: Option<(u32, ElementBox)>,
hover_popovers: Option<(DisplayPoint, Vec<ElementBox>)>,
}
@@ -404,6 +404,8 @@ impl Item for Editor {
project: ModelHandle<Project>,
cx: &mut ViewContext<Self>,
) -> Task<Result<()>> {
+ self.report_event("save editor", cx);
+
let buffer = self.buffer().clone();
let buffers = buffer.read(cx).all_buffers();
let mut timeout = cx.background().timer(FORMAT_TIMEOUT).fuse();
@@ -476,6 +478,17 @@ impl Item for Editor {
})
}
+ fn git_diff_recalc(
+ &mut self,
+ _project: ModelHandle<Project>,
+ cx: &mut ViewContext<Self>,
+ ) -> Task<Result<()>> {
+ self.buffer().update(cx, |multibuffer, cx| {
+ multibuffer.git_diff_recalc(cx);
+ });
+ Task::ready(Ok(()))
+ }
+
fn to_item_events(event: &Self::Event) -> Vec<workspace::ItemEvent> {
let mut result = Vec::new();
match event {
@@ -4,6 +4,7 @@ pub use anchor::{Anchor, AnchorRangeExt};
use anyhow::Result;
use clock::ReplicaId;
use collections::{BTreeMap, Bound, HashMap, HashSet};
+use git::diff::DiffHunk;
use gpui::{AppContext, Entity, ModelContext, ModelHandle, Task};
pub use language::Completion;
use language::{
@@ -90,6 +91,7 @@ struct BufferState {
last_selections_update_count: usize,
last_diagnostics_update_count: usize,
last_file_update_count: usize,
+ last_git_diff_update_count: usize,
excerpts: Vec<ExcerptId>,
_subscriptions: [gpui::Subscription; 2],
}
@@ -101,6 +103,7 @@ pub struct MultiBufferSnapshot {
parse_count: usize,
diagnostics_update_count: usize,
trailing_excerpt_update_count: usize,
+ git_diff_update_count: usize,
edit_count: usize,
is_dirty: bool,
has_conflict: bool,
@@ -202,6 +205,7 @@ impl MultiBuffer {
last_selections_update_count: buffer_state.last_selections_update_count,
last_diagnostics_update_count: buffer_state.last_diagnostics_update_count,
last_file_update_count: buffer_state.last_file_update_count,
+ last_git_diff_update_count: buffer_state.last_git_diff_update_count,
excerpts: buffer_state.excerpts.clone(),
_subscriptions: [
new_cx.observe(&buffer_state.buffer, |_, _, cx| cx.notify()),
@@ -308,6 +312,17 @@ impl MultiBuffer {
self.read(cx).symbols_containing(offset, theme)
}
+ pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) {
+ let buffers = self.buffers.borrow();
+ for buffer_state in buffers.values() {
+ if buffer_state.buffer.read(cx).needs_git_diff_recalc() {
+ buffer_state
+ .buffer
+ .update(cx, |buffer, cx| buffer.git_diff_recalc(cx))
+ }
+ }
+ }
+
pub fn edit<I, S, T>(
&mut self,
edits: I,
@@ -827,6 +842,7 @@ impl MultiBuffer {
last_selections_update_count: buffer_snapshot.selections_update_count(),
last_diagnostics_update_count: buffer_snapshot.diagnostics_update_count(),
last_file_update_count: buffer_snapshot.file_update_count(),
+ last_git_diff_update_count: buffer_snapshot.git_diff_update_count(),
excerpts: Default::default(),
_subscriptions: [
cx.observe(&buffer, |_, _, cx| cx.notify()),
@@ -1212,9 +1228,9 @@ impl MultiBuffer {
&self,
point: T,
cx: &'a AppContext,
- ) -> Option<&'a Arc<Language>> {
+ ) -> Option<Arc<Language>> {
self.point_to_buffer_offset(point, cx)
- .and_then(|(buffer, _)| buffer.read(cx).language())
+ .and_then(|(buffer, offset)| buffer.read(cx).language_at(offset))
}
pub fn files<'a>(&'a self, cx: &'a AppContext) -> SmallVec<[&'a dyn File; 2]> {
@@ -1249,6 +1265,7 @@ impl MultiBuffer {
let mut excerpts_to_edit = Vec::new();
let mut reparsed = false;
let mut diagnostics_updated = false;
+ let mut git_diff_updated = false;
let mut is_dirty = false;
let mut has_conflict = false;
let mut edited = false;
@@ -1260,6 +1277,7 @@ impl MultiBuffer {
let selections_update_count = buffer.selections_update_count();
let diagnostics_update_count = buffer.diagnostics_update_count();
let file_update_count = buffer.file_update_count();
+ let git_diff_update_count = buffer.git_diff_update_count();
let buffer_edited = version.changed_since(&buffer_state.last_version);
let buffer_reparsed = parse_count > buffer_state.last_parse_count;
@@ -1268,17 +1286,21 @@ impl MultiBuffer {
let buffer_diagnostics_updated =
diagnostics_update_count > buffer_state.last_diagnostics_update_count;
let buffer_file_updated = file_update_count > buffer_state.last_file_update_count;
+ let buffer_git_diff_updated =
+ git_diff_update_count > buffer_state.last_git_diff_update_count;
if buffer_edited
|| buffer_reparsed
|| buffer_selections_updated
|| buffer_diagnostics_updated
|| buffer_file_updated
+ || buffer_git_diff_updated
{
buffer_state.last_version = version;
buffer_state.last_parse_count = parse_count;
buffer_state.last_selections_update_count = selections_update_count;
buffer_state.last_diagnostics_update_count = diagnostics_update_count;
buffer_state.last_file_update_count = file_update_count;
+ buffer_state.last_git_diff_update_count = git_diff_update_count;
excerpts_to_edit.extend(
buffer_state
.excerpts
@@ -1290,6 +1312,7 @@ impl MultiBuffer {
edited |= buffer_edited;
reparsed |= buffer_reparsed;
diagnostics_updated |= buffer_diagnostics_updated;
+ git_diff_updated |= buffer_git_diff_updated;
is_dirty |= buffer.is_dirty();
has_conflict |= buffer.has_conflict();
}
@@ -1302,6 +1325,9 @@ impl MultiBuffer {
if diagnostics_updated {
snapshot.diagnostics_update_count += 1;
}
+ if git_diff_updated {
+ snapshot.git_diff_update_count += 1;
+ }
snapshot.is_dirty = is_dirty;
snapshot.has_conflict = has_conflict;
@@ -1940,6 +1966,24 @@ impl MultiBufferSnapshot {
}
}
+ pub fn point_to_buffer_offset<T: ToOffset>(
+ &self,
+ point: T,
+ ) -> Option<(&BufferSnapshot, usize)> {
+ let offset = point.to_offset(&self);
+ let mut cursor = self.excerpts.cursor::<usize>();
+ cursor.seek(&offset, Bias::Right, &());
+ if cursor.item().is_none() {
+ cursor.prev(&());
+ }
+
+ cursor.item().map(|excerpt| {
+ let excerpt_start = excerpt.range.context.start.to_offset(&excerpt.buffer);
+ let buffer_point = excerpt_start + offset - *cursor.start();
+ (&excerpt.buffer, buffer_point)
+ })
+ }
+
pub fn suggested_indents(
&self,
rows: impl IntoIterator<Item = u32>,
@@ -1949,8 +1993,10 @@ impl MultiBufferSnapshot {
let mut rows_for_excerpt = Vec::new();
let mut cursor = self.excerpts.cursor::<Point>();
-
let mut rows = rows.into_iter().peekable();
+ let mut prev_row = u32::MAX;
+ let mut prev_language_indent_size = IndentSize::default();
+
while let Some(row) = rows.next() {
cursor.seek(&Point::new(row, 0), Bias::Right, &());
let excerpt = match cursor.item() {
@@ -1958,7 +2004,17 @@ impl MultiBufferSnapshot {
_ => continue,
};
- let single_indent_size = excerpt.buffer.single_indent_size(cx);
+ // Retrieve the language and indent size once for each disjoint region being indented.
+ let single_indent_size = if row.saturating_sub(1) == prev_row {
+ prev_language_indent_size
+ } else {
+ excerpt
+ .buffer
+ .language_indent_size_at(Point::new(row, 0), cx)
+ };
+ prev_language_indent_size = single_indent_size;
+ prev_row = row;
+
let start_buffer_row = excerpt.range.context.start.to_point(&excerpt.buffer).row;
let start_multibuffer_row = cursor.start().row;
@@ -2479,15 +2535,17 @@ impl MultiBufferSnapshot {
self.diagnostics_update_count
}
+ pub fn git_diff_update_count(&self) -> usize {
+ self.git_diff_update_count
+ }
+
pub fn trailing_excerpt_update_count(&self) -> usize {
self.trailing_excerpt_update_count
}
- pub fn language(&self) -> Option<&Arc<Language>> {
- self.excerpts
- .iter()
- .next()
- .and_then(|excerpt| excerpt.buffer.language())
+ pub fn language_at<'a, T: ToOffset>(&'a self, point: T) -> Option<&'a Arc<Language>> {
+ self.point_to_buffer_offset(point)
+ .and_then(|(buffer, offset)| buffer.language_at(offset))
}
pub fn is_dirty(&self) -> bool {
@@ -2529,6 +2587,15 @@ impl MultiBufferSnapshot {
})
}
+ pub fn git_diff_hunks_in_range<'a>(
+ &'a self,
+ row_range: Range<u32>,
+ ) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
+ self.as_singleton()
+ .into_iter()
+ .flat_map(move |(_, _, buffer)| buffer.git_diff_hunks_in_range(row_range.clone()))
+ }
+
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
let range = range.start.to_offset(self)..range.end.to_offset(self);
@@ -0,0 +1,28 @@
+[package]
+name = "git"
+version = "0.1.0"
+edition = "2021"
+
+[lib]
+path = "src/git.rs"
+
+[dependencies]
+anyhow = "1.0.38"
+clock = { path = "../clock" }
+git2 = { version = "0.15", default-features = false }
+lazy_static = "1.4.0"
+sum_tree = { path = "../sum_tree" }
+text = { path = "../text" }
+collections = { path = "../collections" }
+util = { path = "../util" }
+log = { version = "0.4.16", features = ["kv_unstable_serde"] }
+smol = "1.2"
+parking_lot = "0.11.1"
+async-trait = "0.1"
+futures = "0.3"
+
+[dev-dependencies]
+unindent = "0.1.7"
+
+[features]
+test-support = []
@@ -0,0 +1,362 @@
+use std::ops::Range;
+
+use sum_tree::SumTree;
+use text::{Anchor, BufferSnapshot, OffsetRangeExt, Point};
+
+pub use git2 as libgit;
+use libgit::{DiffLineType as GitDiffLineType, DiffOptions as GitOptions, Patch as GitPatch};
+
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum DiffHunkStatus {
+ Added,
+ Modified,
+ Removed,
+}
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+pub struct DiffHunk<T> {
+ pub buffer_range: Range<T>,
+ pub head_byte_range: Range<usize>,
+}
+
+impl DiffHunk<u32> {
+ pub fn status(&self) -> DiffHunkStatus {
+ if self.head_byte_range.is_empty() {
+ DiffHunkStatus::Added
+ } else if self.buffer_range.is_empty() {
+ DiffHunkStatus::Removed
+ } else {
+ DiffHunkStatus::Modified
+ }
+ }
+}
+
+impl sum_tree::Item for DiffHunk<Anchor> {
+ type Summary = DiffHunkSummary;
+
+ fn summary(&self) -> Self::Summary {
+ DiffHunkSummary {
+ buffer_range: self.buffer_range.clone(),
+ }
+ }
+}
+
+#[derive(Debug, Default, Clone)]
+pub struct DiffHunkSummary {
+ buffer_range: Range<Anchor>,
+}
+
+impl sum_tree::Summary for DiffHunkSummary {
+ type Context = text::BufferSnapshot;
+
+ fn add_summary(&mut self, other: &Self, buffer: &Self::Context) {
+ self.buffer_range.start = self
+ .buffer_range
+ .start
+ .min(&other.buffer_range.start, buffer);
+ self.buffer_range.end = self.buffer_range.end.max(&other.buffer_range.end, buffer);
+ }
+}
+
+#[derive(Clone)]
+pub struct BufferDiff {
+ last_buffer_version: Option<clock::Global>,
+ tree: SumTree<DiffHunk<Anchor>>,
+}
+
+impl BufferDiff {
+ pub fn new() -> BufferDiff {
+ BufferDiff {
+ last_buffer_version: None,
+ tree: SumTree::new(),
+ }
+ }
+
+ pub fn hunks_in_range<'a>(
+ &'a self,
+ query_row_range: Range<u32>,
+ buffer: &'a BufferSnapshot,
+ ) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
+ let start = buffer.anchor_before(Point::new(query_row_range.start, 0));
+ let end = buffer.anchor_after(Point::new(query_row_range.end, 0));
+
+ let mut cursor = self.tree.filter::<_, DiffHunkSummary>(move |summary| {
+ let before_start = summary.buffer_range.end.cmp(&start, buffer).is_lt();
+ let after_end = summary.buffer_range.start.cmp(&end, buffer).is_gt();
+ !before_start && !after_end
+ });
+
+ std::iter::from_fn(move || {
+ cursor.next(buffer);
+ let hunk = cursor.item()?;
+
+ let range = hunk.buffer_range.to_point(buffer);
+ let end_row = if range.end.column > 0 {
+ range.end.row + 1
+ } else {
+ range.end.row
+ };
+
+ Some(DiffHunk {
+ buffer_range: range.start.row..end_row,
+ head_byte_range: hunk.head_byte_range.clone(),
+ })
+ })
+ }
+
+ pub fn clear(&mut self, buffer: &text::BufferSnapshot) {
+ self.last_buffer_version = Some(buffer.version().clone());
+ self.tree = SumTree::new();
+ }
+
+ pub fn needs_update(&self, buffer: &text::BufferSnapshot) -> bool {
+ match &self.last_buffer_version {
+ Some(last) => buffer.version().changed_since(last),
+ None => true,
+ }
+ }
+
+ pub async fn update(&mut self, diff_base: &str, buffer: &text::BufferSnapshot) {
+ let mut tree = SumTree::new();
+
+ let buffer_text = buffer.as_rope().to_string();
+ let patch = Self::diff(&diff_base, &buffer_text);
+
+ if let Some(patch) = patch {
+ let mut divergence = 0;
+ for hunk_index in 0..patch.num_hunks() {
+ let hunk = Self::process_patch_hunk(&patch, hunk_index, buffer, &mut divergence);
+ tree.push(hunk, buffer);
+ }
+ }
+
+ self.tree = tree;
+ self.last_buffer_version = Some(buffer.version().clone());
+ }
+
+ #[cfg(test)]
+ fn hunks<'a>(&'a self, text: &'a BufferSnapshot) -> impl 'a + Iterator<Item = DiffHunk<u32>> {
+ self.hunks_in_range(0..u32::MAX, text)
+ }
+
+ fn diff<'a>(head: &'a str, current: &'a str) -> Option<GitPatch<'a>> {
+ let mut options = GitOptions::default();
+ options.context_lines(0);
+
+ let patch = GitPatch::from_buffers(
+ head.as_bytes(),
+ None,
+ current.as_bytes(),
+ None,
+ Some(&mut options),
+ );
+
+ match patch {
+ Ok(patch) => Some(patch),
+
+ Err(err) => {
+ log::error!("`GitPatch::from_buffers` failed: {}", err);
+ None
+ }
+ }
+ }
+
+ fn process_patch_hunk<'a>(
+ patch: &GitPatch<'a>,
+ hunk_index: usize,
+ buffer: &text::BufferSnapshot,
+ buffer_row_divergence: &mut i64,
+ ) -> DiffHunk<Anchor> {
+ let line_item_count = patch.num_lines_in_hunk(hunk_index).unwrap();
+ assert!(line_item_count > 0);
+
+ let mut first_deletion_buffer_row: Option<u32> = None;
+ let mut buffer_row_range: Option<Range<u32>> = None;
+ let mut head_byte_range: Option<Range<usize>> = None;
+
+ for line_index in 0..line_item_count {
+ let line = patch.line_in_hunk(hunk_index, line_index).unwrap();
+ let kind = line.origin_value();
+ let content_offset = line.content_offset() as isize;
+ let content_len = line.content().len() as isize;
+
+ if kind == GitDiffLineType::Addition {
+ *buffer_row_divergence += 1;
+ let row = line.new_lineno().unwrap().saturating_sub(1);
+
+ match &mut buffer_row_range {
+ Some(buffer_row_range) => buffer_row_range.end = row + 1,
+ None => buffer_row_range = Some(row..row + 1),
+ }
+ }
+
+ if kind == GitDiffLineType::Deletion {
+ *buffer_row_divergence -= 1;
+ let end = content_offset + content_len;
+
+ match &mut head_byte_range {
+ Some(head_byte_range) => head_byte_range.end = end as usize,
+ None => head_byte_range = Some(content_offset as usize..end as usize),
+ }
+
+ if first_deletion_buffer_row.is_none() {
+ let old_row = line.old_lineno().unwrap().saturating_sub(1);
+ let row = old_row as i64 + *buffer_row_divergence;
+ first_deletion_buffer_row = Some(row as u32);
+ }
+ }
+ }
+
+ //unwrap_or deletion without addition
+ let buffer_row_range = buffer_row_range.unwrap_or_else(|| {
+ //we cannot have an addition-less hunk without deletion(s) or else there would be no hunk
+ let row = first_deletion_buffer_row.unwrap();
+ row..row
+ });
+
+ //unwrap_or addition without deletion
+ let head_byte_range = head_byte_range.unwrap_or(0..0);
+
+ let start = Point::new(buffer_row_range.start, 0);
+ let end = Point::new(buffer_row_range.end, 0);
+ let buffer_range = buffer.anchor_before(start)..buffer.anchor_before(end);
+ DiffHunk {
+ buffer_range,
+ head_byte_range,
+ }
+ }
+}
+
+/// Range (crossing new lines), old, new
+#[cfg(any(test, feature = "test-support"))]
+#[track_caller]
+pub fn assert_hunks<Iter>(
+ diff_hunks: Iter,
+ buffer: &BufferSnapshot,
+ diff_base: &str,
+ expected_hunks: &[(Range<u32>, &str, &str)],
+) where
+ Iter: Iterator<Item = DiffHunk<u32>>,
+{
+ let actual_hunks = diff_hunks
+ .map(|hunk| {
+ (
+ hunk.buffer_range.clone(),
+ &diff_base[hunk.head_byte_range],
+ buffer
+ .text_for_range(
+ Point::new(hunk.buffer_range.start, 0)
+ ..Point::new(hunk.buffer_range.end, 0),
+ )
+ .collect::<String>(),
+ )
+ })
+ .collect::<Vec<_>>();
+
+ let expected_hunks: Vec<_> = expected_hunks
+ .iter()
+ .map(|(r, s, h)| (r.clone(), *s, h.to_string()))
+ .collect();
+
+ assert_eq!(actual_hunks, expected_hunks);
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use text::Buffer;
+ use unindent::Unindent as _;
+
+ #[test]
+ fn test_buffer_diff_simple() {
+ let diff_base = "
+ one
+ two
+ three
+ "
+ .unindent();
+
+ let buffer_text = "
+ one
+ HELLO
+ three
+ "
+ .unindent();
+
+ let mut buffer = Buffer::new(0, 0, buffer_text);
+ let mut diff = BufferDiff::new();
+ smol::block_on(diff.update(&diff_base, &buffer));
+ assert_hunks(
+ diff.hunks(&buffer),
+ &buffer,
+ &diff_base,
+ &[(1..2, "two\n", "HELLO\n")],
+ );
+
+ buffer.edit([(0..0, "point five\n")]);
+ smol::block_on(diff.update(&diff_base, &buffer));
+ assert_hunks(
+ diff.hunks(&buffer),
+ &buffer,
+ &diff_base,
+ &[(0..1, "", "point five\n"), (2..3, "two\n", "HELLO\n")],
+ );
+
+ diff.clear(&buffer);
+ assert_hunks(diff.hunks(&buffer), &buffer, &diff_base, &[]);
+ }
+
+ #[test]
+ fn test_buffer_diff_range() {
+ let diff_base = "
+ one
+ two
+ three
+ four
+ five
+ six
+ seven
+ eight
+ nine
+ ten
+ "
+ .unindent();
+
+ let buffer_text = "
+ A
+ one
+ B
+ two
+ C
+ three
+ HELLO
+ four
+ five
+ SIXTEEN
+ seven
+ eight
+ WORLD
+ nine
+
+ ten
+
+ "
+ .unindent();
+
+ let buffer = Buffer::new(0, 0, buffer_text);
+ let mut diff = BufferDiff::new();
+ smol::block_on(diff.update(&diff_base, &buffer));
+ assert_eq!(diff.hunks(&buffer).count(), 8);
+
+ assert_hunks(
+ diff.hunks_in_range(7..12, &buffer),
+ &buffer,
+ &diff_base,
+ &[
+ (6..7, "", "HELLO\n"),
+ (9..10, "six\n", "SIXTEEN\n"),
+ (12..13, "", "WORLD\n"),
+ ],
+ );
+ }
+}
@@ -0,0 +1,12 @@
+use std::ffi::OsStr;
+
+pub use git2 as libgit;
+pub use lazy_static::lazy_static;
+
+pub mod diff;
+pub mod repository;
+
+lazy_static! {
+ pub static ref DOT_GIT: &'static OsStr = OsStr::new(".git");
+ pub static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore");
+}
@@ -0,0 +1,71 @@
+use anyhow::Result;
+use collections::HashMap;
+use parking_lot::Mutex;
+use std::{
+ path::{Path, PathBuf},
+ sync::Arc,
+};
+
+pub use git2::Repository as LibGitRepository;
+
+#[async_trait::async_trait]
+pub trait GitRepository: Send {
+ fn reload_index(&self);
+
+ fn load_index_text(&self, relative_file_path: &Path) -> Option<String>;
+}
+
+#[async_trait::async_trait]
+impl GitRepository for LibGitRepository {
+ fn reload_index(&self) {
+ if let Ok(mut index) = self.index() {
+ _ = index.read(false);
+ }
+ }
+
+ fn load_index_text(&self, relative_file_path: &Path) -> Option<String> {
+ fn logic(repo: &LibGitRepository, relative_file_path: &Path) -> Result<Option<String>> {
+ const STAGE_NORMAL: i32 = 0;
+ let index = repo.index()?;
+ let oid = match index.get_path(relative_file_path, STAGE_NORMAL) {
+ Some(entry) => entry.id,
+ None => return Ok(None),
+ };
+
+ let content = repo.find_blob(oid)?.content().to_owned();
+ Ok(Some(String::from_utf8(content)?))
+ }
+
+ match logic(&self, relative_file_path) {
+ Ok(value) => return value,
+ Err(err) => log::error!("Error loading head text: {:?}", err),
+ }
+ None
+ }
+}
+
+#[derive(Debug, Clone, Default)]
+pub struct FakeGitRepository {
+ state: Arc<Mutex<FakeGitRepositoryState>>,
+}
+
+#[derive(Debug, Clone, Default)]
+pub struct FakeGitRepositoryState {
+ pub index_contents: HashMap<PathBuf, String>,
+}
+
+impl FakeGitRepository {
+ pub fn open(state: Arc<Mutex<FakeGitRepositoryState>>) -> Arc<Mutex<dyn GitRepository>> {
+ Arc::new(Mutex::new(FakeGitRepository { state }))
+ }
+}
+
+#[async_trait::async_trait]
+impl GitRepository for FakeGitRepository {
+ fn reload_index(&self) {}
+
+ fn load_index_text(&self, path: &Path) -> Option<String> {
+ let state = self.state.lock();
+ state.index_contents.get(path).cloned()
+ }
+}
@@ -325,7 +325,12 @@ impl Deterministic {
let mut state = self.state.lock();
let wakeup_at = state.now + duration;
let id = util::post_inc(&mut state.next_timer_id);
- state.pending_timers.push((id, wakeup_at, tx));
+ match state
+ .pending_timers
+ .binary_search_by_key(&wakeup_at, |e| e.1)
+ {
+ Ok(ix) | Err(ix) => state.pending_timers.insert(ix, (id, wakeup_at, tx)),
+ }
let state = self.state.clone();
Timer::Deterministic(DeterministicTimer { rx, id, state })
}
@@ -71,6 +71,8 @@ pub trait Platform: Send + Sync {
fn path_for_auxiliary_executable(&self, name: &str) -> Result<PathBuf>;
fn app_path(&self) -> Result<PathBuf>;
fn app_version(&self) -> Result<AppVersion>;
+ fn os_name(&self) -> &'static str;
+ fn os_version(&self) -> Result<AppVersion>;
}
pub(crate) trait ForegroundPlatform {
@@ -14,8 +14,10 @@ use core_graphics::{
event::{CGEvent, CGEventFlags, CGKeyCode},
event_source::{CGEventSource, CGEventSourceStateID},
};
+use ctor::ctor;
+use foreign_types::ForeignType;
use objc::{class, msg_send, sel, sel_impl};
-use std::{borrow::Cow, ffi::CStr, os::raw::c_char};
+use std::{borrow::Cow, ffi::CStr, mem, os::raw::c_char, ptr};
const BACKSPACE_KEY: u16 = 0x7f;
const SPACE_KEY: u16 = b' ' as u16;
@@ -25,6 +27,15 @@ const ESCAPE_KEY: u16 = 0x1b;
const TAB_KEY: u16 = 0x09;
const SHIFT_TAB_KEY: u16 = 0x19;
+static mut EVENT_SOURCE: core_graphics::sys::CGEventSourceRef = ptr::null_mut();
+
+#[ctor]
+unsafe fn build_event_source() {
+ let source = CGEventSource::new(CGEventSourceStateID::Private).unwrap();
+ EVENT_SOURCE = source.as_ptr();
+ mem::forget(source);
+}
+
pub fn key_to_native(key: &str) -> Cow<str> {
use cocoa::appkit::*;
let code = match key {
@@ -228,7 +239,8 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke {
let mut chars_ignoring_modifiers =
CStr::from_ptr(native_event.charactersIgnoringModifiers().UTF8String() as *mut c_char)
.to_str()
- .unwrap();
+ .unwrap()
+ .to_string();
let first_char = chars_ignoring_modifiers.chars().next().map(|ch| ch as u16);
let modifiers = native_event.modifierFlags();
@@ -243,31 +255,31 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke {
#[allow(non_upper_case_globals)]
let key = match first_char {
- Some(SPACE_KEY) => "space",
- Some(BACKSPACE_KEY) => "backspace",
- Some(ENTER_KEY) | Some(NUMPAD_ENTER_KEY) => "enter",
- Some(ESCAPE_KEY) => "escape",
- Some(TAB_KEY) => "tab",
- Some(SHIFT_TAB_KEY) => "tab",
- Some(NSUpArrowFunctionKey) => "up",
- Some(NSDownArrowFunctionKey) => "down",
- Some(NSLeftArrowFunctionKey) => "left",
- Some(NSRightArrowFunctionKey) => "right",
- Some(NSPageUpFunctionKey) => "pageup",
- Some(NSPageDownFunctionKey) => "pagedown",
- Some(NSDeleteFunctionKey) => "delete",
- Some(NSF1FunctionKey) => "f1",
- Some(NSF2FunctionKey) => "f2",
- Some(NSF3FunctionKey) => "f3",
- Some(NSF4FunctionKey) => "f4",
- Some(NSF5FunctionKey) => "f5",
- Some(NSF6FunctionKey) => "f6",
- Some(NSF7FunctionKey) => "f7",
- Some(NSF8FunctionKey) => "f8",
- Some(NSF9FunctionKey) => "f9",
- Some(NSF10FunctionKey) => "f10",
- Some(NSF11FunctionKey) => "f11",
- Some(NSF12FunctionKey) => "f12",
+ Some(SPACE_KEY) => "space".to_string(),
+ Some(BACKSPACE_KEY) => "backspace".to_string(),
+ Some(ENTER_KEY) | Some(NUMPAD_ENTER_KEY) => "enter".to_string(),
+ Some(ESCAPE_KEY) => "escape".to_string(),
+ Some(TAB_KEY) => "tab".to_string(),
+ Some(SHIFT_TAB_KEY) => "tab".to_string(),
+ Some(NSUpArrowFunctionKey) => "up".to_string(),
+ Some(NSDownArrowFunctionKey) => "down".to_string(),
+ Some(NSLeftArrowFunctionKey) => "left".to_string(),
+ Some(NSRightArrowFunctionKey) => "right".to_string(),
+ Some(NSPageUpFunctionKey) => "pageup".to_string(),
+ Some(NSPageDownFunctionKey) => "pagedown".to_string(),
+ Some(NSDeleteFunctionKey) => "delete".to_string(),
+ Some(NSF1FunctionKey) => "f1".to_string(),
+ Some(NSF2FunctionKey) => "f2".to_string(),
+ Some(NSF3FunctionKey) => "f3".to_string(),
+ Some(NSF4FunctionKey) => "f4".to_string(),
+ Some(NSF5FunctionKey) => "f5".to_string(),
+ Some(NSF6FunctionKey) => "f6".to_string(),
+ Some(NSF7FunctionKey) => "f7".to_string(),
+ Some(NSF8FunctionKey) => "f8".to_string(),
+ Some(NSF9FunctionKey) => "f9".to_string(),
+ Some(NSF10FunctionKey) => "f10".to_string(),
+ Some(NSF11FunctionKey) => "f11".to_string(),
+ Some(NSF12FunctionKey) => "f12".to_string(),
_ => {
let mut chars_ignoring_modifiers_and_shift =
chars_for_modified_key(native_event.keyCode(), false, false);
@@ -303,21 +315,19 @@ unsafe fn parse_keystroke(native_event: id) -> Keystroke {
shift,
cmd,
function,
- key: key.into(),
+ key,
}
}
-fn chars_for_modified_key<'a>(code: CGKeyCode, cmd: bool, shift: bool) -> &'a str {
+fn chars_for_modified_key(code: CGKeyCode, cmd: bool, shift: bool) -> String {
// Ideally, we would use `[NSEvent charactersByApplyingModifiers]` but that
// always returns an empty string with certain keyboards, e.g. Japanese. Synthesizing
// an event with the given flags instead lets us access `characters`, which always
// returns a valid string.
- let event = CGEvent::new_keyboard_event(
- CGEventSource::new(CGEventSourceStateID::Private).unwrap(),
- code,
- true,
- )
- .unwrap();
+ let source = unsafe { core_graphics::event_source::CGEventSource::from_ptr(EVENT_SOURCE) };
+ let event = CGEvent::new_keyboard_event(source.clone(), code, true).unwrap();
+ mem::forget(source);
+
let mut flags = CGEventFlags::empty();
if cmd {
flags |= CGEventFlags::CGEventFlagCommand;
@@ -327,10 +337,11 @@ fn chars_for_modified_key<'a>(code: CGKeyCode, cmd: bool, shift: bool) -> &'a st
}
event.set_flags(flags);
- let event: id = unsafe { msg_send![class!(NSEvent), eventWithCGEvent: event] };
unsafe {
+ let event: id = msg_send![class!(NSEvent), eventWithCGEvent: &*event];
CStr::from_ptr(event.characters().UTF8String())
.to_str()
.unwrap()
+ .to_string()
}
}
@@ -6,7 +6,7 @@ use crate::{
geometry::vector::{vec2f, Vector2F},
keymap,
platform::{self, CursorStyle},
- Action, ClipboardItem, Event, Menu, MenuItem,
+ Action, AppVersion, ClipboardItem, Event, Menu, MenuItem,
};
use anyhow::{anyhow, Result};
use block::ConcreteBlock;
@@ -18,7 +18,8 @@ use cocoa::{
},
base::{id, nil, selector, YES},
foundation::{
- NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSString, NSUInteger, NSURL,
+ NSArray, NSAutoreleasePool, NSBundle, NSData, NSInteger, NSProcessInfo, NSString,
+ NSUInteger, NSURL,
},
};
use core_foundation::{
@@ -758,6 +759,22 @@ impl platform::Platform for MacPlatform {
}
}
}
+
+ fn os_name(&self) -> &'static str {
+ "macOS"
+ }
+
+ fn os_version(&self) -> Result<crate::AppVersion> {
+ unsafe {
+ let process_info = NSProcessInfo::processInfo(nil);
+ let version = process_info.operatingSystemVersion();
+ Ok(AppVersion {
+ major: version.majorVersion as usize,
+ minor: version.minorVersion as usize,
+ patch: version.patchVersion as usize,
+ })
+ }
+ }
}
unsafe fn path_from_objc(path: id) -> PathBuf {
@@ -200,6 +200,18 @@ impl super::Platform for Platform {
patch: 0,
})
}
+
+ fn os_name(&self) -> &'static str {
+ "test"
+ }
+
+ fn os_version(&self) -> Result<AppVersion> {
+ Ok(AppVersion {
+ major: 1,
+ minor: 0,
+ patch: 0,
+ })
+ }
}
impl Window {
@@ -25,6 +25,7 @@ client = { path = "../client" }
clock = { path = "../clock" }
collections = { path = "../collections" }
fuzzy = { path = "../fuzzy" }
+git = { path = "../git" }
gpui = { path = "../gpui" }
lsp = { path = "../lsp" }
rpc = { path = "../rpc" }
@@ -63,6 +64,8 @@ util = { path = "../util", features = ["test-support"] }
ctor = "0.1"
env_logger = "0.9"
rand = "0.8.3"
+tree-sitter-html = "*"
+tree-sitter-javascript = "*"
tree-sitter-json = "*"
tree-sitter-rust = "*"
tree-sitter-python = "*"
@@ -45,8 +45,16 @@ pub use {tree_sitter_rust, tree_sitter_typescript};
pub use lsp::DiagnosticSeverity;
+struct GitDiffStatus {
+ diff: git::diff::BufferDiff,
+ update_in_progress: bool,
+ update_requested: bool,
+}
+
pub struct Buffer {
text: TextBuffer,
+ diff_base: Option<String>,
+ git_diff_status: GitDiffStatus,
file: Option<Arc<dyn File>>,
saved_version: clock::Global,
saved_version_fingerprint: String,
@@ -66,6 +74,7 @@ pub struct Buffer {
diagnostics_update_count: usize,
diagnostics_timestamp: clock::Lamport,
file_update_count: usize,
+ git_diff_update_count: usize,
completion_triggers: Vec<String>,
completion_triggers_timestamp: clock::Lamport,
deferred_ops: OperationQueue<Operation>,
@@ -73,25 +82,28 @@ pub struct Buffer {
pub struct BufferSnapshot {
text: text::BufferSnapshot,
+ pub git_diff: git::diff::BufferDiff,
pub(crate) syntax: SyntaxSnapshot,
file: Option<Arc<dyn File>>,
diagnostics: DiagnosticSet,
diagnostics_update_count: usize,
file_update_count: usize,
+ git_diff_update_count: usize,
remote_selections: TreeMap<ReplicaId, SelectionSet>,
selections_update_count: usize,
language: Option<Arc<Language>>,
parse_count: usize,
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
pub struct IndentSize {
pub len: u32,
pub kind: IndentKind,
}
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Default)]
pub enum IndentKind {
+ #[default]
Space,
Tab,
}
@@ -236,7 +248,6 @@ pub enum AutoindentMode {
struct AutoindentRequest {
before_edit: BufferSnapshot,
entries: Vec<AutoindentRequestEntry>,
- indent_size: IndentSize,
is_block_mode: bool,
}
@@ -249,6 +260,7 @@ struct AutoindentRequestEntry {
/// only be adjusted if the suggested indentation level has *changed*
/// since the edit was made.
first_line_is_new: bool,
+ indent_size: IndentSize,
original_indent_column: Option<u32>,
}
@@ -288,10 +300,8 @@ pub struct Chunk<'a> {
pub struct Diff {
base_version: clock::Global,
- new_text: Arc<str>,
- changes: Vec<(ChangeTag, usize)>,
line_ending: LineEnding,
- start_offset: usize,
+ edits: Vec<(Range<usize>, Arc<str>)>,
}
#[derive(Clone, Copy)]
@@ -328,17 +338,20 @@ impl Buffer {
Self::build(
TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()),
None,
+ None,
)
}
pub fn from_file<T: Into<String>>(
replica_id: ReplicaId,
base_text: T,
+ diff_base: Option<T>,
file: Arc<dyn File>,
cx: &mut ModelContext<Self>,
) -> Self {
Self::build(
TextBuffer::new(replica_id, cx.model_id() as u64, base_text.into()),
+ diff_base.map(|h| h.into().into_boxed_str().into()),
Some(file),
)
}
@@ -349,7 +362,11 @@ impl Buffer {
file: Option<Arc<dyn File>>,
) -> Result<Self> {
let buffer = TextBuffer::new(replica_id, message.id, message.base_text);
- let mut this = Self::build(buffer, file);
+ let mut this = Self::build(
+ buffer,
+ message.diff_base.map(|text| text.into_boxed_str().into()),
+ file,
+ );
this.text.set_line_ending(proto::deserialize_line_ending(
proto::LineEnding::from_i32(message.line_ending)
.ok_or_else(|| anyhow!("missing line_ending"))?,
@@ -362,6 +379,7 @@ impl Buffer {
id: self.remote_id(),
file: self.file.as_ref().map(|f| f.to_proto()),
base_text: self.base_text().to_string(),
+ diff_base: self.diff_base.as_ref().map(|h| h.to_string()),
line_ending: proto::serialize_line_ending(self.line_ending()) as i32,
}
}
@@ -404,7 +422,7 @@ impl Buffer {
self
}
- fn build(buffer: TextBuffer, file: Option<Arc<dyn File>>) -> Self {
+ fn build(buffer: TextBuffer, diff_base: Option<String>, file: Option<Arc<dyn File>>) -> Self {
let saved_mtime = if let Some(file) = file.as_ref() {
file.mtime()
} else {
@@ -418,6 +436,12 @@ impl Buffer {
transaction_depth: 0,
was_dirty_before_starting_transaction: None,
text: buffer,
+ diff_base,
+ git_diff_status: GitDiffStatus {
+ diff: git::diff::BufferDiff::new(),
+ update_in_progress: false,
+ update_requested: false,
+ },
file,
syntax_map: Mutex::new(SyntaxMap::new()),
parsing_in_background: false,
@@ -432,6 +456,7 @@ impl Buffer {
diagnostics_update_count: 0,
diagnostics_timestamp: Default::default(),
file_update_count: 0,
+ git_diff_update_count: 0,
completion_triggers: Default::default(),
completion_triggers_timestamp: Default::default(),
deferred_ops: OperationQueue::new(),
@@ -447,11 +472,13 @@ impl Buffer {
BufferSnapshot {
text,
syntax,
+ git_diff: self.git_diff_status.diff.clone(),
file: self.file.clone(),
remote_selections: self.remote_selections.clone(),
diagnostics: self.diagnostics.clone(),
diagnostics_update_count: self.diagnostics_update_count,
file_update_count: self.file_update_count,
+ git_diff_update_count: self.git_diff_update_count,
language: self.language.clone(),
parse_count: self.parse_count,
selections_update_count: self.selections_update_count,
@@ -584,6 +611,7 @@ impl Buffer {
cx,
);
}
+ self.git_diff_recalc(cx);
cx.emit(Event::Reloaded);
cx.notify();
}
@@ -633,6 +661,60 @@ impl Buffer {
task
}
+ #[cfg(any(test, feature = "test-support"))]
+ pub fn diff_base(&self) -> Option<&str> {
+ self.diff_base.as_deref()
+ }
+
+ pub fn update_diff_base(&mut self, diff_base: Option<String>, cx: &mut ModelContext<Self>) {
+ self.diff_base = diff_base;
+ self.git_diff_recalc(cx);
+ }
+
+ pub fn needs_git_diff_recalc(&self) -> bool {
+ self.git_diff_status.diff.needs_update(self)
+ }
+
+ pub fn git_diff_recalc(&mut self, cx: &mut ModelContext<Self>) {
+ if self.git_diff_status.update_in_progress {
+ self.git_diff_status.update_requested = true;
+ return;
+ }
+
+ if let Some(diff_base) = &self.diff_base {
+ let snapshot = self.snapshot();
+ let diff_base = diff_base.clone();
+
+ let mut diff = self.git_diff_status.diff.clone();
+ let diff = cx.background().spawn(async move {
+ diff.update(&diff_base, &snapshot).await;
+ diff
+ });
+
+ cx.spawn_weak(|this, mut cx| async move {
+ let buffer_diff = diff.await;
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, cx| {
+ this.git_diff_status.diff = buffer_diff;
+ this.git_diff_update_count += 1;
+ cx.notify();
+
+ this.git_diff_status.update_in_progress = false;
+ if this.git_diff_status.update_requested {
+ this.git_diff_recalc(cx);
+ }
+ })
+ }
+ })
+ .detach()
+ } else {
+ let snapshot = self.snapshot();
+ self.git_diff_status.diff.clear(&snapshot);
+ self.git_diff_update_count += 1;
+ cx.notify();
+ }
+ }
+
pub fn close(&mut self, cx: &mut ModelContext<Self>) {
cx.emit(Event::Closed);
}
@@ -641,6 +723,16 @@ impl Buffer {
self.language.as_ref()
}
+ pub fn language_at<D: ToOffset>(&self, position: D) -> Option<Arc<Language>> {
+ let offset = position.to_offset(self);
+ self.syntax_map
+ .lock()
+ .layers_for_range(offset..offset, &self.text)
+ .last()
+ .map(|info| info.language.clone())
+ .or_else(|| self.language.clone())
+ }
+
pub fn parse_count(&self) -> usize {
self.parse_count
}
@@ -657,6 +749,10 @@ impl Buffer {
self.file_update_count
}
+ pub fn git_diff_update_count(&self) -> usize {
+ self.git_diff_update_count
+ }
+
#[cfg(any(test, feature = "test-support"))]
pub fn is_parsing(&self) -> bool {
self.parsing_in_background
@@ -784,10 +880,13 @@ impl Buffer {
// buffer before this batch of edits.
let mut row_ranges = Vec::new();
let mut old_to_new_rows = BTreeMap::new();
+ let mut language_indent_sizes_by_new_row = Vec::new();
for entry in &request.entries {
let position = entry.range.start;
let new_row = position.to_point(&snapshot).row;
let new_end_row = entry.range.end.to_point(&snapshot).row + 1;
+ language_indent_sizes_by_new_row.push((new_row, entry.indent_size));
+
if !entry.first_line_is_new {
let old_row = position.to_point(&request.before_edit).row;
old_to_new_rows.insert(old_row, new_row);
@@ -801,6 +900,8 @@ impl Buffer {
let mut old_suggestions = BTreeMap::<u32, IndentSize>::default();
let old_edited_ranges =
contiguous_ranges(old_to_new_rows.keys().copied(), max_rows_between_yields);
+ let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
+ let mut language_indent_size = IndentSize::default();
for old_edited_range in old_edited_ranges {
let suggestions = request
.before_edit
@@ -809,6 +910,17 @@ impl Buffer {
.flatten();
for (old_row, suggestion) in old_edited_range.zip(suggestions) {
if let Some(suggestion) = suggestion {
+ let new_row = *old_to_new_rows.get(&old_row).unwrap();
+
+ // Find the indent size based on the language for this row.
+ while let Some((row, size)) = language_indent_sizes.peek() {
+ if *row > new_row {
+ break;
+ }
+ language_indent_size = *size;
+ language_indent_sizes.next();
+ }
+
let suggested_indent = old_to_new_rows
.get(&suggestion.basis_row)
.and_then(|from_row| old_suggestions.get(from_row).copied())
@@ -817,9 +929,8 @@ impl Buffer {
.before_edit
.indent_size_for_line(suggestion.basis_row)
})
- .with_delta(suggestion.delta, request.indent_size);
- old_suggestions
- .insert(*old_to_new_rows.get(&old_row).unwrap(), suggested_indent);
+ .with_delta(suggestion.delta, language_indent_size);
+ old_suggestions.insert(new_row, suggested_indent);
}
}
yield_now().await;
@@ -840,6 +951,8 @@ impl Buffer {
// Compute new suggestions for each line, but only include them in the result
// if they differ from the old suggestion for that line.
+ let mut language_indent_sizes = language_indent_sizes_by_new_row.iter().peekable();
+ let mut language_indent_size = IndentSize::default();
for new_edited_row_range in new_edited_row_ranges {
let suggestions = snapshot
.suggest_autoindents(new_edited_row_range.clone())
@@ -847,13 +960,22 @@ impl Buffer {
.flatten();
for (new_row, suggestion) in new_edited_row_range.zip(suggestions) {
if let Some(suggestion) = suggestion {
+ // Find the indent size based on the language for this row.
+ while let Some((row, size)) = language_indent_sizes.peek() {
+ if *row > new_row {
+ break;
+ }
+ language_indent_size = *size;
+ language_indent_sizes.next();
+ }
+
let suggested_indent = indent_sizes
.get(&suggestion.basis_row)
.copied()
.unwrap_or_else(|| {
snapshot.indent_size_for_line(suggestion.basis_row)
})
- .with_delta(suggestion.delta, request.indent_size);
+ .with_delta(suggestion.delta, language_indent_size);
if old_suggestions
.get(&new_row)
.map_or(true, |old_indentation| {
@@ -965,16 +1087,30 @@ impl Buffer {
let old_text = old_text.to_string();
let line_ending = LineEnding::detect(&new_text);
LineEnding::normalize(&mut new_text);
- let changes = TextDiff::from_chars(old_text.as_str(), new_text.as_str())
- .iter_all_changes()
- .map(|c| (c.tag(), c.value().len()))
- .collect::<Vec<_>>();
+ let diff = TextDiff::from_chars(old_text.as_str(), new_text.as_str());
+ let mut edits = Vec::new();
+ let mut offset = 0;
+ let empty: Arc<str> = "".into();
+ for change in diff.iter_all_changes() {
+ let value = change.value();
+ let end_offset = offset + value.len();
+ match change.tag() {
+ ChangeTag::Equal => {
+ offset = end_offset;
+ }
+ ChangeTag::Delete => {
+ edits.push((offset..end_offset, empty.clone()));
+ offset = end_offset;
+ }
+ ChangeTag::Insert => {
+ edits.push((offset..offset, value.into()));
+ }
+ }
+ }
Diff {
base_version,
- new_text: new_text.into(),
- changes,
line_ending,
- start_offset: 0,
+ edits,
}
})
}
@@ -984,28 +1120,7 @@ impl Buffer {
self.finalize_last_transaction();
self.start_transaction();
self.text.set_line_ending(diff.line_ending);
- let mut offset = diff.start_offset;
- for (tag, len) in diff.changes {
- let range = offset..(offset + len);
- match tag {
- ChangeTag::Equal => offset += len,
- ChangeTag::Delete => {
- self.edit([(range, "")], None, cx);
- }
- ChangeTag::Insert => {
- self.edit(
- [(
- offset..offset,
- &diff.new_text[range.start - diff.start_offset
- ..range.end - diff.start_offset],
- )],
- None,
- cx,
- );
- offset += len;
- }
- }
- }
+ self.edit(diff.edits, None, cx);
if self.end_transaction(cx).is_some() {
self.finalize_last_transaction()
} else {
@@ -1184,7 +1299,6 @@ impl Buffer {
let edit_id = edit_operation.local_timestamp();
if let Some((before_edit, mode)) = autoindent_request {
- let indent_size = before_edit.single_indent_size(cx);
let (start_columns, is_block_mode) = match mode {
AutoindentMode::Block {
original_indent_columns: start_columns,
@@ -1233,6 +1347,7 @@ impl Buffer {
AutoindentRequestEntry {
first_line_is_new,
original_indent_column: start_column,
+ indent_size: before_edit.language_indent_size_at(range.start, cx),
range: self.anchor_before(new_start + range_of_insertion_to_indent.start)
..self.anchor_after(new_start + range_of_insertion_to_indent.end),
}
@@ -1242,7 +1357,6 @@ impl Buffer {
self.autoindent_requests.push(Arc::new(AutoindentRequest {
before_edit,
entries,
- indent_size,
is_block_mode,
}));
}
@@ -1560,8 +1674,8 @@ impl BufferSnapshot {
indent_size_for_line(self, row)
}
- pub fn single_indent_size(&self, cx: &AppContext) -> IndentSize {
- let language_name = self.language().map(|language| language.name());
+ pub fn language_indent_size_at<T: ToOffset>(&self, position: T, cx: &AppContext) -> IndentSize {
+ let language_name = self.language_at(position).map(|language| language.name());
let settings = cx.global::<Settings>();
if settings.hard_tabs(language_name.as_deref()) {
IndentSize::tab()
@@ -1631,6 +1745,8 @@ impl BufferSnapshot {
if capture.index == config.indent_capture_ix {
start.get_or_insert(Point::from_ts_point(capture.node.start_position()));
end.get_or_insert(Point::from_ts_point(capture.node.end_position()));
+ } else if Some(capture.index) == config.start_capture_ix {
+ start = Some(Point::from_ts_point(capture.node.end_position()));
} else if Some(capture.index) == config.end_capture_ix {
end = Some(Point::from_ts_point(capture.node.start_position()));
}
@@ -1820,8 +1936,14 @@ impl BufferSnapshot {
}
}
- pub fn language(&self) -> Option<&Arc<Language>> {
- self.language.as_ref()
+ pub fn language_at<D: ToOffset>(&self, position: D) -> Option<&Arc<Language>> {
+ let offset = position.to_offset(self);
+ self.syntax
+ .layers_for_range(offset..offset, &self.text)
+ .filter(|l| l.node.end_byte() > offset)
+ .last()
+ .map(|info| info.language)
+ .or(self.language.as_ref())
}
pub fn surrounding_word<T: ToOffset>(&self, start: T) -> (Range<usize>, Option<CharKind>) {
@@ -1856,8 +1978,8 @@ impl BufferSnapshot {
pub fn range_for_syntax_ancestor<T: ToOffset>(&self, range: Range<T>) -> Option<Range<usize>> {
let range = range.start.to_offset(self)..range.end.to_offset(self);
let mut result: Option<Range<usize>> = None;
- 'outer: for (_, _, node) in self.syntax.layers_for_range(range.clone(), &self.text) {
- let mut cursor = node.walk();
+ 'outer: for layer in self.syntax.layers_for_range(range.clone(), &self.text) {
+ let mut cursor = layer.node.walk();
// Descend to the first leaf that touches the start of the range,
// and if the range is non-empty, extends beyond the start.
@@ -2139,6 +2261,13 @@ impl BufferSnapshot {
})
}
+ pub fn git_diff_hunks_in_range<'a>(
+ &'a self,
+ query_row_range: Range<u32>,
+ ) -> impl 'a + Iterator<Item = git::diff::DiffHunk<u32>> {
+ self.git_diff.hunks_in_range(query_row_range, self)
+ }
+
pub fn diagnostics_in_range<'a, T, O>(
&'a self,
search_range: Range<T>,
@@ -2186,6 +2315,10 @@ impl BufferSnapshot {
pub fn file_update_count(&self) -> usize {
self.file_update_count
}
+
+ pub fn git_diff_update_count(&self) -> usize {
+ self.git_diff_update_count
+ }
}
pub fn indent_size_for_line(text: &text::BufferSnapshot, row: u32) -> IndentSize {
@@ -2212,6 +2345,7 @@ impl Clone for BufferSnapshot {
fn clone(&self) -> Self {
Self {
text: self.text.clone(),
+ git_diff: self.git_diff.clone(),
syntax: self.syntax.clone(),
file: self.file.clone(),
remote_selections: self.remote_selections.clone(),
@@ -2219,6 +2353,7 @@ impl Clone for BufferSnapshot {
selections_update_count: self.selections_update_count,
diagnostics_update_count: self.diagnostics_update_count,
file_update_count: self.file_update_count,
+ git_diff_update_count: self.git_diff_update_count,
language: self.language.clone(),
parse_count: self.parse_count,
}
@@ -14,7 +14,7 @@ use std::{
};
use text::network::Network;
use unindent::Unindent as _;
-use util::post_inc;
+use util::{post_inc, test::marked_text_ranges};
#[cfg(test)]
#[ctor::ctor]
@@ -1035,6 +1035,120 @@ fn test_autoindent_language_without_indents_query(cx: &mut MutableAppContext) {
});
}
+#[gpui::test]
+fn test_autoindent_with_injected_languages(cx: &mut MutableAppContext) {
+ cx.set_global({
+ let mut settings = Settings::test(cx);
+ settings.language_overrides.extend([
+ (
+ "HTML".into(),
+ settings::EditorSettings {
+ tab_size: Some(2.try_into().unwrap()),
+ ..Default::default()
+ },
+ ),
+ (
+ "JavaScript".into(),
+ settings::EditorSettings {
+ tab_size: Some(8.try_into().unwrap()),
+ ..Default::default()
+ },
+ ),
+ ]);
+ settings
+ });
+
+ let html_language = Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "HTML".into(),
+ ..Default::default()
+ },
+ Some(tree_sitter_html::language()),
+ )
+ .with_indents_query(
+ "
+ (element
+ (start_tag) @start
+ (end_tag)? @end) @indent
+ ",
+ )
+ .unwrap()
+ .with_injection_query(
+ r#"
+ (script_element
+ (raw_text) @content
+ (#set! "language" "javascript"))
+ "#,
+ )
+ .unwrap(),
+ );
+
+ let javascript_language = Arc::new(
+ Language::new(
+ LanguageConfig {
+ name: "JavaScript".into(),
+ ..Default::default()
+ },
+ Some(tree_sitter_javascript::language()),
+ )
+ .with_indents_query(
+ r#"
+ (object "}" @end) @indent
+ "#,
+ )
+ .unwrap(),
+ );
+
+ let language_registry = Arc::new(LanguageRegistry::test());
+ language_registry.add(html_language.clone());
+ language_registry.add(javascript_language.clone());
+
+ cx.add_model(|cx| {
+ let (text, ranges) = marked_text_ranges(
+ &"
+ <div>ˇ
+ </div>
+ <script>
+ init({ˇ
+ })
+ </script>
+ <span>ˇ
+ </span>
+ "
+ .unindent(),
+ false,
+ );
+
+ let mut buffer = Buffer::new(0, text, cx);
+ buffer.set_language_registry(language_registry);
+ buffer.set_language(Some(html_language), cx);
+ buffer.edit(
+ ranges.into_iter().map(|range| (range, "\na")),
+ Some(AutoindentMode::EachLine),
+ cx,
+ );
+ assert_eq!(
+ buffer.text(),
+ "
+ <div>
+ a
+ </div>
+ <script>
+ init({
+ a
+ })
+ </script>
+ <span>
+ a
+ </span>
+ "
+ .unindent()
+ );
+ buffer
+ });
+}
+
#[gpui::test]
fn test_serialization(cx: &mut gpui::MutableAppContext) {
let mut now = Instant::now();
@@ -1449,7 +1563,7 @@ fn get_tree_sexp(buffer: &ModelHandle<Buffer>, cx: &gpui::TestAppContext) -> Str
buffer.read_with(cx, |buffer, _| {
let snapshot = buffer.snapshot();
let layers = snapshot.syntax.layers(buffer.as_text_snapshot());
- layers[0].2.to_sexp()
+ layers[0].node.to_sexp()
})
}
@@ -4,8 +4,9 @@ mod highlight_map;
mod outline;
pub mod proto;
mod syntax_map;
+
#[cfg(test)]
-mod tests;
+mod buffer_tests;
use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
@@ -26,6 +27,7 @@ use serde_json::Value;
use std::{
any::Any,
cell::RefCell,
+ fmt::Debug,
mem,
ops::Range,
path::{Path, PathBuf},
@@ -135,7 +137,7 @@ impl CachedLspAdapter {
pub async fn label_for_completion(
&self,
completion_item: &lsp::CompletionItem,
- language: &Language,
+ language: &Arc<Language>,
) -> Option<CodeLabel> {
self.adapter
.label_for_completion(completion_item, language)
@@ -146,7 +148,7 @@ impl CachedLspAdapter {
&self,
name: &str,
kind: lsp::SymbolKind,
- language: &Language,
+ language: &Arc<Language>,
) -> Option<CodeLabel> {
self.adapter.label_for_symbol(name, kind, language).await
}
@@ -175,7 +177,7 @@ pub trait LspAdapter: 'static + Send + Sync {
async fn label_for_completion(
&self,
_: &lsp::CompletionItem,
- _: &Language,
+ _: &Arc<Language>,
) -> Option<CodeLabel> {
None
}
@@ -184,7 +186,7 @@ pub trait LspAdapter: 'static + Send + Sync {
&self,
_: &str,
_: lsp::SymbolKind,
- _: &Language,
+ _: &Arc<Language>,
) -> Option<CodeLabel> {
None
}
@@ -230,7 +232,10 @@ pub struct LanguageConfig {
pub decrease_indent_pattern: Option<Regex>,
#[serde(default)]
pub autoclose_before: String,
- pub line_comment: Option<String>,
+ #[serde(default)]
+ pub line_comment: Option<Arc<str>>,
+ #[serde(default)]
+ pub block_comment: Option<(Arc<str>, Arc<str>)>,
}
impl Default for LanguageConfig {
@@ -244,6 +249,7 @@ impl Default for LanguageConfig {
decrease_indent_pattern: Default::default(),
autoclose_before: Default::default(),
line_comment: Default::default(),
+ block_comment: Default::default(),
}
}
}
@@ -270,7 +276,7 @@ pub struct FakeLspAdapter {
pub disk_based_diagnostics_sources: Vec<String>,
}
-#[derive(Clone, Debug, Deserialize)]
+#[derive(Clone, Debug, Default, Deserialize)]
pub struct BracketPair {
pub start: String,
pub end: String,
@@ -304,6 +310,7 @@ pub struct Grammar {
struct IndentConfig {
query: Query,
indent_capture_ix: u32,
+ start_capture_ix: Option<u32>,
end_capture_ix: Option<u32>,
}
@@ -661,11 +668,13 @@ impl Language {
let grammar = self.grammar_mut();
let query = Query::new(grammar.ts_language, source)?;
let mut indent_capture_ix = None;
+ let mut start_capture_ix = None;
let mut end_capture_ix = None;
get_capture_indices(
&query,
&mut [
("indent", &mut indent_capture_ix),
+ ("start", &mut start_capture_ix),
("end", &mut end_capture_ix),
],
);
@@ -673,6 +682,7 @@ impl Language {
grammar.indents_config = Some(IndentConfig {
query,
indent_capture_ix,
+ start_capture_ix,
end_capture_ix,
});
}
@@ -763,8 +773,15 @@ impl Language {
self.config.name.clone()
}
- pub fn line_comment_prefix(&self) -> Option<&str> {
- self.config.line_comment.as_deref()
+ pub fn line_comment_prefix(&self) -> Option<&Arc<str>> {
+ self.config.line_comment.as_ref()
+ }
+
+ pub fn block_comment_delimiters(&self) -> Option<(&Arc<str>, &Arc<str>)> {
+ self.config
+ .block_comment
+ .as_ref()
+ .map(|(start, end)| (start, end))
}
pub async fn disk_based_diagnostic_sources(&self) -> &[String] {
@@ -789,7 +806,7 @@ impl Language {
}
pub async fn label_for_completion(
- &self,
+ self: &Arc<Self>,
completion: &lsp::CompletionItem,
) -> Option<CodeLabel> {
self.adapter
@@ -798,7 +815,11 @@ impl Language {
.await
}
- pub async fn label_for_symbol(&self, name: &str, kind: lsp::SymbolKind) -> Option<CodeLabel> {
+ pub async fn label_for_symbol(
+ self: &Arc<Self>,
+ name: &str,
+ kind: lsp::SymbolKind,
+ ) -> Option<CodeLabel> {
self.adapter
.as_ref()?
.label_for_symbol(name, kind, self)
@@ -806,20 +827,17 @@ impl Language {
}
pub fn highlight_text<'a>(
- &'a self,
+ self: &'a Arc<Self>,
text: &'a Rope,
range: Range<usize>,
) -> Vec<(Range<usize>, HighlightId)> {
let mut result = Vec::new();
if let Some(grammar) = &self.grammar {
let tree = grammar.parse_text(text, None);
- let captures = SyntaxSnapshot::single_tree_captures(
- range.clone(),
- text,
- &tree,
- grammar,
- |grammar| grammar.highlights_query.as_ref(),
- );
+ let captures =
+ SyntaxSnapshot::single_tree_captures(range.clone(), text, &tree, self, |grammar| {
+ grammar.highlights_query.as_ref()
+ });
let highlight_maps = vec![grammar.highlight_map()];
let mut offset = 0;
for chunk in BufferChunks::new(text, range, Some((captures, highlight_maps)), vec![]) {
@@ -861,6 +879,14 @@ impl Language {
}
}
+impl Debug for Language {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("Language")
+ .field("name", &self.config.name)
+ .finish()
+ }
+}
+
impl Grammar {
pub fn id(&self) -> usize {
self.id
@@ -92,6 +92,13 @@ struct SyntaxLayer {
language: Arc<Language>,
}
+#[derive(Debug)]
+pub struct SyntaxLayerInfo<'a> {
+ pub depth: usize,
+ pub node: Node<'a>,
+ pub language: &'a Arc<Language>,
+}
+
#[derive(Debug, Clone)]
struct SyntaxLayerSummary {
min_depth: usize,
@@ -473,13 +480,18 @@ impl SyntaxSnapshot {
range: Range<usize>,
text: &'a Rope,
tree: &'a Tree,
- grammar: &'a Grammar,
+ language: &'a Arc<Language>,
query: fn(&Grammar) -> Option<&Query>,
) -> SyntaxMapCaptures<'a> {
SyntaxMapCaptures::new(
range.clone(),
text,
- [(grammar, 0, tree.root_node())].into_iter(),
+ [SyntaxLayerInfo {
+ language,
+ depth: 0,
+ node: tree.root_node(),
+ }]
+ .into_iter(),
query,
)
}
@@ -513,19 +525,19 @@ impl SyntaxSnapshot {
}
#[cfg(test)]
- pub fn layers(&self, buffer: &BufferSnapshot) -> Vec<(&Grammar, usize, Node)> {
- self.layers_for_range(0..buffer.len(), buffer)
+ pub fn layers<'a>(&'a self, buffer: &'a BufferSnapshot) -> Vec<SyntaxLayerInfo> {
+ self.layers_for_range(0..buffer.len(), buffer).collect()
}
pub fn layers_for_range<'a, T: ToOffset>(
- &self,
+ &'a self,
range: Range<T>,
- buffer: &BufferSnapshot,
- ) -> Vec<(&Grammar, usize, Node)> {
+ buffer: &'a BufferSnapshot,
+ ) -> impl 'a + Iterator<Item = SyntaxLayerInfo> {
let start = buffer.anchor_before(range.start.to_offset(buffer));
let end = buffer.anchor_after(range.end.to_offset(buffer));
- let mut cursor = self.layers.filter::<_, ()>(|summary| {
+ let mut cursor = self.layers.filter::<_, ()>(move |summary| {
if summary.max_depth > summary.min_depth {
true
} else {
@@ -535,23 +547,26 @@ impl SyntaxSnapshot {
}
});
- let mut result = Vec::new();
+ // let mut result = Vec::new();
cursor.next(buffer);
- while let Some(layer) = cursor.item() {
- if let Some(grammar) = &layer.language.grammar {
- result.push((
- grammar.as_ref(),
- layer.depth,
- layer.tree.root_node_with_offset(
+ std::iter::from_fn(move || {
+ if let Some(layer) = cursor.item() {
+ let info = SyntaxLayerInfo {
+ language: &layer.language,
+ depth: layer.depth,
+ node: layer.tree.root_node_with_offset(
layer.range.start.to_offset(buffer),
layer.range.start.to_point(buffer).to_ts_point(),
),
- ));
+ };
+ cursor.next(buffer);
+ Some(info)
+ } else {
+ None
}
- cursor.next(buffer)
- }
+ })
- result
+ // result
}
}
@@ -559,7 +574,7 @@ impl<'a> SyntaxMapCaptures<'a> {
fn new(
range: Range<usize>,
text: &'a Rope,
- layers: impl Iterator<Item = (&'a Grammar, usize, Node<'a>)>,
+ layers: impl Iterator<Item = SyntaxLayerInfo<'a>>,
query: fn(&Grammar) -> Option<&Query>,
) -> Self {
let mut result = Self {
@@ -567,11 +582,19 @@ impl<'a> SyntaxMapCaptures<'a> {
grammars: Vec::new(),
active_layer_count: 0,
};
- for (grammar, depth, node) in layers {
- let query = if let Some(query) = query(grammar) {
- query
- } else {
- continue;
+ for SyntaxLayerInfo {
+ language,
+ depth,
+ node,
+ } in layers
+ {
+ let grammar = match &language.grammar {
+ Some(grammer) => grammer,
+ None => continue,
+ };
+ let query = match query(&grammar) {
+ Some(query) => query,
+ None => continue,
};
let mut query_cursor = QueryCursorHandle::new();
@@ -678,15 +701,23 @@ impl<'a> SyntaxMapMatches<'a> {
fn new(
range: Range<usize>,
text: &'a Rope,
- layers: impl Iterator<Item = (&'a Grammar, usize, Node<'a>)>,
+ layers: impl Iterator<Item = SyntaxLayerInfo<'a>>,
query: fn(&Grammar) -> Option<&Query>,
) -> Self {
let mut result = Self::default();
- for (grammar, depth, node) in layers {
- let query = if let Some(query) = query(grammar) {
- query
- } else {
- continue;
+ for SyntaxLayerInfo {
+ language,
+ depth,
+ node,
+ } in layers
+ {
+ let grammar = match &language.grammar {
+ Some(grammer) => grammer,
+ None => continue,
+ };
+ let query = match query(&grammar) {
+ Some(query) => query,
+ None => continue,
};
let mut query_cursor = QueryCursorHandle::new();
@@ -1624,8 +1655,8 @@ mod tests {
let reference_layers = reference_syntax_map.layers(&buffer);
for (edited_layer, reference_layer) in layers.into_iter().zip(reference_layers.into_iter())
{
- assert_eq!(edited_layer.2.to_sexp(), reference_layer.2.to_sexp());
- assert_eq!(edited_layer.2.range(), reference_layer.2.range());
+ assert_eq!(edited_layer.node.to_sexp(), reference_layer.node.to_sexp());
+ assert_eq!(edited_layer.node.range(), reference_layer.node.range());
}
}
@@ -1770,13 +1801,13 @@ mod tests {
mutated_layers.into_iter().zip(reference_layers.into_iter())
{
assert_eq!(
- edited_layer.2.to_sexp(),
- reference_layer.2.to_sexp(),
+ edited_layer.node.to_sexp(),
+ reference_layer.node.to_sexp(),
"different layer at step {i}"
);
assert_eq!(
- edited_layer.2.range(),
- reference_layer.2.range(),
+ edited_layer.node.range(),
+ reference_layer.node.range(),
"different layer at step {i}"
);
}
@@ -1822,13 +1853,15 @@ mod tests {
range: Range<Point>,
expected_layers: &[&str],
) {
- let layers = syntax_map.layers_for_range(range, &buffer);
+ let layers = syntax_map
+ .layers_for_range(range, &buffer)
+ .collect::<Vec<_>>();
assert_eq!(
layers.len(),
expected_layers.len(),
"wrong number of layers"
);
- for (i, ((_, _, node), expected_s_exp)) in
+ for (i, (SyntaxLayerInfo { node, .. }, expected_s_exp)) in
layers.iter().zip(expected_layers.iter()).enumerate()
{
let actual_s_exp = node.to_sexp();
@@ -10,6 +10,7 @@ doctest = false
[features]
test-support = [
"client/test-support",
+ "db/test-support",
"language/test-support",
"settings/test-support",
"text/test-support",
@@ -20,8 +21,10 @@ text = { path = "../text" }
client = { path = "../client" }
clock = { path = "../clock" }
collections = { path = "../collections" }
+db = { path = "../db" }
fsevent = { path = "../fsevent" }
fuzzy = { path = "../fuzzy" }
+git = { path = "../git" }
gpui = { path = "../gpui" }
language = { path = "../language" }
lsp = { path = "../lsp" }
@@ -54,6 +57,7 @@ rocksdb = "0.18"
[dev-dependencies]
client = { path = "../client", features = ["test-support"] }
collections = { path = "../collections", features = ["test-support"] }
+db = { path = "../db", features = ["test-support"] }
gpui = { path = "../gpui", features = ["test-support"] }
language = { path = "../language", features = ["test-support"] }
lsp = { path = "../lsp", features = ["test-support"] }
@@ -1,8 +1,11 @@
use anyhow::{anyhow, Result};
use fsevent::EventStream;
use futures::{future::BoxFuture, Stream, StreamExt};
+use git::repository::{GitRepository, LibGitRepository};
use language::LineEnding;
+use parking_lot::Mutex as SyncMutex;
use smol::io::{AsyncReadExt, AsyncWriteExt};
+use std::sync::Arc;
use std::{
io,
os::unix::fs::MetadataExt,
@@ -11,13 +14,16 @@ use std::{
time::{Duration, SystemTime},
};
use text::Rope;
+use util::ResultExt;
#[cfg(any(test, feature = "test-support"))]
use collections::{btree_map, BTreeMap};
#[cfg(any(test, feature = "test-support"))]
use futures::lock::Mutex;
#[cfg(any(test, feature = "test-support"))]
-use std::sync::{Arc, Weak};
+use git::repository::FakeGitRepositoryState;
+#[cfg(any(test, feature = "test-support"))]
+use std::sync::Weak;
#[async_trait::async_trait]
pub trait Fs: Send + Sync {
@@ -42,6 +48,7 @@ pub trait Fs: Send + Sync {
path: &Path,
latency: Duration,
) -> Pin<Box<dyn Send + Stream<Item = Vec<fsevent::Event>>>>;
+ fn open_repo(&self, abs_dot_git: &Path) -> Option<Arc<SyncMutex<dyn GitRepository>>>;
fn is_fake(&self) -> bool;
#[cfg(any(test, feature = "test-support"))]
fn as_fake(&self) -> &FakeFs;
@@ -235,6 +242,14 @@ impl Fs for RealFs {
})))
}
+ fn open_repo(&self, dotgit_path: &Path) -> Option<Arc<SyncMutex<dyn GitRepository>>> {
+ LibGitRepository::open(&dotgit_path)
+ .log_err()
+ .and_then::<Arc<SyncMutex<dyn GitRepository>>, _>(|libgit_repository| {
+ Some(Arc::new(SyncMutex::new(libgit_repository)))
+ })
+ }
+
fn is_fake(&self) -> bool {
false
}
@@ -270,6 +285,7 @@ enum FakeFsEntry {
inode: u64,
mtime: SystemTime,
entries: BTreeMap<String, Arc<Mutex<FakeFsEntry>>>,
+ git_repo_state: Option<Arc<SyncMutex<git::repository::FakeGitRepositoryState>>>,
},
Symlink {
target: PathBuf,
@@ -384,6 +400,7 @@ impl FakeFs {
inode: 0,
mtime: SystemTime::now(),
entries: Default::default(),
+ git_repo_state: None,
})),
next_inode: 1,
event_txs: Default::default(),
@@ -473,6 +490,28 @@ impl FakeFs {
.boxed()
}
+ pub async fn set_index_for_repo(&self, dot_git: &Path, head_state: &[(&Path, String)]) {
+ let mut state = self.state.lock().await;
+ let entry = state.read_path(dot_git).await.unwrap();
+ let mut entry = entry.lock().await;
+
+ if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry {
+ let repo_state = git_repo_state.get_or_insert_with(Default::default);
+ let mut repo_state = repo_state.lock();
+
+ repo_state.index_contents.clear();
+ repo_state.index_contents.extend(
+ head_state
+ .iter()
+ .map(|(path, content)| (path.to_path_buf(), content.clone())),
+ );
+
+ state.emit_event([dot_git]);
+ } else {
+ panic!("not a directory");
+ }
+ }
+
pub async fn files(&self) -> Vec<PathBuf> {
let mut result = Vec::new();
let mut queue = collections::VecDeque::new();
@@ -562,6 +601,7 @@ impl Fs for FakeFs {
inode,
mtime: SystemTime::now(),
entries: Default::default(),
+ git_repo_state: None,
}))
});
Ok(())
@@ -846,6 +886,24 @@ impl Fs for FakeFs {
}))
}
+ fn open_repo(&self, abs_dot_git: &Path) -> Option<Arc<SyncMutex<dyn GitRepository>>> {
+ smol::block_on(async move {
+ let state = self.state.lock().await;
+ let entry = state.read_path(abs_dot_git).await.unwrap();
+ let mut entry = entry.lock().await;
+ if let FakeFsEntry::Dir { git_repo_state, .. } = &mut *entry {
+ let state = git_repo_state
+ .get_or_insert_with(|| {
+ Arc::new(SyncMutex::new(FakeGitRepositoryState::default()))
+ })
+ .clone();
+ Some(git::repository::FakeGitRepository::open(state))
+ } else {
+ None
+ }
+ })
+ }
+
fn is_fake(&self) -> bool {
true
}
@@ -1,4 +1,3 @@
-mod db;
pub mod fs;
mod ignore;
mod lsp_command;
@@ -13,6 +12,7 @@ use client::{proto, Client, PeerId, TypedEnvelope, UserStore};
use clock::ReplicaId;
use collections::{hash_map, BTreeMap, HashMap, HashSet};
use futures::{future::Shared, AsyncWriteExt, Future, FutureExt, StreamExt, TryFutureExt};
+
use gpui::{
AnyModelHandle, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle,
MutableAppContext, Task, UpgradeModelHandle, WeakModelHandle,
@@ -62,7 +62,7 @@ use std::{
time::Instant,
};
use thiserror::Error;
-use util::{post_inc, ResultExt, TryFutureExt as _};
+use util::{defer, post_inc, ResultExt, TryFutureExt as _};
pub use db::Db;
pub use fs::*;
@@ -123,6 +123,7 @@ pub struct Project {
opened_buffers: HashMap<u64, OpenBuffer>,
incomplete_buffers: HashMap<u64, ModelHandle<Buffer>>,
buffer_snapshots: HashMap<u64, Vec<(i32, TextBufferSnapshot)>>,
+ buffers_being_formatted: HashSet<usize>,
nonce: u128,
_maintain_buffer_languages: Task<()>,
}
@@ -407,6 +408,7 @@ impl Project {
client.add_model_request_handler(Self::handle_open_buffer_by_id);
client.add_model_request_handler(Self::handle_open_buffer_by_path);
client.add_model_request_handler(Self::handle_save_buffer);
+ client.add_model_message_handler(Self::handle_update_diff_base);
}
pub fn local(
@@ -466,6 +468,7 @@ impl Project {
language_server_statuses: Default::default(),
last_workspace_edits_by_language_server: Default::default(),
language_server_settings: Default::default(),
+ buffers_being_formatted: Default::default(),
next_language_server_id: 0,
nonce: StdRng::from_entropy().gen(),
}
@@ -562,6 +565,7 @@ impl Project {
last_workspace_edits_by_language_server: Default::default(),
next_language_server_id: 0,
opened_buffers: Default::default(),
+ buffers_being_formatted: Default::default(),
buffer_snapshots: Default::default(),
nonce: StdRng::from_entropy().gen(),
};
@@ -604,7 +608,7 @@ impl Project {
let languages = Arc::new(LanguageRegistry::test());
let http_client = client::test::FakeHttpClient::with_404_response();
- let client = client::Client::new(http_client.clone());
+ let client = cx.update(|cx| client::Client::new(http_client.clone(), cx));
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
let project_store = cx.add_model(|_| ProjectStore::new());
let project =
@@ -2804,7 +2808,26 @@ impl Project {
.await?;
}
- for (buffer, buffer_abs_path, language_server) in local_buffers {
+ // Do not allow multiple concurrent formatting requests for the
+ // same buffer.
+ this.update(&mut cx, |this, _| {
+ local_buffers
+ .retain(|(buffer, _, _)| this.buffers_being_formatted.insert(buffer.id()));
+ });
+ let _cleanup = defer({
+ let this = this.clone();
+ let mut cx = cx.clone();
+ let local_buffers = &local_buffers;
+ move || {
+ this.update(&mut cx, |this, _| {
+ for (buffer, _, _) in local_buffers {
+ this.buffers_being_formatted.remove(&buffer.id());
+ }
+ });
+ }
+ });
+
+ for (buffer, buffer_abs_path, language_server) in &local_buffers {
let (format_on_save, formatter, tab_size) = buffer.read_with(&cx, |buffer, cx| {
let settings = cx.global::<Settings>();
let language_name = buffer.language().map(|language| language.name());
@@ -2856,7 +2879,7 @@ impl Project {
buffer.forget_transaction(transaction.id)
});
}
- project_transaction.0.insert(buffer, transaction);
+ project_transaction.0.insert(buffer.clone(), transaction);
}
}
@@ -4229,8 +4252,11 @@ impl Project {
fn add_worktree(&mut self, worktree: &ModelHandle<Worktree>, cx: &mut ModelContext<Self>) {
cx.observe(worktree, |_, _, cx| cx.notify()).detach();
if worktree.read(cx).is_local() {
- cx.subscribe(worktree, |this, worktree, _, cx| {
- this.update_local_worktree_buffers(worktree, cx);
+ cx.subscribe(worktree, |this, worktree, event, cx| match event {
+ worktree::Event::UpdatedEntries => this.update_local_worktree_buffers(worktree, cx),
+ worktree::Event::UpdatedGitRepositories(updated_repos) => {
+ this.update_local_worktree_buffers_git_repos(worktree, updated_repos, cx)
+ }
})
.detach();
}
@@ -4338,6 +4364,63 @@ impl Project {
}
}
+ fn update_local_worktree_buffers_git_repos(
+ &mut self,
+ worktree: ModelHandle<Worktree>,
+ repos: &[GitRepositoryEntry],
+ cx: &mut ModelContext<Self>,
+ ) {
+ for (_, buffer) in &self.opened_buffers {
+ if let Some(buffer) = buffer.upgrade(cx) {
+ let file = match File::from_dyn(buffer.read(cx).file()) {
+ Some(file) => file,
+ None => continue,
+ };
+ if file.worktree != worktree {
+ continue;
+ }
+
+ let path = file.path().clone();
+
+ let repo = match repos.iter().find(|repo| repo.manages(&path)) {
+ Some(repo) => repo.clone(),
+ None => return,
+ };
+
+ let relative_repo = match path.strip_prefix(repo.content_path) {
+ Ok(relative_repo) => relative_repo.to_owned(),
+ Err(_) => return,
+ };
+
+ let remote_id = self.remote_id();
+ let client = self.client.clone();
+
+ cx.spawn(|_, mut cx| async move {
+ let diff_base = cx
+ .background()
+ .spawn(async move { repo.repo.lock().load_index_text(&relative_repo) })
+ .await;
+
+ let buffer_id = buffer.update(&mut cx, |buffer, cx| {
+ buffer.update_diff_base(diff_base.clone(), cx);
+ buffer.remote_id()
+ });
+
+ if let Some(project_id) = remote_id {
+ client
+ .send(proto::UpdateDiffBase {
+ project_id,
+ buffer_id: buffer_id as u64,
+ diff_base,
+ })
+ .log_err();
+ }
+ })
+ .detach();
+ }
+ }
+ }
+
pub fn set_active_path(&mut self, entry: Option<ProjectPath>, cx: &mut ModelContext<Self>) {
let new_active_entry = entry.and_then(|project_path| {
let worktree = self.worktree_for_id(project_path.worktree_id, cx)?;
@@ -4861,6 +4944,27 @@ impl Project {
})
}
+ async fn handle_update_diff_base(
+ this: ModelHandle<Self>,
+ envelope: TypedEnvelope<proto::UpdateDiffBase>,
+ _: Arc<Client>,
+ mut cx: AsyncAppContext,
+ ) -> Result<()> {
+ this.update(&mut cx, |this, cx| {
+ let buffer_id = envelope.payload.buffer_id;
+ let diff_base = envelope.payload.diff_base;
+ let buffer = this
+ .opened_buffers
+ .get_mut(&buffer_id)
+ .and_then(|b| b.upgrade(cx))
+ .ok_or_else(|| anyhow!("No such buffer {}", buffer_id))?;
+
+ buffer.update(cx, |buffer, cx| buffer.update_diff_base(diff_base, cx));
+
+ Ok(())
+ })
+ }
+
async fn handle_update_buffer_file(
this: ModelHandle<Self>,
envelope: TypedEnvelope<proto::UpdateBufferFile>,
@@ -5427,7 +5531,7 @@ impl Project {
cx: &mut ModelContext<Self>,
) -> Task<Result<ModelHandle<Buffer>>> {
let mut opened_buffer_rx = self.opened_buffer.1.clone();
- cx.spawn(|this, cx| async move {
+ cx.spawn(|this, mut cx| async move {
let buffer = loop {
let buffer = this.read_with(&cx, |this, cx| {
this.opened_buffers
@@ -5445,6 +5549,7 @@ impl Project {
.await
.ok_or_else(|| anyhow!("project dropped while waiting for buffer"))?;
};
+ buffer.update(&mut cx, |buffer, cx| buffer.git_diff_recalc(cx));
Ok(buffer)
})
}
@@ -1,10 +1,9 @@
-use crate::{copy_recursive, ProjectEntryId, RemoveOptions};
-
use super::{
fs::{self, Fs},
ignore::IgnoreStack,
DiagnosticSummary,
};
+use crate::{copy_recursive, ProjectEntryId, RemoveOptions};
use ::ignore::gitignore::{Gitignore, GitignoreBuilder};
use anyhow::{anyhow, Context, Result};
use client::{proto, Client};
@@ -18,6 +17,8 @@ use futures::{
Stream, StreamExt,
};
use fuzzy::CharBag;
+use git::repository::GitRepository;
+use git::{DOT_GIT, GITIGNORE};
use gpui::{
executor, AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, MutableAppContext,
Task,
@@ -26,12 +27,12 @@ use language::{
proto::{deserialize_version, serialize_line_ending, serialize_version},
Buffer, DiagnosticEntry, LineEnding, PointUtf16, Rope,
};
-use lazy_static::lazy_static;
use parking_lot::Mutex;
use postage::{
prelude::{Sink as _, Stream as _},
watch,
};
+
use smol::channel::{self, Sender};
use std::{
any::Any,
@@ -40,6 +41,7 @@ use std::{
ffi::{OsStr, OsString},
fmt,
future::Future,
+ mem,
ops::{Deref, DerefMut},
os::unix::prelude::{OsStrExt, OsStringExt},
path::{Path, PathBuf},
@@ -50,10 +52,6 @@ use std::{
use sum_tree::{Bias, Edit, SeekTarget, SumTree, TreeMap, TreeSet};
use util::{ResultExt, TryFutureExt};
-lazy_static! {
- static ref GITIGNORE: &'static OsStr = OsStr::new(".gitignore");
-}
-
#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash, PartialOrd, Ord)]
pub struct WorktreeId(usize);
@@ -101,15 +99,51 @@ pub struct Snapshot {
}
#[derive(Clone)]
+pub struct GitRepositoryEntry {
+ pub(crate) repo: Arc<Mutex<dyn GitRepository>>,
+
+ pub(crate) scan_id: usize,
+ // Path to folder containing the .git file or directory
+ pub(crate) content_path: Arc<Path>,
+ // Path to the actual .git folder.
+ // Note: if .git is a file, this points to the folder indicated by the .git file
+ pub(crate) git_dir_path: Arc<Path>,
+}
+
+impl std::fmt::Debug for GitRepositoryEntry {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ f.debug_struct("GitRepositoryEntry")
+ .field("content_path", &self.content_path)
+ .field("git_dir_path", &self.git_dir_path)
+ .field("libgit_repository", &"LibGitRepository")
+ .finish()
+ }
+}
+
pub struct LocalSnapshot {
abs_path: Arc<Path>,
ignores_by_parent_abs_path: HashMap<Arc<Path>, (Arc<Gitignore>, usize)>,
+ git_repositories: Vec<GitRepositoryEntry>,
removed_entry_ids: HashMap<u64, ProjectEntryId>,
next_entry_id: Arc<AtomicUsize>,
snapshot: Snapshot,
extension_counts: HashMap<OsString, usize>,
}
+impl Clone for LocalSnapshot {
+ fn clone(&self) -> Self {
+ Self {
+ abs_path: self.abs_path.clone(),
+ ignores_by_parent_abs_path: self.ignores_by_parent_abs_path.clone(),
+ git_repositories: self.git_repositories.iter().cloned().collect(),
+ removed_entry_ids: self.removed_entry_ids.clone(),
+ next_entry_id: self.next_entry_id.clone(),
+ snapshot: self.snapshot.clone(),
+ extension_counts: self.extension_counts.clone(),
+ }
+ }
+}
+
impl Deref for LocalSnapshot {
type Target = Snapshot;
@@ -142,6 +176,7 @@ struct ShareState {
pub enum Event {
UpdatedEntries,
+ UpdatedGitRepositories(Vec<GitRepositoryEntry>),
}
impl Entity for Worktree {
@@ -372,6 +407,7 @@ impl LocalWorktree {
let mut snapshot = LocalSnapshot {
abs_path,
ignores_by_parent_abs_path: Default::default(),
+ git_repositories: Default::default(),
removed_entry_ids: Default::default(),
next_entry_id,
snapshot: Snapshot {
@@ -446,10 +482,14 @@ impl LocalWorktree {
) -> Task<Result<ModelHandle<Buffer>>> {
let path = Arc::from(path);
cx.spawn(move |this, mut cx| async move {
- let (file, contents) = this
+ let (file, contents, diff_base) = this
.update(&mut cx, |t, cx| t.as_local().unwrap().load(&path, cx))
.await?;
- Ok(cx.add_model(|cx| Buffer::from_file(0, contents, Arc::new(file), cx)))
+ Ok(cx.add_model(|cx| {
+ let mut buffer = Buffer::from_file(0, contents, diff_base, Arc::new(file), cx);
+ buffer.git_diff_recalc(cx);
+ buffer
+ }))
})
}
@@ -499,17 +539,37 @@ impl LocalWorktree {
fn poll_snapshot(&mut self, force: bool, cx: &mut ModelContext<Worktree>) {
self.poll_task.take();
+
match self.scan_state() {
ScanState::Idle => {
- self.snapshot = self.background_snapshot.lock().clone();
+ let new_snapshot = self.background_snapshot.lock().clone();
+ let updated_repos = Self::changed_repos(
+ &self.snapshot.git_repositories,
+ &new_snapshot.git_repositories,
+ );
+ self.snapshot = new_snapshot;
+
if let Some(share) = self.share.as_mut() {
*share.snapshots_tx.borrow_mut() = self.snapshot.clone();
}
+
cx.emit(Event::UpdatedEntries);
+
+ if !updated_repos.is_empty() {
+ cx.emit(Event::UpdatedGitRepositories(updated_repos));
+ }
}
+
ScanState::Initializing => {
let is_fake_fs = self.fs.is_fake();
- self.snapshot = self.background_snapshot.lock().clone();
+
+ let new_snapshot = self.background_snapshot.lock().clone();
+ let updated_repos = Self::changed_repos(
+ &self.snapshot.git_repositories,
+ &new_snapshot.git_repositories,
+ );
+ self.snapshot = new_snapshot;
+
self.poll_task = Some(cx.spawn_weak(|this, mut cx| async move {
if is_fake_fs {
#[cfg(any(test, feature = "test-support"))]
@@ -521,17 +581,52 @@ impl LocalWorktree {
this.update(&mut cx, |this, cx| this.poll_snapshot(cx));
}
}));
+
cx.emit(Event::UpdatedEntries);
+
+ if !updated_repos.is_empty() {
+ cx.emit(Event::UpdatedGitRepositories(updated_repos));
+ }
}
+
_ => {
if force {
self.snapshot = self.background_snapshot.lock().clone();
}
}
}
+
cx.notify();
}
+ fn changed_repos(
+ old_repos: &[GitRepositoryEntry],
+ new_repos: &[GitRepositoryEntry],
+ ) -> Vec<GitRepositoryEntry> {
+ fn diff<'a>(
+ a: &'a [GitRepositoryEntry],
+ b: &'a [GitRepositoryEntry],
+ updated: &mut HashMap<&'a Path, GitRepositoryEntry>,
+ ) {
+ for a_repo in a {
+ let matched = b.iter().find(|b_repo| {
+ a_repo.git_dir_path == b_repo.git_dir_path && a_repo.scan_id == b_repo.scan_id
+ });
+
+ if matched.is_none() {
+ updated.insert(a_repo.git_dir_path.as_ref(), a_repo.clone());
+ }
+ }
+ }
+
+ let mut updated = HashMap::<&Path, GitRepositoryEntry>::default();
+
+ diff(old_repos, new_repos, &mut updated);
+ diff(new_repos, old_repos, &mut updated);
+
+ updated.into_values().collect()
+ }
+
pub fn scan_complete(&self) -> impl Future<Output = ()> {
let mut scan_state_rx = self.last_scan_state_rx.clone();
async move {
@@ -558,13 +653,33 @@ impl LocalWorktree {
}
}
- fn load(&self, path: &Path, cx: &mut ModelContext<Worktree>) -> Task<Result<(File, String)>> {
+ fn load(
+ &self,
+ path: &Path,
+ cx: &mut ModelContext<Worktree>,
+ ) -> Task<Result<(File, String, Option<String>)>> {
let handle = cx.handle();
let path = Arc::from(path);
let abs_path = self.absolutize(&path);
let fs = self.fs.clone();
+ let snapshot = self.snapshot();
+
cx.spawn(|this, mut cx| async move {
let text = fs.load(&abs_path).await?;
+
+ let diff_base = if let Some(repo) = snapshot.repo_for(&path) {
+ if let Ok(repo_relative) = path.strip_prefix(repo.content_path) {
+ let repo_relative = repo_relative.to_owned();
+ cx.background()
+ .spawn(async move { repo.repo.lock().load_index_text(&repo_relative) })
+ .await
+ } else {
+ None
+ }
+ } else {
+ None
+ };
+
// Eagerly populate the snapshot with an updated entry for the loaded file
let entry = this
.update(&mut cx, |this, cx| {
@@ -573,6 +688,7 @@ impl LocalWorktree {
.refresh_entry(path, abs_path, None, cx)
})
.await?;
+
Ok((
File {
entry_id: Some(entry.id),
@@ -582,6 +698,7 @@ impl LocalWorktree {
is_local: true,
},
text,
+ diff_base,
))
})
}
@@ -1248,6 +1365,22 @@ impl LocalSnapshot {
&self.extension_counts
}
+ // Gives the most specific git repository for a given path
+ pub(crate) fn repo_for(&self, path: &Path) -> Option<GitRepositoryEntry> {
+ self.git_repositories
+ .iter()
+ .rev() //git_repository is ordered lexicographically
+ .find(|repo| repo.manages(path))
+ .cloned()
+ }
+
+ pub(crate) fn in_dot_git(&mut self, path: &Path) -> Option<&mut GitRepositoryEntry> {
+ // Git repositories cannot be nested, so we don't need to reverse the order
+ self.git_repositories
+ .iter_mut()
+ .find(|repo| repo.in_dot_git(path))
+ }
+
#[cfg(test)]
pub(crate) fn build_initial_update(&self, project_id: u64) -> proto::UpdateWorktree {
let root_name = self.root_name.clone();
@@ -1330,7 +1463,7 @@ impl LocalSnapshot {
}
fn insert_entry(&mut self, mut entry: Entry, fs: &dyn Fs) -> Entry {
- if !entry.is_dir() && entry.path.file_name() == Some(&GITIGNORE) {
+ if entry.is_file() && entry.path.file_name() == Some(&GITIGNORE) {
let abs_path = self.abs_path.join(&entry.path);
match smol::block_on(build_gitignore(&abs_path, fs)) {
Ok(ignore) => {
@@ -1384,6 +1517,7 @@ impl LocalSnapshot {
parent_path: Arc<Path>,
entries: impl IntoIterator<Item = Entry>,
ignore: Option<Arc<Gitignore>>,
+ fs: &dyn Fs,
) {
let mut parent_entry = if let Some(parent_entry) =
self.entries_by_path.get(&PathKey(parent_path.clone()), &())
@@ -1409,6 +1543,27 @@ impl LocalSnapshot {
unreachable!();
}
+ if parent_path.file_name() == Some(&DOT_GIT) {
+ let abs_path = self.abs_path.join(&parent_path);
+ let content_path: Arc<Path> = parent_path.parent().unwrap().into();
+ if let Err(ix) = self
+ .git_repositories
+ .binary_search_by_key(&&content_path, |repo| &repo.content_path)
+ {
+ if let Some(repo) = fs.open_repo(abs_path.as_path()) {
+ self.git_repositories.insert(
+ ix,
+ GitRepositoryEntry {
+ repo,
+ scan_id: 0,
+ content_path,
+ git_dir_path: parent_path,
+ },
+ );
+ }
+ }
+ }
+
let mut entries_by_path_edits = vec![Edit::Insert(parent_entry)];
let mut entries_by_id_edits = Vec::new();
@@ -1493,6 +1648,14 @@ impl LocalSnapshot {
{
*scan_id = self.snapshot.scan_id;
}
+ } else if path.file_name() == Some(&DOT_GIT) {
+ let parent_path = path.parent().unwrap();
+ if let Ok(ix) = self
+ .git_repositories
+ .binary_search_by_key(&parent_path, |repo| repo.git_dir_path.as_ref())
+ {
+ self.git_repositories[ix].scan_id = self.snapshot.scan_id;
+ }
}
}
@@ -1532,6 +1695,22 @@ impl LocalSnapshot {
ignore_stack
}
+
+ pub fn git_repo_entries(&self) -> &[GitRepositoryEntry] {
+ &self.git_repositories
+ }
+}
+
+impl GitRepositoryEntry {
+ // Note that these paths should be relative to the worktree root.
+ pub(crate) fn manages(&self, path: &Path) -> bool {
+ path.starts_with(self.content_path.as_ref())
+ }
+
+ // Note that theis path should be relative to the worktree root.
+ pub(crate) fn in_dot_git(&self, path: &Path) -> bool {
+ path.starts_with(self.git_dir_path.as_ref())
+ }
}
async fn build_gitignore(abs_path: &Path, fs: &dyn Fs) -> Result<Gitignore> {
@@ -2244,9 +2423,12 @@ impl BackgroundScanner {
new_entries.push(child_entry);
}
- self.snapshot
- .lock()
- .populate_dir(job.path.clone(), new_entries, new_ignore);
+ self.snapshot.lock().populate_dir(
+ job.path.clone(),
+ new_entries,
+ new_ignore,
+ self.fs.as_ref(),
+ );
for new_job in new_jobs {
job.scan_queue.send(new_job).await.unwrap();
}
@@ -2321,6 +2503,12 @@ impl BackgroundScanner {
fs_entry.is_ignored = ignore_stack.is_all();
snapshot.insert_entry(fs_entry, self.fs.as_ref());
+ let scan_id = snapshot.scan_id;
+ if let Some(repo) = snapshot.in_dot_git(&path) {
+ repo.repo.lock().reload_index();
+ repo.scan_id = scan_id;
+ }
+
let mut ancestor_inodes = snapshot.ancestor_inodes_for_path(&path);
if metadata.is_dir && !ancestor_inodes.contains(&metadata.inode) {
ancestor_inodes.insert(metadata.inode);
@@ -2367,6 +2555,7 @@ impl BackgroundScanner {
self.snapshot.lock().removed_entry_ids.clear();
self.update_ignore_statuses().await;
+ self.update_git_repositories();
true
}
@@ -2432,6 +2621,13 @@ impl BackgroundScanner {
.await;
}
+ fn update_git_repositories(&self) {
+ let mut snapshot = self.snapshot.lock();
+ let mut git_repositories = mem::take(&mut snapshot.git_repositories);
+ git_repositories.retain(|repo| snapshot.entry_for_path(&repo.git_dir_path).is_some());
+ snapshot.git_repositories = git_repositories;
+ }
+
async fn update_ignore_status(&self, job: UpdateIgnoreStatusJob, snapshot: &LocalSnapshot) {
let mut ignore_stack = job.ignore_stack;
if let Some((ignore, _)) = snapshot.ignores_by_parent_abs_path.get(&job.abs_path) {
@@ -2778,6 +2974,7 @@ mod tests {
use anyhow::Result;
use client::test::FakeHttpClient;
use fs::RealFs;
+ use git::repository::FakeGitRepository;
use gpui::{executor::Deterministic, TestAppContext};
use rand::prelude::*;
use serde_json::json;
@@ -2786,6 +2983,7 @@ mod tests {
fmt::Write,
time::{SystemTime, UNIX_EPOCH},
};
+
use util::test::temp_tree;
#[gpui::test]
@@ -2804,7 +3002,7 @@ mod tests {
.await;
let http_client = FakeHttpClient::with_404_response();
- let client = Client::new(http_client);
+ let client = cx.read(|cx| Client::new(http_client, cx));
let tree = Worktree::local(
client,
@@ -2866,8 +3064,7 @@ mod tests {
fs.insert_symlink("/root/lib/a/lib", "..".into()).await;
fs.insert_symlink("/root/lib/b/lib", "..".into()).await;
- let http_client = FakeHttpClient::with_404_response();
- let client = Client::new(http_client);
+ let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let tree = Worktree::local(
client,
Arc::from(Path::new("/root")),
@@ -2945,8 +3142,7 @@ mod tests {
}));
let dir = parent_dir.path().join("tree");
- let http_client = FakeHttpClient::with_404_response();
- let client = Client::new(http_client.clone());
+ let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let tree = Worktree::local(
client,
@@ -3007,6 +3203,135 @@ mod tests {
});
}
+ #[gpui::test]
+ async fn test_git_repository_for_path(cx: &mut TestAppContext) {
+ let root = temp_tree(json!({
+ "dir1": {
+ ".git": {},
+ "deps": {
+ "dep1": {
+ ".git": {},
+ "src": {
+ "a.txt": ""
+ }
+ }
+ },
+ "src": {
+ "b.txt": ""
+ }
+ },
+ "c.txt": "",
+
+ }));
+
+ let http_client = FakeHttpClient::with_404_response();
+ let client = cx.read(|cx| Client::new(http_client, cx));
+ let tree = Worktree::local(
+ client,
+ root.path(),
+ true,
+ Arc::new(RealFs),
+ Default::default(),
+ &mut cx.to_async(),
+ )
+ .await
+ .unwrap();
+
+ cx.read(|cx| tree.read(cx).as_local().unwrap().scan_complete())
+ .await;
+ tree.flush_fs_events(cx).await;
+
+ tree.read_with(cx, |tree, _cx| {
+ let tree = tree.as_local().unwrap();
+
+ assert!(tree.repo_for("c.txt".as_ref()).is_none());
+
+ let repo = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap();
+ assert_eq!(repo.content_path.as_ref(), Path::new("dir1"));
+ assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/.git"));
+
+ let repo = tree.repo_for("dir1/deps/dep1/src/a.txt".as_ref()).unwrap();
+ assert_eq!(repo.content_path.as_ref(), Path::new("dir1/deps/dep1"));
+ assert_eq!(repo.git_dir_path.as_ref(), Path::new("dir1/deps/dep1/.git"),);
+ });
+
+ let original_scan_id = tree.read_with(cx, |tree, _cx| {
+ let tree = tree.as_local().unwrap();
+ tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id
+ });
+
+ std::fs::write(root.path().join("dir1/.git/random_new_file"), "hello").unwrap();
+ tree.flush_fs_events(cx).await;
+
+ tree.read_with(cx, |tree, _cx| {
+ let tree = tree.as_local().unwrap();
+ let new_scan_id = tree.repo_for("dir1/src/b.txt".as_ref()).unwrap().scan_id;
+ assert_ne!(
+ original_scan_id, new_scan_id,
+ "original {original_scan_id}, new {new_scan_id}"
+ );
+ });
+
+ std::fs::remove_dir_all(root.path().join("dir1/.git")).unwrap();
+ tree.flush_fs_events(cx).await;
+
+ tree.read_with(cx, |tree, _cx| {
+ let tree = tree.as_local().unwrap();
+
+ assert!(tree.repo_for("dir1/src/b.txt".as_ref()).is_none());
+ });
+ }
+
+ #[test]
+ fn test_changed_repos() {
+ fn fake_entry(git_dir_path: impl AsRef<Path>, scan_id: usize) -> GitRepositoryEntry {
+ GitRepositoryEntry {
+ repo: Arc::new(Mutex::new(FakeGitRepository::default())),
+ scan_id,
+ content_path: git_dir_path.as_ref().parent().unwrap().into(),
+ git_dir_path: git_dir_path.as_ref().into(),
+ }
+ }
+
+ let prev_repos: Vec<GitRepositoryEntry> = vec![
+ fake_entry("/.git", 0),
+ fake_entry("/a/.git", 0),
+ fake_entry("/a/b/.git", 0),
+ ];
+
+ let new_repos: Vec<GitRepositoryEntry> = vec![
+ fake_entry("/a/.git", 1),
+ fake_entry("/a/b/.git", 0),
+ fake_entry("/a/c/.git", 0),
+ ];
+
+ let res = LocalWorktree::changed_repos(&prev_repos, &new_repos);
+
+ // Deletion retained
+ assert!(res
+ .iter()
+ .find(|repo| repo.git_dir_path.as_ref() == Path::new("/.git") && repo.scan_id == 0)
+ .is_some());
+
+ // Update retained
+ assert!(res
+ .iter()
+ .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/.git") && repo.scan_id == 1)
+ .is_some());
+
+ // Addition retained
+ assert!(res
+ .iter()
+ .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/c/.git") && repo.scan_id == 0)
+ .is_some());
+
+ // Nochange, not retained
+ assert!(res
+ .iter()
+ .find(|repo| repo.git_dir_path.as_ref() == Path::new("/a/b/.git") && repo.scan_id == 0)
+ .is_none());
+ }
+
#[gpui::test]
async fn test_write_file(cx: &mut TestAppContext) {
let dir = temp_tree(json!({
@@ -3016,8 +3341,7 @@ mod tests {
"ignored-dir": {}
}));
- let http_client = FakeHttpClient::with_404_response();
- let client = Client::new(http_client.clone());
+ let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let tree = Worktree::local(
client,
@@ -3064,8 +3388,7 @@ mod tests {
#[gpui::test(iterations = 30)]
async fn test_create_directory(cx: &mut TestAppContext) {
- let http_client = FakeHttpClient::with_404_response();
- let client = Client::new(http_client.clone());
+ let client = cx.read(|cx| Client::new(FakeHttpClient::with_404_response(), cx));
let fs = FakeFs::new(cx.background());
fs.insert_tree(
@@ -3127,6 +3450,7 @@ mod tests {
abs_path: root_dir.path().into(),
removed_entry_ids: Default::default(),
ignores_by_parent_abs_path: Default::default(),
+ git_repositories: Default::default(),
next_entry_id: next_entry_id.clone(),
snapshot: Snapshot {
id: WorktreeId::from_usize(0),
@@ -15,108 +15,111 @@ message Envelope {
CreateRoomResponse create_room_response = 9;
JoinRoom join_room = 10;
JoinRoomResponse join_room_response = 11;
- LeaveRoom leave_room = 1002;
- Call call = 12;
- IncomingCall incoming_call = 1000;
- CallCanceled call_canceled = 1001;
- CancelCall cancel_call = 1004;
- DeclineCall decline_call = 13;
- UpdateParticipantLocation update_participant_location = 1003;
- RoomUpdated room_updated = 14;
-
- ShareProject share_project = 15;
- ShareProjectResponse share_project_response = 16;
- UnshareProject unshare_project = 17;
- JoinProject join_project = 21;
- JoinProjectResponse join_project_response = 22;
- LeaveProject leave_project = 23;
- AddProjectCollaborator add_project_collaborator = 24;
- RemoveProjectCollaborator remove_project_collaborator = 25;
-
- GetDefinition get_definition = 27;
- GetDefinitionResponse get_definition_response = 28;
- GetTypeDefinition get_type_definition = 29;
- GetTypeDefinitionResponse get_type_definition_response = 30;
- GetReferences get_references = 31;
- GetReferencesResponse get_references_response = 32;
- GetDocumentHighlights get_document_highlights = 33;
- GetDocumentHighlightsResponse get_document_highlights_response = 34;
- GetProjectSymbols get_project_symbols = 35;
- GetProjectSymbolsResponse get_project_symbols_response = 36;
- OpenBufferForSymbol open_buffer_for_symbol = 37;
- OpenBufferForSymbolResponse open_buffer_for_symbol_response = 38;
-
- UpdateProject update_project = 39;
- RegisterProjectActivity register_project_activity = 40;
- UpdateWorktree update_worktree = 41;
- UpdateWorktreeExtensions update_worktree_extensions = 42;
-
- CreateProjectEntry create_project_entry = 43;
- RenameProjectEntry rename_project_entry = 44;
- CopyProjectEntry copy_project_entry = 45;
- DeleteProjectEntry delete_project_entry = 46;
- ProjectEntryResponse project_entry_response = 47;
-
- UpdateDiagnosticSummary update_diagnostic_summary = 48;
- StartLanguageServer start_language_server = 49;
- UpdateLanguageServer update_language_server = 50;
-
- OpenBufferById open_buffer_by_id = 51;
- OpenBufferByPath open_buffer_by_path = 52;
- OpenBufferResponse open_buffer_response = 53;
- CreateBufferForPeer create_buffer_for_peer = 54;
- UpdateBuffer update_buffer = 55;
- UpdateBufferFile update_buffer_file = 56;
- SaveBuffer save_buffer = 57;
- BufferSaved buffer_saved = 58;
- BufferReloaded buffer_reloaded = 59;
- ReloadBuffers reload_buffers = 60;
- ReloadBuffersResponse reload_buffers_response = 61;
- FormatBuffers format_buffers = 62;
- FormatBuffersResponse format_buffers_response = 63;
- GetCompletions get_completions = 64;
- GetCompletionsResponse get_completions_response = 65;
- ApplyCompletionAdditionalEdits apply_completion_additional_edits = 66;
- ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 67;
- GetCodeActions get_code_actions = 68;
- GetCodeActionsResponse get_code_actions_response = 69;
- GetHover get_hover = 70;
- GetHoverResponse get_hover_response = 71;
- ApplyCodeAction apply_code_action = 72;
- ApplyCodeActionResponse apply_code_action_response = 73;
- PrepareRename prepare_rename = 74;
- PrepareRenameResponse prepare_rename_response = 75;
- PerformRename perform_rename = 76;
- PerformRenameResponse perform_rename_response = 77;
- SearchProject search_project = 78;
- SearchProjectResponse search_project_response = 79;
-
- GetChannels get_channels = 80;
- GetChannelsResponse get_channels_response = 81;
- JoinChannel join_channel = 82;
- JoinChannelResponse join_channel_response = 83;
- LeaveChannel leave_channel = 84;
- SendChannelMessage send_channel_message = 85;
- SendChannelMessageResponse send_channel_message_response = 86;
- ChannelMessageSent channel_message_sent = 87;
- GetChannelMessages get_channel_messages = 88;
- GetChannelMessagesResponse get_channel_messages_response = 89;
-
- UpdateContacts update_contacts = 90;
- UpdateInviteInfo update_invite_info = 91;
- ShowContacts show_contacts = 92;
-
- GetUsers get_users = 93;
- FuzzySearchUsers fuzzy_search_users = 94;
- UsersResponse users_response = 95;
- RequestContact request_contact = 96;
- RespondToContactRequest respond_to_contact_request = 97;
- RemoveContact remove_contact = 98;
-
- Follow follow = 99;
- FollowResponse follow_response = 100;
- UpdateFollowers update_followers = 101;
- Unfollow unfollow = 102;
+ LeaveRoom leave_room = 12;
+ Call call = 13;
+ IncomingCall incoming_call = 14;
+ CallCanceled call_canceled = 15;
+ CancelCall cancel_call = 16;
+ DeclineCall decline_call = 17;
+ UpdateParticipantLocation update_participant_location = 18;
+ RoomUpdated room_updated = 19;
+
+ ShareProject share_project = 20;
+ ShareProjectResponse share_project_response = 21;
+ UnshareProject unshare_project = 22;
+ JoinProject join_project = 23;
+ JoinProjectResponse join_project_response = 24;
+ LeaveProject leave_project = 25;
+ AddProjectCollaborator add_project_collaborator = 26;
+ RemoveProjectCollaborator remove_project_collaborator = 27;
+
+ GetDefinition get_definition = 28;
+ GetDefinitionResponse get_definition_response = 29;
+ GetTypeDefinition get_type_definition = 30;
+ GetTypeDefinitionResponse get_type_definition_response = 31;
+ GetReferences get_references = 32;
+ GetReferencesResponse get_references_response = 33;
+ GetDocumentHighlights get_document_highlights = 34;
+ GetDocumentHighlightsResponse get_document_highlights_response = 35;
+ GetProjectSymbols get_project_symbols = 36;
+ GetProjectSymbolsResponse get_project_symbols_response = 37;
+ OpenBufferForSymbol open_buffer_for_symbol = 38;
+ OpenBufferForSymbolResponse open_buffer_for_symbol_response = 39;
+
+ UpdateProject update_project = 40;
+ RegisterProjectActivity register_project_activity = 41;
+ UpdateWorktree update_worktree = 42;
+ UpdateWorktreeExtensions update_worktree_extensions = 43;
+
+ CreateProjectEntry create_project_entry = 44;
+ RenameProjectEntry rename_project_entry = 45;
+ CopyProjectEntry copy_project_entry = 46;
+ DeleteProjectEntry delete_project_entry = 47;
+ ProjectEntryResponse project_entry_response = 48;
+
+ UpdateDiagnosticSummary update_diagnostic_summary = 49;
+ StartLanguageServer start_language_server = 50;
+ UpdateLanguageServer update_language_server = 51;
+
+ OpenBufferById open_buffer_by_id = 52;
+ OpenBufferByPath open_buffer_by_path = 53;
+ OpenBufferResponse open_buffer_response = 54;
+ CreateBufferForPeer create_buffer_for_peer = 55;
+ UpdateBuffer update_buffer = 56;
+ UpdateBufferFile update_buffer_file = 57;
+ SaveBuffer save_buffer = 58;
+ BufferSaved buffer_saved = 59;
+ BufferReloaded buffer_reloaded = 60;
+ ReloadBuffers reload_buffers = 61;
+ ReloadBuffersResponse reload_buffers_response = 62;
+ FormatBuffers format_buffers = 63;
+ FormatBuffersResponse format_buffers_response = 64;
+ GetCompletions get_completions = 65;
+ GetCompletionsResponse get_completions_response = 66;
+ ApplyCompletionAdditionalEdits apply_completion_additional_edits = 67;
+ ApplyCompletionAdditionalEditsResponse apply_completion_additional_edits_response = 68;
+ GetCodeActions get_code_actions = 69;
+ GetCodeActionsResponse get_code_actions_response = 70;
+ GetHover get_hover = 71;
+ GetHoverResponse get_hover_response = 72;
+ ApplyCodeAction apply_code_action = 73;
+ ApplyCodeActionResponse apply_code_action_response = 74;
+ PrepareRename prepare_rename = 75;
+ PrepareRenameResponse prepare_rename_response = 76;
+ PerformRename perform_rename = 77;
+ PerformRenameResponse perform_rename_response = 78;
+ SearchProject search_project = 79;
+ SearchProjectResponse search_project_response = 80;
+
+ GetChannels get_channels = 81;
+ GetChannelsResponse get_channels_response = 82;
+ JoinChannel join_channel = 83;
+ JoinChannelResponse join_channel_response = 84;
+ LeaveChannel leave_channel = 85;
+ SendChannelMessage send_channel_message = 86;
+ SendChannelMessageResponse send_channel_message_response = 87;
+ ChannelMessageSent channel_message_sent = 88;
+ GetChannelMessages get_channel_messages = 89;
+ GetChannelMessagesResponse get_channel_messages_response = 90;
+
+ UpdateContacts update_contacts = 91;
+ UpdateInviteInfo update_invite_info = 92;
+ ShowContacts show_contacts = 93;
+
+ GetUsers get_users = 94;
+ FuzzySearchUsers fuzzy_search_users = 95;
+ UsersResponse users_response = 96;
+ RequestContact request_contact = 97;
+ RespondToContactRequest respond_to_contact_request = 98;
+ RemoveContact remove_contact = 99;
+
+ Follow follow = 100;
+ FollowResponse follow_response = 101;
+ UpdateFollowers update_followers = 102;
+ Unfollow unfollow = 103;
+ GetPrivateUserInfo get_private_user_info = 104;
+ GetPrivateUserInfoResponse get_private_user_info_response = 105;
+ UpdateDiffBase update_diff_base = 106;
}
}
@@ -795,6 +798,13 @@ message Unfollow {
uint32 leader_id = 2;
}
+message GetPrivateUserInfo {}
+
+message GetPrivateUserInfoResponse {
+ string metrics_id = 1;
+ bool staff = 2;
+}
+
// Entities
message UpdateActiveView {
@@ -868,7 +878,8 @@ message BufferState {
uint64 id = 1;
optional File file = 2;
string base_text = 3;
- LineEnding line_ending = 4;
+ optional string diff_base = 4;
+ LineEnding line_ending = 5;
}
message BufferChunk {
@@ -1032,3 +1043,9 @@ message WorktreeMetadata {
string root_name = 2;
bool visible = 3;
}
+
+message UpdateDiffBase {
+ uint64 project_id = 1;
+ uint64 buffer_id = 2;
+ optional string diff_base = 3;
+}
@@ -175,6 +175,9 @@ messages!(
(UpdateProject, Foreground),
(UpdateWorktree, Foreground),
(UpdateWorktreeExtensions, Background),
+ (UpdateDiffBase, Background),
+ (GetPrivateUserInfo, Foreground),
+ (GetPrivateUserInfoResponse, Foreground),
);
request_messages!(
@@ -201,6 +204,7 @@ request_messages!(
(GetTypeDefinition, GetTypeDefinitionResponse),
(GetDocumentHighlights, GetDocumentHighlightsResponse),
(GetReferences, GetReferencesResponse),
+ (GetPrivateUserInfo, GetPrivateUserInfoResponse),
(GetProjectSymbols, GetProjectSymbolsResponse),
(FuzzySearchUsers, UsersResponse),
(GetUsers, UsersResponse),
@@ -274,6 +278,7 @@ entity_messages!(
UpdateProject,
UpdateWorktree,
UpdateWorktreeExtensions,
+ UpdateDiffBase
);
entity_messages!(channel_id, ChannelMessageSent);
@@ -6,4 +6,4 @@ pub use conn::Connection;
pub use peer::*;
mod macros;
-pub const PROTOCOL_VERSION: u32 = 32;
+pub const PROTOCOL_VERSION: u32 = 35;
@@ -32,6 +32,8 @@ pub struct Settings {
pub default_dock_anchor: DockAnchor,
pub editor_defaults: EditorSettings,
pub editor_overrides: EditorSettings,
+ pub git: GitSettings,
+ pub git_overrides: GitSettings,
pub terminal_defaults: TerminalSettings,
pub terminal_overrides: TerminalSettings,
pub language_defaults: HashMap<Arc<str>, EditorSettings>,
@@ -52,6 +54,22 @@ impl FeatureFlags {
}
}
+#[derive(Copy, Clone, Debug, Default, Deserialize, JsonSchema)]
+pub struct GitSettings {
+ pub git_gutter: Option<GitGutter>,
+ pub gutter_debounce: Option<u64>,
+}
+
+#[derive(Clone, Copy, Debug, Default, Deserialize, JsonSchema)]
+#[serde(rename_all = "snake_case")]
+pub enum GitGutter {
+ #[default]
+ TrackedFiles,
+ Hide,
+}
+
+pub struct GitGutterConfig {}
+
#[derive(Clone, Debug, Default, Deserialize, JsonSchema)]
pub struct EditorSettings {
pub tab_size: Option<NonZeroU32>,
@@ -196,6 +214,8 @@ pub struct SettingsFileContent {
#[serde(default)]
pub terminal: TerminalSettings,
#[serde(default)]
+ pub git: Option<GitSettings>,
+ #[serde(default)]
#[serde(alias = "language_overrides")]
pub languages: HashMap<Arc<str>, EditorSettings>,
#[serde(default)]
@@ -252,6 +272,8 @@ impl Settings {
enable_language_server: required(defaults.editor.enable_language_server),
},
editor_overrides: Default::default(),
+ git: defaults.git.unwrap(),
+ git_overrides: Default::default(),
terminal_defaults: Default::default(),
terminal_overrides: Default::default(),
language_defaults: defaults.languages,
@@ -303,6 +325,7 @@ impl Settings {
}
self.editor_overrides = data.editor;
+ self.git_overrides = data.git.unwrap_or_default();
self.terminal_defaults.font_size = data.terminal.font_size;
self.terminal_overrides = data.terminal;
self.language_overrides = data.languages;
@@ -358,6 +381,14 @@ impl Settings {
.expect("missing default")
}
+ pub fn git_gutter(&self) -> GitGutter {
+ self.git_overrides.git_gutter.unwrap_or_else(|| {
+ self.git
+ .git_gutter
+ .expect("git_gutter should be some by setting setup")
+ })
+ }
+
#[cfg(any(test, feature = "test-support"))]
pub fn test(cx: &gpui::AppContext) -> Settings {
Settings {
@@ -382,6 +413,8 @@ impl Settings {
editor_overrides: Default::default(),
terminal_defaults: Default::default(),
terminal_overrides: Default::default(),
+ git: Default::default(),
+ git_overrides: Default::default(),
language_defaults: Default::default(),
language_overrides: Default::default(),
lsp: Default::default(),
@@ -101,6 +101,12 @@ pub enum Bias {
Right,
}
+impl Default for Bias {
+ fn default() -> Self {
+ Bias::Left
+ }
+}
+
impl PartialOrd for Bias {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
@@ -618,8 +618,34 @@ impl Terminal {
term.resize(new_size);
}
InternalEvent::Clear => {
- self.write_to_pty("\x0c".to_string());
+ // Clear back buffer
term.clear_screen(ClearMode::Saved);
+
+ let cursor = term.grid().cursor.point;
+
+ // Clear the lines above
+ term.grid_mut().reset_region(..cursor.line);
+
+ // Copy the current line up
+ let line = term.grid()[cursor.line][..cursor.column]
+ .iter()
+ .cloned()
+ .enumerate()
+ .collect::<Vec<(usize, Cell)>>();
+
+ for (i, cell) in line {
+ term.grid_mut()[Line(0)][Column(i)] = cell;
+ }
+
+ // Reset the cursor
+ term.grid_mut().cursor.point =
+ Point::new(Line(0), term.grid_mut().cursor.point.column);
+ let new_cursor = term.grid().cursor.point;
+
+ // Clear the lines below the new cursor
+ if (new_cursor.line.0 as usize) < term.screen_lines() - 1 {
+ term.grid_mut().reset_region((new_cursor.line + 1)..);
+ }
}
InternalEvent::Scroll(scroll) => {
term.scroll_display(*scroll);
@@ -680,12 +680,12 @@ impl Element for TerminalElement {
let focused = self.focused;
TerminalElement::shape_cursor(cursor_point, dimensions, &cursor_text).map(
move |(cursor_position, block_width)| {
- let shape = match cursor.shape {
- AlacCursorShape::Block if !focused => CursorShape::Hollow,
- AlacCursorShape::Block => CursorShape::Block,
- AlacCursorShape::Underline => CursorShape::Underscore,
- AlacCursorShape::Beam => CursorShape::Bar,
- AlacCursorShape::HollowBlock => CursorShape::Hollow,
+ let (shape, text) = match cursor.shape {
+ AlacCursorShape::Block if !focused => (CursorShape::Hollow, None),
+ AlacCursorShape::Block => (CursorShape::Block, Some(cursor_text)),
+ AlacCursorShape::Underline => (CursorShape::Underscore, None),
+ AlacCursorShape::Beam => (CursorShape::Bar, None),
+ AlacCursorShape::HollowBlock => (CursorShape::Hollow, None),
//This case is handled in the if wrapping the whole cursor layout
AlacCursorShape::Hidden => unreachable!(),
};
@@ -696,7 +696,7 @@ impl Element for TerminalElement {
dimensions.line_height,
terminal_theme.colors.cursor,
shape,
- Some(cursor_text),
+ text,
)
},
)
@@ -4,7 +4,7 @@ use anyhow::Result;
use std::{cmp::Ordering, fmt::Debug, ops::Range};
use sum_tree::Bias;
-#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash)]
+#[derive(Copy, Clone, Eq, PartialEq, Debug, Hash, Default)]
pub struct Anchor {
pub timestamp: clock::Local,
pub offset: usize,
@@ -54,6 +54,13 @@ impl Rope {
cursor.slice(range.end)
}
+ pub fn slice_rows(&self, range: Range<u32>) -> Rope {
+ //This would be more efficient with a forward advance after the first, but it's fine
+ let start = self.point_to_offset(Point::new(range.start, 0));
+ let end = self.point_to_offset(Point::new(range.end, 0));
+ self.slice(start..end)
+ }
+
pub fn push(&mut self, text: &str) {
let mut new_chunks = SmallVec::<[_; 16]>::new();
let mut new_chunk = ArrayString::new();
@@ -510,8 +510,7 @@ pub struct Editor {
pub rename_fade: f32,
pub document_highlight_read_background: Color,
pub document_highlight_write_background: Color,
- pub diff_background_deleted: Color,
- pub diff_background_inserted: Color,
+ pub diff: DiffStyle,
pub line_number: Color,
pub line_number_active: Color,
pub guest_selections: Vec<SelectionStyle>,
@@ -595,6 +594,16 @@ pub struct CodeActions {
pub vertical_scale: f32,
}
+#[derive(Clone, Deserialize, Default)]
+pub struct DiffStyle {
+ pub inserted: Color,
+ pub modified: Color,
+ pub deleted: Color,
+ pub removed_width_em: f32,
+ pub width_em: f32,
+ pub corner_radius: f32,
+}
+
#[derive(Debug, Default, Clone, Copy)]
pub struct Interactive<T> {
pub default: T,
@@ -7,17 +7,21 @@ edition = "2021"
doctest = false
[features]
-test-support = ["rand", "serde_json", "tempdir"]
+test-support = ["rand", "serde_json", "tempdir", "git2"]
[dependencies]
anyhow = "1.0.38"
futures = "0.3"
log = { version = "0.4.16", features = ["kv_unstable_serde"] }
+lazy_static = "1.4.0"
rand = { version = "0.8", optional = true }
tempdir = { version = "0.3.7", optional = true }
serde_json = { version = "1.0", features = ["preserve_order"], optional = true }
+git2 = { version = "0.15", default-features = false, optional = true }
+
[dev-dependencies]
rand = { version = "0.8" }
tempdir = { version = "0.3.7" }
serde_json = { version = "1.0", features = ["preserve_order"] }
+git2 = { version = "0.15", default-features = false }
@@ -1,7 +1,11 @@
mod assertions;
mod marked_text;
-use std::path::{Path, PathBuf};
+use git2;
+use std::{
+ ffi::OsStr,
+ path::{Path, PathBuf},
+};
use tempdir::TempDir;
pub use assertions::*;
@@ -24,6 +28,11 @@ fn write_tree(path: &Path, tree: serde_json::Value) {
match contents {
Value::Object(_) => {
fs::create_dir(&path).unwrap();
+
+ if path.file_name() == Some(&OsStr::new(".git")) {
+ git2::Repository::init(&path.parent().unwrap()).unwrap();
+ }
+
write_tree(&path, contents);
}
Value::Null => {
@@ -46,7 +46,6 @@ use std::{
cell::RefCell,
fmt,
future::Future,
- mem,
path::{Path, PathBuf},
rc::Rc,
sync::{
@@ -295,7 +294,23 @@ pub trait Item: View {
project: ModelHandle<Project>,
cx: &mut ViewContext<Self>,
) -> Task<Result<()>>;
+ fn git_diff_recalc(
+ &mut self,
+ _project: ModelHandle<Project>,
+ _cx: &mut ViewContext<Self>,
+ ) -> Task<Result<()>> {
+ Task::ready(Ok(()))
+ }
fn to_item_events(event: &Self::Event) -> Vec<ItemEvent>;
+ fn should_close_item_on_event(_: &Self::Event) -> bool {
+ false
+ }
+ fn should_update_tab_on_event(_: &Self::Event) -> bool {
+ false
+ }
+ fn is_edit_event(_: &Self::Event) -> bool {
+ false
+ }
fn act_as_type(
&self,
type_id: TypeId,
@@ -412,6 +427,57 @@ impl<T: FollowableItem> FollowableItemHandle for ViewHandle<T> {
}
}
+struct DelayedDebouncedEditAction {
+ task: Option<Task<()>>,
+ cancel_channel: Option<oneshot::Sender<()>>,
+}
+
+impl DelayedDebouncedEditAction {
+ fn new() -> DelayedDebouncedEditAction {
+ DelayedDebouncedEditAction {
+ task: None,
+ cancel_channel: None,
+ }
+ }
+
+ fn fire_new<F, Fut>(
+ &mut self,
+ delay: Duration,
+ workspace: &Workspace,
+ cx: &mut ViewContext<Workspace>,
+ f: F,
+ ) where
+ F: FnOnce(ModelHandle<Project>, AsyncAppContext) -> Fut + 'static,
+ Fut: 'static + Future<Output = ()>,
+ {
+ if let Some(channel) = self.cancel_channel.take() {
+ _ = channel.send(());
+ }
+
+ let project = workspace.project().downgrade();
+
+ let (sender, mut receiver) = oneshot::channel::<()>();
+ self.cancel_channel = Some(sender);
+
+ let previous_task = self.task.take();
+ self.task = Some(cx.spawn_weak(|_, cx| async move {
+ let mut timer = cx.background().timer(delay).fuse();
+ if let Some(previous_task) = previous_task {
+ previous_task.await;
+ }
+
+ futures::select_biased! {
+ _ = receiver => return,
+ _ = timer => {}
+ }
+
+ if let Some(project) = project.upgrade(&cx) {
+ (f)(project, cx).await;
+ }
+ }));
+ }
+}
+
pub trait ItemHandle: 'static + fmt::Debug {
fn subscribe_to_item_events(
&self,
@@ -450,6 +516,11 @@ pub trait ItemHandle: 'static + fmt::Debug {
) -> Task<Result<()>>;
fn reload(&self, project: ModelHandle<Project>, cx: &mut MutableAppContext)
-> Task<Result<()>>;
+ fn git_diff_recalc(
+ &self,
+ project: ModelHandle<Project>,
+ cx: &mut MutableAppContext,
+ ) -> Task<Result<()>>;
fn act_as_type(&self, type_id: TypeId, cx: &AppContext) -> Option<AnyViewHandle>;
fn to_followable_item_handle(&self, cx: &AppContext) -> Option<Box<dyn FollowableItemHandle>>;
fn on_release(
@@ -555,8 +626,8 @@ impl<T: Item> ItemHandle for ViewHandle<T> {
.insert(self.id(), pane.downgrade())
.is_none()
{
- let mut pending_autosave = None;
- let mut cancel_pending_autosave = oneshot::channel::<()>().0;
+ let mut pending_autosave = DelayedDebouncedEditAction::new();
+ let mut pending_git_update = DelayedDebouncedEditAction::new();
let pending_update = Rc::new(RefCell::new(None));
let pending_update_scheduled = Rc::new(AtomicBool::new(false));
@@ -614,45 +685,66 @@ impl<T: Item> ItemHandle for ViewHandle<T> {
.detach_and_log_err(cx);
return;
}
+
ItemEvent::UpdateTab => {
pane.update(cx, |_, cx| {
cx.emit(pane::Event::ChangeItemTitle);
cx.notify();
});
}
+
ItemEvent::Edit => {
if let Autosave::AfterDelay { milliseconds } =
cx.global::<Settings>().autosave
{
- let prev_autosave = pending_autosave
- .take()
- .unwrap_or_else(|| Task::ready(Some(())));
- let (cancel_tx, mut cancel_rx) = oneshot::channel::<()>();
- let prev_cancel_tx =
- mem::replace(&mut cancel_pending_autosave, cancel_tx);
- let project = workspace.project.downgrade();
- let _ = prev_cancel_tx.send(());
+ let delay = Duration::from_millis(milliseconds);
let item = item.clone();
- pending_autosave =
- Some(cx.spawn_weak(|_, mut cx| async move {
- let mut timer = cx
- .background()
- .timer(Duration::from_millis(milliseconds))
- .fuse();
- prev_autosave.await;
- futures::select_biased! {
- _ = cancel_rx => return None,
- _ = timer => {}
- }
-
- let project = project.upgrade(&cx)?;
+ pending_autosave.fire_new(
+ delay,
+ workspace,
+ cx,
+ |project, mut cx| async move {
cx.update(|cx| Pane::autosave_item(&item, project, cx))
.await
.log_err();
- None
- }));
+ },
+ );
+ }
+
+ let settings = cx.global::<Settings>();
+ let debounce_delay = settings.git_overrides.gutter_debounce;
+
+ let item = item.clone();
+
+ if let Some(delay) = debounce_delay {
+ const MIN_GIT_DELAY: u64 = 50;
+
+ let delay = delay.max(MIN_GIT_DELAY);
+ let duration = Duration::from_millis(delay);
+
+ pending_git_update.fire_new(
+ duration,
+ workspace,
+ cx,
+ |project, mut cx| async move {
+ cx.update(|cx| item.git_diff_recalc(project, cx))
+ .await
+ .log_err();
+ },
+ );
+ } else {
+ let project = workspace.project().downgrade();
+ cx.spawn_weak(|_, mut cx| async move {
+ if let Some(project) = project.upgrade(&cx) {
+ cx.update(|cx| item.git_diff_recalc(project, cx))
+ .await
+ .log_err();
+ }
+ })
+ .detach();
}
}
+
_ => {}
}
}
@@ -732,6 +824,14 @@ impl<T: Item> ItemHandle for ViewHandle<T> {
self.update(cx, |item, cx| item.reload(project, cx))
}
+ fn git_diff_recalc(
+ &self,
+ project: ModelHandle<Project>,
+ cx: &mut MutableAppContext,
+ ) -> Task<Result<()>> {
+ self.update(cx, |item, cx| item.git_diff_recalc(project, cx))
+ }
+
fn act_as_type(&self, type_id: TypeId, cx: &AppContext) -> Option<AnyViewHandle> {
self.read(cx).act_as_type(type_id, self, cx)
}
@@ -833,7 +933,7 @@ impl AppState {
let fs = project::FakeFs::new(cx.background().clone());
let languages = Arc::new(LanguageRegistry::test());
let http_client = client::test::FakeHttpClient::with_404_response();
- let client = Client::new(http_client.clone());
+ let client = Client::new(http_client.clone(), cx);
let project_store = cx.add_model(|_| ProjectStore::new());
let user_store = cx.add_model(|cx| UserStore::new(client.clone(), http_client, cx));
let themes = ThemeRegistry::new((), cx.font_cache().clone());
@@ -3,7 +3,7 @@ authors = ["Nathan Sobo <nathansobo@gmail.com>"]
description = "The fast, collaborative code editor."
edition = "2021"
name = "zed"
-version = "0.55.0"
+version = "0.59.0"
[lib]
name = "zed"
@@ -92,6 +92,7 @@ toml = "0.5"
tree-sitter = "0.20"
tree-sitter-c = "0.20.1"
tree-sitter-cpp = "0.20.0"
+tree-sitter-css = { git = "https://github.com/tree-sitter/tree-sitter-css", rev = "769203d0f9abe1a9a691ac2b9fe4bb4397a73c51" }
tree-sitter-elixir = { git = "https://github.com/elixir-lang/tree-sitter-elixir", rev = "05e3631c6a0701c1fa518b0fee7be95a2ceef5e2" }
tree-sitter-go = { git = "https://github.com/tree-sitter/tree-sitter-go", rev = "aeb2f33b366fd78d5789ff104956ce23508b85db" }
tree-sitter-json = { git = "https://github.com/tree-sitter/tree-sitter-json", rev = "137e1ce6a02698fc246cdb9c6b886ed1de9a1ed8" }
@@ -100,6 +101,7 @@ tree-sitter-markdown = { git = "https://github.com/MDeiml/tree-sitter-markdown",
tree-sitter-python = "0.20.2"
tree-sitter-toml = { git = "https://github.com/tree-sitter/tree-sitter-toml", rev = "342d9be207c2dba869b9967124c679b5e6fd0ebe" }
tree-sitter-typescript = "0.20.1"
+tree-sitter-html = "0.19.0"
url = "2.2"
[dev-dependencies]
@@ -3,6 +3,10 @@ use std::process::Command;
fn main() {
println!("cargo:rustc-env=MACOSX_DEPLOYMENT_TARGET=10.14");
+ if let Ok(api_key) = std::env::var("ZED_AMPLITUDE_API_KEY") {
+ println!("cargo:rustc-env=ZED_AMPLITUDE_API_KEY={api_key}");
+ }
+
let output = Command::new("npm")
.current_dir("../../styles")
.args(["install", "--no-save"])
@@ -7,6 +7,7 @@ use std::{borrow::Cow, str, sync::Arc};
mod c;
mod elixir;
mod go;
+mod html;
mod installation;
mod json;
mod language_plugin;
@@ -46,6 +47,11 @@ pub async fn init(languages: Arc<LanguageRegistry>, _executor: Arc<Background>)
tree_sitter_cpp::language(),
Some(CachedLspAdapter::new(c::CLspAdapter).await),
),
+ (
+ "css",
+ tree_sitter_css::language(),
+ None, //
+ ),
(
"elixir",
tree_sitter_elixir::language(),
@@ -96,8 +102,13 @@ pub async fn init(languages: Arc<LanguageRegistry>, _executor: Arc<Background>)
tree_sitter_typescript::language_tsx(),
Some(CachedLspAdapter::new(typescript::TypeScriptLspAdapter).await),
),
+ (
+ "html",
+ tree_sitter_html::language(),
+ Some(CachedLspAdapter::new(html::HtmlLspAdapter).await),
+ ),
] {
- languages.add(Arc::new(language(name, grammar, lsp_adapter)));
+ languages.add(language(name, grammar, lsp_adapter));
}
}
@@ -105,7 +116,7 @@ pub(crate) fn language(
name: &str,
grammar: tree_sitter::Language,
lsp_adapter: Option<Arc<CachedLspAdapter>>,
-) -> Language {
+) -> Arc<Language> {
let config = toml::from_slice(
&LanguageDir::get(&format!("{}/config.toml", name))
.unwrap()
@@ -142,7 +153,7 @@ pub(crate) fn language(
if let Some(lsp_adapter) = lsp_adapter {
language = language.with_lsp_adapter(lsp_adapter)
}
- language
+ Arc::new(language)
}
fn load_query(name: &str, filename_prefix: &str) -> Option<Cow<'static, str>> {
@@ -112,7 +112,7 @@ impl super::LspAdapter for CLspAdapter {
async fn label_for_completion(
&self,
completion: &lsp::CompletionItem,
- language: &Language,
+ language: &Arc<Language>,
) -> Option<CodeLabel> {
let label = completion
.label
@@ -190,7 +190,7 @@ impl super::LspAdapter for CLspAdapter {
&self,
name: &str,
kind: lsp::SymbolKind,
- language: &Language,
+ language: &Arc<Language>,
) -> Option<CodeLabel> {
let (text, filter_range, display_range) = match kind {
lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => {
@@ -251,7 +251,6 @@ mod tests {
use gpui::MutableAppContext;
use language::{AutoindentMode, Buffer};
use settings::Settings;
- use std::sync::Arc;
#[gpui::test]
fn test_c_autoindent(cx: &mut MutableAppContext) {
@@ -262,7 +261,7 @@ mod tests {
let language = crate::languages::language("c", tree_sitter_c::language(), None);
cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(language), cx);
+ let mut buffer = Buffer::new(0, "", cx).with_language(language, cx);
// empty function
buffer.edit([(0..0, "int main() {}")], None, cx);
@@ -86,7 +86,7 @@
(identifier) @variable
((identifier) @constant
- (#match? @constant "^[A-Z][A-Z\\d_]*$"))
+ (#match? @constant "^_*[A-Z][A-Z\\d_]*$"))
(call_expression
function: (identifier) @function)
@@ -37,11 +37,11 @@
(type_identifier) @type
((identifier) @constant
- (#match? @constant "^[A-Z][A-Z\\d_]*$"))
+ (#match? @constant "^_*[A-Z][A-Z\\d_]*$"))
(field_identifier) @property
(statement_identifier) @label
-(this) @variable.builtin
+(this) @variable.special
[
"break"
@@ -0,0 +1,3 @@
+("(" @open ")" @close)
+("[" @open "]" @close)
+("{" @open "}" @close)
@@ -0,0 +1,9 @@
+name = "CSS"
+path_suffixes = ["css"]
+autoclose_before = ";:.,=}])>"
+brackets = [
+ { start = "{", end = "}", close = true, newline = true },
+ { start = "[", end = "]", close = true, newline = true },
+ { start = "(", end = ")", close = true, newline = true },
+ { start = "\"", end = "\"", close = true, newline = false }
+]
@@ -0,0 +1,78 @@
+(comment) @comment
+
+[
+ (tag_name)
+ (nesting_selector)
+ (universal_selector)
+] @tag
+
+[
+ "~"
+ ">"
+ "+"
+ "-"
+ "*"
+ "/"
+ "="
+ "^="
+ "|="
+ "~="
+ "$="
+ "*="
+ "and"
+ "or"
+ "not"
+ "only"
+] @operator
+
+(attribute_selector (plain_value) @string)
+
+(attribute_name) @attribute
+(pseudo_element_selector (tag_name) @attribute)
+(pseudo_class_selector (class_name) @attribute)
+
+[
+ (class_name)
+ (id_name)
+ (namespace_name)
+ (property_name)
+ (feature_name)
+] @property
+
+(function_name) @function
+
+(
+ [
+ (property_name)
+ (plain_value)
+ ] @variable.special
+ (#match? @variable.special "^--")
+)
+
+[
+ "@media"
+ "@import"
+ "@charset"
+ "@namespace"
+ "@supports"
+ "@keyframes"
+ (at_keyword)
+ (to)
+ (from)
+ (important)
+] @keyword
+
+(string_value) @string
+(color_value) @string.special
+
+[
+ (integer_value)
+ (float_value)
+] @number
+
+(unit) @type
+
+[
+ ","
+ ":"
+] @punctuation.delimiter
@@ -0,0 +1 @@
+(_ "{" "}" @end) @indent
@@ -113,7 +113,7 @@ impl LspAdapter for ElixirLspAdapter {
async fn label_for_completion(
&self,
completion: &lsp::CompletionItem,
- language: &Language,
+ language: &Arc<Language>,
) -> Option<CodeLabel> {
match completion.kind.zip(completion.detail.as_ref()) {
Some((_, detail)) if detail.starts_with("(function)") => {
@@ -168,7 +168,7 @@ impl LspAdapter for ElixirLspAdapter {
&self,
name: &str,
kind: SymbolKind,
- language: &Language,
+ language: &Arc<Language>,
) -> Option<CodeLabel> {
let (text, filter_range, display_range) = match kind {
SymbolKind::METHOD | SymbolKind::FUNCTION => {
@@ -134,7 +134,7 @@ impl super::LspAdapter for GoLspAdapter {
async fn label_for_completion(
&self,
completion: &lsp::CompletionItem,
- language: &Language,
+ language: &Arc<Language>,
) -> Option<CodeLabel> {
let label = &completion.label;
@@ -235,7 +235,7 @@ impl super::LspAdapter for GoLspAdapter {
&self,
name: &str,
kind: lsp::SymbolKind,
- language: &Language,
+ language: &Arc<Language>,
) -> Option<CodeLabel> {
let (text, filter_range, display_range) = match kind {
lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => {
@@ -0,0 +1,101 @@
+use super::installation::{npm_install_packages, npm_package_latest_version};
+use anyhow::{anyhow, Context, Result};
+use async_trait::async_trait;
+use client::http::HttpClient;
+use futures::StreamExt;
+use language::{LanguageServerName, LspAdapter};
+use serde_json::json;
+use smol::fs;
+use std::{any::Any, path::PathBuf, sync::Arc};
+use util::ResultExt;
+
+pub struct HtmlLspAdapter;
+
+impl HtmlLspAdapter {
+ const BIN_PATH: &'static str =
+ "node_modules/vscode-langservers-extracted/bin/vscode-html-language-server";
+}
+
+#[async_trait]
+impl LspAdapter for HtmlLspAdapter {
+ async fn name(&self) -> LanguageServerName {
+ LanguageServerName("vscode-html-language-server".into())
+ }
+
+ async fn server_args(&self) -> Vec<String> {
+ vec!["--stdio".into()]
+ }
+
+ async fn fetch_latest_server_version(
+ &self,
+ _: Arc<dyn HttpClient>,
+ ) -> Result<Box<dyn 'static + Any + Send>> {
+ Ok(Box::new(npm_package_latest_version("vscode-langservers-extracted").await?) as Box<_>)
+ }
+
+ async fn fetch_server_binary(
+ &self,
+ version: Box<dyn 'static + Send + Any>,
+ _: Arc<dyn HttpClient>,
+ container_dir: PathBuf,
+ ) -> Result<PathBuf> {
+ let version = version.downcast::<String>().unwrap();
+ let version_dir = container_dir.join(version.as_str());
+ fs::create_dir_all(&version_dir)
+ .await
+ .context("failed to create version directory")?;
+ let binary_path = version_dir.join(Self::BIN_PATH);
+
+ if fs::metadata(&binary_path).await.is_err() {
+ npm_install_packages(
+ [("vscode-langservers-extracted", version.as_str())],
+ &version_dir,
+ )
+ .await?;
+
+ if let Some(mut entries) = fs::read_dir(&container_dir).await.log_err() {
+ while let Some(entry) = entries.next().await {
+ if let Some(entry) = entry.log_err() {
+ let entry_path = entry.path();
+ if entry_path.as_path() != version_dir {
+ fs::remove_dir_all(&entry_path).await.log_err();
+ }
+ }
+ }
+ }
+ }
+
+ Ok(binary_path)
+ }
+
+ async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<PathBuf> {
+ (|| async move {
+ let mut last_version_dir = None;
+ let mut entries = fs::read_dir(&container_dir).await?;
+ while let Some(entry) = entries.next().await {
+ let entry = entry?;
+ if entry.file_type().await?.is_dir() {
+ last_version_dir = Some(entry.path());
+ }
+ }
+ let last_version_dir = last_version_dir.ok_or_else(|| anyhow!("no cached binary"))?;
+ let bin_path = last_version_dir.join(Self::BIN_PATH);
+ if bin_path.exists() {
+ Ok(bin_path)
+ } else {
+ Err(anyhow!(
+ "missing executable in directory {:?}",
+ last_version_dir
+ ))
+ }
+ })()
+ .await
+ .log_err()
+ }
+
+ async fn initialization_options(&self) -> Option<serde_json::Value> {
+ Some(json!({
+ "provideFormatter": true
+ }))
+ }
+}
@@ -0,0 +1,2 @@
+("<" @open ">" @close)
+("\"" @open "\"" @close)
@@ -0,0 +1,12 @@
+name = "HTML"
+path_suffixes = ["html"]
+autoclose_before = ">})"
+brackets = [
+ { start = "<", end = ">", close = true, newline = true },
+ { start = "{", end = "}", close = true, newline = true },
+ { start = "(", end = ")", close = true, newline = true },
+ { start = "\"", end = "\"", close = true, newline = false },
+ { start = "!--", end = " --", close = true, newline = false },
+]
+
+block_comment = ["<!-- ", " -->"]
@@ -0,0 +1,15 @@
+(tag_name) @keyword
+(erroneous_end_tag_name) @keyword
+(doctype) @constant
+(attribute_name) @property
+(attribute_value) @string
+(comment) @comment
+
+"=" @operator
+
+[
+ "<"
+ ">"
+ "</"
+ "/>"
+] @punctuation.bracket
@@ -0,0 +1,6 @@
+(start_tag ">" @end) @indent
+(self_closing_tag "/>" @end) @indent
+
+(element
+ (start_tag) @start
+ (end_tag)? @end) @indent
@@ -0,0 +1,7 @@
+(script_element
+ (raw_text) @content
+ (#set! "language" "javascript"))
+
+(style_element
+ (raw_text) @content
+ (#set! "language" "css"))
@@ -51,12 +51,12 @@
(shorthand_property_identifier)
(shorthand_property_identifier_pattern)
] @constant
- (#match? @constant "^[A-Z_][A-Z\\d_]+$"))
+ (#match? @constant "^_*[A-Z_][A-Z\\d_]*$"))
; Literals
-(this) @variable.builtin
-(super) @variable.builtin
+(this) @variable.special
+(super) @variable.special
[
(true)
@@ -90,7 +90,7 @@ impl LspAdapter for PythonLspAdapter {
async fn label_for_completion(
&self,
item: &lsp::CompletionItem,
- language: &language::Language,
+ language: &Arc<language::Language>,
) -> Option<language::CodeLabel> {
let label = &item.label;
let grammar = language.grammar()?;
@@ -112,7 +112,7 @@ impl LspAdapter for PythonLspAdapter {
&self,
name: &str,
kind: lsp::SymbolKind,
- language: &language::Language,
+ language: &Arc<language::Language>,
) -> Option<language::CodeLabel> {
let (text, filter_range, display_range) = match kind {
lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => {
@@ -149,7 +149,6 @@ mod tests {
use gpui::{ModelContext, MutableAppContext};
use language::{AutoindentMode, Buffer};
use settings::Settings;
- use std::sync::Arc;
#[gpui::test]
fn test_python_autoindent(cx: &mut MutableAppContext) {
@@ -160,7 +159,7 @@ mod tests {
cx.set_global(settings);
cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(language), cx);
+ let mut buffer = Buffer::new(0, "", cx).with_language(language, cx);
let append = |buffer: &mut Buffer, text: &str, cx: &mut ModelContext<Buffer>| {
let ix = buffer.len();
buffer.edit([(ix..ix, text)], Some(AutoindentMode::EachLine), cx);
@@ -21,7 +21,7 @@
(#match? @type "^[A-Z]"))
((identifier) @constant
- (#match? @constant "^[A-Z][A-Z_]*$"))
+ (#match? @constant "^_*[A-Z][A-Z\\d_]*$"))
; Builtin functions
@@ -119,7 +119,7 @@ impl LspAdapter for RustLspAdapter {
async fn label_for_completion(
&self,
completion: &lsp::CompletionItem,
- language: &Language,
+ language: &Arc<Language>,
) -> Option<CodeLabel> {
match completion.kind {
Some(lsp::CompletionItemKind::FIELD) if completion.detail.is_some() => {
@@ -196,7 +196,7 @@ impl LspAdapter for RustLspAdapter {
&self,
name: &str,
kind: lsp::SymbolKind,
- language: &Language,
+ language: &Arc<Language>,
) -> Option<CodeLabel> {
let (text, filter_range, display_range) = match kind {
lsp::SymbolKind::METHOD | lsp::SymbolKind::FUNCTION => {
@@ -439,7 +439,7 @@ mod tests {
cx.set_global(settings);
cx.add_model(|cx| {
- let mut buffer = Buffer::new(0, "", cx).with_language(Arc::new(language), cx);
+ let mut buffer = Buffer::new(0, "", cx).with_language(language, cx);
// indent between braces
buffer.set_text("fn a() {}", cx);
@@ -1,6 +1,6 @@
(type_identifier) @type
(primitive_type) @type.builtin
-(self) @variable.builtin
+(self) @variable.special
(field_identifier) @property
(call_expression
@@ -27,22 +27,13 @@
; Identifier conventions
-; Assume uppercase names are enum constructors
-((identifier) @variant
- (#match? @variant "^[A-Z]"))
-
-; Assume that uppercase names in paths are types
-((scoped_identifier
- path: (identifier) @type)
- (#match? @type "^[A-Z]"))
-((scoped_identifier
- path: (scoped_identifier
- name: (identifier) @type))
+; Assume uppercase names are types/enum-constructors
+((identifier) @type
(#match? @type "^[A-Z]"))
; Assume all-caps names are constants
((identifier) @constant
- (#match? @constant "^[A-Z][A-Z\\d_]+$"))
+ (#match? @constant "^_*[A-Z][A-Z\\d_]*$"))
[
"("
@@ -115,7 +115,7 @@ impl LspAdapter for TypeScriptLspAdapter {
async fn label_for_completion(
&self,
item: &lsp::CompletionItem,
- language: &language::Language,
+ language: &Arc<language::Language>,
) -> Option<language::CodeLabel> {
use lsp::CompletionItemKind as Kind;
let len = item.label.len();
@@ -144,7 +144,6 @@ impl LspAdapter for TypeScriptLspAdapter {
#[cfg(test)]
mod tests {
- use std::sync::Arc;
use gpui::MutableAppContext;
use unindent::Unindent;
@@ -172,9 +171,8 @@ mod tests {
"#
.unindent();
- let buffer = cx.add_model(|cx| {
- language::Buffer::new(0, text, cx).with_language(Arc::new(language), cx)
- });
+ let buffer =
+ cx.add_model(|cx| language::Buffer::new(0, text, cx).with_language(language, cx));
let outline = buffer.read(cx).snapshot().outline(None).unwrap();
assert_eq!(
outline
@@ -51,12 +51,12 @@
(shorthand_property_identifier)
(shorthand_property_identifier_pattern)
] @constant
- (#match? @constant "^[A-Z_][A-Z\\d_]+$"))
+ (#match? @constant "^_*[A-Z_][A-Z\\d_]*$"))
; Literals
-(this) @variable.builtin
-(super) @variable.builtin
+(this) @variable.special
+(super) @variable.special
[
(true)
@@ -20,7 +20,7 @@ use futures::{
FutureExt, SinkExt, StreamExt,
};
use gpui::{executor::Background, App, AssetSource, AsyncAppContext, Task, ViewContext};
-use isahc::{config::Configurable, AsyncBody, Request};
+use isahc::{config::Configurable, Request};
use language::LanguageRegistry;
use log::LevelFilter;
use parking_lot::Mutex;
@@ -88,7 +88,7 @@ fn main() {
});
app.run(move |cx| {
- let client = client::Client::new(http.clone());
+ let client = client::Client::new(http.clone(), cx);
let mut languages = LanguageRegistry::new(login_shell_env_loaded);
languages.set_language_server_download_dir(zed::paths::LANGUAGES_DIR.clone());
let languages = Arc::new(languages);
@@ -120,7 +120,6 @@ fn main() {
vim::init(cx);
terminal::init(cx);
- let db = cx.background().block(db);
cx.spawn(|cx| watch_themes(fs.clone(), themes.clone(), cx))
.detach();
@@ -139,6 +138,10 @@ fn main() {
.detach();
let project_store = cx.add_model(|_| ProjectStore::new());
+ let db = cx.background().block(db);
+ client.start_telemetry(db.clone());
+ client.report_event("start app", Default::default());
+
let app_state = Arc::new(AppState {
languages,
themes,
@@ -280,12 +283,10 @@ fn init_panic_hook(app_version: String, http: Arc<dyn HttpClient>, background: A
"token": ZED_SECRET_CLIENT_TOKEN,
}))
.unwrap();
- let request = Request::builder()
- .uri(&panic_report_url)
- .method(http::Method::POST)
+ let request = Request::post(&panic_report_url)
.redirect_policy(isahc::config::RedirectPolicy::Follow)
.header("Content-Type", "application/json")
- .body(AsyncBody::from(body))?;
+ .body(body.into())?;
let response = http.send(request).await.context("error sending panic")?;
if response.status().is_success() {
fs::remove_file(child_path)
@@ -328,6 +328,11 @@ pub fn menus() -> Vec<Menu<'static>> {
action: Box::new(command_palette::Toggle),
},
MenuItem::Separator,
+ MenuItem::Action {
+ name: "View Telemetry Log",
+ action: Box::new(crate::OpenTelemetryLog),
+ },
+ MenuItem::Separator,
MenuItem::Action {
name: "Documentation",
action: Box::new(crate::OpenBrowser {
@@ -55,6 +55,7 @@ actions!(
DebugElements,
OpenSettings,
OpenLog,
+ OpenTelemetryLog,
OpenKeymap,
OpenDefaultSettings,
OpenDefaultKeymap,
@@ -145,6 +146,12 @@ pub fn init(app_state: &Arc<AppState>, cx: &mut gpui::MutableAppContext) {
open_log_file(workspace, app_state.clone(), cx);
}
});
+ cx.add_action({
+ let app_state = app_state.clone();
+ move |workspace: &mut Workspace, _: &OpenTelemetryLog, cx: &mut ViewContext<Workspace>| {
+ open_telemetry_log_file(workspace, app_state.clone(), cx);
+ }
+ });
cx.add_action({
let app_state = app_state.clone();
move |_: &mut Workspace, _: &OpenKeymap, cx: &mut ViewContext<Workspace>| {
@@ -485,6 +492,62 @@ fn open_log_file(
});
}
+fn open_telemetry_log_file(
+ workspace: &mut Workspace,
+ app_state: Arc<AppState>,
+ cx: &mut ViewContext<Workspace>,
+) {
+ workspace.with_local_workspace(cx, app_state.clone(), |_, cx| {
+ cx.spawn_weak(|workspace, mut cx| async move {
+ let workspace = workspace.upgrade(&cx)?;
+ let path = app_state.client.telemetry_log_file_path()?;
+ let log = app_state.fs.load(&path).await.log_err()?;
+
+ const MAX_TELEMETRY_LOG_LEN: usize = 5 * 1024 * 1024;
+ let mut start_offset = log.len().saturating_sub(MAX_TELEMETRY_LOG_LEN);
+ if let Some(newline_offset) = log[start_offset..].find('\n') {
+ start_offset += newline_offset + 1;
+ }
+ let log_suffix = &log[start_offset..];
+
+ workspace.update(&mut cx, |workspace, cx| {
+ let project = workspace.project().clone();
+ let buffer = project
+ .update(cx, |project, cx| project.create_buffer("", None, cx))
+ .expect("creating buffers on a local workspace always succeeds");
+ buffer.update(cx, |buffer, cx| {
+ buffer.set_language(app_state.languages.get_language("JSON"), cx);
+ buffer.edit(
+ [(
+ 0..0,
+ concat!(
+ "// Zed collects anonymous usage data to help us understand how people are using the app.\n",
+ "// After the beta release, we'll provide the ability to opt out of this telemetry.\n",
+ "// Here is the data that has been reported for the current session:\n",
+ "\n"
+ ),
+ )],
+ None,
+ cx,
+ );
+ buffer.edit([(buffer.len()..buffer.len(), log_suffix)], None, cx);
+ });
+
+ let buffer = cx.add_model(|cx| {
+ MultiBuffer::singleton(buffer, cx).with_title("Telemetry Log".into())
+ });
+ workspace.add_item(
+ Box::new(cx.add_view(|cx| Editor::for_multibuffer(buffer, Some(project), cx))),
+ cx,
+ );
+ });
+
+ Some(())
+ })
+ .detach();
+ });
+}
+
fn open_bundled_config_file(
workspace: &mut Workspace,
app_state: Arc<AppState>,
@@ -1051,7 +1114,7 @@ mod tests {
assert!(!editor.is_dirty(cx));
assert_eq!(editor.title(cx), "untitled");
assert!(Arc::ptr_eq(
- editor.language_at(0, cx).unwrap(),
+ &editor.language_at(0, cx).unwrap(),
&languages::PLAIN_TEXT
));
editor.handle_input("hi", cx);
@@ -1138,7 +1201,7 @@ mod tests {
editor.update(cx, |editor, cx| {
assert!(Arc::ptr_eq(
- editor.language_at(0, cx).unwrap(),
+ &editor.language_at(0, cx).unwrap(),
&languages::PLAIN_TEXT
));
editor.handle_input("hi", cx);
@@ -7,6 +7,7 @@ import {
player,
popoverShadow,
text,
+ textColor,
TextColor,
} from "./components";
import hoverPopover from "./hoverPopover";
@@ -59,8 +60,14 @@ export default function editor(theme: Theme) {
indicator: iconColor(theme, "secondary"),
verticalScale: 0.618
},
- diffBackgroundDeleted: backgroundColor(theme, "error"),
- diffBackgroundInserted: backgroundColor(theme, "ok"),
+ diff: {
+ deleted: theme.iconColor.error,
+ inserted: theme.iconColor.ok,
+ modified: theme.iconColor.warning,
+ removedWidthEm: 0.275,
+ widthEm: 0.16,
+ cornerRadius: 0.05,
+ },
documentHighlightReadBackground: theme.editor.highlight.occurrence,
documentHighlightWriteBackground: theme.editor.highlight.activeOccurrence,
errorColor: theme.textColor.error,
@@ -113,6 +113,11 @@ export function createTheme(
hovered: sample(ramps.blue, 0.1),
active: sample(ramps.blue, 0.15),
},
+ on500Ok: {
+ base: sample(ramps.green, 0.05),
+ hovered: sample(ramps.green, 0.1),
+ active: sample(ramps.green, 0.15)
+ }
};
const borderColor = {
@@ -180,6 +185,10 @@ export function createTheme(
color: sample(ramps.neutral, 7),
weight: fontWeights.normal,
},
+ "variable.special": {
+ color: sample(ramps.blue, 0.80),
+ weight: fontWeights.normal,
+ },
comment: {
color: sample(ramps.neutral, 5),
weight: fontWeights.normal,
@@ -205,15 +214,11 @@ export function createTheme(
weight: fontWeights.normal,
},
constructor: {
- color: sample(ramps.blue, 0.5),
- weight: fontWeights.normal,
- },
- variant: {
- color: sample(ramps.blue, 0.5),
+ color: sample(ramps.cyan, 0.5),
weight: fontWeights.normal,
},
property: {
- color: sample(ramps.blue, 0.5),
+ color: sample(ramps.blue, 0.6),
weight: fontWeights.normal,
},
enum: {
@@ -43,7 +43,7 @@ export interface Syntax {
keyword: SyntaxHighlightStyle;
function: SyntaxHighlightStyle;
type: SyntaxHighlightStyle;
- variant: SyntaxHighlightStyle;
+ constructor: SyntaxHighlightStyle;
property: SyntaxHighlightStyle;
enum: SyntaxHighlightStyle;
operator: SyntaxHighlightStyle;
@@ -78,6 +78,7 @@ export default interface Theme {
// Hacks for elements on top of the editor
on500: BackgroundColorSet;
ok: BackgroundColorSet;
+ on500Ok: BackgroundColorSet;
error: BackgroundColorSet;
on500Error: BackgroundColorSet;
warning: BackgroundColorSet;