diff --git a/.gitignore b/.gitignore index 5a4d2ff25e19e7406f79c1b627510f4c94d58212..30d0fcbf1c8e5786d4c6145c5ae6d7043d6d5590 100644 --- a/.gitignore +++ b/.gitignore @@ -18,4 +18,5 @@ DerivedData/ .swiftpm/config/registries.json .swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata .netrc +.swiftpm **/*.db diff --git a/Cargo.lock b/Cargo.lock index 24fd67f90ec17ecc7d0440fa9d6dc52996a939b2..a4b12223e5a8fe6770464280bf652e08346a26ba 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -114,6 +114,7 @@ dependencies = [ "serde", "serde_json", "settings", + "smol", "theme", "tiktoken-rs", "util", @@ -593,7 +594,7 @@ dependencies = [ "http", "http-body", "hyper", - "itoa", + "itoa 1.0.6", "matchit", "memchr", "mime", @@ -3011,7 +3012,7 @@ checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" dependencies = [ "bytes 1.4.0", "fnv", - "itoa", + "itoa 1.0.6", ] [[package]] @@ -3070,7 +3071,7 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa", + "itoa 1.0.6", "pin-project-lite 0.2.9", "socket2", "tokio", @@ -3336,6 +3337,12 @@ dependencies = [ "either", ] +[[package]] +name = "itoa" +version = "0.4.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" + [[package]] name = "itoa" version = "1.0.6" @@ -3396,12 +3403,6 @@ dependencies = [ "wasm-bindgen", ] -[[package]] -name = "json_comments" -version = "0.2.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41ee439ee368ba4a77ac70d04f14015415af8600d6c894dc1f11bd79758c57d5" - [[package]] name = "jwt" version = "0.16.0" @@ -5667,7 +5668,7 @@ dependencies = [ "bitflags", "errno 0.2.8", "io-lifetimes 0.5.3", - "itoa", + "itoa 1.0.6", "libc", "linux-raw-sys 0.0.42", "once_cell", @@ -6099,7 +6100,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1" dependencies = [ "indexmap", - "itoa", + "itoa 1.0.6", + "ryu", + "serde", +] + +[[package]] +name = "serde_json_lenient" +version = "0.1.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7d7b9ce5b0a63c6269b9623ed828b39259545a6ec0d8a35d6135ad6af6232add" +dependencies = [ + "indexmap", + "itoa 0.4.8", "ryu", "serde", ] @@ -6122,7 +6135,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa", + "itoa 1.0.6", "ryu", "serde", ] @@ -6148,7 +6161,7 @@ dependencies = [ "fs", "futures 0.3.28", "gpui", - "json_comments", + "indoc", "lazy_static", "postage", "pretty_assertions", @@ -6157,6 +6170,7 @@ dependencies = [ "serde", "serde_derive", "serde_json", + "serde_json_lenient", "smallvec", "sqlez", "staff_mode", @@ -6507,7 +6521,7 @@ dependencies = [ "hkdf", "hmac 0.12.1", "indexmap", - "itoa", + "itoa 1.0.6", "libc", "libsqlite3-sys", "log", @@ -6993,7 +7007,7 @@ version = "0.3.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f3403384eaacbca9923fa06940178ac13e4edb725486d70e8e15881d0c836cc" dependencies = [ - "itoa", + "itoa 1.0.6", "serde", "time-core", "time-macros", diff --git a/assets/keymaps/atom.json b/assets/keymaps/atom.json index 25143914cc4828d14e36af9e4b77b5a41cc87d4b..60acf5ea6f17563c4d27ea42728f45509dc582ed 100644 --- a/assets/keymaps/atom.json +++ b/assets/keymaps/atom.json @@ -55,7 +55,40 @@ "context": "Pane", "bindings": { "alt-cmd-/": "search::ToggleRegex", - "ctrl-0": "project_panel::ToggleFocus" + "ctrl-0": "project_panel::ToggleFocus", + "cmd-1": [ + "pane::ActivateItem", + 0 + ], + "cmd-2": [ + "pane::ActivateItem", + 1 + ], + "cmd-3": [ + "pane::ActivateItem", + 2 + ], + "cmd-4": [ + "pane::ActivateItem", + 3 + ], + "cmd-5": [ + "pane::ActivateItem", + 4 + ], + "cmd-6": [ + "pane::ActivateItem", + 5 + ], + "cmd-7": [ + "pane::ActivateItem", + 6 + ], + "cmd-8": [ + "pane::ActivateItem", + 7 + ], + "cmd-9": "pane::ActivateLastItem" } }, { diff --git a/crates/ai/Cargo.toml b/crates/ai/Cargo.toml index 9d67cbd108e79145db2bae2c709ee4d7c0b61660..7f8954bb21ea88a0b14f7fd5bacf26743de3c6be 100644 --- a/crates/ai/Cargo.toml +++ b/crates/ai/Cargo.toml @@ -28,6 +28,7 @@ isahc.workspace = true schemars.workspace = true serde.workspace = true serde_json.workspace = true +smol.workspace = true tiktoken-rs = "0.4" [dev-dependencies] diff --git a/crates/ai/src/ai.rs b/crates/ai/src/ai.rs index a46076831f441ac143fcaa494533425a9b6fd4af..f6bcb670adc5ddb64f5d519e8f40146baf9734de 100644 --- a/crates/ai/src/ai.rs +++ b/crates/ai/src/ai.rs @@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize}; use std::fmt::{self, Display}; // Data types for chat completion requests -#[derive(Serialize)] +#[derive(Debug, Serialize)] struct OpenAIRequest { model: String, messages: Vec, diff --git a/crates/ai/src/assistant.rs b/crates/ai/src/assistant.rs index a84666d3bf367611d14600f4023584f92882a226..114e23c9ff259a578b76aeb92c08e94a2161fc5b 100644 --- a/crates/ai/src/assistant.rs +++ b/crates/ai/src/assistant.rs @@ -8,7 +8,7 @@ use collections::{HashMap, HashSet}; use editor::{ display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, ToDisplayPoint}, scroll::autoscroll::{Autoscroll, AutoscrollStrategy}, - Anchor, Editor, + Anchor, Editor, ToOffset, }; use fs::Fs; use futures::{io::BufReader, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt}; @@ -483,7 +483,7 @@ impl Assistant { language_registry: Arc, cx: &mut ModelContext, ) -> Self { - let model = "gpt-3.5-turbo"; + let model = "gpt-3.5-turbo-0613"; let markdown = language_registry.language_for_name("Markdown"); let buffer = cx.add_model(|cx| { let mut buffer = Buffer::new(0, "", cx); @@ -528,7 +528,7 @@ impl Assistant { MessageMetadata { role: Role::User, sent_at: Local::now(), - error: None, + status: MessageStatus::Done, }, ); @@ -553,7 +553,7 @@ impl Assistant { fn count_remaining_tokens(&mut self, cx: &mut ModelContext) { let messages = self - .open_ai_request_messages(cx) + .messages(cx) .into_iter() .filter_map(|message| { Some(tiktoken_rs::ChatCompletionRequestMessage { @@ -562,7 +562,7 @@ impl Assistant { Role::Assistant => "assistant".into(), Role::System => "system".into(), }, - content: message.content, + content: self.buffer.read(cx).text_for_range(message.range).collect(), name: None, }) }) @@ -599,97 +599,169 @@ impl Assistant { cx.notify(); } - fn assist(&mut self, cx: &mut ModelContext) -> Option<(MessageAnchor, MessageAnchor)> { - let request = OpenAIRequest { - model: self.model.clone(), - messages: self.open_ai_request_messages(cx), - stream: true, - }; + fn assist( + &mut self, + selected_messages: HashSet, + cx: &mut ModelContext, + ) -> Vec { + let mut user_messages = Vec::new(); + let mut tasks = Vec::new(); + for selected_message_id in selected_messages { + let selected_message_role = + if let Some(metadata) = self.messages_metadata.get(&selected_message_id) { + metadata.role + } else { + continue; + }; + + if selected_message_role == Role::Assistant { + if let Some(user_message) = self.insert_message_after( + selected_message_id, + Role::User, + MessageStatus::Done, + cx, + ) { + user_messages.push(user_message); + } else { + continue; + } + } else { + let request = OpenAIRequest { + model: self.model.clone(), + messages: self + .messages(cx) + .filter(|message| matches!(message.status, MessageStatus::Done)) + .flat_map(|message| { + let mut system_message = None; + if message.id == selected_message_id { + system_message = Some(RequestMessage { + role: Role::System, + content: concat!( + "Treat the following messages as additional knowledge you have learned about, ", + "but act as if they were not part of this conversation. That is, treat them ", + "as if the user didn't see them and couldn't possibly inquire about them." + ).into() + }); + } + + Some(message.to_open_ai_message(self.buffer.read(cx))).into_iter().chain(system_message) + }) + .chain(Some(RequestMessage { + role: Role::System, + content: format!( + "Direct your reply to message with id {}. Do not include a [Message X] header.", + selected_message_id.0 + ), + })) + .collect(), + stream: true, + }; + + let Some(api_key) = self.api_key.borrow().clone() else { continue }; + let stream = stream_completion(api_key, cx.background().clone(), request); + let assistant_message = self + .insert_message_after( + selected_message_id, + Role::Assistant, + MessageStatus::Pending, + cx, + ) + .unwrap(); + + tasks.push(cx.spawn_weak({ + |this, mut cx| async move { + let assistant_message_id = assistant_message.id; + let stream_completion = async { + let mut messages = stream.await?; + + while let Some(message) = messages.next().await { + let mut message = message?; + if let Some(choice) = message.choices.pop() { + this.upgrade(&cx) + .ok_or_else(|| anyhow!("assistant was dropped"))? + .update(&mut cx, |this, cx| { + let text: Arc = choice.delta.content?.into(); + let message_ix = this.message_anchors.iter().position( + |message| message.id == assistant_message_id, + )?; + this.buffer.update(cx, |buffer, cx| { + let offset = this.message_anchors[message_ix + 1..] + .iter() + .find(|message| message.start.is_valid(buffer)) + .map_or(buffer.len(), |message| { + message + .start + .to_offset(buffer) + .saturating_sub(1) + }); + buffer.edit([(offset..offset, text)], None, cx); + }); + cx.emit(AssistantEvent::StreamedCompletion); + + Some(()) + }); + } + smol::future::yield_now().await; + } - let api_key = self.api_key.borrow().clone()?; - let stream = stream_completion(api_key, cx.background().clone(), request); - let assistant_message = - self.insert_message_after(self.message_anchors.last()?.id, Role::Assistant, cx)?; - let user_message = self.insert_message_after(assistant_message.id, Role::User, cx)?; - let task = cx.spawn_weak({ - |this, mut cx| async move { - let assistant_message_id = assistant_message.id; - let stream_completion = async { - let mut messages = stream.await?; - - while let Some(message) = messages.next().await { - let mut message = message?; - if let Some(choice) = message.choices.pop() { this.upgrade(&cx) .ok_or_else(|| anyhow!("assistant was dropped"))? .update(&mut cx, |this, cx| { - let text: Arc = choice.delta.content?.into(); - let message_ix = this - .message_anchors - .iter() - .position(|message| message.id == assistant_message_id)?; - this.buffer.update(cx, |buffer, cx| { - let offset = if message_ix + 1 == this.message_anchors.len() - { - buffer.len() - } else { - this.message_anchors[message_ix + 1] - .start - .to_offset(buffer) - .saturating_sub(1) - }; - buffer.edit([(offset..offset, text)], None, cx); + this.pending_completions.retain(|completion| { + completion.id != this.completion_count }); - cx.emit(AssistantEvent::StreamedCompletion); - - Some(()) + this.summarize(cx); }); - } - } - this.upgrade(&cx) - .ok_or_else(|| anyhow!("assistant was dropped"))? - .update(&mut cx, |this, cx| { - this.pending_completions - .retain(|completion| completion.id != this.completion_count); - this.summarize(cx); - }); - - anyhow::Ok(()) - }; - - let result = stream_completion.await; - if let Some(this) = this.upgrade(&cx) { - this.update(&mut cx, |this, cx| { - if let Err(error) = result { - if let Some(metadata) = - this.messages_metadata.get_mut(&assistant_message.id) - { - metadata.error = Some(error.to_string().trim().into()); - cx.notify(); - } + anyhow::Ok(()) + }; + + let result = stream_completion.await; + if let Some(this) = this.upgrade(&cx) { + this.update(&mut cx, |this, cx| { + if let Some(metadata) = + this.messages_metadata.get_mut(&assistant_message.id) + { + match result { + Ok(_) => { + metadata.status = MessageStatus::Done; + } + Err(error) => { + metadata.status = MessageStatus::Error( + error.to_string().trim().into(), + ); + } + } + cx.notify(); + } + }); } - }); - } + } + })); } - }); + } - self.pending_completions.push(PendingCompletion { - id: post_inc(&mut self.completion_count), - _task: task, - }); - Some((assistant_message, user_message)) + if !tasks.is_empty() { + self.pending_completions.push(PendingCompletion { + id: post_inc(&mut self.completion_count), + _tasks: tasks, + }); + } + + user_messages } fn cancel_last_assist(&mut self) -> bool { self.pending_completions.pop().is_some() } - fn cycle_message_role(&mut self, id: MessageId, cx: &mut ModelContext) { - if let Some(metadata) = self.messages_metadata.get_mut(&id) { - metadata.role.cycle(); - cx.emit(AssistantEvent::MessagesEdited); - cx.notify(); + fn cycle_message_roles(&mut self, ids: HashSet, cx: &mut ModelContext) { + for id in ids { + if let Some(metadata) = self.messages_metadata.get_mut(&id) { + metadata.role.cycle(); + cx.emit(AssistantEvent::MessagesEdited); + cx.notify(); + } } } @@ -697,6 +769,7 @@ impl Assistant { &mut self, message_id: MessageId, role: Role, + status: MessageStatus, cx: &mut ModelContext, ) -> Option { if let Some(prev_message_ix) = self @@ -723,7 +796,7 @@ impl Assistant { MessageMetadata { role, sent_at: Local::now(), - error: None, + status, }, ); cx.emit(AssistantEvent::MessagesEdited); @@ -782,7 +855,7 @@ impl Assistant { MessageMetadata { role, sent_at: Local::now(), - error: None, + status: MessageStatus::Done, }, ); @@ -824,7 +897,7 @@ impl Assistant { MessageMetadata { role, sent_at: Local::now(), - error: None, + status: MessageStatus::Done, }, ); (Some(selection), Some(suffix)) @@ -843,16 +916,19 @@ impl Assistant { if self.message_anchors.len() >= 2 && self.summary.is_none() { let api_key = self.api_key.borrow().clone(); if let Some(api_key) = api_key { - let mut messages = self.open_ai_request_messages(cx); - messages.truncate(2); - messages.push(RequestMessage { - role: Role::User, - content: "Summarize the conversation into a short title without punctuation" - .into(), - }); + let messages = self + .messages(cx) + .take(2) + .map(|message| message.to_open_ai_message(self.buffer.read(cx))) + .chain(Some(RequestMessage { + role: Role::User, + content: + "Summarize the conversation into a short title without punctuation" + .into(), + })); let request = OpenAIRequest { model: self.model.clone(), - messages, + messages: messages.collect(), stream: true, }; @@ -880,24 +956,39 @@ impl Assistant { } } - fn open_ai_request_messages(&self, cx: &AppContext) -> Vec { - let buffer = self.buffer.read(cx); - self.messages(cx) - .map(|message| RequestMessage { - role: message.role, - content: buffer.text_for_range(message.range).collect(), - }) - .collect() + fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option { + self.messages_for_offsets([offset], cx).pop() } - fn message_for_offset<'a>(&'a self, offset: usize, cx: &'a AppContext) -> Option { + fn messages_for_offsets( + &self, + offsets: impl IntoIterator, + cx: &AppContext, + ) -> Vec { + let mut result = Vec::new(); + + let buffer_len = self.buffer.read(cx).len(); let mut messages = self.messages(cx).peekable(); - while let Some(message) = messages.next() { - if message.range.contains(&offset) || messages.peek().is_none() { - return Some(message); + let mut offsets = offsets.into_iter().peekable(); + while let Some(offset) = offsets.next() { + // Skip messages that start after the offset. + while messages.peek().map_or(false, |message| { + message.range.end < offset || (message.range.end == offset && offset < buffer_len) + }) { + messages.next(); + } + let Some(message) = messages.peek() else { continue }; + + // Skip offsets that are in the same message. + while offsets.peek().map_or(false, |offset| { + message.range.contains(offset) || message.range.end == buffer_len + }) { + offsets.next(); } + + result.push(message.clone()); } - None + result } fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator { @@ -926,7 +1017,7 @@ impl Assistant { anchor: message_anchor.start, role: metadata.role, sent_at: metadata.sent_at, - error: metadata.error.clone(), + status: metadata.status.clone(), }); } None @@ -963,7 +1054,7 @@ impl Assistant { struct PendingCompletion { id: usize, - _task: Task<()>, + _tasks: Vec>, } enum AssistantEditorEvent { @@ -1019,20 +1110,31 @@ impl AssistantEditor { } fn assist(&mut self, _: &Assist, cx: &mut ViewContext) { - let user_message = self.assistant.update(cx, |assistant, cx| { - let (_, user_message) = assistant.assist(cx)?; - Some(user_message) + let cursors = self.cursors(cx); + + let user_messages = self.assistant.update(cx, |assistant, cx| { + let selected_messages = assistant + .messages_for_offsets(cursors, cx) + .into_iter() + .map(|message| message.id) + .collect(); + assistant.assist(selected_messages, cx) }); - - if let Some(user_message) = user_message { - let cursor = user_message - .start - .to_offset(&self.assistant.read(cx).buffer.read(cx)); + let new_selections = user_messages + .iter() + .map(|message| { + let cursor = message + .start + .to_offset(self.assistant.read(cx).buffer.read(cx)); + cursor..cursor + }) + .collect::>(); + if !new_selections.is_empty() { self.editor.update(cx, |editor, cx| { editor.change_selections( Some(Autoscroll::Strategy(AutoscrollStrategy::Fit)), cx, - |selections| selections.select_ranges([cursor..cursor]), + |selections| selections.select_ranges(new_selections), ); }); } @@ -1048,14 +1150,25 @@ impl AssistantEditor { } fn cycle_message_role(&mut self, _: &CycleMessageRole, cx: &mut ViewContext) { - let cursor_offset = self.editor.read(cx).selections.newest(cx).head(); + let cursors = self.cursors(cx); self.assistant.update(cx, |assistant, cx| { - if let Some(message) = assistant.message_for_offset(cursor_offset, cx) { - assistant.cycle_message_role(message.id, cx); - } + let messages = assistant + .messages_for_offsets(cursors, cx) + .into_iter() + .map(|message| message.id) + .collect(); + assistant.cycle_message_roles(messages, cx) }); } + fn cursors(&self, cx: &AppContext) -> Vec { + let selections = self.editor.read(cx).selections.all::(cx); + selections + .into_iter() + .map(|selection| selection.head()) + .collect() + } + fn handle_assistant_event( &mut self, _: ModelHandle, @@ -1184,7 +1297,10 @@ impl AssistantEditor { let assistant = assistant.clone(); move |_, _, cx| { assistant.update(cx, |assistant, cx| { - assistant.cycle_message_role(message_id, cx) + assistant.cycle_message_roles( + HashSet::from_iter(Some(message_id)), + cx, + ) }) } }); @@ -1200,22 +1316,28 @@ impl AssistantEditor { .with_style(style.sent_at.container) .aligned(), ) - .with_children(message.error.as_ref().map(|error| { - Svg::new("icons/circle_x_mark_12.svg") - .with_color(style.error_icon.color) - .constrained() - .with_width(style.error_icon.width) - .contained() - .with_style(style.error_icon.container) - .with_tooltip::( - message_id.0, - error.to_string(), - None, - theme.tooltip.clone(), - cx, + .with_children( + if let MessageStatus::Error(error) = &message.status { + Some( + Svg::new("icons/circle_x_mark_12.svg") + .with_color(style.error_icon.color) + .constrained() + .with_width(style.error_icon.width) + .contained() + .with_style(style.error_icon.container) + .with_tooltip::( + message_id.0, + error.to_string(), + None, + theme.tooltip.clone(), + cx, + ) + .aligned(), ) - .aligned() - })) + } else { + None + }, + ) .aligned() .left() .contained() @@ -1334,8 +1456,14 @@ impl AssistantEditor { fn split(&mut self, _: &Split, cx: &mut ViewContext) { self.assistant.update(cx, |assistant, cx| { - let range = self.editor.read(cx).selections.newest::(cx).range(); - assistant.split_message(range, cx); + let selections = self.editor.read(cx).selections.disjoint_anchors(); + for selection in selections.into_iter() { + let buffer = self.editor.read(cx).buffer().read(cx).snapshot(cx); + let range = selection + .map(|endpoint| endpoint.to_offset(&buffer)) + .range(); + assistant.split_message(range, cx); + } }); } @@ -1348,8 +1476,8 @@ impl AssistantEditor { fn cycle_model(&mut self, cx: &mut ViewContext) { self.assistant.update(cx, |assistant, cx| { let new_model = match assistant.model.as_str() { - "gpt-4" => "gpt-3.5-turbo", - _ => "gpt-4", + "gpt-4-0613" => "gpt-3.5-turbo-0613", + _ => "gpt-4-0613", }; assistant.set_model(new_model.into(), cx); }); @@ -1463,7 +1591,14 @@ struct MessageAnchor { struct MessageMetadata { role: Role, sent_at: DateTime, - error: Option>, + status: MessageStatus, +} + +#[derive(Clone, Debug)] +enum MessageStatus { + Pending, + Done, + Error(Arc), } #[derive(Clone, Debug)] @@ -1474,7 +1609,18 @@ pub struct Message { anchor: language::Anchor, role: Role, sent_at: DateTime, - error: Option>, + status: MessageStatus, +} + +impl Message { + fn to_open_ai_message(&self, buffer: &Buffer) -> RequestMessage { + let mut content = format!("[Message {}]\n", self.id.0).to_string(); + content.extend(buffer.text_for_range(self.range.clone())); + RequestMessage { + role: self.role, + content, + } + } } async fn stream_completion( @@ -1582,7 +1728,7 @@ mod tests { let message_2 = assistant.update(cx, |assistant, cx| { assistant - .insert_message_after(message_1.id, Role::Assistant, cx) + .insert_message_after(message_1.id, Role::Assistant, MessageStatus::Done, cx) .unwrap() }); assert_eq!( @@ -1606,7 +1752,7 @@ mod tests { let message_3 = assistant.update(cx, |assistant, cx| { assistant - .insert_message_after(message_2.id, Role::User, cx) + .insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx) .unwrap() }); assert_eq!( @@ -1620,7 +1766,7 @@ mod tests { let message_4 = assistant.update(cx, |assistant, cx| { assistant - .insert_message_after(message_2.id, Role::User, cx) + .insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx) .unwrap() }); assert_eq!( @@ -1681,7 +1827,7 @@ mod tests { // Ensure we can still insert after a merged message. let message_5 = assistant.update(cx, |assistant, cx| { assistant - .insert_message_after(message_1.id, Role::System, cx) + .insert_message_after(message_1.id, Role::System, MessageStatus::Done, cx) .unwrap() }); assert_eq!( @@ -1787,6 +1933,66 @@ mod tests { ); } + #[gpui::test] + fn test_messages_for_offsets(cx: &mut AppContext) { + let registry = Arc::new(LanguageRegistry::test()); + let assistant = cx.add_model(|cx| Assistant::new(Default::default(), registry, cx)); + let buffer = assistant.read(cx).buffer.clone(); + + let message_1 = assistant.read(cx).message_anchors[0].clone(); + assert_eq!( + messages(&assistant, cx), + vec![(message_1.id, Role::User, 0..0)] + ); + + buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "aaa")], None, cx)); + let message_2 = assistant + .update(cx, |assistant, cx| { + assistant.insert_message_after(message_1.id, Role::User, MessageStatus::Done, cx) + }) + .unwrap(); + buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "bbb")], None, cx)); + + let message_3 = assistant + .update(cx, |assistant, cx| { + assistant.insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx) + }) + .unwrap(); + buffer.update(cx, |buffer, cx| buffer.edit([(8..8, "ccc")], None, cx)); + + assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc"); + assert_eq!( + messages(&assistant, cx), + vec![ + (message_1.id, Role::User, 0..4), + (message_2.id, Role::User, 4..8), + (message_3.id, Role::User, 8..11) + ] + ); + + assert_eq!( + message_ids_for_offsets(&assistant, &[0, 4, 9], cx), + [message_1.id, message_2.id, message_3.id] + ); + assert_eq!( + message_ids_for_offsets(&assistant, &[0, 1, 11], cx), + [message_1.id, message_3.id] + ); + + fn message_ids_for_offsets( + assistant: &ModelHandle, + offsets: &[usize], + cx: &AppContext, + ) -> Vec { + assistant + .read(cx) + .messages_for_offsets(offsets.iter().copied(), cx) + .into_iter() + .map(|message| message.id) + .collect() + } + } + fn messages( assistant: &ModelHandle, cx: &AppContext, diff --git a/crates/collab/src/tests/integration_tests.rs b/crates/collab/src/tests/integration_tests.rs index ce4ede8684831fd78d0d6a16cc49905581fadc1a..92b63478cbf1d20bf0597857a96ef88e0590efe2 100644 --- a/crates/collab/src/tests/integration_tests.rs +++ b/crates/collab/src/tests/integration_tests.rs @@ -39,7 +39,12 @@ use std::{ }, }; use unindent::Unindent as _; -use workspace::{item::ItemHandle as _, shared_screen::SharedScreen, SplitDirection, Workspace}; +use workspace::{ + dock::{test::TestPanel, DockPosition}, + item::{test::TestItem, ItemHandle as _}, + shared_screen::SharedScreen, + SplitDirection, Workspace, +}; #[ctor::ctor] fn init_logger() { @@ -6847,12 +6852,43 @@ async fn test_basic_following( ) }); - // Client B activates an external window again, and the previously-opened screen-sharing item - // gets activated. - active_call_b - .update(cx_b, |call, cx| call.set_location(None, cx)) - .await - .unwrap(); + // Client B activates a panel, and the previously-opened screen-sharing item gets activated. + let panel = cx_b.add_view(workspace_b.window_id(), |_| { + TestPanel::new(DockPosition::Left) + }); + workspace_b.update(cx_b, |workspace, cx| { + workspace.add_panel(panel, cx); + workspace.toggle_panel_focus::(cx); + }); + deterministic.run_until_parked(); + assert_eq!( + workspace_a.read_with(cx_a, |workspace, cx| workspace + .active_item(cx) + .unwrap() + .id()), + shared_screen.id() + ); + + // Toggling the focus back to the pane causes client A to return to the multibuffer. + workspace_b.update(cx_b, |workspace, cx| { + workspace.toggle_panel_focus::(cx); + }); + deterministic.run_until_parked(); + workspace_a.read_with(cx_a, |workspace, cx| { + assert_eq!( + workspace.active_item(cx).unwrap().id(), + multibuffer_editor_a.id() + ) + }); + + // Client B activates an item that doesn't implement following, + // so the previously-opened screen-sharing item gets activated. + let unfollowable_item = cx_b.add_view(workspace_b.window_id(), |_| TestItem::new()); + workspace_b.update(cx_b, |workspace, cx| { + workspace.active_pane().update(cx, |pane, cx| { + pane.add_item(Box::new(unfollowable_item), true, true, None, cx) + }) + }); deterministic.run_until_parked(); assert_eq!( workspace_a.read_with(cx_a, |workspace, cx| workspace diff --git a/crates/editor/src/display_map/block_map.rs b/crates/editor/src/display_map/block_map.rs index 05ff9886f1d33827776fa55795a4b0b6b26efd64..0cf5bacc65c39b704b41c13e4e3e922e926a9daf 100644 --- a/crates/editor/src/display_map/block_map.rs +++ b/crates/editor/src/display_map/block_map.rs @@ -243,7 +243,7 @@ impl BlockMap { // Preserve any old transforms that precede this edit. let old_start = WrapRow(edit.old.start); let new_start = WrapRow(edit.new.start); - new_transforms.push_tree(cursor.slice(&old_start, Bias::Left, &()), &()); + new_transforms.append(cursor.slice(&old_start, Bias::Left, &()), &()); if let Some(transform) = cursor.item() { if transform.is_isomorphic() && old_start == cursor.end(&()) { new_transforms.push(transform.clone(), &()); @@ -425,7 +425,7 @@ impl BlockMap { push_isomorphic(&mut new_transforms, extent_after_edit); } - new_transforms.push_tree(cursor.suffix(&()), &()); + new_transforms.append(cursor.suffix(&()), &()); debug_assert_eq!( new_transforms.summary().input_rows, wrap_snapshot.max_point().row() + 1 diff --git a/crates/editor/src/display_map/fold_map.rs b/crates/editor/src/display_map/fold_map.rs index 6ef1ebce1da844d1cd23185d5d246524716e082f..36cfeba4a13d0ab4d8f0aba67a3a4505bd8e8136 100644 --- a/crates/editor/src/display_map/fold_map.rs +++ b/crates/editor/src/display_map/fold_map.rs @@ -115,10 +115,10 @@ impl<'a> FoldMapWriter<'a> { let mut new_tree = SumTree::new(); let mut cursor = self.0.folds.cursor::(); for fold in folds { - new_tree.push_tree(cursor.slice(&fold, Bias::Right, &buffer), &buffer); + new_tree.append(cursor.slice(&fold, Bias::Right, &buffer), &buffer); new_tree.push(fold, &buffer); } - new_tree.push_tree(cursor.suffix(&buffer), &buffer); + new_tree.append(cursor.suffix(&buffer), &buffer); new_tree }; @@ -165,10 +165,10 @@ impl<'a> FoldMapWriter<'a> { let mut cursor = self.0.folds.cursor::(); let mut folds = SumTree::new(); for fold_ix in fold_ixs_to_delete { - folds.push_tree(cursor.slice(&fold_ix, Bias::Right, &buffer), &buffer); + folds.append(cursor.slice(&fold_ix, Bias::Right, &buffer), &buffer); cursor.next(&buffer); } - folds.push_tree(cursor.suffix(&buffer), &buffer); + folds.append(cursor.suffix(&buffer), &buffer); folds }; @@ -302,7 +302,7 @@ impl FoldMap { cursor.seek(&0, Bias::Right, &()); while let Some(mut edit) = buffer_edits_iter.next() { - new_transforms.push_tree(cursor.slice(&edit.old.start, Bias::Left, &()), &()); + new_transforms.append(cursor.slice(&edit.old.start, Bias::Left, &()), &()); edit.new.start -= edit.old.start - cursor.start(); edit.old.start = *cursor.start(); @@ -412,7 +412,7 @@ impl FoldMap { } } - new_transforms.push_tree(cursor.suffix(&()), &()); + new_transforms.append(cursor.suffix(&()), &()); if new_transforms.is_empty() { let text_summary = new_buffer.text_summary(); new_transforms.push( diff --git a/crates/editor/src/display_map/wrap_map.rs b/crates/editor/src/display_map/wrap_map.rs index 478eaf4c7e1a16f56e55286b8e438cda9d78b4b1..dad264d57d3ec1de1f4f7ce41537e588ab16933e 100644 --- a/crates/editor/src/display_map/wrap_map.rs +++ b/crates/editor/src/display_map/wrap_map.rs @@ -353,7 +353,7 @@ impl WrapSnapshot { } old_cursor.next(&()); - new_transforms.push_tree( + new_transforms.append( old_cursor.slice(&next_edit.old.start, Bias::Right, &()), &(), ); @@ -366,7 +366,7 @@ impl WrapSnapshot { new_transforms.push_or_extend(Transform::isomorphic(summary)); } old_cursor.next(&()); - new_transforms.push_tree(old_cursor.suffix(&()), &()); + new_transforms.append(old_cursor.suffix(&()), &()); } } } @@ -500,7 +500,7 @@ impl WrapSnapshot { new_transforms.push_or_extend(Transform::isomorphic(summary)); } old_cursor.next(&()); - new_transforms.push_tree( + new_transforms.append( old_cursor.slice( &TabPoint::new(next_edit.old_rows.start, 0), Bias::Right, @@ -517,7 +517,7 @@ impl WrapSnapshot { new_transforms.push_or_extend(Transform::isomorphic(summary)); } old_cursor.next(&()); - new_transforms.push_tree(old_cursor.suffix(&()), &()); + new_transforms.append(old_cursor.suffix(&()), &()); } } } diff --git a/crates/editor/src/multi_buffer.rs b/crates/editor/src/multi_buffer.rs index 955902da1263f875c6e3c3779b6cf39d84ccc191..31af03f768ea549d04a8802623bbc364acd762a4 100644 --- a/crates/editor/src/multi_buffer.rs +++ b/crates/editor/src/multi_buffer.rs @@ -1010,7 +1010,7 @@ impl MultiBuffer { let suffix = cursor.suffix(&()); let changed_trailing_excerpt = suffix.is_empty(); - new_excerpts.push_tree(suffix, &()); + new_excerpts.append(suffix, &()); drop(cursor); snapshot.excerpts = new_excerpts; snapshot.excerpt_ids = new_excerpt_ids; @@ -1193,7 +1193,7 @@ impl MultiBuffer { while let Some(excerpt_id) = excerpt_ids.next() { // Seek to the next excerpt to remove, preserving any preceding excerpts. let locator = snapshot.excerpt_locator_for_id(excerpt_id); - new_excerpts.push_tree(cursor.slice(&Some(locator), Bias::Left, &()), &()); + new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &()); if let Some(mut excerpt) = cursor.item() { if excerpt.id != excerpt_id { @@ -1245,7 +1245,7 @@ impl MultiBuffer { } let suffix = cursor.suffix(&()); let changed_trailing_excerpt = suffix.is_empty(); - new_excerpts.push_tree(suffix, &()); + new_excerpts.append(suffix, &()); drop(cursor); snapshot.excerpts = new_excerpts; @@ -1509,7 +1509,7 @@ impl MultiBuffer { let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>(); for (locator, buffer, buffer_edited) in excerpts_to_edit { - new_excerpts.push_tree(cursor.slice(&Some(locator), Bias::Left, &()), &()); + new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &()); let old_excerpt = cursor.item().unwrap(); let buffer = buffer.read(cx); let buffer_id = buffer.remote_id(); @@ -1549,7 +1549,7 @@ impl MultiBuffer { new_excerpts.push(new_excerpt, &()); cursor.next(&()); } - new_excerpts.push_tree(cursor.suffix(&()), &()); + new_excerpts.append(cursor.suffix(&()), &()); drop(cursor); snapshot.excerpts = new_excerpts; diff --git a/crates/gpui/src/app.rs b/crates/gpui/src/app.rs index 882800f128abe8d41dd48e83a694b48d0020778b..41684f32267b5273e2795435950039422ea9b0ac 100644 --- a/crates/gpui/src/app.rs +++ b/crates/gpui/src/app.rs @@ -445,7 +445,7 @@ type WindowBoundsCallback = Box>, &mut WindowContext) -> bool>; type ActiveLabeledTasksCallback = Box bool>; -type DeserializeActionCallback = fn(json: &str) -> anyhow::Result>; +type DeserializeActionCallback = fn(json: serde_json::Value) -> anyhow::Result>; type WindowShouldCloseSubscriptionCallback = Box bool>; pub struct AppContext { @@ -624,14 +624,14 @@ impl AppContext { pub fn deserialize_action( &self, name: &str, - argument: Option<&str>, + argument: Option, ) -> Result> { let callback = self .action_deserializers .get(name) .ok_or_else(|| anyhow!("unknown action {}", name))? .1; - callback(argument.unwrap_or("{}")) + callback(argument.unwrap_or_else(|| serde_json::Value::Object(Default::default()))) .with_context(|| format!("invalid data for action {}", name)) } @@ -5573,7 +5573,7 @@ mod tests { let action1 = cx .deserialize_action( "test::something::ComplexAction", - Some(r#"{"arg": "a", "count": 5}"#), + Some(serde_json::from_str(r#"{"arg": "a", "count": 5}"#).unwrap()), ) .unwrap(); let action2 = cx diff --git a/crates/gpui/src/app/action.rs b/crates/gpui/src/app/action.rs index 5b4df68a651cfa63c9a1b1976a076994d9971747..c6b43e489ba55e2fa259b9d3a2d1fc8ed9c2a564 100644 --- a/crates/gpui/src/app/action.rs +++ b/crates/gpui/src/app/action.rs @@ -11,7 +11,7 @@ pub trait Action: 'static { fn qualified_name() -> &'static str where Self: Sized; - fn from_json_str(json: &str) -> anyhow::Result> + fn from_json_str(json: serde_json::Value) -> anyhow::Result> where Self: Sized; } @@ -38,7 +38,7 @@ macro_rules! actions { $crate::__impl_action! { $namespace, $name, - fn from_json_str(_: &str) -> $crate::anyhow::Result> { + fn from_json_str(_: $crate::serde_json::Value) -> $crate::anyhow::Result> { Ok(Box::new(Self)) } } @@ -58,8 +58,8 @@ macro_rules! impl_actions { $crate::__impl_action! { $namespace, $name, - fn from_json_str(json: &str) -> $crate::anyhow::Result> { - Ok(Box::new($crate::serde_json::from_str::(json)?)) + fn from_json_str(json: $crate::serde_json::Value) -> $crate::anyhow::Result> { + Ok(Box::new($crate::serde_json::from_value::(json)?)) } } )* diff --git a/crates/gpui/src/app/window.rs b/crates/gpui/src/app/window.rs index cfcef626dfa2a5baf36a230eebcd1e5ae2d23495..cffce6c3a6c646094f7247683def5d148c550138 100644 --- a/crates/gpui/src/app/window.rs +++ b/crates/gpui/src/app/window.rs @@ -394,7 +394,7 @@ impl<'a> WindowContext<'a> { .iter() .filter_map(move |(name, (type_id, deserialize))| { if let Some(action_depth) = handler_depths_by_action_type.get(type_id).copied() { - let action = deserialize("{}").ok()?; + let action = deserialize(serde_json::Value::Object(Default::default())).ok()?; let bindings = self .keystroke_matcher .bindings_for_action_type(*type_id) diff --git a/crates/gpui/src/elements/list.rs b/crates/gpui/src/elements/list.rs index 1cf8cc986f08afe5325b3fe8f756192969487e99..4c6298d8f55a28abeb6cec468af63a94eec21759 100644 --- a/crates/gpui/src/elements/list.rs +++ b/crates/gpui/src/elements/list.rs @@ -211,7 +211,7 @@ impl Element for List { let mut cursor = old_items.cursor::(); if state.rendered_range.start < new_rendered_range.start { - new_items.push_tree( + new_items.append( cursor.slice(&Count(state.rendered_range.start), Bias::Right, &()), &(), ); @@ -221,7 +221,7 @@ impl Element for List { cursor.next(&()); } } - new_items.push_tree( + new_items.append( cursor.slice(&Count(new_rendered_range.start), Bias::Right, &()), &(), ); @@ -230,7 +230,7 @@ impl Element for List { cursor.seek(&Count(new_rendered_range.end), Bias::Right, &()); if new_rendered_range.end < state.rendered_range.start { - new_items.push_tree( + new_items.append( cursor.slice(&Count(state.rendered_range.start), Bias::Right, &()), &(), ); @@ -240,7 +240,7 @@ impl Element for List { cursor.next(&()); } - new_items.push_tree(cursor.suffix(&()), &()); + new_items.append(cursor.suffix(&()), &()); state.items = new_items; state.rendered_range = new_rendered_range; @@ -413,7 +413,7 @@ impl ListState { old_heights.seek_forward(&Count(old_range.end), Bias::Right, &()); new_heights.extend((0..count).map(|_| ListItem::Unrendered), &()); - new_heights.push_tree(old_heights.suffix(&()), &()); + new_heights.append(old_heights.suffix(&()), &()); drop(old_heights); state.items = new_heights; } diff --git a/crates/gpui/src/platform/mac/platform.rs b/crates/gpui/src/platform/mac/platform.rs index 8b5b801adaff3bd337754ba8e59bf6d12db2e7d3..ea415cc6a66e987052800c3c78e64826e1445355 100644 --- a/crates/gpui/src/platform/mac/platform.rs +++ b/crates/gpui/src/platform/mac/platform.rs @@ -786,7 +786,7 @@ impl platform::Platform for MacPlatform { fn set_cursor_style(&self, style: CursorStyle) { unsafe { - let cursor: id = match style { + let new_cursor: id = match style { CursorStyle::Arrow => msg_send![class!(NSCursor), arrowCursor], CursorStyle::ResizeLeftRight => { msg_send![class!(NSCursor), resizeLeftRightCursor] @@ -795,7 +795,11 @@ impl platform::Platform for MacPlatform { CursorStyle::PointingHand => msg_send![class!(NSCursor), pointingHandCursor], CursorStyle::IBeam => msg_send![class!(NSCursor), IBeamCursor], }; - let _: () = msg_send![cursor, set]; + + let old_cursor: id = msg_send![class!(NSCursor), currentCursor]; + if new_cursor != old_cursor { + let _: () = msg_send![new_cursor, set]; + } } } diff --git a/crates/language/src/language.rs b/crates/language/src/language.rs index e91d5770cfba80485f3cbe6272257a81c48c4fc3..5a4d604ce349e107c282b8ab2e981b07e94261b9 100644 --- a/crates/language/src/language.rs +++ b/crates/language/src/language.rs @@ -17,7 +17,7 @@ use futures::{ future::{BoxFuture, Shared}, FutureExt, TryFutureExt as _, }; -use gpui::{executor::Background, AppContext, Task}; +use gpui::{executor::Background, AppContext, AsyncAppContext, Task}; use highlight_map::HighlightMap; use lazy_static::lazy_static; use lsp::CodeActionKind; @@ -125,27 +125,46 @@ impl CachedLspAdapter { pub async fn fetch_latest_server_version( &self, - http: Arc, + delegate: &dyn LspAdapterDelegate, ) -> Result> { - self.adapter.fetch_latest_server_version(http).await + self.adapter.fetch_latest_server_version(delegate).await + } + + pub fn will_fetch_server( + &self, + delegate: &Arc, + cx: &mut AsyncAppContext, + ) -> Option>> { + self.adapter.will_fetch_server(delegate, cx) + } + + pub fn will_start_server( + &self, + delegate: &Arc, + cx: &mut AsyncAppContext, + ) -> Option>> { + self.adapter.will_start_server(delegate, cx) } pub async fn fetch_server_binary( &self, version: Box, - http: Arc, container_dir: PathBuf, + delegate: &dyn LspAdapterDelegate, ) -> Result { self.adapter - .fetch_server_binary(version, http, container_dir) + .fetch_server_binary(version, container_dir, delegate) .await } pub async fn cached_server_binary( &self, container_dir: PathBuf, + delegate: &dyn LspAdapterDelegate, ) -> Option { - self.adapter.cached_server_binary(container_dir).await + self.adapter + .cached_server_binary(container_dir, delegate) + .await } pub fn code_action_kinds(&self) -> Option> { @@ -187,23 +206,48 @@ impl CachedLspAdapter { } } +pub trait LspAdapterDelegate: Send + Sync { + fn show_notification(&self, message: &str, cx: &mut AppContext); + fn http_client(&self) -> Arc; +} + #[async_trait] pub trait LspAdapter: 'static + Send + Sync { async fn name(&self) -> LanguageServerName; async fn fetch_latest_server_version( &self, - http: Arc, + delegate: &dyn LspAdapterDelegate, ) -> Result>; + fn will_fetch_server( + &self, + _: &Arc, + _: &mut AsyncAppContext, + ) -> Option>> { + None + } + + fn will_start_server( + &self, + _: &Arc, + _: &mut AsyncAppContext, + ) -> Option>> { + None + } + async fn fetch_server_binary( &self, version: Box, - http: Arc, container_dir: PathBuf, + delegate: &dyn LspAdapterDelegate, ) -> Result; - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option; + async fn cached_server_binary( + &self, + container_dir: PathBuf, + delegate: &dyn LspAdapterDelegate, + ) -> Option; async fn process_diagnostics(&self, _: &mut lsp::PublishDiagnosticsParams) {} @@ -513,10 +557,7 @@ pub struct LanguageRegistry { login_shell_env_loaded: Shared>, #[allow(clippy::type_complexity)] lsp_binary_paths: Mutex< - HashMap< - LanguageServerName, - Shared>>>, - >, + HashMap>>>>, >, executor: Option>, } @@ -812,7 +853,7 @@ impl LanguageRegistry { language: Arc, adapter: Arc, root_path: Arc, - http_client: Arc, + delegate: Arc, cx: &mut AppContext, ) -> Option { let server_id = self.state.write().next_language_server_id(); @@ -860,35 +901,40 @@ impl LanguageRegistry { .log_err()?; let this = self.clone(); let language = language.clone(); - let http_client = http_client.clone(); let download_dir = download_dir.clone(); let root_path = root_path.clone(); let adapter = adapter.clone(); let lsp_binary_statuses = self.lsp_binary_statuses_tx.clone(); let login_shell_env_loaded = self.login_shell_env_loaded.clone(); - let task = cx.spawn(|cx| async move { + let task = cx.spawn(|mut cx| async move { login_shell_env_loaded.await; - let mut lock = this.lsp_binary_paths.lock(); - let entry = lock + let entry = this + .lsp_binary_paths + .lock() .entry(adapter.name.clone()) .or_insert_with(|| { - get_binary( - adapter.clone(), - language.clone(), - http_client, - download_dir, - lsp_binary_statuses, - ) - .map_err(Arc::new) - .boxed() + cx.spawn(|cx| { + get_binary( + adapter.clone(), + language.clone(), + delegate.clone(), + download_dir, + lsp_binary_statuses, + cx, + ) + .map_err(Arc::new) + }) .shared() }) .clone(); - drop(lock); let binary = entry.clone().map_err(|e| anyhow!(e)).await?; + if let Some(task) = adapter.will_start_server(&delegate, &mut cx) { + task.await?; + } + let server = lsp::LanguageServer::new( server_id, &binary.path, @@ -958,9 +1004,10 @@ impl Default for LanguageRegistry { async fn get_binary( adapter: Arc, language: Arc, - http_client: Arc, + delegate: Arc, download_dir: Arc, statuses: async_broadcast::Sender<(Arc, LanguageServerBinaryStatus)>, + mut cx: AsyncAppContext, ) -> Result { let container_dir = download_dir.join(adapter.name.0.as_ref()); if !container_dir.exists() { @@ -969,17 +1016,24 @@ async fn get_binary( .context("failed to create container directory")?; } + if let Some(task) = adapter.will_fetch_server(&delegate, &mut cx) { + task.await?; + } + let binary = fetch_latest_binary( adapter.clone(), language.clone(), - http_client, + delegate.as_ref(), &container_dir, statuses.clone(), ) .await; if let Err(error) = binary.as_ref() { - if let Some(cached) = adapter.cached_server_binary(container_dir).await { + if let Some(cached) = adapter + .cached_server_binary(container_dir, delegate.as_ref()) + .await + { statuses .broadcast((language.clone(), LanguageServerBinaryStatus::Cached)) .await?; @@ -1001,7 +1055,7 @@ async fn get_binary( async fn fetch_latest_binary( adapter: Arc, language: Arc, - http_client: Arc, + delegate: &dyn LspAdapterDelegate, container_dir: &Path, lsp_binary_statuses_tx: async_broadcast::Sender<(Arc, LanguageServerBinaryStatus)>, ) -> Result { @@ -1012,14 +1066,12 @@ async fn fetch_latest_binary( LanguageServerBinaryStatus::CheckingForUpdate, )) .await?; - let version_info = adapter - .fetch_latest_server_version(http_client.clone()) - .await?; + let version_info = adapter.fetch_latest_server_version(delegate).await?; lsp_binary_statuses_tx .broadcast((language.clone(), LanguageServerBinaryStatus::Downloading)) .await?; let binary = adapter - .fetch_server_binary(version_info, http_client, container_dir.to_path_buf()) + .fetch_server_binary(version_info, container_dir.to_path_buf(), delegate) .await?; lsp_binary_statuses_tx .broadcast((language.clone(), LanguageServerBinaryStatus::Downloaded)) @@ -1543,7 +1595,7 @@ impl LspAdapter for Arc { async fn fetch_latest_server_version( &self, - _: Arc, + _: &dyn LspAdapterDelegate, ) -> Result> { unreachable!(); } @@ -1551,13 +1603,17 @@ impl LspAdapter for Arc { async fn fetch_server_binary( &self, _: Box, - _: Arc, _: PathBuf, + _: &dyn LspAdapterDelegate, ) -> Result { unreachable!(); } - async fn cached_server_binary(&self, _: PathBuf) -> Option { + async fn cached_server_binary( + &self, + _: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { unreachable!(); } diff --git a/crates/language/src/syntax_map.rs b/crates/language/src/syntax_map.rs index 7e5664c1bd68d85fa83ebac497acb15e43d0eed4..c589e37d8a70277d2d3e2d7372e7b3a037862785 100644 --- a/crates/language/src/syntax_map.rs +++ b/crates/language/src/syntax_map.rs @@ -288,7 +288,7 @@ impl SyntaxSnapshot { }; if target.cmp(&cursor.start(), text).is_gt() { let slice = cursor.slice(&target, Bias::Left, text); - layers.push_tree(slice, text); + layers.append(slice, text); } } // If this layer follows all of the edits, then preserve it and any @@ -303,7 +303,7 @@ impl SyntaxSnapshot { Bias::Left, text, ); - layers.push_tree(slice, text); + layers.append(slice, text); continue; }; @@ -369,7 +369,7 @@ impl SyntaxSnapshot { cursor.next(text); } - layers.push_tree(cursor.suffix(&text), &text); + layers.append(cursor.suffix(&text), &text); drop(cursor); self.layers = layers; } @@ -478,7 +478,7 @@ impl SyntaxSnapshot { if bounded_position.cmp(&cursor.start(), &text).is_gt() { let slice = cursor.slice(&bounded_position, Bias::Left, text); if !slice.is_empty() { - layers.push_tree(slice, &text); + layers.append(slice, &text); if changed_regions.prune(cursor.end(text), text) { done = false; } diff --git a/crates/live_kit_client/LiveKitBridge/Sources/LiveKitBridge/LiveKitBridge.swift b/crates/live_kit_client/LiveKitBridge/Sources/LiveKitBridge/LiveKitBridge.swift index a0326b24a1c480d55bc307b178f6562cba1b3d07..40c1319e8f9111dac6139e723699ba76fd8d6111 100644 --- a/crates/live_kit_client/LiveKitBridge/Sources/LiveKitBridge/LiveKitBridge.swift +++ b/crates/live_kit_client/LiveKitBridge/Sources/LiveKitBridge/LiveKitBridge.swift @@ -6,17 +6,23 @@ import ScreenCaptureKit class LKRoomDelegate: RoomDelegate { var data: UnsafeRawPointer var onDidDisconnect: @convention(c) (UnsafeRawPointer) -> Void + var onDidSubscribeToRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void + var onDidUnsubscribeFromRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void var onDidSubscribeToRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void var onDidUnsubscribeFromRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void init( data: UnsafeRawPointer, onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void, + onDidSubscribeToRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void, + onDidUnsubscribeFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void, onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void, onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void) { self.data = data self.onDidDisconnect = onDidDisconnect + self.onDidSubscribeToRemoteAudioTrack = onDidSubscribeToRemoteAudioTrack + self.onDidUnsubscribeFromRemoteAudioTrack = onDidUnsubscribeFromRemoteAudioTrack self.onDidSubscribeToRemoteVideoTrack = onDidSubscribeToRemoteVideoTrack self.onDidUnsubscribeFromRemoteVideoTrack = onDidUnsubscribeFromRemoteVideoTrack } @@ -30,12 +36,16 @@ class LKRoomDelegate: RoomDelegate { func room(_ room: Room, participant: RemoteParticipant, didSubscribe publication: RemoteTrackPublication, track: Track) { if track.kind == .video { self.onDidSubscribeToRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque()) + } else if track.kind == .audio { + self.onDidSubscribeToRemoteAudioTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque()) } } func room(_ room: Room, participant: RemoteParticipant, didUnsubscribe publication: RemoteTrackPublication, track: Track) { if track.kind == .video { self.onDidUnsubscribeFromRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString) + } else if track.kind == .audio { + self.onDidUnsubscribeFromRemoteAudioTrack(self.data, participant.identity as CFString, track.sid! as CFString) } } } @@ -77,12 +87,16 @@ class LKVideoRenderer: NSObject, VideoRenderer { public func LKRoomDelegateCreate( data: UnsafeRawPointer, onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void, + onDidSubscribeToRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void, + onDidUnsubscribeFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void, onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void, onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void ) -> UnsafeMutableRawPointer { let delegate = LKRoomDelegate( data: data, onDidDisconnect: onDidDisconnect, + onDidSubscribeToRemoteAudioTrack: onDidSubscribeToRemoteAudioTrack, + onDidUnsubscribeFromRemoteAudioTrack: onDidUnsubscribeFromRemoteAudioTrack, onDidSubscribeToRemoteVideoTrack: onDidSubscribeToRemoteVideoTrack, onDidUnsubscribeFromRemoteVideoTrack: onDidUnsubscribeFromRemoteVideoTrack ) @@ -123,6 +137,18 @@ public func LKRoomPublishVideoTrack(room: UnsafeRawPointer, track: UnsafeRawPoin } } +@_cdecl("LKRoomPublishAudioTrack") +public func LKRoomPublishAudioTrack(room: UnsafeRawPointer, track: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, UnsafeMutableRawPointer?, CFString?) -> Void, callback_data: UnsafeRawPointer) { + let room = Unmanaged.fromOpaque(room).takeUnretainedValue() + let track = Unmanaged.fromOpaque(track).takeUnretainedValue() + room.localParticipant?.publishAudioTrack(track: track).then { publication in + callback(callback_data, Unmanaged.passRetained(publication).toOpaque(), nil) + }.catch { error in + callback(callback_data, nil, error.localizedDescription as CFString) + } +} + + @_cdecl("LKRoomUnpublishTrack") public func LKRoomUnpublishTrack(room: UnsafeRawPointer, publication: UnsafeRawPointer) { let room = Unmanaged.fromOpaque(room).takeUnretainedValue() @@ -130,6 +156,20 @@ public func LKRoomUnpublishTrack(room: UnsafeRawPointer, publication: UnsafeRawP let _ = room.localParticipant?.unpublish(publication: publication) } +@_cdecl("LKRoomAudioTracksForRemoteParticipant") +public func LKRoomAudioTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? { + let room = Unmanaged.fromOpaque(room).takeUnretainedValue() + + for (_, participant) in room.remoteParticipants { + if participant.identity == participantId as String { + return participant.audioTracks.compactMap { $0.track as? RemoteAudioTrack } as CFArray? + } + } + + return nil; +} + + @_cdecl("LKRoomVideoTracksForRemoteParticipant") public func LKRoomVideoTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? { let room = Unmanaged.fromOpaque(room).takeUnretainedValue() @@ -143,6 +183,17 @@ public func LKRoomVideoTracksForRemoteParticipant(room: UnsafeRawPointer, partic return nil; } +@_cdecl("LKLocalAudioTrackCreateTrack") +public func LKLocalAudioTrackCreateTrack() -> UnsafeMutableRawPointer { + let track = LocalAudioTrack.createTrack(options: AudioCaptureOptions( + echoCancellation: true, + noiseSuppression: true + )) + + return Unmanaged.passRetained(track).toOpaque() +} + + @_cdecl("LKCreateScreenShareTrackForDisplay") public func LKCreateScreenShareTrackForDisplay(display: UnsafeMutableRawPointer) -> UnsafeMutableRawPointer { let display = Unmanaged.fromOpaque(display).takeUnretainedValue() @@ -150,6 +201,19 @@ public func LKCreateScreenShareTrackForDisplay(display: UnsafeMutableRawPointer) return Unmanaged.passRetained(track).toOpaque() } +@_cdecl("LKRemoteAudioTrackStart") +public func LKRemoteAudioTrackStart(track: UnsafeRawPointer, onStart: @escaping @convention(c) (UnsafeRawPointer, Bool) -> Void, callbackData: UnsafeRawPointer) { + let track = Unmanaged.fromOpaque(track).takeUnretainedValue() as! RemoteAudioTrack + + track.start().then { success in + onStart(callbackData, success) + } + .catch { _ in + onStart(callbackData, false) + } +} + + @_cdecl("LKVideoRendererCreate") public func LKVideoRendererCreate(data: UnsafeRawPointer, onFrame: @escaping @convention(c) (UnsafeRawPointer, CVPixelBuffer) -> Bool, onDrop: @escaping @convention(c) (UnsafeRawPointer) -> Void) -> UnsafeMutableRawPointer { Unmanaged.passRetained(LKVideoRenderer(data: data, onFrame: onFrame, onDrop: onDrop)).toOpaque() @@ -169,6 +233,12 @@ public func LKRemoteVideoTrackGetSid(track: UnsafeRawPointer) -> CFString { return track.sid! as CFString } +@_cdecl("LKRemoteAudioTrackGetSid") +public func LKRemoteAudioTrackGetSid(track: UnsafeRawPointer) -> CFString { + let track = Unmanaged.fromOpaque(track).takeUnretainedValue() + return track.sid! as CFString +} + @_cdecl("LKDisplaySources") public func LKDisplaySources(data: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, CFArray?, CFString?) -> Void) { MacOSScreenCapturer.sources(for: .display, includeCurrentApplication: false, preferredMethod: .legacy).then { displaySources in diff --git a/crates/live_kit_client/examples/test_app.rs b/crates/live_kit_client/examples/test_app.rs index 96480e92bc53095e68d80c5d86e6b12c5e7de15b..faf1b54798f3722fe335fdf0e27748b94e9aba9e 100644 --- a/crates/live_kit_client/examples/test_app.rs +++ b/crates/live_kit_client/examples/test_app.rs @@ -1,6 +1,10 @@ +use std::time::Duration; + use futures::StreamExt; use gpui::{actions, keymap_matcher::Binding, Menu, MenuItem}; -use live_kit_client::{LocalVideoTrack, RemoteVideoTrackUpdate, Room}; +use live_kit_client::{ + LocalAudioTrack, LocalVideoTrack, RemoteAudioTrackUpdate, RemoteVideoTrackUpdate, Room, +}; use live_kit_server::token::{self, VideoGrant}; use log::LevelFilter; use simplelog::SimpleLogger; @@ -11,6 +15,12 @@ fn main() { SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger"); gpui::App::new(()).unwrap().run(|cx| { + #[cfg(any(test, feature = "test-support"))] + println!("USING TEST LIVEKIT"); + + #[cfg(not(any(test, feature = "test-support")))] + println!("USING REAL LIVEKIT"); + cx.platform().activate(true); cx.add_global_action(quit); @@ -49,35 +59,75 @@ fn main() { let room_b = Room::new(); room_b.connect(&live_kit_url, &user2_token).await.unwrap(); - let mut track_changes = room_b.remote_video_track_updates(); + let mut audio_track_updates = room_b.remote_audio_track_updates(); + let audio_track = LocalAudioTrack::create(); + let audio_track_publication = room_a.publish_audio_track(&audio_track).await.unwrap(); + + if let RemoteAudioTrackUpdate::Subscribed(track) = + audio_track_updates.next().await.unwrap() + { + let remote_tracks = room_b.remote_audio_tracks("test-participant-1"); + assert_eq!(remote_tracks.len(), 1); + assert_eq!(remote_tracks[0].publisher_id(), "test-participant-1"); + assert_eq!(track.publisher_id(), "test-participant-1"); + } else { + panic!("unexpected message"); + } + + println!("Pausing for 5 seconds to test audio, make some noise!"); + let timer = cx.background().timer(Duration::from_secs(5)); + timer.await; + + let remote_audio_track = room_b + .remote_audio_tracks("test-participant-1") + .pop() + .unwrap(); + room_a.unpublish_track(audio_track_publication); + if let RemoteAudioTrackUpdate::Unsubscribed { + publisher_id, + track_id, + } = audio_track_updates.next().await.unwrap() + { + assert_eq!(publisher_id, "test-participant-1"); + assert_eq!(remote_audio_track.sid(), track_id); + assert_eq!(room_b.remote_audio_tracks("test-participant-1").len(), 0); + } else { + panic!("unexpected message"); + } + let mut video_track_updates = room_b.remote_video_track_updates(); let displays = room_a.display_sources().await.unwrap(); let display = displays.into_iter().next().unwrap(); - let track_a = LocalVideoTrack::screen_share_for_display(&display); - let track_a_publication = room_a.publish_video_track(&track_a).await.unwrap(); + let local_video_track = LocalVideoTrack::screen_share_for_display(&display); + let local_video_track_publication = room_a + .publish_video_track(&local_video_track) + .await + .unwrap(); - if let RemoteVideoTrackUpdate::Subscribed(track) = track_changes.next().await.unwrap() { - let remote_tracks = room_b.remote_video_tracks("test-participant-1"); - assert_eq!(remote_tracks.len(), 1); - assert_eq!(remote_tracks[0].publisher_id(), "test-participant-1"); + if let RemoteVideoTrackUpdate::Subscribed(track) = + video_track_updates.next().await.unwrap() + { + let remote_video_tracks = room_b.remote_video_tracks("test-participant-1"); + assert_eq!(remote_video_tracks.len(), 1); + assert_eq!(remote_video_tracks[0].publisher_id(), "test-participant-1"); assert_eq!(track.publisher_id(), "test-participant-1"); } else { panic!("unexpected message"); } - let remote_track = room_b + let remote_video_track = room_b .remote_video_tracks("test-participant-1") .pop() .unwrap(); - room_a.unpublish_track(track_a_publication); + room_a.unpublish_track(local_video_track_publication); if let RemoteVideoTrackUpdate::Unsubscribed { publisher_id, track_id, - } = track_changes.next().await.unwrap() + } = video_track_updates.next().await.unwrap() { assert_eq!(publisher_id, "test-participant-1"); - assert_eq!(remote_track.sid(), track_id); + assert_eq!(remote_video_track.sid(), track_id); assert_eq!(room_b.remote_video_tracks("test-participant-1").len(), 0); } else { panic!("unexpected message"); diff --git a/crates/live_kit_client/src/live_kit_client.rs b/crates/live_kit_client/src/live_kit_client.rs index 2ded57082801428e35ff5ce87d1b8740db0fffb7..0467018c009a9b36f00908d6628311d580fdec91 100644 --- a/crates/live_kit_client/src/live_kit_client.rs +++ b/crates/live_kit_client/src/live_kit_client.rs @@ -4,7 +4,7 @@ pub mod prod; pub use prod::*; #[cfg(any(test, feature = "test-support"))] -mod test; +pub mod test; #[cfg(any(test, feature = "test-support"))] pub use test::*; diff --git a/crates/live_kit_client/src/prod.rs b/crates/live_kit_client/src/prod.rs index f45667e3c3e4b4d00f264b51ff4782e8471e1ebd..6ae493d306136f66f3a6212ccf23ec93b2f50f93 100644 --- a/crates/live_kit_client/src/prod.rs +++ b/crates/live_kit_client/src/prod.rs @@ -21,6 +21,17 @@ extern "C" { fn LKRoomDelegateCreate( callback_data: *mut c_void, on_did_disconnect: extern "C" fn(callback_data: *mut c_void), + on_did_subscribe_to_remote_audio_track: extern "C" fn( + callback_data: *mut c_void, + publisher_id: CFStringRef, + track_id: CFStringRef, + remote_track: *const c_void, + ), + on_did_unsubscribe_from_remote_audio_track: extern "C" fn( + callback_data: *mut c_void, + publisher_id: CFStringRef, + track_id: CFStringRef, + ), on_did_subscribe_to_remote_video_track: extern "C" fn( callback_data: *mut c_void, publisher_id: CFStringRef, @@ -49,7 +60,18 @@ extern "C" { callback: extern "C" fn(*mut c_void, *mut c_void, CFStringRef), callback_data: *mut c_void, ); + fn LKRoomPublishAudioTrack( + room: *const c_void, + track: *const c_void, + callback: extern "C" fn(*mut c_void, *mut c_void, CFStringRef), + callback_data: *mut c_void, + ); fn LKRoomUnpublishTrack(room: *const c_void, publication: *const c_void); + fn LKRoomAudioTracksForRemoteParticipant( + room: *const c_void, + participant_id: CFStringRef, + ) -> CFArrayRef; + fn LKRoomVideoTracksForRemoteParticipant( room: *const c_void, participant_id: CFStringRef, @@ -61,6 +83,13 @@ extern "C" { on_drop: extern "C" fn(callback_data: *mut c_void), ) -> *const c_void; + fn LKRemoteAudioTrackGetSid(track: *const c_void) -> CFStringRef; + // fn LKRemoteAudioTrackStart( + // track: *const c_void, + // callback: extern "C" fn(*mut c_void, bool), + // callback_data: *mut c_void + // ); + fn LKVideoTrackAddRenderer(track: *const c_void, renderer: *const c_void); fn LKRemoteVideoTrackGetSid(track: *const c_void) -> CFStringRef; @@ -73,6 +102,7 @@ extern "C" { ), ); fn LKCreateScreenShareTrackForDisplay(display: *const c_void) -> *const c_void; + fn LKLocalAudioTrackCreateTrack() -> *const c_void; } pub type Sid = String; @@ -89,6 +119,7 @@ pub struct Room { watch::Sender, watch::Receiver, )>, + remote_audio_track_subscribers: Mutex>>, remote_video_track_subscribers: Mutex>>, _delegate: RoomDelegate, } @@ -100,6 +131,7 @@ impl Room { Self { native_room: unsafe { LKRoomCreate(delegate.native_delegate) }, connection: Mutex::new(watch::channel_with(ConnectionState::Disconnected)), + remote_audio_track_subscribers: Default::default(), remote_video_track_subscribers: Default::default(), _delegate: delegate, } @@ -191,6 +223,32 @@ impl Room { async { rx.await.unwrap().context("error publishing video track") } } + pub fn publish_audio_track( + self: &Arc, + track: &LocalAudioTrack, + ) -> impl Future> { + let (tx, rx) = oneshot::channel::>(); + extern "C" fn callback(tx: *mut c_void, publication: *mut c_void, error: CFStringRef) { + let tx = + unsafe { Box::from_raw(tx as *mut oneshot::Sender>) }; + if error.is_null() { + let _ = tx.send(Ok(LocalTrackPublication(publication))); + } else { + let error = unsafe { CFString::wrap_under_get_rule(error).to_string() }; + let _ = tx.send(Err(anyhow!(error))); + } + } + unsafe { + LKRoomPublishAudioTrack( + self.native_room, + track.0, + callback, + Box::into_raw(Box::new(tx)) as *mut c_void, + ); + } + async { rx.await.unwrap().context("error publishing video track") } + } + pub fn unpublish_track(&self, publication: LocalTrackPublication) { unsafe { LKRoomUnpublishTrack(self.native_room, publication.0); @@ -226,12 +284,65 @@ impl Room { } } + pub fn remote_audio_tracks(&self, participant_id: &str) -> Vec> { + unsafe { + let tracks = LKRoomAudioTracksForRemoteParticipant( + self.native_room, + CFString::new(participant_id).as_concrete_TypeRef(), + ); + + if tracks.is_null() { + Vec::new() + } else { + let tracks = CFArray::wrap_under_get_rule(tracks); + tracks + .into_iter() + .map(|native_track| { + let native_track = *native_track; + let id = + CFString::wrap_under_get_rule(LKRemoteAudioTrackGetSid(native_track)) + .to_string(); + Arc::new(RemoteAudioTrack::new( + native_track, + id, + participant_id.into(), + )) + }) + .collect() + } + } + } + + pub fn remote_audio_track_updates(&self) -> mpsc::UnboundedReceiver { + let (tx, rx) = mpsc::unbounded(); + self.remote_audio_track_subscribers.lock().push(tx); + rx + } + pub fn remote_video_track_updates(&self) -> mpsc::UnboundedReceiver { let (tx, rx) = mpsc::unbounded(); self.remote_video_track_subscribers.lock().push(tx); rx } + fn did_subscribe_to_remote_audio_track(&self, track: RemoteAudioTrack) { + let track = Arc::new(track); + self.remote_audio_track_subscribers.lock().retain(|tx| { + tx.unbounded_send(RemoteAudioTrackUpdate::Subscribed(track.clone())) + .is_ok() + }); + } + + fn did_unsubscribe_from_remote_audio_track(&self, publisher_id: String, track_id: String) { + self.remote_audio_track_subscribers.lock().retain(|tx| { + tx.unbounded_send(RemoteAudioTrackUpdate::Unsubscribed { + publisher_id: publisher_id.clone(), + track_id: track_id.clone(), + }) + .is_ok() + }); + } + fn did_subscribe_to_remote_video_track(&self, track: RemoteVideoTrack) { let track = Arc::new(track); self.remote_video_track_subscribers.lock().retain(|tx| { @@ -294,6 +405,8 @@ impl RoomDelegate { LKRoomDelegateCreate( weak_room as *mut c_void, Self::on_did_disconnect, + Self::on_did_subscribe_to_remote_audio_track, + Self::on_did_unsubscribe_from_remote_audio_track, Self::on_did_subscribe_to_remote_video_track, Self::on_did_unsubscribe_from_remote_video_track, ) @@ -312,6 +425,36 @@ impl RoomDelegate { let _ = Weak::into_raw(room); } + extern "C" fn on_did_subscribe_to_remote_audio_track( + room: *mut c_void, + publisher_id: CFStringRef, + track_id: CFStringRef, + track: *const c_void, + ) { + let room = unsafe { Weak::from_raw(room as *mut Room) }; + let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() }; + let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() }; + let track = RemoteAudioTrack::new(track, track_id, publisher_id); + if let Some(room) = room.upgrade() { + room.did_subscribe_to_remote_audio_track(track); + } + let _ = Weak::into_raw(room); + } + + extern "C" fn on_did_unsubscribe_from_remote_audio_track( + room: *mut c_void, + publisher_id: CFStringRef, + track_id: CFStringRef, + ) { + let room = unsafe { Weak::from_raw(room as *mut Room) }; + let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() }; + let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() }; + if let Some(room) = room.upgrade() { + room.did_unsubscribe_from_remote_audio_track(publisher_id, track_id); + } + let _ = Weak::into_raw(room); + } + extern "C" fn on_did_subscribe_to_remote_video_track( room: *mut c_void, publisher_id: CFStringRef, @@ -352,6 +495,20 @@ impl Drop for RoomDelegate { } } +pub struct LocalAudioTrack(*const c_void); + +impl LocalAudioTrack { + pub fn create() -> Self { + Self(unsafe { LKLocalAudioTrackCreateTrack() }) + } +} + +impl Drop for LocalAudioTrack { + fn drop(&mut self) { + unsafe { CFRelease(self.0) } + } +} + pub struct LocalVideoTrack(*const c_void); impl LocalVideoTrack { @@ -374,6 +531,34 @@ impl Drop for LocalTrackPublication { } } +#[derive(Debug)] +pub struct RemoteAudioTrack { + _native_track: *const c_void, + sid: Sid, + publisher_id: String, +} + +impl RemoteAudioTrack { + fn new(native_track: *const c_void, sid: Sid, publisher_id: String) -> Self { + unsafe { + CFRetain(native_track); + } + Self { + _native_track: native_track, + sid, + publisher_id, + } + } + + pub fn sid(&self) -> &str { + &self.sid + } + + pub fn publisher_id(&self) -> &str { + &self.publisher_id + } +} + #[derive(Debug)] pub struct RemoteVideoTrack { native_track: *const c_void, @@ -453,6 +638,11 @@ pub enum RemoteVideoTrackUpdate { Unsubscribed { publisher_id: Sid, track_id: Sid }, } +pub enum RemoteAudioTrackUpdate { + Subscribed(Arc), + Unsubscribed { publisher_id: Sid, track_id: Sid }, +} + pub struct MacOSDisplay(*const c_void); impl MacOSDisplay { diff --git a/crates/live_kit_client/src/test.rs b/crates/live_kit_client/src/test.rs index 8d1e4fa16abca12af30878779b7cf7d403f74250..2f89617363db7a9e9f51251ebf70b5e8b0eea482 100644 --- a/crates/live_kit_client/src/test.rs +++ b/crates/live_kit_client/src/test.rs @@ -67,7 +67,7 @@ impl TestServer { } } - async fn create_room(&self, room: String) -> Result<()> { + pub async fn create_room(&self, room: String) -> Result<()> { self.background.simulate_random_delay().await; let mut server_rooms = self.rooms.lock(); if server_rooms.contains_key(&room) { @@ -104,7 +104,7 @@ impl TestServer { room_name )) } else { - for track in &room.tracks { + for track in &room.video_tracks { client_room .0 .lock() @@ -182,7 +182,7 @@ impl TestServer { frames_rx: local_track.frames_rx.clone(), }); - room.tracks.push(track.clone()); + room.video_tracks.push(track.clone()); for (id, client_room) in &room.client_rooms { if *id != identity { @@ -199,6 +199,43 @@ impl TestServer { Ok(()) } + async fn publish_audio_track( + &self, + token: String, + _local_track: &LocalAudioTrack, + ) -> Result<()> { + self.background.simulate_random_delay().await; + let claims = live_kit_server::token::validate(&token, &self.secret_key)?; + let identity = claims.sub.unwrap().to_string(); + let room_name = claims.video.room.unwrap(); + + let mut server_rooms = self.rooms.lock(); + let room = server_rooms + .get_mut(&*room_name) + .ok_or_else(|| anyhow!("room {} does not exist", room_name))?; + + let track = Arc::new(RemoteAudioTrack { + sid: nanoid::nanoid!(17), + publisher_id: identity.clone(), + }); + + room.audio_tracks.push(track.clone()); + + for (id, client_room) in &room.client_rooms { + if *id != identity { + let _ = client_room + .0 + .lock() + .audio_track_updates + .0 + .try_broadcast(RemoteAudioTrackUpdate::Subscribed(track.clone())) + .unwrap(); + } + } + + Ok(()) + } + fn video_tracks(&self, token: String) -> Result>> { let claims = live_kit_server::token::validate(&token, &self.secret_key)?; let room_name = claims.video.room.unwrap(); @@ -207,14 +244,26 @@ impl TestServer { let room = server_rooms .get_mut(&*room_name) .ok_or_else(|| anyhow!("room {} does not exist", room_name))?; - Ok(room.tracks.clone()) + Ok(room.video_tracks.clone()) + } + + fn audio_tracks(&self, token: String) -> Result>> { + let claims = live_kit_server::token::validate(&token, &self.secret_key)?; + let room_name = claims.video.room.unwrap(); + + let mut server_rooms = self.rooms.lock(); + let room = server_rooms + .get_mut(&*room_name) + .ok_or_else(|| anyhow!("room {} does not exist", room_name))?; + Ok(room.audio_tracks.clone()) } } #[derive(Default)] struct TestServerRoom { client_rooms: HashMap>, - tracks: Vec>, + video_tracks: Vec>, + audio_tracks: Vec>, } impl TestServerRoom {} @@ -266,6 +315,10 @@ struct RoomState { watch::Receiver, ), display_sources: Vec, + audio_track_updates: ( + async_broadcast::Sender, + async_broadcast::Receiver, + ), video_track_updates: ( async_broadcast::Sender, async_broadcast::Receiver, @@ -286,6 +339,7 @@ impl Room { connection: watch::channel_with(ConnectionState::Disconnected), display_sources: Default::default(), video_track_updates: async_broadcast::broadcast(128), + audio_track_updates: async_broadcast::broadcast(128), }))) } @@ -327,8 +381,34 @@ impl Room { Ok(LocalTrackPublication) } } + pub fn publish_audio_track( + self: &Arc, + track: &LocalAudioTrack, + ) -> impl Future> { + let this = self.clone(); + let track = track.clone(); + async move { + this.test_server() + .publish_audio_track(this.token(), &track) + .await?; + Ok(LocalTrackPublication) + } + } - pub fn unpublish_track(&self, _: LocalTrackPublication) {} + pub fn unpublish_track(&self, _publication: LocalTrackPublication) {} + + pub fn remote_audio_tracks(&self, publisher_id: &str) -> Vec> { + if !self.is_connected() { + return Vec::new(); + } + + self.test_server() + .audio_tracks(self.token()) + .unwrap() + .into_iter() + .filter(|track| track.publisher_id() == publisher_id) + .collect() + } pub fn remote_video_tracks(&self, publisher_id: &str) -> Vec> { if !self.is_connected() { @@ -343,6 +423,10 @@ impl Room { .collect() } + pub fn remote_audio_track_updates(&self) -> impl Stream { + self.0.lock().audio_track_updates.1.clone() + } + pub fn remote_video_track_updates(&self) -> impl Stream { self.0.lock().video_track_updates.1.clone() } @@ -404,6 +488,15 @@ impl LocalVideoTrack { } } +#[derive(Clone)] +pub struct LocalAudioTrack; + +impl LocalAudioTrack { + pub fn create() -> Self { + Self + } +} + pub struct RemoteVideoTrack { sid: Sid, publisher_id: Sid, @@ -424,12 +517,33 @@ impl RemoteVideoTrack { } } +pub struct RemoteAudioTrack { + sid: Sid, + publisher_id: Sid, +} + +impl RemoteAudioTrack { + pub fn sid(&self) -> &str { + &self.sid + } + + pub fn publisher_id(&self) -> &str { + &self.publisher_id + } +} + #[derive(Clone)] pub enum RemoteVideoTrackUpdate { Subscribed(Arc), Unsubscribed { publisher_id: Sid, track_id: Sid }, } +#[derive(Clone)] +pub enum RemoteAudioTrackUpdate { + Subscribed(Arc), + Unsubscribed { publisher_id: Sid, track_id: Sid }, +} + #[derive(Clone)] pub struct MacOSDisplay { frames: ( diff --git a/crates/lsp/src/lsp.rs b/crates/lsp/src/lsp.rs index 96d43820758ee0ad17dd8bb11861fc57573f3f17..af73a16ec27a38fb18aa7840d92ef2541fec163e 100644 --- a/crates/lsp/src/lsp.rs +++ b/crates/lsp/src/lsp.rs @@ -103,14 +103,14 @@ struct Notification<'a, T> { params: T, } -#[derive(Deserialize)] +#[derive(Debug, Clone, Deserialize)] struct AnyNotification<'a> { #[serde(default)] id: Option, #[serde(borrow)] method: &'a str, - #[serde(borrow)] - params: &'a RawValue, + #[serde(borrow, default)] + params: Option<&'a RawValue>, } #[derive(Debug, Serialize, Deserialize)] @@ -157,9 +157,12 @@ impl LanguageServer { "unhandled notification {}:\n{}", notification.method, serde_json::to_string_pretty( - &Value::from_str(notification.params.get()).unwrap() + ¬ification + .params + .and_then(|params| Value::from_str(params.get()).ok()) + .unwrap_or(Value::Null) ) - .unwrap() + .unwrap(), ); }, ); @@ -279,7 +282,11 @@ impl LanguageServer { if let Ok(msg) = serde_json::from_slice::(&buffer) { if let Some(handler) = notification_handlers.lock().get_mut(msg.method) { - handler(msg.id, msg.params.get(), cx.clone()); + handler( + msg.id, + &msg.params.map(|params| params.get()).unwrap_or("null"), + cx.clone(), + ); } else { on_unhandled_notification(msg); } @@ -828,7 +835,13 @@ impl LanguageServer { cx, move |msg| { notifications_tx - .try_send((msg.method.to_string(), msg.params.get().to_string())) + .try_send(( + msg.method.to_string(), + msg.params + .map(|raw_value| raw_value.get()) + .unwrap_or("null") + .to_string(), + )) .ok(); }, )), diff --git a/crates/project/src/project.rs b/crates/project/src/project.rs index 5069c805b729698b95a0988f56d15417cb2cc0c6..27d424879f1d566d1bf3d551912a00de89435840 100644 --- a/crates/project/src/project.rs +++ b/crates/project/src/project.rs @@ -38,9 +38,9 @@ use language::{ }, range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel, Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _, - Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt, Operation, Patch, - PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction, - Unclipped, + Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate, OffsetRangeExt, + Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, + ToPointUtf16, Transaction, Unclipped, }; use log::error; use lsp::{ @@ -75,8 +75,8 @@ use std::{ }; use terminals::Terminals; use util::{ - debug_panic, defer, merge_json_value_into, paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, - ResultExt, TryFutureExt as _, + debug_panic, defer, http::HttpClient, merge_json_value_into, + paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _, }; pub use fs::*; @@ -252,6 +252,7 @@ pub enum Event { LanguageServerAdded(LanguageServerId), LanguageServerRemoved(LanguageServerId), LanguageServerLog(LanguageServerId, String), + Notification(String), ActiveEntryChanged(Option), WorktreeAdded, WorktreeRemoved(WorktreeId), @@ -435,6 +436,11 @@ pub enum FormatTrigger { Manual, } +struct ProjectLspAdapterDelegate { + project: ModelHandle, + http_client: Arc, +} + impl FormatTrigger { fn from_proto(value: i32) -> FormatTrigger { match value { @@ -2407,7 +2413,7 @@ impl Project { language.clone(), adapter.clone(), worktree_path.clone(), - self.client.http_client(), + ProjectLspAdapterDelegate::new(self, cx), cx, ) { Some(pending_server) => pending_server, @@ -7188,6 +7194,26 @@ impl> From<(WorktreeId, P)> for ProjectPath { } } +impl ProjectLspAdapterDelegate { + fn new(project: &Project, cx: &ModelContext) -> Arc { + Arc::new(Self { + project: cx.handle(), + http_client: project.client.http_client(), + }) + } +} + +impl LspAdapterDelegate for ProjectLspAdapterDelegate { + fn show_notification(&self, message: &str, cx: &mut AppContext) { + self.project + .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned()))); + } + + fn http_client(&self) -> Arc { + self.http_client.clone() + } +} + fn split_operations( mut operations: Vec, ) -> impl Iterator> { diff --git a/crates/project/src/worktree.rs b/crates/project/src/worktree.rs index 561da2c292842e83fc9f10be8a90db21bc6349f2..2b0ba3d5218ea86d69dfdac2381bf2a7d62290be 100644 --- a/crates/project/src/worktree.rs +++ b/crates/project/src/worktree.rs @@ -1470,7 +1470,7 @@ impl Snapshot { break; } } - new_entries_by_path.push_tree(cursor.suffix(&()), &()); + new_entries_by_path.append(cursor.suffix(&()), &()); new_entries_by_path }; @@ -2259,7 +2259,7 @@ impl BackgroundScannerState { let mut cursor = self.snapshot.entries_by_path.cursor::(); new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &()); removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &()); - new_entries.push_tree(cursor.suffix(&()), &()); + new_entries.append(cursor.suffix(&()), &()); } self.snapshot.entries_by_path = new_entries; diff --git a/crates/rope/src/rope.rs b/crates/rope/src/rope.rs index 6b6f364fdb0b7395be9db6d516a46f1b40d042bd..0b76dba319fce58db77a4ea8a0c1e6c1921ecb95 100644 --- a/crates/rope/src/rope.rs +++ b/crates/rope/src/rope.rs @@ -53,7 +53,7 @@ impl Rope { } } - self.chunks.push_tree(chunks.suffix(&()), &()); + self.chunks.append(chunks.suffix(&()), &()); self.check_invariants(); } diff --git a/crates/settings/Cargo.toml b/crates/settings/Cargo.toml index dab4b919923678bf73ec588e5affb7cf48d8bda4..b5dc301a5c00cef3e58f3ae12702cdc222db1ee9 100644 --- a/crates/settings/Cargo.toml +++ b/crates/settings/Cargo.toml @@ -21,7 +21,7 @@ util = { path = "../util" } anyhow.workspace = true futures.workspace = true -json_comments = "0.2" +serde_json_lenient = {version = "0.1", features = ["preserve_order", "raw_value"]} lazy_static.workspace = true postage.workspace = true rust-embed.workspace = true @@ -37,6 +37,6 @@ tree-sitter-json = "*" [dev-dependencies] gpui = { path = "../gpui", features = ["test-support"] } fs = { path = "../fs", features = ["test-support"] } - +indoc.workspace = true pretty_assertions = "1.3.0" unindent.workspace = true diff --git a/crates/settings/src/keymap_file.rs b/crates/settings/src/keymap_file.rs index e607a254bd70d066be2cae4efcb78d82d222a861..d2e656ebe3cd1231f665a69e7284b1121c218776 100644 --- a/crates/settings/src/keymap_file.rs +++ b/crates/settings/src/keymap_file.rs @@ -1,5 +1,5 @@ use crate::{settings_store::parse_json_with_comments, SettingsAssets}; -use anyhow::{Context, Result}; +use anyhow::{anyhow, Context, Result}; use collections::BTreeMap; use gpui::{keymap_matcher::Binding, AppContext}; use schemars::{ @@ -8,7 +8,7 @@ use schemars::{ JsonSchema, }; use serde::Deserialize; -use serde_json::{value::RawValue, Value}; +use serde_json::Value; use util::{asset_str, ResultExt}; #[derive(Deserialize, Default, Clone, JsonSchema)] @@ -24,7 +24,7 @@ pub struct KeymapBlock { #[derive(Deserialize, Default, Clone)] #[serde(transparent)] -pub struct KeymapAction(Box); +pub struct KeymapAction(Value); impl JsonSchema for KeymapAction { fn schema_name() -> String { @@ -37,11 +37,12 @@ impl JsonSchema for KeymapAction { } #[derive(Deserialize)] -struct ActionWithData(Box, Box); +struct ActionWithData(Box, Value); impl KeymapFile { pub fn load_asset(asset_path: &str, cx: &mut AppContext) -> Result<()> { let content = asset_str::(asset_path); + Self::parse(content.as_ref())?.add_to_cx(cx) } @@ -54,18 +55,27 @@ impl KeymapFile { let bindings = bindings .into_iter() .filter_map(|(keystroke, action)| { - let action = action.0.get(); + let action = action.0; // This is a workaround for a limitation in serde: serde-rs/json#497 // We want to deserialize the action data as a `RawValue` so that we can // deserialize the action itself dynamically directly from the JSON // string. But `RawValue` currently does not work inside of an untagged enum. - if action.starts_with('[') { - let ActionWithData(name, data) = serde_json::from_str(action).log_err()?; - cx.deserialize_action(&name, Some(data.get())) + if let Value::Array(items) = action { + let Ok([name, data]): Result<[serde_json::Value; 2], _> = items.try_into() else { + return Some(Err(anyhow!("Expected array of length 2"))); + }; + let serde_json::Value::String(name) = name else { + return Some(Err(anyhow!("Expected first item in array to be a string."))) + }; + cx.deserialize_action( + &name, + Some(data), + ) + } else if let Value::String(name) = action { + cx.deserialize_action(&name, None) } else { - let name = serde_json::from_str(action).log_err()?; - cx.deserialize_action(name, None) + return Some(Err(anyhow!("Expected two-element array, got {:?}", action))); } .with_context(|| { format!( @@ -118,3 +128,24 @@ impl KeymapFile { serde_json::to_value(root_schema).unwrap() } } + +#[cfg(test)] +mod tests { + use crate::KeymapFile; + + #[test] + fn can_deserialize_keymap_with_trailing_comma() { + let json = indoc::indoc! {"[ + // Standard macOS bindings + { + \"bindings\": { + \"up\": \"menu::SelectPrev\", + }, + }, + ] + " + + }; + KeymapFile::parse(json).unwrap(); + } +} diff --git a/crates/settings/src/settings_store.rs b/crates/settings/src/settings_store.rs index 39c6a2c122e567155d76245d883e2fc6e4ab205a..1188018cd892143788c0f6629aa72425eee2dc85 100644 --- a/crates/settings/src/settings_store.rs +++ b/crates/settings/src/settings_store.rs @@ -834,11 +834,8 @@ fn to_pretty_json(value: &impl Serialize, indent_size: usize, indent_prefix_len: } pub fn parse_json_with_comments(content: &str) -> Result { - Ok(serde_json::from_reader( - json_comments::CommentSettings::c_style().strip_comments(content.as_bytes()), - )?) + Ok(serde_json_lenient::from_str(content)?) } - #[cfg(test)] mod tests { use super::*; diff --git a/crates/sum_tree/src/cursor.rs b/crates/sum_tree/src/cursor.rs index 88412f60598e8e5eaafbb52515089b580e2613a2..b78484e6ded5b69c09b081abc7a17d8084fbcdf3 100644 --- a/crates/sum_tree/src/cursor.rs +++ b/crates/sum_tree/src/cursor.rs @@ -669,7 +669,7 @@ impl<'a, T: Item> SeekAggregate<'a, T> for () { impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate { fn begin_leaf(&mut self) {} fn end_leaf(&mut self, cx: &::Context) { - self.tree.push_tree( + self.tree.append( SumTree(Arc::new(Node::Leaf { summary: mem::take(&mut self.leaf_summary), items: mem::take(&mut self.leaf_items), @@ -689,7 +689,7 @@ impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate { _: &T::Summary, cx: &::Context, ) { - self.tree.push_tree(tree.clone(), cx); + self.tree.append(tree.clone(), cx); } } diff --git a/crates/sum_tree/src/sum_tree.rs b/crates/sum_tree/src/sum_tree.rs index 6c6150aa3ab45f9b7467a30ea060fbfb0fb27e69..fa467886f03aa6109df1ef3670c4ce79a6f41466 100644 --- a/crates/sum_tree/src/sum_tree.rs +++ b/crates/sum_tree/src/sum_tree.rs @@ -268,7 +268,7 @@ impl SumTree { for item in iter { if leaf.is_some() && leaf.as_ref().unwrap().items().len() == 2 * TREE_BASE { - self.push_tree(SumTree(Arc::new(leaf.take().unwrap())), cx); + self.append(SumTree(Arc::new(leaf.take().unwrap())), cx); } if leaf.is_none() { @@ -295,13 +295,13 @@ impl SumTree { } if leaf.is_some() { - self.push_tree(SumTree(Arc::new(leaf.take().unwrap())), cx); + self.append(SumTree(Arc::new(leaf.take().unwrap())), cx); } } pub fn push(&mut self, item: T, cx: &::Context) { let summary = item.summary(); - self.push_tree( + self.append( SumTree(Arc::new(Node::Leaf { summary: summary.clone(), items: ArrayVec::from_iter(Some(item)), @@ -311,11 +311,11 @@ impl SumTree { ); } - pub fn push_tree(&mut self, other: Self, cx: &::Context) { + pub fn append(&mut self, other: Self, cx: &::Context) { if !other.0.is_leaf() || !other.0.items().is_empty() { if self.0.height() < other.0.height() { for tree in other.0.child_trees() { - self.push_tree(tree.clone(), cx); + self.append(tree.clone(), cx); } } else if let Some(split_tree) = self.push_tree_recursive(other, cx) { *self = Self::from_child_trees(self.clone(), split_tree, cx); @@ -512,7 +512,7 @@ impl SumTree { } } new_tree.push(item, cx); - new_tree.push_tree(cursor.suffix(cx), cx); + new_tree.append(cursor.suffix(cx), cx); new_tree }; replaced @@ -529,7 +529,7 @@ impl SumTree { cursor.next(cx); } } - new_tree.push_tree(cursor.suffix(cx), cx); + new_tree.append(cursor.suffix(cx), cx); new_tree }; removed @@ -563,7 +563,7 @@ impl SumTree { { new_tree.extend(buffered_items.drain(..), cx); let slice = cursor.slice(&new_key, Bias::Left, cx); - new_tree.push_tree(slice, cx); + new_tree.append(slice, cx); old_item = cursor.item(); } @@ -583,7 +583,7 @@ impl SumTree { } new_tree.extend(buffered_items, cx); - new_tree.push_tree(cursor.suffix(cx), cx); + new_tree.append(cursor.suffix(cx), cx); new_tree }; @@ -719,7 +719,7 @@ mod tests { let mut tree2 = SumTree::new(); tree2.extend(50..100, &()); - tree1.push_tree(tree2, &()); + tree1.append(tree2, &()); assert_eq!( tree1.items(&()), (0..20).chain(50..100).collect::>() @@ -766,7 +766,7 @@ mod tests { let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &()); new_tree.extend(new_items, &()); cursor.seek(&Count(splice_end), Bias::Right, &()); - new_tree.push_tree(cursor.slice(&tree_end, Bias::Right, &()), &()); + new_tree.append(cursor.slice(&tree_end, Bias::Right, &()), &()); new_tree }; diff --git a/crates/sum_tree/src/tree_map.rs b/crates/sum_tree/src/tree_map.rs index ea69fb0dca9bfbd5a87bdd22d86c1c22c22af2ca..4bb98d2ac8668cb96afbcf31c717f4bb87dbe16f 100644 --- a/crates/sum_tree/src/tree_map.rs +++ b/crates/sum_tree/src/tree_map.rs @@ -67,7 +67,7 @@ impl TreeMap { removed = Some(cursor.item().unwrap().value.clone()); cursor.next(&()); } - new_tree.push_tree(cursor.suffix(&()), &()); + new_tree.append(cursor.suffix(&()), &()); drop(cursor); self.0 = new_tree; removed @@ -79,7 +79,7 @@ impl TreeMap { let mut cursor = self.0.cursor::>(); let mut new_tree = cursor.slice(&start, Bias::Left, &()); cursor.seek(&end, Bias::Left, &()); - new_tree.push_tree(cursor.suffix(&()), &()); + new_tree.append(cursor.suffix(&()), &()); drop(cursor); self.0 = new_tree; } @@ -117,7 +117,7 @@ impl TreeMap { new_tree.push(updated, &()); cursor.next(&()); } - new_tree.push_tree(cursor.suffix(&()), &()); + new_tree.append(cursor.suffix(&()), &()); drop(cursor); self.0 = new_tree; result diff --git a/crates/text/src/text.rs b/crates/text/src/text.rs index 2693add8ed0ac0d9d032ad5bd3f5e540138cf2fe..28c5125de246e15ea0a75cfe47fdb13a3a0db5f0 100644 --- a/crates/text/src/text.rs +++ b/crates/text/src/text.rs @@ -600,7 +600,7 @@ impl Buffer { let mut old_fragments = self.fragments.cursor::(); let mut new_fragments = old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None); - new_ropes.push_tree(new_fragments.summary().text); + new_ropes.append(new_fragments.summary().text); let mut fragment_start = old_fragments.start().visible; for (range, new_text) in edits { @@ -625,8 +625,8 @@ impl Buffer { } let slice = old_fragments.slice(&range.start, Bias::Right, &None); - new_ropes.push_tree(slice.summary().text); - new_fragments.push_tree(slice, &None); + new_ropes.append(slice.summary().text); + new_fragments.append(slice, &None); fragment_start = old_fragments.start().visible; } @@ -728,8 +728,8 @@ impl Buffer { } let suffix = old_fragments.suffix(&None); - new_ropes.push_tree(suffix.summary().text); - new_fragments.push_tree(suffix, &None); + new_ropes.append(suffix.summary().text); + new_fragments.append(suffix, &None); let (visible_text, deleted_text) = new_ropes.finish(); drop(old_fragments); @@ -828,7 +828,7 @@ impl Buffer { Bias::Left, &cx, ); - new_ropes.push_tree(new_fragments.summary().text); + new_ropes.append(new_fragments.summary().text); let mut fragment_start = old_fragments.start().0.full_offset(); for (range, new_text) in edits { @@ -854,8 +854,8 @@ impl Buffer { let slice = old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx); - new_ropes.push_tree(slice.summary().text); - new_fragments.push_tree(slice, &None); + new_ropes.append(slice.summary().text); + new_fragments.append(slice, &None); fragment_start = old_fragments.start().0.full_offset(); } @@ -986,8 +986,8 @@ impl Buffer { } let suffix = old_fragments.suffix(&cx); - new_ropes.push_tree(suffix.summary().text); - new_fragments.push_tree(suffix, &None); + new_ropes.append(suffix.summary().text); + new_fragments.append(suffix, &None); let (visible_text, deleted_text) = new_ropes.finish(); drop(old_fragments); @@ -1056,8 +1056,8 @@ impl Buffer { for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) { let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None); - new_ropes.push_tree(preceding_fragments.summary().text); - new_fragments.push_tree(preceding_fragments, &None); + new_ropes.append(preceding_fragments.summary().text); + new_fragments.append(preceding_fragments, &None); if let Some(fragment) = old_fragments.item() { let mut fragment = fragment.clone(); @@ -1087,8 +1087,8 @@ impl Buffer { } let suffix = old_fragments.suffix(&None); - new_ropes.push_tree(suffix.summary().text); - new_fragments.push_tree(suffix, &None); + new_ropes.append(suffix.summary().text); + new_fragments.append(suffix, &None); drop(old_fragments); let (visible_text, deleted_text) = new_ropes.finish(); @@ -2070,7 +2070,7 @@ impl<'a> RopeBuilder<'a> { } } - fn push_tree(&mut self, len: FragmentTextSummary) { + fn append(&mut self, len: FragmentTextSummary) { self.push(len.visible, true, true); self.push(len.deleted, false, false); } diff --git a/crates/workspace/src/dock.rs b/crates/workspace/src/dock.rs index 48f486381dd223340becb30eb1c594e5a1f55bb4..c174b8d3a5682b16c2b0b049389f5101afdde458 100644 --- a/crates/workspace/src/dock.rs +++ b/crates/workspace/src/dock.rs @@ -598,8 +598,8 @@ impl StatusItemView for PanelButtons { } } -#[cfg(test)] -pub(crate) mod test { +#[cfg(any(test, feature = "test-support"))] +pub mod test { use super::*; use gpui::{ViewContext, WindowContext}; diff --git a/crates/workspace/src/item.rs b/crates/workspace/src/item.rs index 9a3fb5e475dd0c612fecccfefdecab95606bb776..a3e3ab92999fe42e3f8e6a29ae82745883c26df2 100644 --- a/crates/workspace/src/item.rs +++ b/crates/workspace/src/item.rs @@ -710,8 +710,8 @@ impl FollowableItemHandle for ViewHandle { } } -#[cfg(test)] -pub(crate) mod test { +#[cfg(any(test, feature = "test-support"))] +pub mod test { use super::{Item, ItemEvent}; use crate::{ItemId, ItemNavHistory, Pane, Workspace, WorkspaceId}; use gpui::{ diff --git a/crates/workspace/src/persistence.rs b/crates/workspace/src/persistence.rs index d27818d2028eec866d17cc687bb393f37e9cb6e5..dd2aa5a818968c77e94f8ccb1ad0bcaf284f0ecb 100644 --- a/crates/workspace/src/persistence.rs +++ b/crates/workspace/src/persistence.rs @@ -162,6 +162,12 @@ define_connection! { ALTER TABLE workspaces ADD COLUMN right_dock_active_panel TEXT; ALTER TABLE workspaces ADD COLUMN bottom_dock_visible INTEGER; //bool ALTER TABLE workspaces ADD COLUMN bottom_dock_active_panel TEXT; + ), + // Add panel zoom persistence + sql!( + ALTER TABLE workspaces ADD COLUMN left_dock_zoom INTEGER; //bool + ALTER TABLE workspaces ADD COLUMN right_dock_zoom INTEGER; //bool + ALTER TABLE workspaces ADD COLUMN bottom_dock_zoom INTEGER; //bool )]; } @@ -196,10 +202,13 @@ impl WorkspaceDb { display, left_dock_visible, left_dock_active_panel, + left_dock_zoom, right_dock_visible, right_dock_active_panel, + right_dock_zoom, bottom_dock_visible, - bottom_dock_active_panel + bottom_dock_active_panel, + bottom_dock_zoom FROM workspaces WHERE workspace_location = ? }) @@ -244,22 +253,28 @@ impl WorkspaceDb { workspace_location, left_dock_visible, left_dock_active_panel, + left_dock_zoom, right_dock_visible, right_dock_active_panel, + right_dock_zoom, bottom_dock_visible, bottom_dock_active_panel, + bottom_dock_zoom, timestamp ) - VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, CURRENT_TIMESTAMP) + VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, CURRENT_TIMESTAMP) ON CONFLICT DO UPDATE SET workspace_location = ?2, left_dock_visible = ?3, left_dock_active_panel = ?4, - right_dock_visible = ?5, - right_dock_active_panel = ?6, - bottom_dock_visible = ?7, - bottom_dock_active_panel = ?8, + left_dock_zoom = ?5, + right_dock_visible = ?6, + right_dock_active_panel = ?7, + right_dock_zoom = ?8, + bottom_dock_visible = ?9, + bottom_dock_active_panel = ?10, + bottom_dock_zoom = ?11, timestamp = CURRENT_TIMESTAMP ))?((workspace.id, &workspace.location, workspace.docks)) .context("Updating workspace")?; diff --git a/crates/workspace/src/persistence/model.rs b/crates/workspace/src/persistence/model.rs index 9e3c4012cdcb77e082befb553488512f121569af..10750618530642465d73eb3d4016e1d42095c85a 100644 --- a/crates/workspace/src/persistence/model.rs +++ b/crates/workspace/src/persistence/model.rs @@ -100,16 +100,19 @@ impl Bind for DockStructure { pub struct DockData { pub(crate) visible: bool, pub(crate) active_panel: Option, + pub(crate) zoom: bool, } impl Column for DockData { fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> { let (visible, next_index) = Option::::column(statement, start_index)?; let (active_panel, next_index) = Option::::column(statement, next_index)?; + let (zoom, next_index) = Option::::column(statement, next_index)?; Ok(( DockData { visible: visible.unwrap_or(false), active_panel, + zoom: zoom.unwrap_or(false), }, next_index, )) @@ -119,7 +122,8 @@ impl Column for DockData { impl Bind for DockData { fn bind(&self, statement: &Statement, start_index: i32) -> Result { let next_index = statement.bind(&self.visible, start_index)?; - statement.bind(&self.active_panel, next_index) + let next_index = statement.bind(&self.active_panel, next_index)?; + statement.bind(&self.zoom, next_index) } } diff --git a/crates/workspace/src/workspace.rs b/crates/workspace/src/workspace.rs index ef8cde78a64aa1260d6be8174b24eb418762ed4d..43ca41ab1d79171808aa7bef8281002e5c081a55 100644 --- a/crates/workspace/src/workspace.rs +++ b/crates/workspace/src/workspace.rs @@ -553,6 +553,10 @@ impl Workspace { } } + project::Event::Notification(message) => this.show_notification(0, cx, |cx| { + cx.add_view(|_| MessageNotification::new(message.clone())) + }), + _ => {} } cx.notify() @@ -919,6 +923,7 @@ impl Workspace { this.zoomed = None; this.zoomed_position = None; } + this.update_active_view_for_followers(cx); cx.notify(); } } @@ -1598,9 +1603,7 @@ impl Workspace { focus_center = true; } } else { - if active_panel.is_zoomed(cx) { - cx.focus(active_panel.as_any()); - } + cx.focus(active_panel.as_any()); reveal_dock = true; } } @@ -1946,18 +1949,7 @@ impl Workspace { self.zoomed = None; } self.zoomed_position = None; - - self.update_followers( - proto::update_followers::Variant::UpdateActiveView(proto::UpdateActiveView { - id: self.active_item(cx).and_then(|item| { - item.to_followable_item_handle(cx)? - .remote_id(&self.app_state.client, cx) - .map(|id| id.to_proto()) - }), - leader_id: self.leader_for_pane(&pane), - }), - cx, - ); + self.update_active_view_for_followers(cx); cx.notify(); } @@ -2646,6 +2638,30 @@ impl Workspace { Ok(()) } + fn update_active_view_for_followers(&self, cx: &AppContext) { + if self.active_pane.read(cx).has_focus() { + self.update_followers( + proto::update_followers::Variant::UpdateActiveView(proto::UpdateActiveView { + id: self.active_item(cx).and_then(|item| { + item.to_followable_item_handle(cx)? + .remote_id(&self.app_state.client, cx) + .map(|id| id.to_proto()) + }), + leader_id: self.leader_for_pane(&self.active_pane), + }), + cx, + ); + } else { + self.update_followers( + proto::update_followers::Variant::UpdateActiveView(proto::UpdateActiveView { + id: None, + leader_id: None, + }), + cx, + ); + } + } + fn update_followers( &self, update: proto::update_followers::Variant, @@ -2693,12 +2709,10 @@ impl Workspace { .and_then(|id| state.items_by_leader_view_id.get(&id)) { items_to_activate.push((pane.clone(), item.boxed_clone())); - } else { - if let Some(shared_screen) = - self.shared_screen_for_peer(leader_id, pane, cx) - { - items_to_activate.push((pane.clone(), Box::new(shared_screen))); - } + } else if let Some(shared_screen) = + self.shared_screen_for_peer(leader_id, pane, cx) + { + items_to_activate.push((pane.clone(), Box::new(shared_screen))); } } } @@ -2838,7 +2852,7 @@ impl Workspace { cx.notify(); } - fn serialize_workspace(&self, cx: &AppContext) { + fn serialize_workspace(&self, cx: &ViewContext) { fn serialize_pane_handle( pane_handle: &ViewHandle, cx: &AppContext, @@ -2881,7 +2895,7 @@ impl Workspace { } } - fn build_serialized_docks(this: &Workspace, cx: &AppContext) -> DockStructure { + fn build_serialized_docks(this: &Workspace, cx: &ViewContext) -> DockStructure { let left_dock = this.left_dock.read(cx); let left_visible = left_dock.is_open(); let left_active_panel = left_dock.visible_panel().and_then(|panel| { @@ -2890,6 +2904,10 @@ impl Workspace { .to_string(), ) }); + let left_dock_zoom = left_dock + .visible_panel() + .map(|panel| panel.is_zoomed(cx)) + .unwrap_or(false); let right_dock = this.right_dock.read(cx); let right_visible = right_dock.is_open(); @@ -2899,6 +2917,10 @@ impl Workspace { .to_string(), ) }); + let right_dock_zoom = right_dock + .visible_panel() + .map(|panel| panel.is_zoomed(cx)) + .unwrap_or(false); let bottom_dock = this.bottom_dock.read(cx); let bottom_visible = bottom_dock.is_open(); @@ -2908,19 +2930,26 @@ impl Workspace { .to_string(), ) }); + let bottom_dock_zoom = bottom_dock + .visible_panel() + .map(|panel| panel.is_zoomed(cx)) + .unwrap_or(false); DockStructure { left: DockData { visible: left_visible, active_panel: left_active_panel, + zoom: left_dock_zoom, }, right: DockData { visible: right_visible, active_panel: right_active_panel, + zoom: right_dock_zoom, }, bottom: DockData { visible: bottom_visible, active_panel: bottom_active_panel, + zoom: bottom_dock_zoom, }, } } @@ -3033,14 +3062,31 @@ impl Workspace { dock.activate_panel(ix, cx); } } + dock.active_panel() + .map(|panel| { + panel.set_zoomed(docks.left.zoom, cx) + }); + if docks.left.visible && docks.left.zoom { + cx.focus_self() + } }); + // TODO: I think the bug is that setting zoom or active undoes the bottom zoom or something workspace.right_dock.update(cx, |dock, cx| { dock.set_open(docks.right.visible, cx); if let Some(active_panel) = docks.right.active_panel { if let Some(ix) = dock.panel_index_for_ui_name(&active_panel, cx) { dock.activate_panel(ix, cx); + } } + dock.active_panel() + .map(|panel| { + panel.set_zoomed(docks.right.zoom, cx) + }); + + if docks.right.visible && docks.right.zoom { + cx.focus_self() + } }); workspace.bottom_dock.update(cx, |dock, cx| { dock.set_open(docks.bottom.visible, cx); @@ -3049,8 +3095,18 @@ impl Workspace { dock.activate_panel(ix, cx); } } + + dock.active_panel() + .map(|panel| { + panel.set_zoomed(docks.bottom.zoom, cx) + }); + + if docks.bottom.visible && docks.bottom.zoom { + cx.focus_self() + } }); + cx.notify(); })?; @@ -4413,7 +4469,7 @@ mod tests { workspace.read_with(cx, |workspace, cx| { assert!(workspace.right_dock().read(cx).is_open()); assert!(!panel.is_zoomed(cx)); - assert!(!panel.has_focus(cx)); + assert!(panel.has_focus(cx)); }); // Focus and zoom panel @@ -4488,7 +4544,7 @@ mod tests { workspace.read_with(cx, |workspace, cx| { let pane = pane.read(cx); assert!(!pane.is_zoomed()); - assert!(pane.has_focus()); + assert!(!pane.has_focus()); assert!(workspace.right_dock().read(cx).is_open()); assert!(workspace.zoomed.is_none()); }); diff --git a/crates/zed/src/languages/c.rs b/crates/zed/src/languages/c.rs index 7e4ddcef19189cb0a5704d43ca78a6164186d1de..241b11b47c1091fafe26177317eeb4433e47bfd6 100644 --- a/crates/zed/src/languages/c.rs +++ b/crates/zed/src/languages/c.rs @@ -4,12 +4,11 @@ use futures::StreamExt; pub use language::*; use smol::fs::{self, File}; use std::{any::Any, path::PathBuf, sync::Arc}; -use util::fs::remove_matching; -use util::github::latest_github_release; -use util::http::HttpClient; -use util::ResultExt; - -use util::github::GitHubLspBinaryVersion; +use util::{ + fs::remove_matching, + github::{latest_github_release, GitHubLspBinaryVersion}, + ResultExt, +}; pub struct CLspAdapter; @@ -21,9 +20,9 @@ impl super::LspAdapter for CLspAdapter { async fn fetch_latest_server_version( &self, - http: Arc, + delegate: &dyn LspAdapterDelegate, ) -> Result> { - let release = latest_github_release("clangd/clangd", false, http).await?; + let release = latest_github_release("clangd/clangd", false, delegate.http_client()).await?; let asset_name = format!("clangd-mac-{}.zip", release.name); let asset = release .assets @@ -40,8 +39,8 @@ impl super::LspAdapter for CLspAdapter { async fn fetch_server_binary( &self, version: Box, - http: Arc, container_dir: PathBuf, + delegate: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::().unwrap(); let zip_path = container_dir.join(format!("clangd_{}.zip", version.name)); @@ -49,7 +48,8 @@ impl super::LspAdapter for CLspAdapter { let binary_path = version_dir.join("bin/clangd"); if fs::metadata(&binary_path).await.is_err() { - let mut response = http + let mut response = delegate + .http_client() .get(&version.url, Default::default(), true) .await .context("error downloading release")?; @@ -81,7 +81,11 @@ impl super::LspAdapter for CLspAdapter { }) } - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + container_dir: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { (|| async move { let mut last_clangd_dir = None; let mut entries = fs::read_dir(&container_dir).await?; diff --git a/crates/zed/src/languages/elixir.rs b/crates/zed/src/languages/elixir.rs index 2939a0fa5f942b9631b2ecd9e13d6c1e5c4de17e..22aaedc0695b9e38d2873c86c978fcc704764a6e 100644 --- a/crates/zed/src/languages/elixir.rs +++ b/crates/zed/src/languages/elixir.rs @@ -1,16 +1,23 @@ use anyhow::{anyhow, Context, Result}; use async_trait::async_trait; use futures::StreamExt; +use gpui::{AsyncAppContext, Task}; pub use language::*; use lsp::{CompletionItemKind, SymbolKind}; use smol::fs::{self, File}; -use std::{any::Any, path::PathBuf, sync::Arc}; -use util::fs::remove_matching; -use util::github::latest_github_release; -use util::http::HttpClient; -use util::ResultExt; - -use util::github::GitHubLspBinaryVersion; +use std::{ + any::Any, + path::PathBuf, + sync::{ + atomic::{AtomicBool, Ordering::SeqCst}, + Arc, + }, +}; +use util::{ + fs::remove_matching, + github::{latest_github_release, GitHubLspBinaryVersion}, + ResultExt, +}; pub struct ElixirLspAdapter; @@ -20,11 +27,43 @@ impl LspAdapter for ElixirLspAdapter { LanguageServerName("elixir-ls".into()) } + fn will_start_server( + &self, + delegate: &Arc, + cx: &mut AsyncAppContext, + ) -> Option>> { + static DID_SHOW_NOTIFICATION: AtomicBool = AtomicBool::new(false); + + const NOTIFICATION_MESSAGE: &str = "Could not run the elixir language server, `elixir-ls`, because `elixir` was not found."; + + let delegate = delegate.clone(); + Some(cx.spawn(|mut cx| async move { + let elixir_output = smol::process::Command::new("elixir") + .args(["--version"]) + .output() + .await; + if elixir_output.is_err() { + if DID_SHOW_NOTIFICATION + .compare_exchange(false, true, SeqCst, SeqCst) + .is_ok() + { + cx.update(|cx| { + delegate.show_notification(NOTIFICATION_MESSAGE, cx); + }) + } + return Err(anyhow!("cannot run elixir-ls")); + } + + Ok(()) + })) + } + async fn fetch_latest_server_version( &self, - http: Arc, + delegate: &dyn LspAdapterDelegate, ) -> Result> { - let release = latest_github_release("elixir-lsp/elixir-ls", false, http).await?; + let release = + latest_github_release("elixir-lsp/elixir-ls", false, delegate.http_client()).await?; let asset_name = "elixir-ls.zip"; let asset = release .assets @@ -41,8 +80,8 @@ impl LspAdapter for ElixirLspAdapter { async fn fetch_server_binary( &self, version: Box, - http: Arc, container_dir: PathBuf, + delegate: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::().unwrap(); let zip_path = container_dir.join(format!("elixir-ls_{}.zip", version.name)); @@ -50,7 +89,8 @@ impl LspAdapter for ElixirLspAdapter { let binary_path = version_dir.join("language_server.sh"); if fs::metadata(&binary_path).await.is_err() { - let mut response = http + let mut response = delegate + .http_client() .get(&version.url, Default::default(), true) .await .context("error downloading release")?; @@ -88,7 +128,11 @@ impl LspAdapter for ElixirLspAdapter { }) } - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + container_dir: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { (|| async move { let mut last = None; let mut entries = fs::read_dir(&container_dir).await?; diff --git a/crates/zed/src/languages/elixir/highlights.scm b/crates/zed/src/languages/elixir/highlights.scm index deea51c436386eb36b1ed41d61cb5d21a787ad20..0e779d195c5e6e03404c783d9675fc223232c84d 100644 --- a/crates/zed/src/languages/elixir/highlights.scm +++ b/crates/zed/src/languages/elixir/highlights.scm @@ -36,8 +36,6 @@ (char) @constant -(interpolation "#{" @punctuation.special "}" @punctuation.special) @embedded - (escape_sequence) @string.escape [ @@ -146,3 +144,10 @@ "<<" ">>" ] @punctuation.bracket + +(interpolation "#{" @punctuation.special "}" @punctuation.special) @embedded + +((sigil + (sigil_name) @_sigil_name + (quoted_content) @embedded) + (#eq? @_sigil_name "H")) diff --git a/crates/zed/src/languages/go.rs b/crates/zed/src/languages/go.rs index ed24abb45c40b6b2cc2c2336a746557c03505745..34364d0b7f3d498573cfbafce62eae26e674ba91 100644 --- a/crates/zed/src/languages/go.rs +++ b/crates/zed/src/languages/go.rs @@ -1,16 +1,23 @@ use anyhow::{anyhow, Result}; use async_trait::async_trait; use futures::StreamExt; +use gpui::{AsyncAppContext, Task}; pub use language::*; use lazy_static::lazy_static; use regex::Regex; use smol::{fs, process}; -use std::ffi::{OsStr, OsString}; -use std::{any::Any, ops::Range, path::PathBuf, str, sync::Arc}; -use util::fs::remove_matching; -use util::github::latest_github_release; -use util::http::HttpClient; -use util::ResultExt; +use std::{ + any::Any, + ffi::{OsStr, OsString}, + ops::Range, + path::PathBuf, + str, + sync::{ + atomic::{AtomicBool, Ordering::SeqCst}, + Arc, + }, +}; +use util::{fs::remove_matching, github::latest_github_release, ResultExt}; fn server_binary_arguments() -> Vec { vec!["-mode=stdio".into()] @@ -31,9 +38,9 @@ impl super::LspAdapter for GoLspAdapter { async fn fetch_latest_server_version( &self, - http: Arc, + delegate: &dyn LspAdapterDelegate, ) -> Result> { - let release = latest_github_release("golang/tools", false, http).await?; + let release = latest_github_release("golang/tools", false, delegate.http_client()).await?; let version: Option = release.name.strip_prefix("gopls/v").map(str::to_string); if version.is_none() { log::warn!( @@ -44,11 +51,39 @@ impl super::LspAdapter for GoLspAdapter { Ok(Box::new(version) as Box<_>) } + fn will_fetch_server( + &self, + delegate: &Arc, + cx: &mut AsyncAppContext, + ) -> Option>> { + static DID_SHOW_NOTIFICATION: AtomicBool = AtomicBool::new(false); + + const NOTIFICATION_MESSAGE: &str = + "Could not install the Go language server `gopls`, because `go` was not found."; + + let delegate = delegate.clone(); + Some(cx.spawn(|mut cx| async move { + let install_output = process::Command::new("go").args(["version"]).output().await; + if install_output.is_err() { + if DID_SHOW_NOTIFICATION + .compare_exchange(false, true, SeqCst, SeqCst) + .is_ok() + { + cx.update(|cx| { + delegate.show_notification(NOTIFICATION_MESSAGE, cx); + }) + } + return Err(anyhow!("cannot install gopls")); + } + Ok(()) + })) + } + async fn fetch_server_binary( &self, version: Box, - _: Arc, container_dir: PathBuf, + delegate: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::>().unwrap(); let this = *self; @@ -68,7 +103,10 @@ impl super::LspAdapter for GoLspAdapter { }); } } - } else if let Some(path) = this.cached_server_binary(container_dir.clone()).await { + } else if let Some(path) = this + .cached_server_binary(container_dir.clone(), delegate) + .await + { return Ok(path); } @@ -105,7 +143,11 @@ impl super::LspAdapter for GoLspAdapter { }) } - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + container_dir: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { (|| async move { let mut last_binary_path = None; let mut entries = fs::read_dir(&container_dir).await?; diff --git a/crates/zed/src/languages/heex/highlights.scm b/crates/zed/src/languages/heex/highlights.scm index fa88acd4d99239f97d4f8a2b26c098e0322d49bc..8728110d5826959b893b352dffd8c8e01a4d29ac 100644 --- a/crates/zed/src/languages/heex/highlights.scm +++ b/crates/zed/src/languages/heex/highlights.scm @@ -1,17 +1,11 @@ ; HEEx delimiters [ - "%>" "--%>" "-->" "/>" "" +] @keyword + ; HEEx operators are highlighted as such "=" @operator diff --git a/crates/zed/src/languages/heex/injections.scm b/crates/zed/src/languages/heex/injections.scm index 0d4977b28a4749de6851120e60bceb450d526bf1..41b65db8eedb5ffa8ca9b559f03c775ef4c6cf34 100644 --- a/crates/zed/src/languages/heex/injections.scm +++ b/crates/zed/src/languages/heex/injections.scm @@ -1,11 +1,13 @@ -((directive (partial_expression_value) @content) - (#set! language "elixir") - (#set! include-children) - (#set! combined)) - -; Regular expression_values do not need to be combined -((directive (expression_value) @content) - (#set! language "elixir")) +( + (directive + [ + (partial_expression_value) + (expression_value) + (ending_expression_value) + ] @content) + (#set! language "elixir") + (#set! combined) +) ; expressions live within HTML tags, and do not need to be combined ; diff --git a/crates/zed/src/languages/html.rs b/crates/zed/src/languages/html.rs index 68f780c3af73b3febd1fdcbeaba1a2a95bb36b37..d038ef80718961a48a317db9eeb68da592b5fae7 100644 --- a/crates/zed/src/languages/html.rs +++ b/crates/zed/src/languages/html.rs @@ -1,14 +1,16 @@ use anyhow::{anyhow, Result}; use async_trait::async_trait; use futures::StreamExt; -use language::{LanguageServerBinary, LanguageServerName, LspAdapter}; +use language::{LanguageServerBinary, LanguageServerName, LspAdapter, LspAdapterDelegate}; use node_runtime::NodeRuntime; use serde_json::json; use smol::fs; -use std::ffi::OsString; -use std::path::Path; -use std::{any::Any, path::PathBuf, sync::Arc}; -use util::http::HttpClient; +use std::{ + any::Any, + ffi::OsString, + path::{Path, PathBuf}, + sync::Arc, +}; use util::ResultExt; fn server_binary_arguments(server_path: &Path) -> Vec { @@ -36,7 +38,7 @@ impl LspAdapter for HtmlLspAdapter { async fn fetch_latest_server_version( &self, - _: Arc, + _: &dyn LspAdapterDelegate, ) -> Result> { Ok(Box::new( self.node @@ -48,8 +50,8 @@ impl LspAdapter for HtmlLspAdapter { async fn fetch_server_binary( &self, version: Box, - _: Arc, container_dir: PathBuf, + _: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::().unwrap(); let server_path = container_dir.join(Self::SERVER_PATH); @@ -69,7 +71,11 @@ impl LspAdapter for HtmlLspAdapter { }) } - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + container_dir: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { (|| async move { let mut last_version_dir = None; let mut entries = fs::read_dir(&container_dir).await?; diff --git a/crates/zed/src/languages/json.rs b/crates/zed/src/languages/json.rs index e1f3da9e0238f0a91179447e7c15ce2df5239ec1..ced733fa1b17131f3f5a246e908ee4dbb1cd8524 100644 --- a/crates/zed/src/languages/json.rs +++ b/crates/zed/src/languages/json.rs @@ -3,7 +3,9 @@ use async_trait::async_trait; use collections::HashMap; use futures::{future::BoxFuture, FutureExt, StreamExt}; use gpui::AppContext; -use language::{LanguageRegistry, LanguageServerBinary, LanguageServerName, LspAdapter}; +use language::{ + LanguageRegistry, LanguageServerBinary, LanguageServerName, LspAdapter, LspAdapterDelegate, +}; use node_runtime::NodeRuntime; use serde_json::json; use settings::{KeymapFile, SettingsJsonSchemaParams, SettingsStore}; @@ -16,7 +18,6 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::http::HttpClient; use util::{paths, ResultExt}; const SERVER_PATH: &'static str = @@ -45,7 +46,7 @@ impl LspAdapter for JsonLspAdapter { async fn fetch_latest_server_version( &self, - _: Arc, + _: &dyn LspAdapterDelegate, ) -> Result> { Ok(Box::new( self.node @@ -57,8 +58,8 @@ impl LspAdapter for JsonLspAdapter { async fn fetch_server_binary( &self, version: Box, - _: Arc, container_dir: PathBuf, + _: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::().unwrap(); let server_path = container_dir.join(SERVER_PATH); @@ -78,7 +79,11 @@ impl LspAdapter for JsonLspAdapter { }) } - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + container_dir: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { (|| async move { let mut last_version_dir = None; let mut entries = fs::read_dir(&container_dir).await?; diff --git a/crates/zed/src/languages/language_plugin.rs b/crates/zed/src/languages/language_plugin.rs index 9b82713d082d5d12a0243ebaf674b9a16550c3d4..52091024546a22add29529652a910b5ef0f822b9 100644 --- a/crates/zed/src/languages/language_plugin.rs +++ b/crates/zed/src/languages/language_plugin.rs @@ -3,10 +3,9 @@ use async_trait::async_trait; use collections::HashMap; use futures::lock::Mutex; use gpui::executor::Background; -use language::{LanguageServerBinary, LanguageServerName, LspAdapter}; +use language::{LanguageServerBinary, LanguageServerName, LspAdapter, LspAdapterDelegate}; use plugin_runtime::{Plugin, PluginBinary, PluginBuilder, WasiFn}; use std::{any::Any, path::PathBuf, sync::Arc}; -use util::http::HttpClient; use util::ResultExt; #[allow(dead_code)] @@ -72,7 +71,7 @@ impl LspAdapter for PluginLspAdapter { async fn fetch_latest_server_version( &self, - _: Arc, + _: &dyn LspAdapterDelegate, ) -> Result> { let runtime = self.runtime.clone(); let function = self.fetch_latest_server_version; @@ -92,8 +91,8 @@ impl LspAdapter for PluginLspAdapter { async fn fetch_server_binary( &self, version: Box, - _: Arc, container_dir: PathBuf, + _: &dyn LspAdapterDelegate, ) -> Result { let version = *version.downcast::().unwrap(); let runtime = self.runtime.clone(); @@ -110,7 +109,11 @@ impl LspAdapter for PluginLspAdapter { .await } - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + container_dir: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { let runtime = self.runtime.clone(); let function = self.cached_server_binary; diff --git a/crates/zed/src/languages/lua.rs b/crates/zed/src/languages/lua.rs index f204eb2555f5327d0e70df60963db771acd772ab..7f63a1fae28296936b85c97f1ad55bf7437a2ed4 100644 --- a/crates/zed/src/languages/lua.rs +++ b/crates/zed/src/languages/lua.rs @@ -3,12 +3,14 @@ use async_compression::futures::bufread::GzipDecoder; use async_tar::Archive; use async_trait::async_trait; use futures::{io::BufReader, StreamExt}; -use language::{LanguageServerBinary, LanguageServerName}; +use language::{LanguageServerBinary, LanguageServerName, LspAdapterDelegate}; use smol::fs; -use std::{any::Any, env::consts, ffi::OsString, path::PathBuf, sync::Arc}; -use util::{async_iife, github::latest_github_release, http::HttpClient, ResultExt}; - -use util::github::GitHubLspBinaryVersion; +use std::{any::Any, env::consts, ffi::OsString, path::PathBuf}; +use util::{ + async_iife, + github::{latest_github_release, GitHubLspBinaryVersion}, + ResultExt, +}; #[derive(Copy, Clone)] pub struct LuaLspAdapter; @@ -28,9 +30,11 @@ impl super::LspAdapter for LuaLspAdapter { async fn fetch_latest_server_version( &self, - http: Arc, + delegate: &dyn LspAdapterDelegate, ) -> Result> { - let release = latest_github_release("LuaLS/lua-language-server", false, http).await?; + let release = + latest_github_release("LuaLS/lua-language-server", false, delegate.http_client()) + .await?; let version = release.name.clone(); let platform = match consts::ARCH { "x86_64" => "x64", @@ -53,15 +57,16 @@ impl super::LspAdapter for LuaLspAdapter { async fn fetch_server_binary( &self, version: Box, - http: Arc, container_dir: PathBuf, + delegate: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::().unwrap(); let binary_path = container_dir.join("bin/lua-language-server"); if fs::metadata(&binary_path).await.is_err() { - let mut response = http + let mut response = delegate + .http_client() .get(&version.url, Default::default(), true) .await .map_err(|err| anyhow!("error downloading release: {}", err))?; @@ -81,7 +86,11 @@ impl super::LspAdapter for LuaLspAdapter { }) } - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + container_dir: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { async_iife!({ let mut last_binary_path = None; let mut entries = fs::read_dir(&container_dir).await?; diff --git a/crates/zed/src/languages/python.rs b/crates/zed/src/languages/python.rs index 7aaddf5fe8a72d5731322e911b5a5c87473dfdbe..b56a0d61751e372ac38339c540de39b4c49da06b 100644 --- a/crates/zed/src/languages/python.rs +++ b/crates/zed/src/languages/python.rs @@ -1,7 +1,7 @@ use anyhow::{anyhow, Result}; use async_trait::async_trait; use futures::StreamExt; -use language::{LanguageServerBinary, LanguageServerName, LspAdapter}; +use language::{LanguageServerBinary, LanguageServerName, LspAdapter, LspAdapterDelegate}; use node_runtime::NodeRuntime; use smol::fs; use std::{ @@ -10,7 +10,6 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::http::HttpClient; use util::ResultExt; fn server_binary_arguments(server_path: &Path) -> Vec { @@ -37,7 +36,7 @@ impl LspAdapter for PythonLspAdapter { async fn fetch_latest_server_version( &self, - _: Arc, + _: &dyn LspAdapterDelegate, ) -> Result> { Ok(Box::new(self.node.npm_package_latest_version("pyright").await?) as Box<_>) } @@ -45,8 +44,8 @@ impl LspAdapter for PythonLspAdapter { async fn fetch_server_binary( &self, version: Box, - _: Arc, container_dir: PathBuf, + _: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::().unwrap(); let server_path = container_dir.join(Self::SERVER_PATH); @@ -63,7 +62,11 @@ impl LspAdapter for PythonLspAdapter { }) } - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + container_dir: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { (|| async move { let mut last_version_dir = None; let mut entries = fs::read_dir(&container_dir).await?; diff --git a/crates/zed/src/languages/ruby.rs b/crates/zed/src/languages/ruby.rs index d387f815f0cd345c4173f522370ab17495989592..18756e3b77725519ec8edd120fecb1889bf6c6cc 100644 --- a/crates/zed/src/languages/ruby.rs +++ b/crates/zed/src/languages/ruby.rs @@ -1,8 +1,7 @@ use anyhow::{anyhow, Result}; use async_trait::async_trait; -use language::{LanguageServerBinary, LanguageServerName, LspAdapter}; +use language::{LanguageServerBinary, LanguageServerName, LspAdapter, LspAdapterDelegate}; use std::{any::Any, path::PathBuf, sync::Arc}; -use util::http::HttpClient; pub struct RubyLanguageServer; @@ -14,7 +13,7 @@ impl LspAdapter for RubyLanguageServer { async fn fetch_latest_server_version( &self, - _: Arc, + _: &dyn LspAdapterDelegate, ) -> Result> { Ok(Box::new(())) } @@ -22,13 +21,17 @@ impl LspAdapter for RubyLanguageServer { async fn fetch_server_binary( &self, _version: Box, - _: Arc, _container_dir: PathBuf, + _: &dyn LspAdapterDelegate, ) -> Result { Err(anyhow!("solargraph must be installed manually")) } - async fn cached_server_binary(&self, _container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + _: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { Some(LanguageServerBinary { path: "solargraph".into(), arguments: vec!["stdio".into()], diff --git a/crates/zed/src/languages/rust.rs b/crates/zed/src/languages/rust.rs index 15700ec80a1bc798d44210ed47d600cc319a1241..e60846b70a472743c4f299a37f285bc49cb6fe02 100644 --- a/crates/zed/src/languages/rust.rs +++ b/crates/zed/src/languages/rust.rs @@ -7,10 +7,11 @@ use lazy_static::lazy_static; use regex::Regex; use smol::fs::{self, File}; use std::{any::Any, borrow::Cow, env::consts, path::PathBuf, str, sync::Arc}; -use util::fs::remove_matching; -use util::github::{latest_github_release, GitHubLspBinaryVersion}; -use util::http::HttpClient; -use util::ResultExt; +use util::{ + fs::remove_matching, + github::{latest_github_release, GitHubLspBinaryVersion}, + ResultExt, +}; pub struct RustLspAdapter; @@ -22,9 +23,11 @@ impl LspAdapter for RustLspAdapter { async fn fetch_latest_server_version( &self, - http: Arc, + delegate: &dyn LspAdapterDelegate, ) -> Result> { - let release = latest_github_release("rust-analyzer/rust-analyzer", false, http).await?; + let release = + latest_github_release("rust-analyzer/rust-analyzer", false, delegate.http_client()) + .await?; let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH); let asset = release .assets @@ -40,14 +43,15 @@ impl LspAdapter for RustLspAdapter { async fn fetch_server_binary( &self, version: Box, - http: Arc, container_dir: PathBuf, + delegate: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::().unwrap(); let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name)); if fs::metadata(&destination_path).await.is_err() { - let mut response = http + let mut response = delegate + .http_client() .get(&version.url, Default::default(), true) .await .map_err(|err| anyhow!("error downloading release: {}", err))?; @@ -69,7 +73,11 @@ impl LspAdapter for RustLspAdapter { }) } - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + container_dir: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { (|| async move { let mut last = None; let mut entries = fs::read_dir(&container_dir).await?; diff --git a/crates/zed/src/languages/typescript.rs b/crates/zed/src/languages/typescript.rs index 7d2d580857780a714496a5f8c2c850de36986d26..662e73ea33cc02aa9289df0a3237f9b79bb39190 100644 --- a/crates/zed/src/languages/typescript.rs +++ b/crates/zed/src/languages/typescript.rs @@ -4,7 +4,7 @@ use async_tar::Archive; use async_trait::async_trait; use futures::{future::BoxFuture, FutureExt}; use gpui::AppContext; -use language::{LanguageServerBinary, LanguageServerName, LspAdapter}; +use language::{LanguageServerBinary, LanguageServerName, LspAdapter, LspAdapterDelegate}; use lsp::CodeActionKind; use node_runtime::NodeRuntime; use serde_json::{json, Value}; @@ -16,7 +16,7 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::{fs::remove_matching, github::latest_github_release, http::HttpClient}; +use util::{fs::remove_matching, github::latest_github_release}; use util::{github::GitHubLspBinaryVersion, ResultExt}; fn typescript_server_binary_arguments(server_path: &Path) -> Vec { @@ -58,7 +58,7 @@ impl LspAdapter for TypeScriptLspAdapter { async fn fetch_latest_server_version( &self, - _: Arc, + _: &dyn LspAdapterDelegate, ) -> Result> { Ok(Box::new(TypeScriptVersions { typescript_version: self.node.npm_package_latest_version("typescript").await?, @@ -72,8 +72,8 @@ impl LspAdapter for TypeScriptLspAdapter { async fn fetch_server_binary( &self, version: Box, - _: Arc, container_dir: PathBuf, + _: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::().unwrap(); let server_path = container_dir.join(Self::NEW_SERVER_PATH); @@ -99,7 +99,11 @@ impl LspAdapter for TypeScriptLspAdapter { }) } - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + container_dir: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { (|| async move { let old_server_path = container_dir.join(Self::OLD_SERVER_PATH); let new_server_path = container_dir.join(Self::NEW_SERVER_PATH); @@ -204,12 +208,13 @@ impl LspAdapter for EsLintLspAdapter { async fn fetch_latest_server_version( &self, - http: Arc, + delegate: &dyn LspAdapterDelegate, ) -> Result> { // At the time of writing the latest vscode-eslint release was released in 2020 and requires // special custom LSP protocol extensions be handled to fully initialize. Download the latest // prerelease instead to sidestep this issue - let release = latest_github_release("microsoft/vscode-eslint", true, http).await?; + let release = + latest_github_release("microsoft/vscode-eslint", true, delegate.http_client()).await?; Ok(Box::new(GitHubLspBinaryVersion { name: release.name, url: release.tarball_url, @@ -219,8 +224,8 @@ impl LspAdapter for EsLintLspAdapter { async fn fetch_server_binary( &self, version: Box, - http: Arc, container_dir: PathBuf, + delegate: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::().unwrap(); let destination_path = container_dir.join(format!("vscode-eslint-{}", version.name)); @@ -229,7 +234,8 @@ impl LspAdapter for EsLintLspAdapter { if fs::metadata(&server_path).await.is_err() { remove_matching(&container_dir, |entry| entry != destination_path).await; - let mut response = http + let mut response = delegate + .http_client() .get(&version.url, Default::default(), true) .await .map_err(|err| anyhow!("error downloading release: {}", err))?; @@ -257,7 +263,11 @@ impl LspAdapter for EsLintLspAdapter { }) } - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + container_dir: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { (|| async move { // This is unfortunate but we don't know what the version is to build a path directly let mut dir = fs::read_dir(&container_dir).await?; diff --git a/crates/zed/src/languages/yaml.rs b/crates/zed/src/languages/yaml.rs index 7f87a7caedb764588a698b5e863fbd418c3859ed..99c226bba7218bf0547c26485d7dc76612ac8f5c 100644 --- a/crates/zed/src/languages/yaml.rs +++ b/crates/zed/src/languages/yaml.rs @@ -4,6 +4,7 @@ use futures::{future::BoxFuture, FutureExt, StreamExt}; use gpui::AppContext; use language::{ language_settings::all_language_settings, LanguageServerBinary, LanguageServerName, LspAdapter, + LspAdapterDelegate, }; use node_runtime::NodeRuntime; use serde_json::Value; @@ -15,7 +16,6 @@ use std::{ path::{Path, PathBuf}, sync::Arc, }; -use util::http::HttpClient; use util::ResultExt; fn server_binary_arguments(server_path: &Path) -> Vec { @@ -42,7 +42,7 @@ impl LspAdapter for YamlLspAdapter { async fn fetch_latest_server_version( &self, - _: Arc, + _: &dyn LspAdapterDelegate, ) -> Result> { Ok(Box::new( self.node @@ -54,8 +54,8 @@ impl LspAdapter for YamlLspAdapter { async fn fetch_server_binary( &self, version: Box, - _: Arc, container_dir: PathBuf, + _: &dyn LspAdapterDelegate, ) -> Result { let version = version.downcast::().unwrap(); let server_path = container_dir.join(Self::SERVER_PATH); @@ -72,7 +72,11 @@ impl LspAdapter for YamlLspAdapter { }) } - async fn cached_server_binary(&self, container_dir: PathBuf) -> Option { + async fn cached_server_binary( + &self, + container_dir: PathBuf, + _: &dyn LspAdapterDelegate, + ) -> Option { (|| async move { let mut last_version_dir = None; let mut entries = fs::read_dir(&container_dir).await?; diff --git a/crates/zed/src/main.rs b/crates/zed/src/main.rs index 73a3346a9adc75b61af3ab4e5f50b4484129bb1b..dcdf5c1ea5f542339cfac3ef7edeac61c5ab34c4 100644 --- a/crates/zed/src/main.rs +++ b/crates/zed/src/main.rs @@ -31,7 +31,6 @@ use std::{ ffi::OsStr, fs::OpenOptions, io::Write as _, - ops::Not, os::unix::prelude::OsStrExt, panic, path::{Path, PathBuf}, @@ -373,7 +372,6 @@ struct Panic { os_version: Option, architecture: String, panicked_on: u128, - identifying_backtrace: Option>, } #[derive(Serialize)] @@ -401,61 +399,18 @@ fn init_panic_hook(app: &App) { .unwrap_or_else(|| "Box".to_string()); let backtrace = Backtrace::new(); - let backtrace = backtrace + let mut backtrace = backtrace .frames() .iter() - .filter_map(|frame| { - let symbol = frame.symbols().first()?; - let path = symbol.filename()?; - Some((path, symbol.lineno(), format!("{:#}", symbol.name()?))) - }) + .filter_map(|frame| Some(format!("{:#}", frame.symbols().first()?.name()?))) .collect::>(); - let this_file_path = Path::new(file!()); - - // Find the first frame in the backtrace for this panic hook itself. Exclude - // that frame and all frames before it. - let mut start_frame_ix = 0; - let mut codebase_root_path = None; - for (ix, (path, _, _)) in backtrace.iter().enumerate() { - if path.ends_with(this_file_path) { - start_frame_ix = ix + 1; - codebase_root_path = path.ancestors().nth(this_file_path.components().count()); - break; - } - } - - // Exclude any subsequent frames inside of rust's panic handling system. - while let Some((path, _, _)) = backtrace.get(start_frame_ix) { - if path.starts_with("/rustc") { - start_frame_ix += 1; - } else { - break; - } - } - - // Build two backtraces: - // * one for display, which includes symbol names for all frames, and files - // and line numbers for symbols in this codebase - // * one for identification and de-duplication, which only includes symbol - // names for symbols in this codebase. - let mut display_backtrace = Vec::new(); - let mut identifying_backtrace = Vec::new(); - for (path, line, symbol) in &backtrace[start_frame_ix..] { - display_backtrace.push(symbol.clone()); - - if let Some(codebase_root_path) = &codebase_root_path { - if let Ok(suffix) = path.strip_prefix(&codebase_root_path) { - identifying_backtrace.push(symbol.clone()); - - let display_path = suffix.to_string_lossy(); - if let Some(line) = line { - display_backtrace.push(format!(" {display_path}:{line}")); - } else { - display_backtrace.push(format!(" {display_path}")); - } - } - } + // Strip out leading stack frames for rust panic-handling. + if let Some(ix) = backtrace + .iter() + .position(|name| name == "rust_begin_unwind") + { + backtrace.drain(0..=ix); } let panic_data = Panic { @@ -477,29 +432,27 @@ fn init_panic_hook(app: &App) { .duration_since(UNIX_EPOCH) .unwrap() .as_millis(), - backtrace: display_backtrace, - identifying_backtrace: identifying_backtrace - .is_empty() - .not() - .then_some(identifying_backtrace), + backtrace, }; - if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() { - if is_pty { + if is_pty { + if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() { eprintln!("{}", panic_data_json); return; } - - let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string(); - let panic_file_path = paths::LOGS_DIR.join(format!("zed-{}.panic", timestamp)); - let panic_file = std::fs::OpenOptions::new() - .append(true) - .create(true) - .open(&panic_file_path) - .log_err(); - if let Some(mut panic_file) = panic_file { - write!(&mut panic_file, "{}", panic_data_json).log_err(); - panic_file.flush().log_err(); + } else { + if let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() { + let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string(); + let panic_file_path = paths::LOGS_DIR.join(format!("zed-{}.panic", timestamp)); + let panic_file = std::fs::OpenOptions::new() + .append(true) + .create(true) + .open(&panic_file_path) + .log_err(); + if let Some(mut panic_file) = panic_file { + writeln!(&mut panic_file, "{}", panic_data_json).log_err(); + panic_file.flush().log_err(); + } } } })); @@ -531,23 +484,45 @@ fn upload_previous_panics(http: Arc, cx: &mut AppContext) { } if telemetry_settings.diagnostics { - let panic_data_text = smol::fs::read_to_string(&child_path) + let panic_file_content = smol::fs::read_to_string(&child_path) .await .context("error reading panic file")?; - let body = serde_json::to_string(&PanicRequest { - panic: serde_json::from_str(&panic_data_text)?, - token: ZED_SECRET_CLIENT_TOKEN.into(), - }) - .unwrap(); - - let request = Request::post(&panic_report_url) - .redirect_policy(isahc::config::RedirectPolicy::Follow) - .header("Content-Type", "application/json") - .body(body.into())?; - let response = http.send(request).await.context("error sending panic")?; - if !response.status().is_success() { - log::error!("Error uploading panic to server: {}", response.status()); + let panic = serde_json::from_str(&panic_file_content) + .ok() + .or_else(|| { + panic_file_content + .lines() + .next() + .and_then(|line| serde_json::from_str(line).ok()) + }) + .unwrap_or_else(|| { + log::error!( + "failed to deserialize panic file {:?}", + panic_file_content + ); + None + }); + + if let Some(panic) = panic { + let body = serde_json::to_string(&PanicRequest { + panic, + token: ZED_SECRET_CLIENT_TOKEN.into(), + }) + .unwrap(); + + let request = Request::post(&panic_report_url) + .redirect_policy(isahc::config::RedirectPolicy::Follow) + .header("Content-Type", "application/json") + .body(body.into())?; + let response = + http.send(request).await.context("error sending panic")?; + if !response.status().is_success() { + log::error!( + "Error uploading panic to server: {}", + response.status() + ); + } } } diff --git a/crates/zed/src/zed.rs b/crates/zed/src/zed.rs index 088d26be78ad3048fba5516ac3f67eb3f846d600..bcdfe57a469013251994b4112500daa4a7489027 100644 --- a/crates/zed/src/zed.rs +++ b/crates/zed/src/zed.rs @@ -384,6 +384,8 @@ pub fn initialize_workspace( workspace.toggle_dock(project_panel_position, cx); } + cx.focus_self(); + workspace.add_panel(terminal_panel, cx); if let Some(assistant_panel) = assistant_panel { workspace.add_panel(assistant_panel, cx); diff --git a/docs/backend-development.md b/docs/backend-development.md new file mode 100644 index 0000000000000000000000000000000000000000..59100d3628e6a1e1202c1a4759c2f6a3d594e5fd --- /dev/null +++ b/docs/backend-development.md @@ -0,0 +1,52 @@ +[⬅ Back to Index](./index.md) + +# Developing Zed's Backend + +Zed's backend consists of the following components: + +- The Zed.dev web site + - implemented in the [`zed.dev`](https://github.com/zed-industries/zed.dev) repository + - hosted on [Vercel](https://vercel.com/zed-industries/zed-dev). +- The Zed Collaboration server + - implemented in the [`crates/collab`](https://github.com/zed-industries/zed/tree/main/crates/collab) directory of the main `zed` repository + - hosted on [DigitalOcean](https://cloud.digitalocean.com/projects/6c680a82-9d3b-4f1a-91e5-63a6ca4a8611), using Kubernetes +- The Zed Postgres database + - defined via migrations in the [`crates/collab/migrations`](https://github.com/zed-industries/zed/tree/main/crates/collab/migrations) directory + - hosted on DigitalOcean + +--- + +## Local Development + +Here's some things you need to develop backend code locally. + +### Dependencies + +- **Postgres** - download [Postgres.app](https://postgresapp.com). + +### Setup + +1. Check out the `zed` and `zed.dev` repositories into a common parent directory +2. Set the `GITHUB_TOKEN` environment variable to one of your GitHub personal access tokens (PATs). + + - You can create a PAT [here](https://github.com/settings/tokens). + - You may want to add something like this to your `~/.zshrc`: + + ``` + export GITHUB_TOKEN= + ``` + +3. In the `zed.dev` directory, run `npm install` to install dependencies. +4. In the `zed directory`, run `script/bootstrap` to set up the database +5. In the `zed directory`, run `foreman start` to start both servers + +--- + +## Production Debugging + +### Datadog + +Zed uses Datadog to collect metrics and logs from backend services. The Zed organization lives within Datadog's _US5_ [site](https://docs.datadoghq.com/getting_started/site/), so it can be accessed at [us5.datadoghq.com](https://us5.datadoghq.com). Useful things to look at in Datadog: + +- The [Logs](https://us5.datadoghq.com/logs) page shows logs from Zed.dev and the Collab server, and the internals of Zed's Kubernetes cluster. +- The [collab metrics dashboard](https://us5.datadoghq.com/dashboard/y2d-gxz-h4h/collab?from_ts=1660517946462&to_ts=1660604346462&live=true) shows metrics about the running collab server diff --git a/docs/building-zed.md b/docs/building-zed.md new file mode 100644 index 0000000000000000000000000000000000000000..78653571adfccdb1fca8cefe9f6408ddf995ea56 --- /dev/null +++ b/docs/building-zed.md @@ -0,0 +1,79 @@ +[⬅ Back to Index](./index.md) + +# Building Zed + +How to build Zed from source for the first time. + +## Process + +Expect this to take 30min to an hour! Some of these steps will take quite a while based on your connection speed, and how long your first build will be. + +1. Install the [GitHub CLI](https://cli.github.com/): + - `brew install gh` +1. Clone the `zed` repo + - `gh repo clone zed-industries/zed` +1. Install Xcode from the macOS App Store +1. Install [Postgres](https://postgresapp.com) +1. Install rust/rustup + - `curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh` +1. Install the wasm toolchain + - `rustup target add wasm32-wasi` +1. Generate an GitHub API Key + - Go to https://github.com/settings/tokens and Generate new token + - GitHub currently provides two kinds of tokens: + - Classic Tokens, where only `repo` (Full control of private repositories) OAuth scope has to be selected + Unfortunately, unselecting `repo` scope and selecting every its inner scope instead does not allow the token users to read from private repositories + - (not applicable) Fine-grained Tokens, at the moment of writing, did not allow any kind of access of non-owned private repos + - Keep the token in the browser tab/editor for the next two steps +1. Open Postgres.app +1. From `./path/to/zed/`: + - Run: + - `GITHUB_TOKEN={yourGithubAPIToken} script/bootstrap` + - Replace `{yourGithubAPIToken}` with the API token you generated above. + - Consider removing the token (if it's fine for you to crecreate such tokens during occasional migrations) or store this token somewhere safe (like your Zed 1Password vault). + - If you get: + - ```bash + Error: Cannot install in Homebrew on ARM processor in Intel default prefix (/usr/local)! + Please create a new installation in /opt/homebrew using one of the + "Alternative Installs" from: + https://docs.brew.sh/Installation + ``` + - In that case try: + - `/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)"` + - If Homebrew is not in your PATH: + - Replace `{username}` with your home folder name (usually your login name) + - `echo 'eval "$(/opt/homebrew/bin/brew shellenv)"' >> /Users/{username}/.zprofile` + - `eval "$(/opt/homebrew/bin/brew shellenv)"` +1. To run the Zed app: + - If you are working on zed: + - `cargo run` + - If you are just using the latest version, but not working on zed: + - `cargo run --release` + - If you need to run the collaboration server locally: + - `script/zed-with-local-servers` + +## Troubleshooting + +### `error: failed to run custom build command for gpui v0.1.0 (/Users/path/to/zed)` + +- Try `xcode-select --switch /Applications/Xcode.app/Contents/Developer` + +### `xcrun: error: unable to find utility "metal", not a developer tool or in PATH` + +### Seeding errors during `script/bootstrap` runs + +``` +seeding database... +thread 'main' panicked at 'failed to deserialize github user from 'https://api.github.com/orgs/zed-industries/teams/staff/members': reqwest::Error { kind: Decode, source: Error("invalid type: map, expected a sequence", line: 1, column: 0) }', crates/collab/src/bin/seed.rs:111:10 +``` + +Wrong permissions for `GITHUB_TOKEN` token used, the token needs to be able to read from private repos. +For Classic GitHub Tokens, that required OAuth scope `repo` (seacrh the scope name above for more details) + +Same command + +`sudo xcode-select --switch /Applications/Xcode.app/Contents/Developer` + +### If you experience errors that mention some dependency is using unstable features + +Try `cargo clean` and `cargo build` diff --git a/docs/company-and-vision.md b/docs/company-and-vision.md new file mode 100644 index 0000000000000000000000000000000000000000..389d0774a78ab162e46d938c952ff071c3874334 --- /dev/null +++ b/docs/company-and-vision.md @@ -0,0 +1,34 @@ +[⬅ Back to Index](./index.md) + +# Company & Vision + +## Vision + +Our goal is to make Zed the primary tool software teams use to collaborate. + +To do this, Zed will... + +* Make collaboration a first-class feature of the code authoring environment. +* Enable text-based conversations about any piece of text, independent of whether/when it was committed to version control. +* Make it smooth to edit and discuss code with teammates in real time. +* Make it easy to recall past conversations any area of the code. + +We believe the best way to make collaboration amazing is to build it into a new editor rather than retrofitting an existing editor. This means that in order for a team to adopt Zed for collaboration, each team member will need to adopt it as their editor as well. + +For this reason, we need to deliver a clearly superior experience as a single-user code editor in addition to being an excellent collaboration tool. This will take time, but we believe the dominance of VS Code demonstrates that it's possible for a single tool to capture substantial market share. We can proceed incrementally, capturing one team at a time and gradually transitioning conversations away from GitHub. + +## Zed Values + +Everyone wants to work quickly and have a lot of users. What are we unwilling to sacrifice in pursuit of those goals? + +- **Performance.** Speed is core to our brand and value proposition. It's important that we consistently deliver a response in less than 8ms on modern hardware for fine-grained actions. Coarse-grained actions should render feedback within 50ms. We consider the performance goals of the product at all times, and take the time to ensure our code meets them with reasonable usage. Once we have met our goals, we assess the impact vs effort of further performance investment and know when to say when. We measure our performance in the field and make an effort to maintain or improve real-world performance and promptly address regressions. + +- **Craftsmanship.** Zed is a premium product, and we put care into design and user experience. We can always cut scope, but what we do ship should be quality. Incomplete is okay, so long as we're executing on a coherent subset well. Half-baked, unintuitive, or broken is not okay. + +- **Shipping.** Knowledge matters only in as much as it drives results. We're here to build a real product in the real world. We care a lot about the experience of developing Zed, but we care about the user's experience more. + +- **Code quality.** This enables craftsmanship. Nobody is creative in a trash heap, and we're willing to dedicate time to keep our codebase clean. If we're spending no time refactoring, we are likely underinvesting. When we realize a design flaw, we assess its centrality to the rest of the system and consider budgeting time to address it. If we're spending all of our time refactoring, we are likely either overinvesting or paying off debt from past underinvestment. It's up to each engineer to allocate a reasonable refactoring budget. We shouldn't be navel gazing, but we also shouldn't be afraid to invest. + +- **Pairing.** Zed depends on regular pair programming to promote cohesion on our remote team. We believe pairing is a powerful substitute for beuracratic management, excessive documentation, and tedious code review. Nobody has to pair all day, every day, but everyone is responsible for pairing at least 2 hours a week with a variety of other engineers. If anyone wants to pair all day every day, that is explicitly endorsed and credited. If pairing temporarily reduces our throughput due to working on one thing instead of two, we trust that it will pay for itself in the long term by increasing our velocity and allowing us to more effectively grow our team. + +- **Long-term thinking.** The Zed vision began several years ago, and we expect Zed to be around many years from today. We must always be mindful to avoid overengineering for the future, but we should also keep the long-term in mind. Are we building a system our future selves would want to work on in 5 years? diff --git a/docs/design-tools.md b/docs/design-tools.md new file mode 100644 index 0000000000000000000000000000000000000000..70e545981145726086c0926c82c819db88c15e1f --- /dev/null +++ b/docs/design-tools.md @@ -0,0 +1,74 @@ +[⬅ Back to Index](./index.md) + +# Design Tools & Links + +Generally useful tools and resources for design. + +## General + +[Names of Signs & Symbols](https://www.prepressure.com/fonts/basics/character-names#curlybrackets) + +[The Noun Project](https://thenounproject.com/) - Icons for everything, attempts to describe all of human language visually. + +[SVG Repo](https://www.svgrepo.com/) - Open-licensed SVG Vector and Icons + +[Font Awsesome](https://fontawesome.com/) - High quality icons, has been around for many years. + +--- + +## Color + +[Opacity/Transparency Hex Values](https://gist.github.com/lopspower/03fb1cc0ac9f32ef38f4) + +[Color Ramp Generator](https://lyft-colorbox.herokuapp.com) + +[Designing a Comprehensive Color System +](https://www.rethinkhq.com/videos/designing-a-comprehensive-color-system-for-lyft) - [Linda Dong](https://twitter.com/lindadong) + +--- + +## Figma & Plugins + +[Figma Plugins for Designers](https://www.uiprep.com/blog/21-best-figma-plugins-for-designers-in-2021) + +[Icon Resizer](https://www.figma.com/community/plugin/739117729229117975/Icon-Resizer) + +[Code Syntax Highlighter](https://www.figma.com/community/plugin/938793197191698232/Code-Syntax-Highlighter) + +[Proportional Scale](https://www.figma.com/community/plugin/756895186298946525/Proportional-Scale) + +[LilGrid](https://www.figma.com/community/plugin/795397421598343178/LilGrid) + +Organize your selection into a grid. + +[Automator](https://www.figma.com/community/plugin/1005114571859948695/Automator) + +Build photoshop-style batch actions to automate things. + +[Figma Tokens](https://www.figma.com/community/plugin/843461159747178978/Figma-Tokens) + +Use tokens in Figma and generate JSON from them. + +--- + +## Design Systems + +### Naming + +[Naming Design Tokens](https://uxdesign.cc/naming-design-tokens-9454818ed7cb) + +### Storybook + +[Collaboration with design tokens and storybook](https://zure.com/blog/collaboration-with-design-tokens-and-storybook/) + +### Example DS Documentation + +[Tailwind CSS Documentation](https://tailwindcss.com/docs/container) + +[Material Design Docs](https://material.io/design/color/the-color-system.html#color-usage-and-palettes) + +[Carbon Design System Docs](https://www.carbondesignsystem.com) + +[Adobe Spectrum](https://spectrum.adobe.com/) + - Great documentation, like [Color System](https://spectrum.adobe.com/page/color-system/) and [Design Tokens](https://spectrum.adobe.com/page/design-tokens/). + - A good place to start if thinking about building a design system. diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 0000000000000000000000000000000000000000..0fc5bfbc893209dba2c72bcbd1548598eeb957b5 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,14 @@ +[⬅ Back to Index](./index.md) + +# Welcome to Zed + +Welcome! These internal docs are a work in progress. You can contribute to them by submitting a PR directly! + +## Contents + +- [The Company](./company-and-vision.md) +- [Tools We Use](./tools.md) +- [Building Zed](./building-zed.md) +- [Release Process](./release-process.md) +- [Backend Development](./backend-development.md) +- [Design Tools & Links](./design-tools.md) diff --git a/docs/release-process.md b/docs/release-process.md new file mode 100644 index 0000000000000000000000000000000000000000..ce43d647bd723d1343d00d3f1c571c29e2df6092 --- /dev/null +++ b/docs/release-process.md @@ -0,0 +1,96 @@ +[⬅ Back to Index](./index.md) + +# Zed's Release Process + +The process to create and ship a Zed release + +## Overview + +### Release Channels + +Users of Zed can choose between two _release channels_ - 'Stable' and 'Preview'. Most people use Stable, but Preview exists so that the Zed team and other early-adopters can test new features before they are released to our general user-base. + +### Weekly (Minor) Releases + +We normally publish new releases of Zed on Wednesdays, for both the Stable and Preview channels. For each of these releases, we bump Zed's _minor_ version number. + +For the Preview channel, we build the new release based on what's on the `main` branch. For the Stable channel, we build the new release based on the last Preview release. + +### Hotfix (Patch) Releases + +When we find a _regression_ in Zed (a bug that wasn't present in an earlier version), or find a significant bug in a newly-released feature, we typically publish a hotfix release. For these releases, we bump Zed's _patch_ version number. + +### Server Deployments + +Often, changes in the Zed app require corresponding changes in the `collab` server. At the currente stage of our copmany, we don't attempt to keep our server backwards-compatible with older versions of the app. Instead, when making a change, we simply bump Zed's _protocol version_ number (in the `rpc` crate), which causes the server to recognize that it isn't compatible with earlier versions of the Zed app. + +This means that when releasing a new version of Zed that has changes to the RPC protocol, we need to deploy a new version of the `collab` server at the same time. + +## Instructions + +### Publishing a Minor Release + +1. Announce your intent to publish a new version in Discord. This gives other people a chance to raise concerns or postpone the release if they want to get something merged before publishing a new version. +1. Open your terminal and `cd` into your local copy of Zed. Checkout `main` and perform a `git pull` to ensure you have the latest version. +1. Run the following command, which will update two git branches and two git tags (one for each release channel): + + ``` + script/bump-zed-minor-versions + ``` + +1. The script will make local changes only, and print out a shell command that you can use to push all of these branches and tags. +1. Pushing the two new tags will trigger two CI builds that, when finished, will create two draft releases (Stable and Preview) containing `Zed.dmg` files. +1. Now you need to write the release notes for the Stable and Preview releases. For the Stable release, you can just copy the release notes from the last week's Preview release, plus any hotfixes that were published on the Preview channel since then. Some of the hotfixes may not be relevant for the Stable release notes, if they were fixing bugs that were only present in Preview. +1. For the Preview release, you can retrieve the list of changes by running this command (make sure you have at least `Node 18` installed): + + ``` + GITHUB_ACCESS_TOKEN=your_access_token script/get-preview-channel-changes + ``` + +1. The script will list all the merged pull requests and you can use it as a reference to write the release notes. If there were protocol changes, it will also emit a warning. +1. Once CI creates the draft releases, add each release's notes and save the drafts. +1. If there have been server-side changes since the last release, you'll need to re-deploy the `collab` server. See below. +1. Before publishing, download the Zed.dmg and smoke test it to ensure everything looks good. + +### Publishing a Patch Release + +1. Announce your intent to publish a new patch version in Discord. +1. Open your terminal and `cd` into your local copy of Zed. Check out the branch corresponding to the release channel where the fix is needed. For example, if the fix is for a bug in Stable, and the current stable version is `0.63.0`, then checkout the branch `v0.63.x`. Run `git pull` to ensure your branch is up-to-date. +1. Find the merge commit where your bug-fix landed on `main`. You can browse the merged pull requests on main by running `git log main --grep Merge`. +1. Cherry-pick those commits onto the current release branch: + + ``` + git cherry-pick -m1 + ``` + +1. Run the following command, which will bump the version of Zed and create a new tag: + + ``` + script/bump-zed-patch-version + ``` + +1. The script will make local changes only, and print out a shell command that you can use to push all the branch and tag. +1. Pushing the new tag will trigger a CI build that, when finished, will create a draft release containing a `Zed.dmg` file. +1. Once the draft release is created, fill in the release notes based on the bugfixes that you cherry-picked. +1. If any of the bug-fixes require server-side changes, you'll need to re-deploy the `collab` server. See below. +1. Before publishing, download the Zed.dmg and smoke test it to ensure everything looks good. +1. Clicking publish on the release will cause old Zed instances to auto-update and the Zed.dev releases page to re-build and display the new release. + +### Deploying the Server + +1. Deploying the server is a two-step process that begins with pushing a tag. 1. Check out the commit you'd like to deploy. Often it will be the head of `main`, but could be on any branch. +1. Run the following script, which will bump the version of the `collab` crate and create a new tag. The script takes an argument `minor` or `patch`, to indicate how to increment the version. If you're releasing new features, use `minor`. If it's just a bugfix, use `patch` + + ``` + script/bump-collab-version patch + ``` + +1. This script will make local changes only, and print out a shell command that you can use to push the branch and tag. +1. Pushing the new tag will trigger a CI build that, when finished will upload a new versioned docker image to the DigitalOcean docker registry. +1. Once that CI job completes, you will be able to run the following command to deploy that docker image. The script takes two arguments: an environment (`production`, `preview`, or `staging`), and a version number (e.g. `0.10.1`). + + ``` + script/deploy preview 0.10.1 + ``` + +1. This command should complete quickly, updating the given environment to use the given version number of the `collab` server. diff --git a/docs/tools.md b/docs/tools.md new file mode 100644 index 0000000000000000000000000000000000000000..6e424a6f8145bc26235cc4e6eeca2db1d52df234 --- /dev/null +++ b/docs/tools.md @@ -0,0 +1,82 @@ +[⬅ Back to Index](./index.md) + +# Tools + +Tools to get started at Zed. Work in progress, submit a PR to add any missing tools here! + +--- + +## Everyday Tools + +### Calendar + +To gain access to company calendar, visit [this link](https://calendar.google.com/calendar/u/0/r?cid=Y18xOGdzcGE1aG5wdHJocGRoNWtlb2tlbWxzc0Bncm91cC5jYWxlbmRhci5nb29nbGUuY29t). + +If you would like the company calendar to be synced with a calendar application (Apple Calendar, etc.): + +- Add your company account (i.e `joseph@zed.dev`) to your calendar application +- Visit [this link](https://calendar.google.com/calendar/u/0/syncselect), check `Zed Industries (Read Only)` under `Shared Calendars`, and save it. + +### 1Password + +We have a shared company 1Password with all of our credentials. To gain access: + +1. Go to [zed-industries.1password.com](https://zed-industries.1password.com). +1. Sign in with your `@zed.dev` email address. +1. Make your account and let an admin know you've signed up. +1. Once they approve your sign up, you'll have access to all of the company credentials. + +### Slack + +Have a team member add you to the [Zed Industries](https://zed-industries.slack.com/) slack. + +### Discord + +We have a discord community. You can use [this link](https://discord.gg/SSD9eJrn6s) to join. **!Don't share this link, this is specifically for team memebers!** + +Once you have joined the community, let a team member know and we can add your correct role. + +--- + +## Engineering + +### Github + +For now, all the Zed source code lives on [Github](https://github.com/zed-industries). A founder will need to add you to the team and set up the appropriate permissions. + +Useful repos: +- [zed-industries/zed](https://github.com/zed-industries/zed) - Zed source +- [zed-industries/zed.dev](https://github.com/zed-industries/zed.dev) - Zed.dev site and collab API +- [zed-industries/docs](https://github.com/zed-industries/docs) - Zed public docs +- [zed-industries/community](https://github.com/zed-industries/community) - Zed community feedback & discussion + +### Vercel + +We use Vercel for all of our web deployments and some backend things. If you sign up with your `@zed.dev` email you should be invited to join the team automatically. If not, ask a founder to invite you to the Vercel team. + +### Environment Variables + +You can get access to many of our shared enviroment variables through 1Password and Vercel. For one password search the value you are looking for, or sort by passwords or API credentials. + +For Vercel, go to `settings` -> `Environment Variables` (either on the entire org, or on a specific project depending on where it is shared.) For a given Vercel project if you have their CLI installed you can use `vercel pull` or `vercel env` to pull values down directly. More on those in their [CLI docs](https://vercel.com/docs/cli/env). + +--- + +## Design + +### Figma + +We use Figma for all of our design work. To gain access: + +1. Use [this link](https://www.figma.com/team_invite/redeem/Xg4RcNXHhwP5netIvVBmKQ) to join the Figma team. +1. You should now have access to all of the company files. +1. You should go to the team page and "favorite" (star) any relevant projects so they show up in your sidebar. +1. Download the [Figma app](https://www.figma.com/downloads/) for easier access on desktop. + +### Campsite + +We use Campsite to review and discuss designs. To gain access: + +1. Download the [Campsite app](https://campsite.design/desktop/download). +1. Open it and sign in with your `@zed.dev` email address. +1. You can access our company campsite directly: [app.campsite.design/zed](https://app.campsite.design/zed)