.gitignore 🔗
@@ -18,4 +18,5 @@ DerivedData/
.swiftpm/config/registries.json
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
.netrc
+.swiftpm
**/*.db
Julia created
.gitignore | 1
Cargo.lock | 44
assets/keymaps/atom.json | 35
assets/keymaps/default.json | 4
crates/ai/Cargo.toml | 1
crates/ai/src/ai.rs | 2
crates/ai/src/assistant.rs | 809
crates/editor/src/display_map/block_map.rs | 4
crates/editor/src/display_map/fold_map.rs | 12
crates/editor/src/display_map/wrap_map.rs | 8
crates/editor/src/multi_buffer.rs | 10
crates/gpui/src/app.rs | 8
crates/gpui/src/app/action.rs | 8
crates/gpui/src/app/window.rs | 2
crates/gpui/src/elements/list.rs | 10
crates/gpui/src/platform/mac/platform.rs | 8
crates/language/src/language.rs | 123
crates/language/src/syntax_map.rs | 8
crates/live_kit_client/LiveKitBridge/Sources/LiveKitBridge/LiveKitBridge.swift | 70
crates/live_kit_client/examples/test_app.rs | 74
crates/live_kit_client/src/live_kit_client.rs | 2
crates/live_kit_client/src/prod.rs | 190
crates/live_kit_client/src/test.rs | 126
crates/lsp/src/lsp.rs | 27
crates/project/src/project.rs | 38
crates/project/src/worktree.rs | 4
crates/rope/src/rope.rs | 2
crates/settings/Cargo.toml | 4
crates/settings/src/keymap_file.rs | 51
crates/settings/src/settings_store.rs | 5
crates/sum_tree/src/cursor.rs | 4
crates/sum_tree/src/sum_tree.rs | 22
crates/sum_tree/src/tree_map.rs | 6
crates/text/src/text.rs | 30
crates/workspace/src/persistence.rs | 27
crates/workspace/src/persistence/model.rs | 6
crates/workspace/src/workspace.rs | 58
crates/zed/src/languages/c.rs | 82
crates/zed/src/languages/elixir.rs | 68
crates/zed/src/languages/elixir/highlights.scm | 9
crates/zed/src/languages/go.rs | 64
crates/zed/src/languages/heex/highlights.scm | 15
crates/zed/src/languages/heex/injections.scm | 18
crates/zed/src/languages/html.rs | 22
crates/zed/src/languages/json.rs | 13
crates/zed/src/languages/language_plugin.rs | 13
crates/zed/src/languages/lua.rs | 29
crates/zed/src/languages/python.rs | 13
crates/zed/src/languages/ruby.rs | 13
crates/zed/src/languages/rust.rs | 26
crates/zed/src/languages/typescript.rs | 30
crates/zed/src/languages/yaml.rs | 15
crates/zed/src/main.rs | 145
crates/zed/src/zed.rs | 2
54 files changed, 1,826 insertions(+), 594 deletions(-)
@@ -18,4 +18,5 @@ DerivedData/
.swiftpm/config/registries.json
.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata
.netrc
+.swiftpm
**/*.db
@@ -114,6 +114,7 @@ dependencies = [
"serde",
"serde_json",
"settings",
+ "smol",
"theme",
"tiktoken-rs",
"util",
@@ -593,7 +594,7 @@ dependencies = [
"http",
"http-body",
"hyper",
- "itoa",
+ "itoa 1.0.6",
"matchit",
"memchr",
"mime",
@@ -3011,7 +3012,7 @@ checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482"
dependencies = [
"bytes 1.4.0",
"fnv",
- "itoa",
+ "itoa 1.0.6",
]
[[package]]
@@ -3070,7 +3071,7 @@ dependencies = [
"http-body",
"httparse",
"httpdate",
- "itoa",
+ "itoa 1.0.6",
"pin-project-lite 0.2.9",
"socket2",
"tokio",
@@ -3336,6 +3337,12 @@ dependencies = [
"either",
]
+[[package]]
+name = "itoa"
+version = "0.4.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4"
+
[[package]]
name = "itoa"
version = "1.0.6"
@@ -3396,12 +3403,6 @@ dependencies = [
"wasm-bindgen",
]
-[[package]]
-name = "json_comments"
-version = "0.2.1"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "41ee439ee368ba4a77ac70d04f14015415af8600d6c894dc1f11bd79758c57d5"
-
[[package]]
name = "jwt"
version = "0.16.0"
@@ -5667,7 +5668,7 @@ dependencies = [
"bitflags",
"errno 0.2.8",
"io-lifetimes 0.5.3",
- "itoa",
+ "itoa 1.0.6",
"libc",
"linux-raw-sys 0.0.42",
"once_cell",
@@ -6099,7 +6100,19 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "057d394a50403bcac12672b2b18fb387ab6d289d957dab67dd201875391e52f1"
dependencies = [
"indexmap",
- "itoa",
+ "itoa 1.0.6",
+ "ryu",
+ "serde",
+]
+
+[[package]]
+name = "serde_json_lenient"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7d7b9ce5b0a63c6269b9623ed828b39259545a6ec0d8a35d6135ad6af6232add"
+dependencies = [
+ "indexmap",
+ "itoa 0.4.8",
"ryu",
"serde",
]
@@ -6122,7 +6135,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd"
dependencies = [
"form_urlencoded",
- "itoa",
+ "itoa 1.0.6",
"ryu",
"serde",
]
@@ -6148,7 +6161,7 @@ dependencies = [
"fs",
"futures 0.3.28",
"gpui",
- "json_comments",
+ "indoc",
"lazy_static",
"postage",
"pretty_assertions",
@@ -6157,6 +6170,7 @@ dependencies = [
"serde",
"serde_derive",
"serde_json",
+ "serde_json_lenient",
"smallvec",
"sqlez",
"staff_mode",
@@ -6507,7 +6521,7 @@ dependencies = [
"hkdf",
"hmac 0.12.1",
"indexmap",
- "itoa",
+ "itoa 1.0.6",
"libc",
"libsqlite3-sys",
"log",
@@ -6993,7 +7007,7 @@ version = "0.3.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8f3403384eaacbca9923fa06940178ac13e4edb725486d70e8e15881d0c836cc"
dependencies = [
- "itoa",
+ "itoa 1.0.6",
"serde",
"time-core",
"time-macros",
@@ -55,7 +55,40 @@
"context": "Pane",
"bindings": {
"alt-cmd-/": "search::ToggleRegex",
- "ctrl-0": "project_panel::ToggleFocus"
+ "ctrl-0": "project_panel::ToggleFocus",
+ "cmd-1": [
+ "pane::ActivateItem",
+ 0
+ ],
+ "cmd-2": [
+ "pane::ActivateItem",
+ 1
+ ],
+ "cmd-3": [
+ "pane::ActivateItem",
+ 2
+ ],
+ "cmd-4": [
+ "pane::ActivateItem",
+ 3
+ ],
+ "cmd-5": [
+ "pane::ActivateItem",
+ 4
+ ],
+ "cmd-6": [
+ "pane::ActivateItem",
+ 5
+ ],
+ "cmd-7": [
+ "pane::ActivateItem",
+ 6
+ ],
+ "cmd-8": [
+ "pane::ActivateItem",
+ 7
+ ],
+ "cmd-9": "pane::ActivateLastItem"
}
},
{
@@ -200,7 +200,9 @@
"context": "AssistantEditor > Editor",
"bindings": {
"cmd-enter": "assistant::Assist",
- "cmd->": "assistant::QuoteSelection"
+ "cmd->": "assistant::QuoteSelection",
+ "shift-enter": "assistant::Split",
+ "ctrl-r": "assistant::CycleMessageRole"
}
},
{
@@ -28,6 +28,7 @@ isahc.workspace = true
schemars.workspace = true
serde.workspace = true
serde_json.workspace = true
+smol.workspace = true
tiktoken-rs = "0.4"
[dev-dependencies]
@@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize};
use std::fmt::{self, Display};
// Data types for chat completion requests
-#[derive(Serialize)]
+#[derive(Debug, Serialize)]
struct OpenAIRequest {
model: String,
messages: Vec<RequestMessage>,
@@ -8,7 +8,7 @@ use collections::{HashMap, HashSet};
use editor::{
display_map::{BlockDisposition, BlockId, BlockProperties, BlockStyle, ToDisplayPoint},
scroll::autoscroll::{Autoscroll, AutoscrollStrategy},
- Anchor, Editor, ToOffset as _,
+ Anchor, Editor, ToOffset,
};
use fs::Fs;
use futures::{io::BufReader, AsyncBufReadExt, AsyncReadExt, Stream, StreamExt};
@@ -40,7 +40,15 @@ const OPENAI_API_URL: &'static str = "https://api.openai.com/v1";
actions!(
assistant,
- [NewContext, Assist, QuoteSelection, ToggleFocus, ResetKey]
+ [
+ NewContext,
+ Assist,
+ Split,
+ CycleMessageRole,
+ QuoteSelection,
+ ToggleFocus,
+ ResetKey
+ ]
);
pub fn init(cx: &mut AppContext) {
@@ -64,6 +72,8 @@ pub fn init(cx: &mut AppContext) {
cx.capture_action(AssistantEditor::cancel_last_assist);
cx.add_action(AssistantEditor::quote_selection);
cx.capture_action(AssistantEditor::copy);
+ cx.capture_action(AssistantEditor::split);
+ cx.capture_action(AssistantEditor::cycle_message_role);
cx.add_action(AssistantPanel::save_api_key);
cx.add_action(AssistantPanel::reset_api_key);
cx.add_action(
@@ -438,7 +448,7 @@ enum AssistantEvent {
struct Assistant {
buffer: ModelHandle<Buffer>,
- messages: Vec<Message>,
+ message_anchors: Vec<MessageAnchor>,
messages_metadata: HashMap<MessageId, MessageMetadata>,
next_message_id: MessageId,
summary: Option<String>,
@@ -463,7 +473,7 @@ impl Assistant {
language_registry: Arc<LanguageRegistry>,
cx: &mut ModelContext<Self>,
) -> Self {
- let model = "gpt-3.5-turbo";
+ let model = "gpt-3.5-turbo-0613";
let markdown = language_registry.language_for_name("Markdown");
let buffer = cx.add_model(|cx| {
let mut buffer = Buffer::new(0, "", cx);
@@ -483,7 +493,7 @@ impl Assistant {
});
let mut this = Self {
- messages: Default::default(),
+ message_anchors: Default::default(),
messages_metadata: Default::default(),
next_message_id: Default::default(),
summary: None,
@@ -498,17 +508,17 @@ impl Assistant {
api_key,
buffer,
};
- let message = Message {
+ let message = MessageAnchor {
id: MessageId(post_inc(&mut this.next_message_id.0)),
start: language::Anchor::MIN,
};
- this.messages.push(message.clone());
+ this.message_anchors.push(message.clone());
this.messages_metadata.insert(
message.id,
MessageMetadata {
role: Role::User,
sent_at: Local::now(),
- error: None,
+ status: MessageStatus::Done,
},
);
@@ -533,7 +543,7 @@ impl Assistant {
fn count_remaining_tokens(&mut self, cx: &mut ModelContext<Self>) {
let messages = self
- .open_ai_request_messages(cx)
+ .messages(cx)
.into_iter()
.filter_map(|message| {
Some(tiktoken_rs::ChatCompletionRequestMessage {
@@ -542,7 +552,7 @@ impl Assistant {
Role::Assistant => "assistant".into(),
Role::System => "system".into(),
},
- content: message.content,
+ content: self.buffer.read(cx).text_for_range(message.range).collect(),
name: None,
})
})
@@ -579,96 +589,169 @@ impl Assistant {
cx.notify();
}
- fn assist(&mut self, cx: &mut ModelContext<Self>) -> Option<(Message, Message)> {
- let request = OpenAIRequest {
- model: self.model.clone(),
- messages: self.open_ai_request_messages(cx),
- stream: true,
- };
+ fn assist(
+ &mut self,
+ selected_messages: HashSet<MessageId>,
+ cx: &mut ModelContext<Self>,
+ ) -> Vec<MessageAnchor> {
+ let mut user_messages = Vec::new();
+ let mut tasks = Vec::new();
+ for selected_message_id in selected_messages {
+ let selected_message_role =
+ if let Some(metadata) = self.messages_metadata.get(&selected_message_id) {
+ metadata.role
+ } else {
+ continue;
+ };
+
+ if selected_message_role == Role::Assistant {
+ if let Some(user_message) = self.insert_message_after(
+ selected_message_id,
+ Role::User,
+ MessageStatus::Done,
+ cx,
+ ) {
+ user_messages.push(user_message);
+ } else {
+ continue;
+ }
+ } else {
+ let request = OpenAIRequest {
+ model: self.model.clone(),
+ messages: self
+ .messages(cx)
+ .filter(|message| matches!(message.status, MessageStatus::Done))
+ .flat_map(|message| {
+ let mut system_message = None;
+ if message.id == selected_message_id {
+ system_message = Some(RequestMessage {
+ role: Role::System,
+ content: concat!(
+ "Treat the following messages as additional knowledge you have learned about, ",
+ "but act as if they were not part of this conversation. That is, treat them ",
+ "as if the user didn't see them and couldn't possibly inquire about them."
+ ).into()
+ });
+ }
+
+ Some(message.to_open_ai_message(self.buffer.read(cx))).into_iter().chain(system_message)
+ })
+ .chain(Some(RequestMessage {
+ role: Role::System,
+ content: format!(
+ "Direct your reply to message with id {}. Do not include a [Message X] header.",
+ selected_message_id.0
+ ),
+ }))
+ .collect(),
+ stream: true,
+ };
+
+ let Some(api_key) = self.api_key.borrow().clone() else { continue };
+ let stream = stream_completion(api_key, cx.background().clone(), request);
+ let assistant_message = self
+ .insert_message_after(
+ selected_message_id,
+ Role::Assistant,
+ MessageStatus::Pending,
+ cx,
+ )
+ .unwrap();
+
+ tasks.push(cx.spawn_weak({
+ |this, mut cx| async move {
+ let assistant_message_id = assistant_message.id;
+ let stream_completion = async {
+ let mut messages = stream.await?;
+
+ while let Some(message) = messages.next().await {
+ let mut message = message?;
+ if let Some(choice) = message.choices.pop() {
+ this.upgrade(&cx)
+ .ok_or_else(|| anyhow!("assistant was dropped"))?
+ .update(&mut cx, |this, cx| {
+ let text: Arc<str> = choice.delta.content?.into();
+ let message_ix = this.message_anchors.iter().position(
+ |message| message.id == assistant_message_id,
+ )?;
+ this.buffer.update(cx, |buffer, cx| {
+ let offset = this.message_anchors[message_ix + 1..]
+ .iter()
+ .find(|message| message.start.is_valid(buffer))
+ .map_or(buffer.len(), |message| {
+ message
+ .start
+ .to_offset(buffer)
+ .saturating_sub(1)
+ });
+ buffer.edit([(offset..offset, text)], None, cx);
+ });
+ cx.emit(AssistantEvent::StreamedCompletion);
+
+ Some(())
+ });
+ }
+ smol::future::yield_now().await;
+ }
- let api_key = self.api_key.borrow().clone()?;
- let stream = stream_completion(api_key, cx.background().clone(), request);
- let assistant_message =
- self.insert_message_after(self.messages.last()?.id, Role::Assistant, cx)?;
- let user_message = self.insert_message_after(assistant_message.id, Role::User, cx)?;
- let task = cx.spawn_weak({
- |this, mut cx| async move {
- let assistant_message_id = assistant_message.id;
- let stream_completion = async {
- let mut messages = stream.await?;
-
- while let Some(message) = messages.next().await {
- let mut message = message?;
- if let Some(choice) = message.choices.pop() {
this.upgrade(&cx)
.ok_or_else(|| anyhow!("assistant was dropped"))?
.update(&mut cx, |this, cx| {
- let text: Arc<str> = choice.delta.content?.into();
- let message_ix = this
- .messages
- .iter()
- .position(|message| message.id == assistant_message_id)?;
- this.buffer.update(cx, |buffer, cx| {
- let offset = if message_ix + 1 == this.messages.len() {
- buffer.len()
- } else {
- this.messages[message_ix + 1]
- .start
- .to_offset(buffer)
- .saturating_sub(1)
- };
- buffer.edit([(offset..offset, text)], None, cx);
+ this.pending_completions.retain(|completion| {
+ completion.id != this.completion_count
});
- cx.emit(AssistantEvent::StreamedCompletion);
-
- Some(())
+ this.summarize(cx);
});
- }
- }
- this.upgrade(&cx)
- .ok_or_else(|| anyhow!("assistant was dropped"))?
- .update(&mut cx, |this, cx| {
- this.pending_completions
- .retain(|completion| completion.id != this.completion_count);
- this.summarize(cx);
- });
-
- anyhow::Ok(())
- };
-
- let result = stream_completion.await;
- if let Some(this) = this.upgrade(&cx) {
- this.update(&mut cx, |this, cx| {
- if let Err(error) = result {
- if let Some(metadata) =
- this.messages_metadata.get_mut(&assistant_message.id)
- {
- metadata.error = Some(error.to_string().trim().into());
- cx.notify();
- }
+ anyhow::Ok(())
+ };
+
+ let result = stream_completion.await;
+ if let Some(this) = this.upgrade(&cx) {
+ this.update(&mut cx, |this, cx| {
+ if let Some(metadata) =
+ this.messages_metadata.get_mut(&assistant_message.id)
+ {
+ match result {
+ Ok(_) => {
+ metadata.status = MessageStatus::Done;
+ }
+ Err(error) => {
+ metadata.status = MessageStatus::Error(
+ error.to_string().trim().into(),
+ );
+ }
+ }
+ cx.notify();
+ }
+ });
}
- });
- }
+ }
+ }));
}
- });
+ }
- self.pending_completions.push(PendingCompletion {
- id: post_inc(&mut self.completion_count),
- _task: task,
- });
- Some((assistant_message, user_message))
+ if !tasks.is_empty() {
+ self.pending_completions.push(PendingCompletion {
+ id: post_inc(&mut self.completion_count),
+ _tasks: tasks,
+ });
+ }
+
+ user_messages
}
fn cancel_last_assist(&mut self) -> bool {
self.pending_completions.pop().is_some()
}
- fn cycle_message_role(&mut self, id: MessageId, cx: &mut ModelContext<Self>) {
- if let Some(metadata) = self.messages_metadata.get_mut(&id) {
- metadata.role.cycle();
- cx.emit(AssistantEvent::MessagesEdited);
- cx.notify();
+ fn cycle_message_roles(&mut self, ids: HashSet<MessageId>, cx: &mut ModelContext<Self>) {
+ for id in ids {
+ if let Some(metadata) = self.messages_metadata.get_mut(&id) {
+ metadata.role.cycle();
+ cx.emit(AssistantEvent::MessagesEdited);
+ cx.notify();
+ }
}
}
@@ -676,32 +759,34 @@ impl Assistant {
&mut self,
message_id: MessageId,
role: Role,
+ status: MessageStatus,
cx: &mut ModelContext<Self>,
- ) -> Option<Message> {
+ ) -> Option<MessageAnchor> {
if let Some(prev_message_ix) = self
- .messages
+ .message_anchors
.iter()
.position(|message| message.id == message_id)
{
let start = self.buffer.update(cx, |buffer, cx| {
- let offset = self.messages[prev_message_ix + 1..]
+ let offset = self.message_anchors[prev_message_ix + 1..]
.iter()
.find(|message| message.start.is_valid(buffer))
.map_or(buffer.len(), |message| message.start.to_offset(buffer) - 1);
buffer.edit([(offset..offset, "\n")], None, cx);
buffer.anchor_before(offset + 1)
});
- let message = Message {
+ let message = MessageAnchor {
id: MessageId(post_inc(&mut self.next_message_id.0)),
start,
};
- self.messages.insert(prev_message_ix + 1, message.clone());
+ self.message_anchors
+ .insert(prev_message_ix + 1, message.clone());
self.messages_metadata.insert(
message.id,
MessageMetadata {
role,
sent_at: Local::now(),
- error: None,
+ status,
},
);
cx.emit(AssistantEvent::MessagesEdited);
@@ -711,20 +796,129 @@ impl Assistant {
}
}
+ fn split_message(
+ &mut self,
+ range: Range<usize>,
+ cx: &mut ModelContext<Self>,
+ ) -> (Option<MessageAnchor>, Option<MessageAnchor>) {
+ let start_message = self.message_for_offset(range.start, cx);
+ let end_message = self.message_for_offset(range.end, cx);
+ if let Some((start_message, end_message)) = start_message.zip(end_message) {
+ // Prevent splitting when range spans multiple messages.
+ if start_message.index != end_message.index {
+ return (None, None);
+ }
+
+ let message = start_message;
+ let role = message.role;
+ let mut edited_buffer = false;
+
+ let mut suffix_start = None;
+ if range.start > message.range.start && range.end < message.range.end - 1 {
+ if self.buffer.read(cx).chars_at(range.end).next() == Some('\n') {
+ suffix_start = Some(range.end + 1);
+ } else if self.buffer.read(cx).reversed_chars_at(range.end).next() == Some('\n') {
+ suffix_start = Some(range.end);
+ }
+ }
+
+ let suffix = if let Some(suffix_start) = suffix_start {
+ MessageAnchor {
+ id: MessageId(post_inc(&mut self.next_message_id.0)),
+ start: self.buffer.read(cx).anchor_before(suffix_start),
+ }
+ } else {
+ self.buffer.update(cx, |buffer, cx| {
+ buffer.edit([(range.end..range.end, "\n")], None, cx);
+ });
+ edited_buffer = true;
+ MessageAnchor {
+ id: MessageId(post_inc(&mut self.next_message_id.0)),
+ start: self.buffer.read(cx).anchor_before(range.end + 1),
+ }
+ };
+
+ self.message_anchors
+ .insert(message.index + 1, suffix.clone());
+ self.messages_metadata.insert(
+ suffix.id,
+ MessageMetadata {
+ role,
+ sent_at: Local::now(),
+ status: MessageStatus::Done,
+ },
+ );
+
+ let new_messages = if range.start == range.end || range.start == message.range.start {
+ (None, Some(suffix))
+ } else {
+ let mut prefix_end = None;
+ if range.start > message.range.start && range.end < message.range.end - 1 {
+ if self.buffer.read(cx).chars_at(range.start).next() == Some('\n') {
+ prefix_end = Some(range.start + 1);
+ } else if self.buffer.read(cx).reversed_chars_at(range.start).next()
+ == Some('\n')
+ {
+ prefix_end = Some(range.start);
+ }
+ }
+
+ let selection = if let Some(prefix_end) = prefix_end {
+ cx.emit(AssistantEvent::MessagesEdited);
+ MessageAnchor {
+ id: MessageId(post_inc(&mut self.next_message_id.0)),
+ start: self.buffer.read(cx).anchor_before(prefix_end),
+ }
+ } else {
+ self.buffer.update(cx, |buffer, cx| {
+ buffer.edit([(range.start..range.start, "\n")], None, cx)
+ });
+ edited_buffer = true;
+ MessageAnchor {
+ id: MessageId(post_inc(&mut self.next_message_id.0)),
+ start: self.buffer.read(cx).anchor_before(range.end + 1),
+ }
+ };
+
+ self.message_anchors
+ .insert(message.index + 1, selection.clone());
+ self.messages_metadata.insert(
+ selection.id,
+ MessageMetadata {
+ role,
+ sent_at: Local::now(),
+ status: MessageStatus::Done,
+ },
+ );
+ (Some(selection), Some(suffix))
+ };
+
+ if !edited_buffer {
+ cx.emit(AssistantEvent::MessagesEdited);
+ }
+ new_messages
+ } else {
+ (None, None)
+ }
+ }
+
fn summarize(&mut self, cx: &mut ModelContext<Self>) {
- if self.messages.len() >= 2 && self.summary.is_none() {
+ if self.message_anchors.len() >= 2 && self.summary.is_none() {
let api_key = self.api_key.borrow().clone();
if let Some(api_key) = api_key {
- let mut messages = self.open_ai_request_messages(cx);
- messages.truncate(2);
- messages.push(RequestMessage {
- role: Role::User,
- content: "Summarize the conversation into a short title without punctuation"
- .into(),
- });
+ let messages = self
+ .messages(cx)
+ .take(2)
+ .map(|message| message.to_open_ai_message(self.buffer.read(cx)))
+ .chain(Some(RequestMessage {
+ role: Role::User,
+ content:
+ "Summarize the conversation into a short title without punctuation"
+ .into(),
+ }));
let request = OpenAIRequest {
model: self.model.clone(),
- messages,
+ messages: messages.collect(),
stream: true,
};
@@ -752,49 +946,69 @@ impl Assistant {
}
}
- fn open_ai_request_messages(&self, cx: &AppContext) -> Vec<RequestMessage> {
- let buffer = self.buffer.read(cx);
- self.messages(cx)
- .map(|(_message, metadata, range)| RequestMessage {
- role: metadata.role,
- content: buffer.text_for_range(range).collect(),
- })
- .collect()
+ fn message_for_offset(&self, offset: usize, cx: &AppContext) -> Option<Message> {
+ self.messages_for_offsets([offset], cx).pop()
}
- fn message_id_for_offset(&self, offset: usize, cx: &AppContext) -> Option<MessageId> {
- Some(
- self.messages(cx)
- .find(|(_, _, range)| range.contains(&offset))
- .map(|(message, _, _)| message)
- .or(self.messages.last())?
- .id,
- )
+ fn messages_for_offsets(
+ &self,
+ offsets: impl IntoIterator<Item = usize>,
+ cx: &AppContext,
+ ) -> Vec<Message> {
+ let mut result = Vec::new();
+
+ let buffer_len = self.buffer.read(cx).len();
+ let mut messages = self.messages(cx).peekable();
+ let mut offsets = offsets.into_iter().peekable();
+ while let Some(offset) = offsets.next() {
+ // Skip messages that start after the offset.
+ while messages.peek().map_or(false, |message| {
+ message.range.end < offset || (message.range.end == offset && offset < buffer_len)
+ }) {
+ messages.next();
+ }
+ let Some(message) = messages.peek() else { continue };
+
+ // Skip offsets that are in the same message.
+ while offsets.peek().map_or(false, |offset| {
+ message.range.contains(offset) || message.range.end == buffer_len
+ }) {
+ offsets.next();
+ }
+
+ result.push(message.clone());
+ }
+ result
}
- fn messages<'a>(
- &'a self,
- cx: &'a AppContext,
- ) -> impl 'a + Iterator<Item = (&Message, &MessageMetadata, Range<usize>)> {
+ fn messages<'a>(&'a self, cx: &'a AppContext) -> impl 'a + Iterator<Item = Message> {
let buffer = self.buffer.read(cx);
- let mut messages = self.messages.iter().peekable();
+ let mut message_anchors = self.message_anchors.iter().enumerate().peekable();
iter::from_fn(move || {
- while let Some(message) = messages.next() {
- let metadata = self.messages_metadata.get(&message.id)?;
- let message_start = message.start.to_offset(buffer);
+ while let Some((ix, message_anchor)) = message_anchors.next() {
+ let metadata = self.messages_metadata.get(&message_anchor.id)?;
+ let message_start = message_anchor.start.to_offset(buffer);
let mut message_end = None;
- while let Some(next_message) = messages.peek() {
+ while let Some((_, next_message)) = message_anchors.peek() {
if next_message.start.is_valid(buffer) {
message_end = Some(next_message.start);
break;
} else {
- messages.next();
+ message_anchors.next();
}
}
let message_end = message_end
.unwrap_or(language::Anchor::MAX)
.to_offset(buffer);
- return Some((message, metadata, message_start..message_end));
+ return Some(Message {
+ index: ix,
+ range: message_start..message_end,
+ id: message_anchor.id,
+ anchor: message_anchor.start,
+ role: metadata.role,
+ sent_at: metadata.sent_at,
+ status: metadata.status.clone(),
+ });
}
None
})
@@ -803,7 +1017,7 @@ impl Assistant {
struct PendingCompletion {
id: usize,
- _task: Task<()>,
+ _tasks: Vec<Task<()>>,
}
enum AssistantEditorEvent {
@@ -856,34 +1070,31 @@ impl AssistantEditor {
}
fn assist(&mut self, _: &Assist, cx: &mut ViewContext<Self>) {
- let user_message = self.assistant.update(cx, |assistant, cx| {
- let editor = self.editor.read(cx);
- let newest_selection = editor
- .selections
- .newest_anchor()
- .head()
- .to_offset(&editor.buffer().read(cx).snapshot(cx));
- let message_id = assistant.message_id_for_offset(newest_selection, cx)?;
- let metadata = assistant.messages_metadata.get(&message_id)?;
- let user_message = if metadata.role == Role::User {
- let (_, user_message) = assistant.assist(cx)?;
- user_message
- } else {
- let user_message = assistant.insert_message_after(message_id, Role::User, cx)?;
- user_message
- };
- Some(user_message)
+ let cursors = self.cursors(cx);
+
+ let user_messages = self.assistant.update(cx, |assistant, cx| {
+ let selected_messages = assistant
+ .messages_for_offsets(cursors, cx)
+ .into_iter()
+ .map(|message| message.id)
+ .collect();
+ assistant.assist(selected_messages, cx)
});
-
- if let Some(user_message) = user_message {
- let cursor = user_message
- .start
- .to_offset(&self.assistant.read(cx).buffer.read(cx));
+ let new_selections = user_messages
+ .iter()
+ .map(|message| {
+ let cursor = message
+ .start
+ .to_offset(self.assistant.read(cx).buffer.read(cx));
+ cursor..cursor
+ })
+ .collect::<Vec<_>>();
+ if !new_selections.is_empty() {
self.editor.update(cx, |editor, cx| {
editor.change_selections(
Some(Autoscroll::Strategy(AutoscrollStrategy::Fit)),
cx,
- |selections| selections.select_ranges([cursor..cursor]),
+ |selections| selections.select_ranges(new_selections),
);
});
}
@@ -898,6 +1109,26 @@ impl AssistantEditor {
}
}
+ fn cycle_message_role(&mut self, _: &CycleMessageRole, cx: &mut ViewContext<Self>) {
+ let cursors = self.cursors(cx);
+ self.assistant.update(cx, |assistant, cx| {
+ let messages = assistant
+ .messages_for_offsets(cursors, cx)
+ .into_iter()
+ .map(|message| message.id)
+ .collect();
+ assistant.cycle_message_roles(messages, cx)
+ });
+ }
+
+ fn cursors(&self, cx: &AppContext) -> Vec<usize> {
+ let selections = self.editor.read(cx).selections.all::<usize>(cx);
+ selections
+ .into_iter()
+ .map(|selection| selection.head())
+ .collect()
+ }
+
fn handle_assistant_event(
&mut self,
_: ModelHandle<Assistant>,
@@ -982,14 +1213,14 @@ impl AssistantEditor {
.assistant
.read(cx)
.messages(cx)
- .map(|(message, metadata, _)| BlockProperties {
- position: buffer.anchor_in_excerpt(excerpt_id, message.start),
+ .map(|message| BlockProperties {
+ position: buffer.anchor_in_excerpt(excerpt_id, message.anchor),
height: 2,
style: BlockStyle::Sticky,
render: Arc::new({
let assistant = self.assistant.clone();
- let metadata = metadata.clone();
- let message = message.clone();
+ // let metadata = message.metadata.clone();
+ // let message = message.clone();
move |cx| {
enum Sender {}
enum ErrorTooltip {}
@@ -1000,7 +1231,7 @@ impl AssistantEditor {
let sender = MouseEventHandler::<Sender, _>::new(
message_id.0,
cx,
- |state, _| match metadata.role {
+ |state, _| match message.role {
Role::User => {
let style = style.user_sender.style_for(state, false);
Label::new("You", style.text.clone())
@@ -1026,7 +1257,10 @@ impl AssistantEditor {
let assistant = assistant.clone();
move |_, _, cx| {
assistant.update(cx, |assistant, cx| {
- assistant.cycle_message_role(message_id, cx)
+ assistant.cycle_message_roles(
+ HashSet::from_iter(Some(message_id)),
+ cx,
+ )
})
}
});
@@ -1035,29 +1269,35 @@ impl AssistantEditor {
.with_child(sender.aligned())
.with_child(
Label::new(
- metadata.sent_at.format("%I:%M%P").to_string(),
+ message.sent_at.format("%I:%M%P").to_string(),
style.sent_at.text.clone(),
)
.contained()
.with_style(style.sent_at.container)
.aligned(),
)
- .with_children(metadata.error.clone().map(|error| {
- Svg::new("icons/circle_x_mark_12.svg")
- .with_color(style.error_icon.color)
- .constrained()
- .with_width(style.error_icon.width)
- .contained()
- .with_style(style.error_icon.container)
- .with_tooltip::<ErrorTooltip>(
- message_id.0,
- error,
- None,
- theme.tooltip.clone(),
- cx,
+ .with_children(
+ if let MessageStatus::Error(error) = &message.status {
+ Some(
+ Svg::new("icons/circle_x_mark_12.svg")
+ .with_color(style.error_icon.color)
+ .constrained()
+ .with_width(style.error_icon.width)
+ .contained()
+ .with_style(style.error_icon.container)
+ .with_tooltip::<ErrorTooltip>(
+ message_id.0,
+ error.to_string(),
+ None,
+ theme.tooltip.clone(),
+ cx,
+ )
+ .aligned(),
)
- .aligned()
- }))
+ } else {
+ None
+ },
+ )
.aligned()
.left()
.contained()
@@ -1147,15 +1387,15 @@ impl AssistantEditor {
let selection = editor.selections.newest::<usize>(cx);
let mut copied_text = String::new();
let mut spanned_messages = 0;
- for (_message, metadata, message_range) in assistant.messages(cx) {
- if message_range.start >= selection.range().end {
+ for message in assistant.messages(cx) {
+ if message.range.start >= selection.range().end {
break;
- } else if message_range.end >= selection.range().start {
- let range = cmp::max(message_range.start, selection.range().start)
- ..cmp::min(message_range.end, selection.range().end);
+ } else if message.range.end >= selection.range().start {
+ let range = cmp::max(message.range.start, selection.range().start)
+ ..cmp::min(message.range.end, selection.range().end);
if !range.is_empty() {
spanned_messages += 1;
- write!(&mut copied_text, "## {}\n\n", metadata.role).unwrap();
+ write!(&mut copied_text, "## {}\n\n", message.role).unwrap();
for chunk in assistant.buffer.read(cx).text_for_range(range) {
copied_text.push_str(&chunk);
}
@@ -1174,11 +1414,24 @@ impl AssistantEditor {
cx.propagate_action();
}
+ fn split(&mut self, _: &Split, cx: &mut ViewContext<Self>) {
+ self.assistant.update(cx, |assistant, cx| {
+ let selections = self.editor.read(cx).selections.disjoint_anchors();
+ for selection in selections.into_iter() {
+ let buffer = self.editor.read(cx).buffer().read(cx).snapshot(cx);
+ let range = selection
+ .map(|endpoint| endpoint.to_offset(&buffer))
+ .range();
+ assistant.split_message(range, cx);
+ }
+ });
+ }
+
fn cycle_model(&mut self, cx: &mut ViewContext<Self>) {
self.assistant.update(cx, |assistant, cx| {
let new_model = match assistant.model.as_str() {
- "gpt-4" => "gpt-3.5-turbo",
- _ => "gpt-4",
+ "gpt-4-0613" => "gpt-3.5-turbo-0613",
+ _ => "gpt-4-0613",
};
assistant.set_model(new_model.into(), cx);
});
@@ -1283,7 +1536,7 @@ impl Item for AssistantEditor {
struct MessageId(usize);
#[derive(Clone, Debug)]
-struct Message {
+struct MessageAnchor {
id: MessageId,
start: language::Anchor,
}
@@ -1292,7 +1545,36 @@ struct Message {
struct MessageMetadata {
role: Role,
sent_at: DateTime<Local>,
- error: Option<String>,
+ status: MessageStatus,
+}
+
+#[derive(Clone, Debug)]
+enum MessageStatus {
+ Pending,
+ Done,
+ Error(Arc<str>),
+}
+
+#[derive(Clone, Debug)]
+pub struct Message {
+ range: Range<usize>,
+ index: usize,
+ id: MessageId,
+ anchor: language::Anchor,
+ role: Role,
+ sent_at: DateTime<Local>,
+ status: MessageStatus,
+}
+
+impl Message {
+ fn to_open_ai_message(&self, buffer: &Buffer) -> RequestMessage {
+ let mut content = format!("[Message {}]\n", self.id.0).to_string();
+ content.extend(buffer.text_for_range(self.range.clone()));
+ RequestMessage {
+ role: self.role,
+ content,
+ }
+ }
}
async fn stream_completion(
@@ -1392,7 +1674,7 @@ mod tests {
let assistant = cx.add_model(|cx| Assistant::new(Default::default(), registry, cx));
let buffer = assistant.read(cx).buffer.clone();
- let message_1 = assistant.read(cx).messages[0].clone();
+ let message_1 = assistant.read(cx).message_anchors[0].clone();
assert_eq!(
messages(&assistant, cx),
vec![(message_1.id, Role::User, 0..0)]
@@ -1400,7 +1682,7 @@ mod tests {
let message_2 = assistant.update(cx, |assistant, cx| {
assistant
- .insert_message_after(message_1.id, Role::Assistant, cx)
+ .insert_message_after(message_1.id, Role::Assistant, MessageStatus::Done, cx)
.unwrap()
});
assert_eq!(
@@ -1424,7 +1706,7 @@ mod tests {
let message_3 = assistant.update(cx, |assistant, cx| {
assistant
- .insert_message_after(message_2.id, Role::User, cx)
+ .insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx)
.unwrap()
});
assert_eq!(
@@ -1438,7 +1720,7 @@ mod tests {
let message_4 = assistant.update(cx, |assistant, cx| {
assistant
- .insert_message_after(message_2.id, Role::User, cx)
+ .insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx)
.unwrap()
});
assert_eq!(
@@ -1499,7 +1781,7 @@ mod tests {
// Ensure we can still insert after a merged message.
let message_5 = assistant.update(cx, |assistant, cx| {
assistant
- .insert_message_after(message_1.id, Role::System, cx)
+ .insert_message_after(message_1.id, Role::System, MessageStatus::Done, cx)
.unwrap()
});
assert_eq!(
@@ -1512,6 +1794,159 @@ mod tests {
);
}
+ #[gpui::test]
+ fn test_message_splitting(cx: &mut AppContext) {
+ let registry = Arc::new(LanguageRegistry::test());
+ let assistant = cx.add_model(|cx| Assistant::new(Default::default(), registry, cx));
+ let buffer = assistant.read(cx).buffer.clone();
+
+ let message_1 = assistant.read(cx).message_anchors[0].clone();
+ assert_eq!(
+ messages(&assistant, cx),
+ vec![(message_1.id, Role::User, 0..0)]
+ );
+
+ buffer.update(cx, |buffer, cx| {
+ buffer.edit([(0..0, "aaa\nbbb\nccc\nddd\n")], None, cx)
+ });
+
+ let (_, message_2) =
+ assistant.update(cx, |assistant, cx| assistant.split_message(3..3, cx));
+ let message_2 = message_2.unwrap();
+
+ // We recycle newlines in the middle of a split message
+ assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc\nddd\n");
+ assert_eq!(
+ messages(&assistant, cx),
+ vec![
+ (message_1.id, Role::User, 0..4),
+ (message_2.id, Role::User, 4..16),
+ ]
+ );
+
+ let (_, message_3) =
+ assistant.update(cx, |assistant, cx| assistant.split_message(3..3, cx));
+ let message_3 = message_3.unwrap();
+
+ // We don't recycle newlines at the end of a split message
+ assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\nccc\nddd\n");
+ assert_eq!(
+ messages(&assistant, cx),
+ vec![
+ (message_1.id, Role::User, 0..4),
+ (message_3.id, Role::User, 4..5),
+ (message_2.id, Role::User, 5..17),
+ ]
+ );
+
+ let (_, message_4) =
+ assistant.update(cx, |assistant, cx| assistant.split_message(9..9, cx));
+ let message_4 = message_4.unwrap();
+ assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\nccc\nddd\n");
+ assert_eq!(
+ messages(&assistant, cx),
+ vec![
+ (message_1.id, Role::User, 0..4),
+ (message_3.id, Role::User, 4..5),
+ (message_2.id, Role::User, 5..9),
+ (message_4.id, Role::User, 9..17),
+ ]
+ );
+
+ let (_, message_5) =
+ assistant.update(cx, |assistant, cx| assistant.split_message(9..9, cx));
+ let message_5 = message_5.unwrap();
+ assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\n\nccc\nddd\n");
+ assert_eq!(
+ messages(&assistant, cx),
+ vec![
+ (message_1.id, Role::User, 0..4),
+ (message_3.id, Role::User, 4..5),
+ (message_2.id, Role::User, 5..9),
+ (message_4.id, Role::User, 9..10),
+ (message_5.id, Role::User, 10..18),
+ ]
+ );
+
+ let (message_6, message_7) =
+ assistant.update(cx, |assistant, cx| assistant.split_message(14..16, cx));
+ let message_6 = message_6.unwrap();
+ let message_7 = message_7.unwrap();
+ assert_eq!(buffer.read(cx).text(), "aaa\n\nbbb\n\nccc\ndd\nd\n");
+ assert_eq!(
+ messages(&assistant, cx),
+ vec![
+ (message_1.id, Role::User, 0..4),
+ (message_3.id, Role::User, 4..5),
+ (message_2.id, Role::User, 5..9),
+ (message_4.id, Role::User, 9..10),
+ (message_5.id, Role::User, 10..14),
+ (message_6.id, Role::User, 14..17),
+ (message_7.id, Role::User, 17..19),
+ ]
+ );
+ }
+
+ #[gpui::test]
+ fn test_messages_for_offsets(cx: &mut AppContext) {
+ let registry = Arc::new(LanguageRegistry::test());
+ let assistant = cx.add_model(|cx| Assistant::new(Default::default(), registry, cx));
+ let buffer = assistant.read(cx).buffer.clone();
+
+ let message_1 = assistant.read(cx).message_anchors[0].clone();
+ assert_eq!(
+ messages(&assistant, cx),
+ vec![(message_1.id, Role::User, 0..0)]
+ );
+
+ buffer.update(cx, |buffer, cx| buffer.edit([(0..0, "aaa")], None, cx));
+ let message_2 = assistant
+ .update(cx, |assistant, cx| {
+ assistant.insert_message_after(message_1.id, Role::User, MessageStatus::Done, cx)
+ })
+ .unwrap();
+ buffer.update(cx, |buffer, cx| buffer.edit([(4..4, "bbb")], None, cx));
+
+ let message_3 = assistant
+ .update(cx, |assistant, cx| {
+ assistant.insert_message_after(message_2.id, Role::User, MessageStatus::Done, cx)
+ })
+ .unwrap();
+ buffer.update(cx, |buffer, cx| buffer.edit([(8..8, "ccc")], None, cx));
+
+ assert_eq!(buffer.read(cx).text(), "aaa\nbbb\nccc");
+ assert_eq!(
+ messages(&assistant, cx),
+ vec![
+ (message_1.id, Role::User, 0..4),
+ (message_2.id, Role::User, 4..8),
+ (message_3.id, Role::User, 8..11)
+ ]
+ );
+
+ assert_eq!(
+ message_ids_for_offsets(&assistant, &[0, 4, 9], cx),
+ [message_1.id, message_2.id, message_3.id]
+ );
+ assert_eq!(
+ message_ids_for_offsets(&assistant, &[0, 1, 11], cx),
+ [message_1.id, message_3.id]
+ );
+
+ fn message_ids_for_offsets(
+ assistant: &ModelHandle<Assistant>,
+ offsets: &[usize],
+ cx: &AppContext,
+ ) -> Vec<MessageId> {
+ assistant
+ .read(cx)
+ .messages_for_offsets(offsets.iter().copied(), cx)
+ .into_iter()
+ .map(|message| message.id)
+ .collect()
+ }
+ }
+
fn messages(
assistant: &ModelHandle<Assistant>,
cx: &AppContext,
@@ -243,7 +243,7 @@ impl BlockMap {
// Preserve any old transforms that precede this edit.
let old_start = WrapRow(edit.old.start);
let new_start = WrapRow(edit.new.start);
- new_transforms.push_tree(cursor.slice(&old_start, Bias::Left, &()), &());
+ new_transforms.append(cursor.slice(&old_start, Bias::Left, &()), &());
if let Some(transform) = cursor.item() {
if transform.is_isomorphic() && old_start == cursor.end(&()) {
new_transforms.push(transform.clone(), &());
@@ -425,7 +425,7 @@ impl BlockMap {
push_isomorphic(&mut new_transforms, extent_after_edit);
}
- new_transforms.push_tree(cursor.suffix(&()), &());
+ new_transforms.append(cursor.suffix(&()), &());
debug_assert_eq!(
new_transforms.summary().input_rows,
wrap_snapshot.max_point().row() + 1
@@ -115,10 +115,10 @@ impl<'a> FoldMapWriter<'a> {
let mut new_tree = SumTree::new();
let mut cursor = self.0.folds.cursor::<Fold>();
for fold in folds {
- new_tree.push_tree(cursor.slice(&fold, Bias::Right, &buffer), &buffer);
+ new_tree.append(cursor.slice(&fold, Bias::Right, &buffer), &buffer);
new_tree.push(fold, &buffer);
}
- new_tree.push_tree(cursor.suffix(&buffer), &buffer);
+ new_tree.append(cursor.suffix(&buffer), &buffer);
new_tree
};
@@ -165,10 +165,10 @@ impl<'a> FoldMapWriter<'a> {
let mut cursor = self.0.folds.cursor::<usize>();
let mut folds = SumTree::new();
for fold_ix in fold_ixs_to_delete {
- folds.push_tree(cursor.slice(&fold_ix, Bias::Right, &buffer), &buffer);
+ folds.append(cursor.slice(&fold_ix, Bias::Right, &buffer), &buffer);
cursor.next(&buffer);
}
- folds.push_tree(cursor.suffix(&buffer), &buffer);
+ folds.append(cursor.suffix(&buffer), &buffer);
folds
};
@@ -302,7 +302,7 @@ impl FoldMap {
cursor.seek(&0, Bias::Right, &());
while let Some(mut edit) = buffer_edits_iter.next() {
- new_transforms.push_tree(cursor.slice(&edit.old.start, Bias::Left, &()), &());
+ new_transforms.append(cursor.slice(&edit.old.start, Bias::Left, &()), &());
edit.new.start -= edit.old.start - cursor.start();
edit.old.start = *cursor.start();
@@ -412,7 +412,7 @@ impl FoldMap {
}
}
- new_transforms.push_tree(cursor.suffix(&()), &());
+ new_transforms.append(cursor.suffix(&()), &());
if new_transforms.is_empty() {
let text_summary = new_buffer.text_summary();
new_transforms.push(
@@ -353,7 +353,7 @@ impl WrapSnapshot {
}
old_cursor.next(&());
- new_transforms.push_tree(
+ new_transforms.append(
old_cursor.slice(&next_edit.old.start, Bias::Right, &()),
&(),
);
@@ -366,7 +366,7 @@ impl WrapSnapshot {
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
- new_transforms.push_tree(old_cursor.suffix(&()), &());
+ new_transforms.append(old_cursor.suffix(&()), &());
}
}
}
@@ -500,7 +500,7 @@ impl WrapSnapshot {
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
- new_transforms.push_tree(
+ new_transforms.append(
old_cursor.slice(
&TabPoint::new(next_edit.old_rows.start, 0),
Bias::Right,
@@ -517,7 +517,7 @@ impl WrapSnapshot {
new_transforms.push_or_extend(Transform::isomorphic(summary));
}
old_cursor.next(&());
- new_transforms.push_tree(old_cursor.suffix(&()), &());
+ new_transforms.append(old_cursor.suffix(&()), &());
}
}
}
@@ -1010,7 +1010,7 @@ impl MultiBuffer {
let suffix = cursor.suffix(&());
let changed_trailing_excerpt = suffix.is_empty();
- new_excerpts.push_tree(suffix, &());
+ new_excerpts.append(suffix, &());
drop(cursor);
snapshot.excerpts = new_excerpts;
snapshot.excerpt_ids = new_excerpt_ids;
@@ -1193,7 +1193,7 @@ impl MultiBuffer {
while let Some(excerpt_id) = excerpt_ids.next() {
// Seek to the next excerpt to remove, preserving any preceding excerpts.
let locator = snapshot.excerpt_locator_for_id(excerpt_id);
- new_excerpts.push_tree(cursor.slice(&Some(locator), Bias::Left, &()), &());
+ new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &());
if let Some(mut excerpt) = cursor.item() {
if excerpt.id != excerpt_id {
@@ -1245,7 +1245,7 @@ impl MultiBuffer {
}
let suffix = cursor.suffix(&());
let changed_trailing_excerpt = suffix.is_empty();
- new_excerpts.push_tree(suffix, &());
+ new_excerpts.append(suffix, &());
drop(cursor);
snapshot.excerpts = new_excerpts;
@@ -1509,7 +1509,7 @@ impl MultiBuffer {
let mut cursor = snapshot.excerpts.cursor::<(Option<&Locator>, usize)>();
for (locator, buffer, buffer_edited) in excerpts_to_edit {
- new_excerpts.push_tree(cursor.slice(&Some(locator), Bias::Left, &()), &());
+ new_excerpts.append(cursor.slice(&Some(locator), Bias::Left, &()), &());
let old_excerpt = cursor.item().unwrap();
let buffer = buffer.read(cx);
let buffer_id = buffer.remote_id();
@@ -1549,7 +1549,7 @@ impl MultiBuffer {
new_excerpts.push(new_excerpt, &());
cursor.next(&());
}
- new_excerpts.push_tree(cursor.suffix(&()), &());
+ new_excerpts.append(cursor.suffix(&()), &());
drop(cursor);
snapshot.excerpts = new_excerpts;
@@ -445,7 +445,7 @@ type WindowBoundsCallback = Box<dyn FnMut(WindowBounds, Uuid, &mut WindowContext
type KeystrokeCallback =
Box<dyn FnMut(&Keystroke, &MatchResult, Option<&Box<dyn Action>>, &mut WindowContext) -> bool>;
type ActiveLabeledTasksCallback = Box<dyn FnMut(&mut AppContext) -> bool>;
-type DeserializeActionCallback = fn(json: &str) -> anyhow::Result<Box<dyn Action>>;
+type DeserializeActionCallback = fn(json: serde_json::Value) -> anyhow::Result<Box<dyn Action>>;
type WindowShouldCloseSubscriptionCallback = Box<dyn FnMut(&mut AppContext) -> bool>;
pub struct AppContext {
@@ -624,14 +624,14 @@ impl AppContext {
pub fn deserialize_action(
&self,
name: &str,
- argument: Option<&str>,
+ argument: Option<serde_json::Value>,
) -> Result<Box<dyn Action>> {
let callback = self
.action_deserializers
.get(name)
.ok_or_else(|| anyhow!("unknown action {}", name))?
.1;
- callback(argument.unwrap_or("{}"))
+ callback(argument.unwrap_or_else(|| serde_json::Value::Object(Default::default())))
.with_context(|| format!("invalid data for action {}", name))
}
@@ -5573,7 +5573,7 @@ mod tests {
let action1 = cx
.deserialize_action(
"test::something::ComplexAction",
- Some(r#"{"arg": "a", "count": 5}"#),
+ Some(serde_json::from_str(r#"{"arg": "a", "count": 5}"#).unwrap()),
)
.unwrap();
let action2 = cx
@@ -11,7 +11,7 @@ pub trait Action: 'static {
fn qualified_name() -> &'static str
where
Self: Sized;
- fn from_json_str(json: &str) -> anyhow::Result<Box<dyn Action>>
+ fn from_json_str(json: serde_json::Value) -> anyhow::Result<Box<dyn Action>>
where
Self: Sized;
}
@@ -38,7 +38,7 @@ macro_rules! actions {
$crate::__impl_action! {
$namespace,
$name,
- fn from_json_str(_: &str) -> $crate::anyhow::Result<Box<dyn $crate::Action>> {
+ fn from_json_str(_: $crate::serde_json::Value) -> $crate::anyhow::Result<Box<dyn $crate::Action>> {
Ok(Box::new(Self))
}
}
@@ -58,8 +58,8 @@ macro_rules! impl_actions {
$crate::__impl_action! {
$namespace,
$name,
- fn from_json_str(json: &str) -> $crate::anyhow::Result<Box<dyn $crate::Action>> {
- Ok(Box::new($crate::serde_json::from_str::<Self>(json)?))
+ fn from_json_str(json: $crate::serde_json::Value) -> $crate::anyhow::Result<Box<dyn $crate::Action>> {
+ Ok(Box::new($crate::serde_json::from_value::<Self>(json)?))
}
}
)*
@@ -394,7 +394,7 @@ impl<'a> WindowContext<'a> {
.iter()
.filter_map(move |(name, (type_id, deserialize))| {
if let Some(action_depth) = handler_depths_by_action_type.get(type_id).copied() {
- let action = deserialize("{}").ok()?;
+ let action = deserialize(serde_json::Value::Object(Default::default())).ok()?;
let bindings = self
.keystroke_matcher
.bindings_for_action_type(*type_id)
@@ -211,7 +211,7 @@ impl<V: View> Element<V> for List<V> {
let mut cursor = old_items.cursor::<Count>();
if state.rendered_range.start < new_rendered_range.start {
- new_items.push_tree(
+ new_items.append(
cursor.slice(&Count(state.rendered_range.start), Bias::Right, &()),
&(),
);
@@ -221,7 +221,7 @@ impl<V: View> Element<V> for List<V> {
cursor.next(&());
}
}
- new_items.push_tree(
+ new_items.append(
cursor.slice(&Count(new_rendered_range.start), Bias::Right, &()),
&(),
);
@@ -230,7 +230,7 @@ impl<V: View> Element<V> for List<V> {
cursor.seek(&Count(new_rendered_range.end), Bias::Right, &());
if new_rendered_range.end < state.rendered_range.start {
- new_items.push_tree(
+ new_items.append(
cursor.slice(&Count(state.rendered_range.start), Bias::Right, &()),
&(),
);
@@ -240,7 +240,7 @@ impl<V: View> Element<V> for List<V> {
cursor.next(&());
}
- new_items.push_tree(cursor.suffix(&()), &());
+ new_items.append(cursor.suffix(&()), &());
state.items = new_items;
state.rendered_range = new_rendered_range;
@@ -413,7 +413,7 @@ impl<V: View> ListState<V> {
old_heights.seek_forward(&Count(old_range.end), Bias::Right, &());
new_heights.extend((0..count).map(|_| ListItem::Unrendered), &());
- new_heights.push_tree(old_heights.suffix(&()), &());
+ new_heights.append(old_heights.suffix(&()), &());
drop(old_heights);
state.items = new_heights;
}
@@ -786,7 +786,7 @@ impl platform::Platform for MacPlatform {
fn set_cursor_style(&self, style: CursorStyle) {
unsafe {
- let cursor: id = match style {
+ let new_cursor: id = match style {
CursorStyle::Arrow => msg_send![class!(NSCursor), arrowCursor],
CursorStyle::ResizeLeftRight => {
msg_send![class!(NSCursor), resizeLeftRightCursor]
@@ -795,7 +795,11 @@ impl platform::Platform for MacPlatform {
CursorStyle::PointingHand => msg_send![class!(NSCursor), pointingHandCursor],
CursorStyle::IBeam => msg_send![class!(NSCursor), IBeamCursor],
};
- let _: () = msg_send![cursor, set];
+
+ let old_cursor: id = msg_send![class!(NSCursor), currentCursor];
+ if new_cursor != old_cursor {
+ let _: () = msg_send![new_cursor, set];
+ }
}
}
@@ -17,7 +17,7 @@ use futures::{
future::{BoxFuture, Shared},
FutureExt, TryFutureExt as _,
};
-use gpui::{executor::Background, AppContext, Task};
+use gpui::{executor::Background, AppContext, AsyncAppContext, Task};
use highlight_map::HighlightMap;
use lazy_static::lazy_static;
use lsp::{CodeActionKind, LanguageServerBinaries, LanguageServerBinary};
@@ -118,27 +118,46 @@ impl CachedLspAdapter {
pub async fn fetch_latest_server_version(
&self,
- http: Arc<dyn HttpClient>,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
- self.adapter.fetch_latest_server_version(http).await
+ self.adapter.fetch_latest_server_version(delegate).await
+ }
+
+ pub fn will_fetch_server(
+ &self,
+ delegate: &Arc<dyn LspAdapterDelegate>,
+ cx: &mut AsyncAppContext,
+ ) -> Option<Task<Result<()>>> {
+ self.adapter.will_fetch_server(delegate, cx)
+ }
+
+ pub fn will_start_server(
+ &self,
+ delegate: &Arc<dyn LspAdapterDelegate>,
+ cx: &mut AsyncAppContext,
+ ) -> Option<Task<Result<()>>> {
+ self.adapter.will_start_server(delegate, cx)
}
pub async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- http: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
self.adapter
- .fetch_server_binary(version, http, container_dir)
+ .fetch_server_binary(version, container_dir, delegate)
.await
}
pub async fn cached_server_binary(
&self,
container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
) -> Option<LanguageServerBinary> {
- self.adapter.cached_server_binary(container_dir).await
+ self.adapter
+ .cached_server_binary(container_dir, delegate)
+ .await
}
pub async fn installation_test_binary(
@@ -187,23 +206,48 @@ impl CachedLspAdapter {
}
}
+pub trait LspAdapterDelegate: Send + Sync {
+ fn show_notification(&self, message: &str, cx: &mut AppContext);
+ fn http_client(&self) -> Arc<dyn HttpClient>;
+}
+
#[async_trait]
pub trait LspAdapter: 'static + Send + Sync {
async fn name(&self) -> LanguageServerName;
async fn fetch_latest_server_version(
&self,
- http: Arc<dyn HttpClient>,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>>;
+ fn will_fetch_server(
+ &self,
+ _: &Arc<dyn LspAdapterDelegate>,
+ _: &mut AsyncAppContext,
+ ) -> Option<Task<Result<()>>> {
+ None
+ }
+
+ fn will_start_server(
+ &self,
+ _: &Arc<dyn LspAdapterDelegate>,
+ _: &mut AsyncAppContext,
+ ) -> Option<Task<Result<()>>> {
+ None
+ }
+
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- http: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary>;
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary>;
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary>;
async fn installation_test_binary(
&self,
@@ -523,7 +567,7 @@ pub struct LanguageRegistry {
lsp_binary_paths: Mutex<
HashMap<
LanguageServerName,
- Shared<BoxFuture<'static, Result<LanguageServerBinaries, Arc<anyhow::Error>>>>,
+ Shared<Task<Result<LanguageServerBinaries, Arc<anyhow::Error>>>>,
>,
>,
executor: Option<Arc<Background>>,
@@ -821,7 +865,7 @@ impl LanguageRegistry {
language: Arc<Language>,
adapter: Arc<CachedLspAdapter>,
root_path: Arc<Path>,
- http_client: Arc<dyn HttpClient>,
+ delegate: Arc<dyn LspAdapterDelegate>,
cx: &mut AppContext,
) -> Option<PendingLanguageServer> {
let server_id = self.state.write().next_language_server_id();
@@ -873,7 +917,6 @@ impl LanguageRegistry {
.log_err()?;
let this = self.clone();
let language = language.clone();
- let http_client = http_client.clone();
let container_dir: Arc<Path> = Arc::from(download_dir.join(adapter.name.0.as_ref()));
let root_path = root_path.clone();
let adapter = adapter.clone();
@@ -882,28 +925,35 @@ impl LanguageRegistry {
let task = {
let container_dir = container_dir.clone();
- cx.spawn(|cx| async move {
+ cx.spawn(|mut cx| async move {
login_shell_env_loaded.await;
let mut lock = this.lsp_binary_paths.lock();
let entry = lock
.entry(adapter.name.clone())
.or_insert_with(|| {
- get_binaries(
- adapter.clone(),
- language.clone(),
- http_client,
- container_dir,
- lsp_binary_statuses,
- )
- .map_err(Arc::new)
- .boxed()
+ cx.spawn(|cx| {
+ get_binaries(
+ adapter.clone(),
+ language.clone(),
+ delegate.clone(),
+ container_dir,
+ lsp_binary_statuses,
+ cx,
+ )
+ .map_err(Arc::new)
+ })
.shared()
})
.clone();
drop(lock);
let binaries = entry.clone().map_err(|e| anyhow!(e)).await?;
+
+ if let Some(task) = adapter.will_start_server(&delegate, &mut cx) {
+ task.await?;
+ }
+
println!("starting server");
let server = lsp::LanguageServer::new(
server_id,
@@ -1001,9 +1051,10 @@ impl Default for LanguageRegistry {
async fn get_binaries(
adapter: Arc<CachedLspAdapter>,
language: Arc<Language>,
- http_client: Arc<dyn HttpClient>,
+ delegate: Arc<dyn LspAdapterDelegate>,
container_dir: Arc<Path>,
statuses: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
+ mut cx: AsyncAppContext,
) -> Result<LanguageServerBinaries> {
if !container_dir.exists() {
smol::fs::create_dir_all(&container_dir)
@@ -1011,11 +1062,15 @@ async fn get_binaries(
.context("failed to create container directory")?;
}
+ if let Some(task) = adapter.will_fetch_server(&delegate, &mut cx) {
+ task.await?;
+ }
+
println!("fetching binary");
let binary = fetch_latest_binary(
adapter.clone(),
language.clone(),
- http_client,
+ delegate.as_ref(),
&container_dir,
statuses.clone(),
)
@@ -1023,7 +1078,7 @@ async fn get_binaries(
if let Err(error) = binary.as_ref() {
if let Some(binary) = adapter
- .cached_server_binary(container_dir.to_path_buf())
+ .cached_server_binary(container_dir.to_path_buf(), delegate.as_ref())
.await
{
statuses
@@ -1054,7 +1109,7 @@ async fn get_binaries(
async fn fetch_latest_binary(
adapter: Arc<CachedLspAdapter>,
language: Arc<Language>,
- http_client: Arc<dyn HttpClient>,
+ delegate: &dyn LspAdapterDelegate,
container_dir: &Path,
lsp_binary_statuses_tx: async_broadcast::Sender<(Arc<Language>, LanguageServerBinaryStatus)>,
) -> Result<LanguageServerBinaries> {
@@ -1066,15 +1121,13 @@ async fn fetch_latest_binary(
))
.await?;
- let version_info = adapter
- .fetch_latest_server_version(http_client.clone())
- .await?;
+ let version_info = adapter.fetch_latest_server_version(delegate).await?;
lsp_binary_statuses_tx
.broadcast((language.clone(), LanguageServerBinaryStatus::Downloading))
.await?;
let binary = adapter
- .fetch_server_binary(version_info, http_client, container_dir.to_path_buf())
+ .fetch_server_binary(version_info, container_dir.to_path_buf(), delegate)
.await?;
let installation_test_binary = adapter
.installation_test_binary(container_dir.to_path_buf())
@@ -1605,7 +1658,7 @@ impl LspAdapter for Arc<FakeLspAdapter> {
async fn fetch_latest_server_version(
&self,
- _: Arc<dyn HttpClient>,
+ _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
unreachable!();
}
@@ -1613,13 +1666,17 @@ impl LspAdapter for Arc<FakeLspAdapter> {
async fn fetch_server_binary(
&self,
_: Box<dyn 'static + Send + Any>,
- _: Arc<dyn HttpClient>,
_: PathBuf,
+ _: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
unreachable!();
}
- async fn cached_server_binary(&self, _: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ _: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
unreachable!();
}
@@ -288,7 +288,7 @@ impl SyntaxSnapshot {
};
if target.cmp(&cursor.start(), text).is_gt() {
let slice = cursor.slice(&target, Bias::Left, text);
- layers.push_tree(slice, text);
+ layers.append(slice, text);
}
}
// If this layer follows all of the edits, then preserve it and any
@@ -303,7 +303,7 @@ impl SyntaxSnapshot {
Bias::Left,
text,
);
- layers.push_tree(slice, text);
+ layers.append(slice, text);
continue;
};
@@ -369,7 +369,7 @@ impl SyntaxSnapshot {
cursor.next(text);
}
- layers.push_tree(cursor.suffix(&text), &text);
+ layers.append(cursor.suffix(&text), &text);
drop(cursor);
self.layers = layers;
}
@@ -478,7 +478,7 @@ impl SyntaxSnapshot {
if bounded_position.cmp(&cursor.start(), &text).is_gt() {
let slice = cursor.slice(&bounded_position, Bias::Left, text);
if !slice.is_empty() {
- layers.push_tree(slice, &text);
+ layers.append(slice, &text);
if changed_regions.prune(cursor.end(text), text) {
done = false;
}
@@ -6,17 +6,23 @@ import ScreenCaptureKit
class LKRoomDelegate: RoomDelegate {
var data: UnsafeRawPointer
var onDidDisconnect: @convention(c) (UnsafeRawPointer) -> Void
+ var onDidSubscribeToRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void
+ var onDidUnsubscribeFromRemoteAudioTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
var onDidSubscribeToRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void
var onDidUnsubscribeFromRemoteVideoTrack: @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
init(
data: UnsafeRawPointer,
onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void,
+ onDidSubscribeToRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
+ onDidUnsubscribeFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void,
onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void)
{
self.data = data
self.onDidDisconnect = onDidDisconnect
+ self.onDidSubscribeToRemoteAudioTrack = onDidSubscribeToRemoteAudioTrack
+ self.onDidUnsubscribeFromRemoteAudioTrack = onDidUnsubscribeFromRemoteAudioTrack
self.onDidSubscribeToRemoteVideoTrack = onDidSubscribeToRemoteVideoTrack
self.onDidUnsubscribeFromRemoteVideoTrack = onDidUnsubscribeFromRemoteVideoTrack
}
@@ -30,12 +36,16 @@ class LKRoomDelegate: RoomDelegate {
func room(_ room: Room, participant: RemoteParticipant, didSubscribe publication: RemoteTrackPublication, track: Track) {
if track.kind == .video {
self.onDidSubscribeToRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque())
+ } else if track.kind == .audio {
+ self.onDidSubscribeToRemoteAudioTrack(self.data, participant.identity as CFString, track.sid! as CFString, Unmanaged.passUnretained(track).toOpaque())
}
}
func room(_ room: Room, participant: RemoteParticipant, didUnsubscribe publication: RemoteTrackPublication, track: Track) {
if track.kind == .video {
self.onDidUnsubscribeFromRemoteVideoTrack(self.data, participant.identity as CFString, track.sid! as CFString)
+ } else if track.kind == .audio {
+ self.onDidUnsubscribeFromRemoteAudioTrack(self.data, participant.identity as CFString, track.sid! as CFString)
}
}
}
@@ -77,12 +87,16 @@ class LKVideoRenderer: NSObject, VideoRenderer {
public func LKRoomDelegateCreate(
data: UnsafeRawPointer,
onDidDisconnect: @escaping @convention(c) (UnsafeRawPointer) -> Void,
+ onDidSubscribeToRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
+ onDidUnsubscribeFromRemoteAudioTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void,
onDidSubscribeToRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString, UnsafeRawPointer) -> Void,
onDidUnsubscribeFromRemoteVideoTrack: @escaping @convention(c) (UnsafeRawPointer, CFString, CFString) -> Void
) -> UnsafeMutableRawPointer {
let delegate = LKRoomDelegate(
data: data,
onDidDisconnect: onDidDisconnect,
+ onDidSubscribeToRemoteAudioTrack: onDidSubscribeToRemoteAudioTrack,
+ onDidUnsubscribeFromRemoteAudioTrack: onDidUnsubscribeFromRemoteAudioTrack,
onDidSubscribeToRemoteVideoTrack: onDidSubscribeToRemoteVideoTrack,
onDidUnsubscribeFromRemoteVideoTrack: onDidUnsubscribeFromRemoteVideoTrack
)
@@ -123,6 +137,18 @@ public func LKRoomPublishVideoTrack(room: UnsafeRawPointer, track: UnsafeRawPoin
}
}
+@_cdecl("LKRoomPublishAudioTrack")
+public func LKRoomPublishAudioTrack(room: UnsafeRawPointer, track: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, UnsafeMutableRawPointer?, CFString?) -> Void, callback_data: UnsafeRawPointer) {
+ let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
+ let track = Unmanaged<LocalAudioTrack>.fromOpaque(track).takeUnretainedValue()
+ room.localParticipant?.publishAudioTrack(track: track).then { publication in
+ callback(callback_data, Unmanaged.passRetained(publication).toOpaque(), nil)
+ }.catch { error in
+ callback(callback_data, nil, error.localizedDescription as CFString)
+ }
+}
+
+
@_cdecl("LKRoomUnpublishTrack")
public func LKRoomUnpublishTrack(room: UnsafeRawPointer, publication: UnsafeRawPointer) {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
@@ -130,6 +156,20 @@ public func LKRoomUnpublishTrack(room: UnsafeRawPointer, publication: UnsafeRawP
let _ = room.localParticipant?.unpublish(publication: publication)
}
+@_cdecl("LKRoomAudioTracksForRemoteParticipant")
+public func LKRoomAudioTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
+ let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
+
+ for (_, participant) in room.remoteParticipants {
+ if participant.identity == participantId as String {
+ return participant.audioTracks.compactMap { $0.track as? RemoteAudioTrack } as CFArray?
+ }
+ }
+
+ return nil;
+}
+
+
@_cdecl("LKRoomVideoTracksForRemoteParticipant")
public func LKRoomVideoTracksForRemoteParticipant(room: UnsafeRawPointer, participantId: CFString) -> CFArray? {
let room = Unmanaged<Room>.fromOpaque(room).takeUnretainedValue()
@@ -143,6 +183,17 @@ public func LKRoomVideoTracksForRemoteParticipant(room: UnsafeRawPointer, partic
return nil;
}
+@_cdecl("LKLocalAudioTrackCreateTrack")
+public func LKLocalAudioTrackCreateTrack() -> UnsafeMutableRawPointer {
+ let track = LocalAudioTrack.createTrack(options: AudioCaptureOptions(
+ echoCancellation: true,
+ noiseSuppression: true
+ ))
+
+ return Unmanaged.passRetained(track).toOpaque()
+}
+
+
@_cdecl("LKCreateScreenShareTrackForDisplay")
public func LKCreateScreenShareTrackForDisplay(display: UnsafeMutableRawPointer) -> UnsafeMutableRawPointer {
let display = Unmanaged<MacOSDisplay>.fromOpaque(display).takeUnretainedValue()
@@ -150,6 +201,19 @@ public func LKCreateScreenShareTrackForDisplay(display: UnsafeMutableRawPointer)
return Unmanaged.passRetained(track).toOpaque()
}
+@_cdecl("LKRemoteAudioTrackStart")
+public func LKRemoteAudioTrackStart(track: UnsafeRawPointer, onStart: @escaping @convention(c) (UnsafeRawPointer, Bool) -> Void, callbackData: UnsafeRawPointer) {
+ let track = Unmanaged<Track>.fromOpaque(track).takeUnretainedValue() as! RemoteAudioTrack
+
+ track.start().then { success in
+ onStart(callbackData, success)
+ }
+ .catch { _ in
+ onStart(callbackData, false)
+ }
+}
+
+
@_cdecl("LKVideoRendererCreate")
public func LKVideoRendererCreate(data: UnsafeRawPointer, onFrame: @escaping @convention(c) (UnsafeRawPointer, CVPixelBuffer) -> Bool, onDrop: @escaping @convention(c) (UnsafeRawPointer) -> Void) -> UnsafeMutableRawPointer {
Unmanaged.passRetained(LKVideoRenderer(data: data, onFrame: onFrame, onDrop: onDrop)).toOpaque()
@@ -169,6 +233,12 @@ public func LKRemoteVideoTrackGetSid(track: UnsafeRawPointer) -> CFString {
return track.sid! as CFString
}
+@_cdecl("LKRemoteAudioTrackGetSid")
+public func LKRemoteAudioTrackGetSid(track: UnsafeRawPointer) -> CFString {
+ let track = Unmanaged<RemoteAudioTrack>.fromOpaque(track).takeUnretainedValue()
+ return track.sid! as CFString
+}
+
@_cdecl("LKDisplaySources")
public func LKDisplaySources(data: UnsafeRawPointer, callback: @escaping @convention(c) (UnsafeRawPointer, CFArray?, CFString?) -> Void) {
MacOSScreenCapturer.sources(for: .display, includeCurrentApplication: false, preferredMethod: .legacy).then { displaySources in
@@ -1,6 +1,10 @@
+use std::time::Duration;
+
use futures::StreamExt;
use gpui::{actions, keymap_matcher::Binding, Menu, MenuItem};
-use live_kit_client::{LocalVideoTrack, RemoteVideoTrackUpdate, Room};
+use live_kit_client::{
+ LocalAudioTrack, LocalVideoTrack, RemoteAudioTrackUpdate, RemoteVideoTrackUpdate, Room,
+};
use live_kit_server::token::{self, VideoGrant};
use log::LevelFilter;
use simplelog::SimpleLogger;
@@ -11,6 +15,12 @@ fn main() {
SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
gpui::App::new(()).unwrap().run(|cx| {
+ #[cfg(any(test, feature = "test-support"))]
+ println!("USING TEST LIVEKIT");
+
+ #[cfg(not(any(test, feature = "test-support")))]
+ println!("USING REAL LIVEKIT");
+
cx.platform().activate(true);
cx.add_global_action(quit);
@@ -49,35 +59,75 @@ fn main() {
let room_b = Room::new();
room_b.connect(&live_kit_url, &user2_token).await.unwrap();
- let mut track_changes = room_b.remote_video_track_updates();
+ let mut audio_track_updates = room_b.remote_audio_track_updates();
+ let audio_track = LocalAudioTrack::create();
+ let audio_track_publication = room_a.publish_audio_track(&audio_track).await.unwrap();
+
+ if let RemoteAudioTrackUpdate::Subscribed(track) =
+ audio_track_updates.next().await.unwrap()
+ {
+ let remote_tracks = room_b.remote_audio_tracks("test-participant-1");
+ assert_eq!(remote_tracks.len(), 1);
+ assert_eq!(remote_tracks[0].publisher_id(), "test-participant-1");
+ assert_eq!(track.publisher_id(), "test-participant-1");
+ } else {
+ panic!("unexpected message");
+ }
+
+ println!("Pausing for 5 seconds to test audio, make some noise!");
+ let timer = cx.background().timer(Duration::from_secs(5));
+ timer.await;
+
+ let remote_audio_track = room_b
+ .remote_audio_tracks("test-participant-1")
+ .pop()
+ .unwrap();
+ room_a.unpublish_track(audio_track_publication);
+ if let RemoteAudioTrackUpdate::Unsubscribed {
+ publisher_id,
+ track_id,
+ } = audio_track_updates.next().await.unwrap()
+ {
+ assert_eq!(publisher_id, "test-participant-1");
+ assert_eq!(remote_audio_track.sid(), track_id);
+ assert_eq!(room_b.remote_audio_tracks("test-participant-1").len(), 0);
+ } else {
+ panic!("unexpected message");
+ }
+ let mut video_track_updates = room_b.remote_video_track_updates();
let displays = room_a.display_sources().await.unwrap();
let display = displays.into_iter().next().unwrap();
- let track_a = LocalVideoTrack::screen_share_for_display(&display);
- let track_a_publication = room_a.publish_video_track(&track_a).await.unwrap();
+ let local_video_track = LocalVideoTrack::screen_share_for_display(&display);
+ let local_video_track_publication = room_a
+ .publish_video_track(&local_video_track)
+ .await
+ .unwrap();
- if let RemoteVideoTrackUpdate::Subscribed(track) = track_changes.next().await.unwrap() {
- let remote_tracks = room_b.remote_video_tracks("test-participant-1");
- assert_eq!(remote_tracks.len(), 1);
- assert_eq!(remote_tracks[0].publisher_id(), "test-participant-1");
+ if let RemoteVideoTrackUpdate::Subscribed(track) =
+ video_track_updates.next().await.unwrap()
+ {
+ let remote_video_tracks = room_b.remote_video_tracks("test-participant-1");
+ assert_eq!(remote_video_tracks.len(), 1);
+ assert_eq!(remote_video_tracks[0].publisher_id(), "test-participant-1");
assert_eq!(track.publisher_id(), "test-participant-1");
} else {
panic!("unexpected message");
}
- let remote_track = room_b
+ let remote_video_track = room_b
.remote_video_tracks("test-participant-1")
.pop()
.unwrap();
- room_a.unpublish_track(track_a_publication);
+ room_a.unpublish_track(local_video_track_publication);
if let RemoteVideoTrackUpdate::Unsubscribed {
publisher_id,
track_id,
- } = track_changes.next().await.unwrap()
+ } = video_track_updates.next().await.unwrap()
{
assert_eq!(publisher_id, "test-participant-1");
- assert_eq!(remote_track.sid(), track_id);
+ assert_eq!(remote_video_track.sid(), track_id);
assert_eq!(room_b.remote_video_tracks("test-participant-1").len(), 0);
} else {
panic!("unexpected message");
@@ -4,7 +4,7 @@ pub mod prod;
pub use prod::*;
#[cfg(any(test, feature = "test-support"))]
-mod test;
+pub mod test;
#[cfg(any(test, feature = "test-support"))]
pub use test::*;
@@ -21,6 +21,17 @@ extern "C" {
fn LKRoomDelegateCreate(
callback_data: *mut c_void,
on_did_disconnect: extern "C" fn(callback_data: *mut c_void),
+ on_did_subscribe_to_remote_audio_track: extern "C" fn(
+ callback_data: *mut c_void,
+ publisher_id: CFStringRef,
+ track_id: CFStringRef,
+ remote_track: *const c_void,
+ ),
+ on_did_unsubscribe_from_remote_audio_track: extern "C" fn(
+ callback_data: *mut c_void,
+ publisher_id: CFStringRef,
+ track_id: CFStringRef,
+ ),
on_did_subscribe_to_remote_video_track: extern "C" fn(
callback_data: *mut c_void,
publisher_id: CFStringRef,
@@ -49,7 +60,18 @@ extern "C" {
callback: extern "C" fn(*mut c_void, *mut c_void, CFStringRef),
callback_data: *mut c_void,
);
+ fn LKRoomPublishAudioTrack(
+ room: *const c_void,
+ track: *const c_void,
+ callback: extern "C" fn(*mut c_void, *mut c_void, CFStringRef),
+ callback_data: *mut c_void,
+ );
fn LKRoomUnpublishTrack(room: *const c_void, publication: *const c_void);
+ fn LKRoomAudioTracksForRemoteParticipant(
+ room: *const c_void,
+ participant_id: CFStringRef,
+ ) -> CFArrayRef;
+
fn LKRoomVideoTracksForRemoteParticipant(
room: *const c_void,
participant_id: CFStringRef,
@@ -61,6 +83,13 @@ extern "C" {
on_drop: extern "C" fn(callback_data: *mut c_void),
) -> *const c_void;
+ fn LKRemoteAudioTrackGetSid(track: *const c_void) -> CFStringRef;
+ // fn LKRemoteAudioTrackStart(
+ // track: *const c_void,
+ // callback: extern "C" fn(*mut c_void, bool),
+ // callback_data: *mut c_void
+ // );
+
fn LKVideoTrackAddRenderer(track: *const c_void, renderer: *const c_void);
fn LKRemoteVideoTrackGetSid(track: *const c_void) -> CFStringRef;
@@ -73,6 +102,7 @@ extern "C" {
),
);
fn LKCreateScreenShareTrackForDisplay(display: *const c_void) -> *const c_void;
+ fn LKLocalAudioTrackCreateTrack() -> *const c_void;
}
pub type Sid = String;
@@ -89,6 +119,7 @@ pub struct Room {
watch::Sender<ConnectionState>,
watch::Receiver<ConnectionState>,
)>,
+ remote_audio_track_subscribers: Mutex<Vec<mpsc::UnboundedSender<RemoteAudioTrackUpdate>>>,
remote_video_track_subscribers: Mutex<Vec<mpsc::UnboundedSender<RemoteVideoTrackUpdate>>>,
_delegate: RoomDelegate,
}
@@ -100,6 +131,7 @@ impl Room {
Self {
native_room: unsafe { LKRoomCreate(delegate.native_delegate) },
connection: Mutex::new(watch::channel_with(ConnectionState::Disconnected)),
+ remote_audio_track_subscribers: Default::default(),
remote_video_track_subscribers: Default::default(),
_delegate: delegate,
}
@@ -191,6 +223,32 @@ impl Room {
async { rx.await.unwrap().context("error publishing video track") }
}
+ pub fn publish_audio_track(
+ self: &Arc<Self>,
+ track: &LocalAudioTrack,
+ ) -> impl Future<Output = Result<LocalTrackPublication>> {
+ let (tx, rx) = oneshot::channel::<Result<LocalTrackPublication>>();
+ extern "C" fn callback(tx: *mut c_void, publication: *mut c_void, error: CFStringRef) {
+ let tx =
+ unsafe { Box::from_raw(tx as *mut oneshot::Sender<Result<LocalTrackPublication>>) };
+ if error.is_null() {
+ let _ = tx.send(Ok(LocalTrackPublication(publication)));
+ } else {
+ let error = unsafe { CFString::wrap_under_get_rule(error).to_string() };
+ let _ = tx.send(Err(anyhow!(error)));
+ }
+ }
+ unsafe {
+ LKRoomPublishAudioTrack(
+ self.native_room,
+ track.0,
+ callback,
+ Box::into_raw(Box::new(tx)) as *mut c_void,
+ );
+ }
+ async { rx.await.unwrap().context("error publishing video track") }
+ }
+
pub fn unpublish_track(&self, publication: LocalTrackPublication) {
unsafe {
LKRoomUnpublishTrack(self.native_room, publication.0);
@@ -226,12 +284,65 @@ impl Room {
}
}
+ pub fn remote_audio_tracks(&self, participant_id: &str) -> Vec<Arc<RemoteAudioTrack>> {
+ unsafe {
+ let tracks = LKRoomAudioTracksForRemoteParticipant(
+ self.native_room,
+ CFString::new(participant_id).as_concrete_TypeRef(),
+ );
+
+ if tracks.is_null() {
+ Vec::new()
+ } else {
+ let tracks = CFArray::wrap_under_get_rule(tracks);
+ tracks
+ .into_iter()
+ .map(|native_track| {
+ let native_track = *native_track;
+ let id =
+ CFString::wrap_under_get_rule(LKRemoteAudioTrackGetSid(native_track))
+ .to_string();
+ Arc::new(RemoteAudioTrack::new(
+ native_track,
+ id,
+ participant_id.into(),
+ ))
+ })
+ .collect()
+ }
+ }
+ }
+
+ pub fn remote_audio_track_updates(&self) -> mpsc::UnboundedReceiver<RemoteAudioTrackUpdate> {
+ let (tx, rx) = mpsc::unbounded();
+ self.remote_audio_track_subscribers.lock().push(tx);
+ rx
+ }
+
pub fn remote_video_track_updates(&self) -> mpsc::UnboundedReceiver<RemoteVideoTrackUpdate> {
let (tx, rx) = mpsc::unbounded();
self.remote_video_track_subscribers.lock().push(tx);
rx
}
+ fn did_subscribe_to_remote_audio_track(&self, track: RemoteAudioTrack) {
+ let track = Arc::new(track);
+ self.remote_audio_track_subscribers.lock().retain(|tx| {
+ tx.unbounded_send(RemoteAudioTrackUpdate::Subscribed(track.clone()))
+ .is_ok()
+ });
+ }
+
+ fn did_unsubscribe_from_remote_audio_track(&self, publisher_id: String, track_id: String) {
+ self.remote_audio_track_subscribers.lock().retain(|tx| {
+ tx.unbounded_send(RemoteAudioTrackUpdate::Unsubscribed {
+ publisher_id: publisher_id.clone(),
+ track_id: track_id.clone(),
+ })
+ .is_ok()
+ });
+ }
+
fn did_subscribe_to_remote_video_track(&self, track: RemoteVideoTrack) {
let track = Arc::new(track);
self.remote_video_track_subscribers.lock().retain(|tx| {
@@ -294,6 +405,8 @@ impl RoomDelegate {
LKRoomDelegateCreate(
weak_room as *mut c_void,
Self::on_did_disconnect,
+ Self::on_did_subscribe_to_remote_audio_track,
+ Self::on_did_unsubscribe_from_remote_audio_track,
Self::on_did_subscribe_to_remote_video_track,
Self::on_did_unsubscribe_from_remote_video_track,
)
@@ -312,6 +425,36 @@ impl RoomDelegate {
let _ = Weak::into_raw(room);
}
+ extern "C" fn on_did_subscribe_to_remote_audio_track(
+ room: *mut c_void,
+ publisher_id: CFStringRef,
+ track_id: CFStringRef,
+ track: *const c_void,
+ ) {
+ let room = unsafe { Weak::from_raw(room as *mut Room) };
+ let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
+ let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
+ let track = RemoteAudioTrack::new(track, track_id, publisher_id);
+ if let Some(room) = room.upgrade() {
+ room.did_subscribe_to_remote_audio_track(track);
+ }
+ let _ = Weak::into_raw(room);
+ }
+
+ extern "C" fn on_did_unsubscribe_from_remote_audio_track(
+ room: *mut c_void,
+ publisher_id: CFStringRef,
+ track_id: CFStringRef,
+ ) {
+ let room = unsafe { Weak::from_raw(room as *mut Room) };
+ let publisher_id = unsafe { CFString::wrap_under_get_rule(publisher_id).to_string() };
+ let track_id = unsafe { CFString::wrap_under_get_rule(track_id).to_string() };
+ if let Some(room) = room.upgrade() {
+ room.did_unsubscribe_from_remote_audio_track(publisher_id, track_id);
+ }
+ let _ = Weak::into_raw(room);
+ }
+
extern "C" fn on_did_subscribe_to_remote_video_track(
room: *mut c_void,
publisher_id: CFStringRef,
@@ -352,6 +495,20 @@ impl Drop for RoomDelegate {
}
}
+pub struct LocalAudioTrack(*const c_void);
+
+impl LocalAudioTrack {
+ pub fn create() -> Self {
+ Self(unsafe { LKLocalAudioTrackCreateTrack() })
+ }
+}
+
+impl Drop for LocalAudioTrack {
+ fn drop(&mut self) {
+ unsafe { CFRelease(self.0) }
+ }
+}
+
pub struct LocalVideoTrack(*const c_void);
impl LocalVideoTrack {
@@ -374,6 +531,34 @@ impl Drop for LocalTrackPublication {
}
}
+#[derive(Debug)]
+pub struct RemoteAudioTrack {
+ _native_track: *const c_void,
+ sid: Sid,
+ publisher_id: String,
+}
+
+impl RemoteAudioTrack {
+ fn new(native_track: *const c_void, sid: Sid, publisher_id: String) -> Self {
+ unsafe {
+ CFRetain(native_track);
+ }
+ Self {
+ _native_track: native_track,
+ sid,
+ publisher_id,
+ }
+ }
+
+ pub fn sid(&self) -> &str {
+ &self.sid
+ }
+
+ pub fn publisher_id(&self) -> &str {
+ &self.publisher_id
+ }
+}
+
#[derive(Debug)]
pub struct RemoteVideoTrack {
native_track: *const c_void,
@@ -453,6 +638,11 @@ pub enum RemoteVideoTrackUpdate {
Unsubscribed { publisher_id: Sid, track_id: Sid },
}
+pub enum RemoteAudioTrackUpdate {
+ Subscribed(Arc<RemoteAudioTrack>),
+ Unsubscribed { publisher_id: Sid, track_id: Sid },
+}
+
pub struct MacOSDisplay(*const c_void);
impl MacOSDisplay {
@@ -67,7 +67,7 @@ impl TestServer {
}
}
- async fn create_room(&self, room: String) -> Result<()> {
+ pub async fn create_room(&self, room: String) -> Result<()> {
self.background.simulate_random_delay().await;
let mut server_rooms = self.rooms.lock();
if server_rooms.contains_key(&room) {
@@ -104,7 +104,7 @@ impl TestServer {
room_name
))
} else {
- for track in &room.tracks {
+ for track in &room.video_tracks {
client_room
.0
.lock()
@@ -182,7 +182,7 @@ impl TestServer {
frames_rx: local_track.frames_rx.clone(),
});
- room.tracks.push(track.clone());
+ room.video_tracks.push(track.clone());
for (id, client_room) in &room.client_rooms {
if *id != identity {
@@ -199,6 +199,43 @@ impl TestServer {
Ok(())
}
+ async fn publish_audio_track(
+ &self,
+ token: String,
+ _local_track: &LocalAudioTrack,
+ ) -> Result<()> {
+ self.background.simulate_random_delay().await;
+ let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let identity = claims.sub.unwrap().to_string();
+ let room_name = claims.video.room.unwrap();
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+
+ let track = Arc::new(RemoteAudioTrack {
+ sid: nanoid::nanoid!(17),
+ publisher_id: identity.clone(),
+ });
+
+ room.audio_tracks.push(track.clone());
+
+ for (id, client_room) in &room.client_rooms {
+ if *id != identity {
+ let _ = client_room
+ .0
+ .lock()
+ .audio_track_updates
+ .0
+ .try_broadcast(RemoteAudioTrackUpdate::Subscribed(track.clone()))
+ .unwrap();
+ }
+ }
+
+ Ok(())
+ }
+
fn video_tracks(&self, token: String) -> Result<Vec<Arc<RemoteVideoTrack>>> {
let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
let room_name = claims.video.room.unwrap();
@@ -207,14 +244,26 @@ impl TestServer {
let room = server_rooms
.get_mut(&*room_name)
.ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
- Ok(room.tracks.clone())
+ Ok(room.video_tracks.clone())
+ }
+
+ fn audio_tracks(&self, token: String) -> Result<Vec<Arc<RemoteAudioTrack>>> {
+ let claims = live_kit_server::token::validate(&token, &self.secret_key)?;
+ let room_name = claims.video.room.unwrap();
+
+ let mut server_rooms = self.rooms.lock();
+ let room = server_rooms
+ .get_mut(&*room_name)
+ .ok_or_else(|| anyhow!("room {} does not exist", room_name))?;
+ Ok(room.audio_tracks.clone())
}
}
#[derive(Default)]
struct TestServerRoom {
client_rooms: HashMap<Sid, Arc<Room>>,
- tracks: Vec<Arc<RemoteVideoTrack>>,
+ video_tracks: Vec<Arc<RemoteVideoTrack>>,
+ audio_tracks: Vec<Arc<RemoteAudioTrack>>,
}
impl TestServerRoom {}
@@ -266,6 +315,10 @@ struct RoomState {
watch::Receiver<ConnectionState>,
),
display_sources: Vec<MacOSDisplay>,
+ audio_track_updates: (
+ async_broadcast::Sender<RemoteAudioTrackUpdate>,
+ async_broadcast::Receiver<RemoteAudioTrackUpdate>,
+ ),
video_track_updates: (
async_broadcast::Sender<RemoteVideoTrackUpdate>,
async_broadcast::Receiver<RemoteVideoTrackUpdate>,
@@ -286,6 +339,7 @@ impl Room {
connection: watch::channel_with(ConnectionState::Disconnected),
display_sources: Default::default(),
video_track_updates: async_broadcast::broadcast(128),
+ audio_track_updates: async_broadcast::broadcast(128),
})))
}
@@ -327,8 +381,34 @@ impl Room {
Ok(LocalTrackPublication)
}
}
+ pub fn publish_audio_track(
+ self: &Arc<Self>,
+ track: &LocalAudioTrack,
+ ) -> impl Future<Output = Result<LocalTrackPublication>> {
+ let this = self.clone();
+ let track = track.clone();
+ async move {
+ this.test_server()
+ .publish_audio_track(this.token(), &track)
+ .await?;
+ Ok(LocalTrackPublication)
+ }
+ }
- pub fn unpublish_track(&self, _: LocalTrackPublication) {}
+ pub fn unpublish_track(&self, _publication: LocalTrackPublication) {}
+
+ pub fn remote_audio_tracks(&self, publisher_id: &str) -> Vec<Arc<RemoteAudioTrack>> {
+ if !self.is_connected() {
+ return Vec::new();
+ }
+
+ self.test_server()
+ .audio_tracks(self.token())
+ .unwrap()
+ .into_iter()
+ .filter(|track| track.publisher_id() == publisher_id)
+ .collect()
+ }
pub fn remote_video_tracks(&self, publisher_id: &str) -> Vec<Arc<RemoteVideoTrack>> {
if !self.is_connected() {
@@ -343,6 +423,10 @@ impl Room {
.collect()
}
+ pub fn remote_audio_track_updates(&self) -> impl Stream<Item = RemoteAudioTrackUpdate> {
+ self.0.lock().audio_track_updates.1.clone()
+ }
+
pub fn remote_video_track_updates(&self) -> impl Stream<Item = RemoteVideoTrackUpdate> {
self.0.lock().video_track_updates.1.clone()
}
@@ -404,6 +488,15 @@ impl LocalVideoTrack {
}
}
+#[derive(Clone)]
+pub struct LocalAudioTrack;
+
+impl LocalAudioTrack {
+ pub fn create() -> Self {
+ Self
+ }
+}
+
pub struct RemoteVideoTrack {
sid: Sid,
publisher_id: Sid,
@@ -424,12 +517,33 @@ impl RemoteVideoTrack {
}
}
+pub struct RemoteAudioTrack {
+ sid: Sid,
+ publisher_id: Sid,
+}
+
+impl RemoteAudioTrack {
+ pub fn sid(&self) -> &str {
+ &self.sid
+ }
+
+ pub fn publisher_id(&self) -> &str {
+ &self.publisher_id
+ }
+}
+
#[derive(Clone)]
pub enum RemoteVideoTrackUpdate {
Subscribed(Arc<RemoteVideoTrack>),
Unsubscribed { publisher_id: Sid, track_id: Sid },
}
+#[derive(Clone)]
+pub enum RemoteAudioTrackUpdate {
+ Subscribed(Arc<RemoteAudioTrack>),
+ Unsubscribed { publisher_id: Sid, track_id: Sid },
+}
+
#[derive(Clone)]
pub struct MacOSDisplay {
frames: (
@@ -117,14 +117,14 @@ struct Notification<'a, T> {
params: T,
}
-#[derive(Deserialize)]
+#[derive(Debug, Clone, Deserialize)]
struct AnyNotification<'a> {
#[serde(default)]
id: Option<usize>,
#[serde(borrow)]
method: &'a str,
- #[serde(borrow)]
- params: &'a RawValue,
+ #[serde(borrow, default)]
+ params: Option<&'a RawValue>,
}
#[derive(Debug, Serialize, Deserialize)]
@@ -171,9 +171,12 @@ impl LanguageServer {
"unhandled notification {}:\n{}",
notification.method,
serde_json::to_string_pretty(
- &Value::from_str(notification.params.get()).unwrap()
+ ¬ification
+ .params
+ .and_then(|params| Value::from_str(params.get()).ok())
+ .unwrap_or(Value::Null)
)
- .unwrap()
+ .unwrap(),
);
},
);
@@ -313,7 +316,11 @@ impl LanguageServer {
if let Ok(msg) = serde_json::from_slice::<AnyNotification>(&buffer) {
if let Some(handler) = notification_handlers.lock().get_mut(msg.method) {
- handler(msg.id, msg.params.get(), cx.clone());
+ handler(
+ msg.id,
+ &msg.params.map(|params| params.get()).unwrap_or("null"),
+ cx.clone(),
+ );
} else {
on_unhandled_notification(msg);
}
@@ -864,7 +871,13 @@ impl LanguageServer {
cx,
move |msg| {
notifications_tx
- .try_send((msg.method.to_string(), msg.params.get().to_string()))
+ .try_send((
+ msg.method.to_string(),
+ msg.params
+ .map(|raw_value| raw_value.get())
+ .unwrap_or("null")
+ .to_string(),
+ ))
.ok();
},
)),
@@ -38,9 +38,9 @@ use language::{
},
range_from_lsp, range_to_lsp, Anchor, Bias, Buffer, CachedLspAdapter, CodeAction, CodeLabel,
Completion, Diagnostic, DiagnosticEntry, DiagnosticSet, Diff, Event as BufferEvent, File as _,
- Language, LanguageRegistry, LanguageServerName, LocalFile, OffsetRangeExt, Operation, Patch,
- PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset, ToPointUtf16, Transaction,
- Unclipped,
+ Language, LanguageRegistry, LanguageServerName, LocalFile, LspAdapterDelegate, OffsetRangeExt,
+ Operation, Patch, PendingLanguageServer, PointUtf16, TextBufferSnapshot, ToOffset,
+ ToPointUtf16, Transaction, Unclipped,
};
use log::error;
use lsp::{
@@ -76,8 +76,8 @@ use std::{
};
use terminals::Terminals;
use util::{
- debug_panic, defer, merge_json_value_into, paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc,
- ResultExt, TryFutureExt as _,
+ debug_panic, defer, http::HttpClient, merge_json_value_into,
+ paths::LOCAL_SETTINGS_RELATIVE_PATH, post_inc, ResultExt, TryFutureExt as _,
};
pub use fs::*;
@@ -254,6 +254,7 @@ pub enum Event {
LanguageServerAdded(LanguageServerId),
LanguageServerRemoved(LanguageServerId),
LanguageServerLog(LanguageServerId, String),
+ Notification(String),
ActiveEntryChanged(Option<ProjectEntryId>),
WorktreeAdded,
WorktreeRemoved(WorktreeId),
@@ -444,6 +445,11 @@ pub enum FormatTrigger {
Manual,
}
+struct ProjectLspAdapterDelegate {
+ project: ModelHandle<Project>,
+ http_client: Arc<dyn HttpClient>,
+}
+
impl FormatTrigger {
fn from_proto(value: i32) -> FormatTrigger {
match value {
@@ -2427,7 +2433,7 @@ impl Project {
language.clone(),
adapter.clone(),
worktree_path,
- self.client.http_client(),
+ ProjectLspAdapterDelegate::new(self, cx),
cx,
) {
Some(pending_server) => pending_server,
@@ -7481,6 +7487,26 @@ impl<P: AsRef<Path>> From<(WorktreeId, P)> for ProjectPath {
}
}
+impl ProjectLspAdapterDelegate {
+ fn new(project: &Project, cx: &ModelContext<Project>) -> Arc<Self> {
+ Arc::new(Self {
+ project: cx.handle(),
+ http_client: project.client.http_client(),
+ })
+ }
+}
+
+impl LspAdapterDelegate for ProjectLspAdapterDelegate {
+ fn show_notification(&self, message: &str, cx: &mut AppContext) {
+ self.project
+ .update(cx, |_, cx| cx.emit(Event::Notification(message.to_owned())));
+ }
+
+ fn http_client(&self) -> Arc<dyn HttpClient> {
+ self.http_client.clone()
+ }
+}
+
fn split_operations(
mut operations: Vec<proto::Operation>,
) -> impl Iterator<Item = Vec<proto::Operation>> {
@@ -1470,7 +1470,7 @@ impl Snapshot {
break;
}
}
- new_entries_by_path.push_tree(cursor.suffix(&()), &());
+ new_entries_by_path.append(cursor.suffix(&()), &());
new_entries_by_path
};
@@ -2259,7 +2259,7 @@ impl BackgroundScannerState {
let mut cursor = self.snapshot.entries_by_path.cursor::<TraversalProgress>();
new_entries = cursor.slice(&TraversalTarget::Path(path), Bias::Left, &());
removed_entries = cursor.slice(&TraversalTarget::PathSuccessor(path), Bias::Left, &());
- new_entries.push_tree(cursor.suffix(&()), &());
+ new_entries.append(cursor.suffix(&()), &());
}
self.snapshot.entries_by_path = new_entries;
@@ -53,7 +53,7 @@ impl Rope {
}
}
- self.chunks.push_tree(chunks.suffix(&()), &());
+ self.chunks.append(chunks.suffix(&()), &());
self.check_invariants();
}
@@ -21,7 +21,7 @@ util = { path = "../util" }
anyhow.workspace = true
futures.workspace = true
-json_comments = "0.2"
+serde_json_lenient = {version = "0.1", features = ["preserve_order", "raw_value"]}
lazy_static.workspace = true
postage.workspace = true
rust-embed.workspace = true
@@ -37,6 +37,6 @@ tree-sitter-json = "*"
[dev-dependencies]
gpui = { path = "../gpui", features = ["test-support"] }
fs = { path = "../fs", features = ["test-support"] }
-
+indoc.workspace = true
pretty_assertions = "1.3.0"
unindent.workspace = true
@@ -1,5 +1,5 @@
use crate::{settings_store::parse_json_with_comments, SettingsAssets};
-use anyhow::{Context, Result};
+use anyhow::{anyhow, Context, Result};
use collections::BTreeMap;
use gpui::{keymap_matcher::Binding, AppContext};
use schemars::{
@@ -8,7 +8,7 @@ use schemars::{
JsonSchema,
};
use serde::Deserialize;
-use serde_json::{value::RawValue, Value};
+use serde_json::Value;
use util::{asset_str, ResultExt};
#[derive(Deserialize, Default, Clone, JsonSchema)]
@@ -24,7 +24,7 @@ pub struct KeymapBlock {
#[derive(Deserialize, Default, Clone)]
#[serde(transparent)]
-pub struct KeymapAction(Box<RawValue>);
+pub struct KeymapAction(Value);
impl JsonSchema for KeymapAction {
fn schema_name() -> String {
@@ -37,11 +37,12 @@ impl JsonSchema for KeymapAction {
}
#[derive(Deserialize)]
-struct ActionWithData(Box<str>, Box<RawValue>);
+struct ActionWithData(Box<str>, Value);
impl KeymapFile {
pub fn load_asset(asset_path: &str, cx: &mut AppContext) -> Result<()> {
let content = asset_str::<SettingsAssets>(asset_path);
+
Self::parse(content.as_ref())?.add_to_cx(cx)
}
@@ -54,18 +55,27 @@ impl KeymapFile {
let bindings = bindings
.into_iter()
.filter_map(|(keystroke, action)| {
- let action = action.0.get();
+ let action = action.0;
// This is a workaround for a limitation in serde: serde-rs/json#497
// We want to deserialize the action data as a `RawValue` so that we can
// deserialize the action itself dynamically directly from the JSON
// string. But `RawValue` currently does not work inside of an untagged enum.
- if action.starts_with('[') {
- let ActionWithData(name, data) = serde_json::from_str(action).log_err()?;
- cx.deserialize_action(&name, Some(data.get()))
+ if let Value::Array(items) = action {
+ let Ok([name, data]): Result<[serde_json::Value; 2], _> = items.try_into() else {
+ return Some(Err(anyhow!("Expected array of length 2")));
+ };
+ let serde_json::Value::String(name) = name else {
+ return Some(Err(anyhow!("Expected first item in array to be a string.")))
+ };
+ cx.deserialize_action(
+ &name,
+ Some(data),
+ )
+ } else if let Value::String(name) = action {
+ cx.deserialize_action(&name, None)
} else {
- let name = serde_json::from_str(action).log_err()?;
- cx.deserialize_action(name, None)
+ return Some(Err(anyhow!("Expected two-element array, got {:?}", action)));
}
.with_context(|| {
format!(
@@ -118,3 +128,24 @@ impl KeymapFile {
serde_json::to_value(root_schema).unwrap()
}
}
+
+#[cfg(test)]
+mod tests {
+ use crate::KeymapFile;
+
+ #[test]
+ fn can_deserialize_keymap_with_trailing_comma() {
+ let json = indoc::indoc! {"[
+ // Standard macOS bindings
+ {
+ \"bindings\": {
+ \"up\": \"menu::SelectPrev\",
+ },
+ },
+ ]
+ "
+
+ };
+ KeymapFile::parse(json).unwrap();
+ }
+}
@@ -834,11 +834,8 @@ fn to_pretty_json(value: &impl Serialize, indent_size: usize, indent_prefix_len:
}
pub fn parse_json_with_comments<T: DeserializeOwned>(content: &str) -> Result<T> {
- Ok(serde_json::from_reader(
- json_comments::CommentSettings::c_style().strip_comments(content.as_bytes()),
- )?)
+ Ok(serde_json_lenient::from_str(content)?)
}
-
#[cfg(test)]
mod tests {
use super::*;
@@ -669,7 +669,7 @@ impl<'a, T: Item> SeekAggregate<'a, T> for () {
impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate<T> {
fn begin_leaf(&mut self) {}
fn end_leaf(&mut self, cx: &<T::Summary as Summary>::Context) {
- self.tree.push_tree(
+ self.tree.append(
SumTree(Arc::new(Node::Leaf {
summary: mem::take(&mut self.leaf_summary),
items: mem::take(&mut self.leaf_items),
@@ -689,7 +689,7 @@ impl<'a, T: Item> SeekAggregate<'a, T> for SliceSeekAggregate<T> {
_: &T::Summary,
cx: &<T::Summary as Summary>::Context,
) {
- self.tree.push_tree(tree.clone(), cx);
+ self.tree.append(tree.clone(), cx);
}
}
@@ -268,7 +268,7 @@ impl<T: Item> SumTree<T> {
for item in iter {
if leaf.is_some() && leaf.as_ref().unwrap().items().len() == 2 * TREE_BASE {
- self.push_tree(SumTree(Arc::new(leaf.take().unwrap())), cx);
+ self.append(SumTree(Arc::new(leaf.take().unwrap())), cx);
}
if leaf.is_none() {
@@ -295,13 +295,13 @@ impl<T: Item> SumTree<T> {
}
if leaf.is_some() {
- self.push_tree(SumTree(Arc::new(leaf.take().unwrap())), cx);
+ self.append(SumTree(Arc::new(leaf.take().unwrap())), cx);
}
}
pub fn push(&mut self, item: T, cx: &<T::Summary as Summary>::Context) {
let summary = item.summary();
- self.push_tree(
+ self.append(
SumTree(Arc::new(Node::Leaf {
summary: summary.clone(),
items: ArrayVec::from_iter(Some(item)),
@@ -311,11 +311,11 @@ impl<T: Item> SumTree<T> {
);
}
- pub fn push_tree(&mut self, other: Self, cx: &<T::Summary as Summary>::Context) {
+ pub fn append(&mut self, other: Self, cx: &<T::Summary as Summary>::Context) {
if !other.0.is_leaf() || !other.0.items().is_empty() {
if self.0.height() < other.0.height() {
for tree in other.0.child_trees() {
- self.push_tree(tree.clone(), cx);
+ self.append(tree.clone(), cx);
}
} else if let Some(split_tree) = self.push_tree_recursive(other, cx) {
*self = Self::from_child_trees(self.clone(), split_tree, cx);
@@ -512,7 +512,7 @@ impl<T: KeyedItem> SumTree<T> {
}
}
new_tree.push(item, cx);
- new_tree.push_tree(cursor.suffix(cx), cx);
+ new_tree.append(cursor.suffix(cx), cx);
new_tree
};
replaced
@@ -529,7 +529,7 @@ impl<T: KeyedItem> SumTree<T> {
cursor.next(cx);
}
}
- new_tree.push_tree(cursor.suffix(cx), cx);
+ new_tree.append(cursor.suffix(cx), cx);
new_tree
};
removed
@@ -563,7 +563,7 @@ impl<T: KeyedItem> SumTree<T> {
{
new_tree.extend(buffered_items.drain(..), cx);
let slice = cursor.slice(&new_key, Bias::Left, cx);
- new_tree.push_tree(slice, cx);
+ new_tree.append(slice, cx);
old_item = cursor.item();
}
@@ -583,7 +583,7 @@ impl<T: KeyedItem> SumTree<T> {
}
new_tree.extend(buffered_items, cx);
- new_tree.push_tree(cursor.suffix(cx), cx);
+ new_tree.append(cursor.suffix(cx), cx);
new_tree
};
@@ -719,7 +719,7 @@ mod tests {
let mut tree2 = SumTree::new();
tree2.extend(50..100, &());
- tree1.push_tree(tree2, &());
+ tree1.append(tree2, &());
assert_eq!(
tree1.items(&()),
(0..20).chain(50..100).collect::<Vec<u8>>()
@@ -766,7 +766,7 @@ mod tests {
let mut new_tree = cursor.slice(&Count(splice_start), Bias::Right, &());
new_tree.extend(new_items, &());
cursor.seek(&Count(splice_end), Bias::Right, &());
- new_tree.push_tree(cursor.slice(&tree_end, Bias::Right, &()), &());
+ new_tree.append(cursor.slice(&tree_end, Bias::Right, &()), &());
new_tree
};
@@ -67,7 +67,7 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
removed = Some(cursor.item().unwrap().value.clone());
cursor.next(&());
}
- new_tree.push_tree(cursor.suffix(&()), &());
+ new_tree.append(cursor.suffix(&()), &());
drop(cursor);
self.0 = new_tree;
removed
@@ -79,7 +79,7 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
let mut cursor = self.0.cursor::<MapKeyRef<'_, K>>();
let mut new_tree = cursor.slice(&start, Bias::Left, &());
cursor.seek(&end, Bias::Left, &());
- new_tree.push_tree(cursor.suffix(&()), &());
+ new_tree.append(cursor.suffix(&()), &());
drop(cursor);
self.0 = new_tree;
}
@@ -117,7 +117,7 @@ impl<K: Clone + Debug + Default + Ord, V: Clone + Debug> TreeMap<K, V> {
new_tree.push(updated, &());
cursor.next(&());
}
- new_tree.push_tree(cursor.suffix(&()), &());
+ new_tree.append(cursor.suffix(&()), &());
drop(cursor);
self.0 = new_tree;
result
@@ -600,7 +600,7 @@ impl Buffer {
let mut old_fragments = self.fragments.cursor::<FragmentTextSummary>();
let mut new_fragments =
old_fragments.slice(&edits.peek().unwrap().0.start, Bias::Right, &None);
- new_ropes.push_tree(new_fragments.summary().text);
+ new_ropes.append(new_fragments.summary().text);
let mut fragment_start = old_fragments.start().visible;
for (range, new_text) in edits {
@@ -625,8 +625,8 @@ impl Buffer {
}
let slice = old_fragments.slice(&range.start, Bias::Right, &None);
- new_ropes.push_tree(slice.summary().text);
- new_fragments.push_tree(slice, &None);
+ new_ropes.append(slice.summary().text);
+ new_fragments.append(slice, &None);
fragment_start = old_fragments.start().visible;
}
@@ -728,8 +728,8 @@ impl Buffer {
}
let suffix = old_fragments.suffix(&None);
- new_ropes.push_tree(suffix.summary().text);
- new_fragments.push_tree(suffix, &None);
+ new_ropes.append(suffix.summary().text);
+ new_fragments.append(suffix, &None);
let (visible_text, deleted_text) = new_ropes.finish();
drop(old_fragments);
@@ -828,7 +828,7 @@ impl Buffer {
Bias::Left,
&cx,
);
- new_ropes.push_tree(new_fragments.summary().text);
+ new_ropes.append(new_fragments.summary().text);
let mut fragment_start = old_fragments.start().0.full_offset();
for (range, new_text) in edits {
@@ -854,8 +854,8 @@ impl Buffer {
let slice =
old_fragments.slice(&VersionedFullOffset::Offset(range.start), Bias::Left, &cx);
- new_ropes.push_tree(slice.summary().text);
- new_fragments.push_tree(slice, &None);
+ new_ropes.append(slice.summary().text);
+ new_fragments.append(slice, &None);
fragment_start = old_fragments.start().0.full_offset();
}
@@ -986,8 +986,8 @@ impl Buffer {
}
let suffix = old_fragments.suffix(&cx);
- new_ropes.push_tree(suffix.summary().text);
- new_fragments.push_tree(suffix, &None);
+ new_ropes.append(suffix.summary().text);
+ new_fragments.append(suffix, &None);
let (visible_text, deleted_text) = new_ropes.finish();
drop(old_fragments);
@@ -1056,8 +1056,8 @@ impl Buffer {
for fragment_id in self.fragment_ids_for_edits(undo.counts.keys()) {
let preceding_fragments = old_fragments.slice(&Some(fragment_id), Bias::Left, &None);
- new_ropes.push_tree(preceding_fragments.summary().text);
- new_fragments.push_tree(preceding_fragments, &None);
+ new_ropes.append(preceding_fragments.summary().text);
+ new_fragments.append(preceding_fragments, &None);
if let Some(fragment) = old_fragments.item() {
let mut fragment = fragment.clone();
@@ -1087,8 +1087,8 @@ impl Buffer {
}
let suffix = old_fragments.suffix(&None);
- new_ropes.push_tree(suffix.summary().text);
- new_fragments.push_tree(suffix, &None);
+ new_ropes.append(suffix.summary().text);
+ new_fragments.append(suffix, &None);
drop(old_fragments);
let (visible_text, deleted_text) = new_ropes.finish();
@@ -2070,7 +2070,7 @@ impl<'a> RopeBuilder<'a> {
}
}
- fn push_tree(&mut self, len: FragmentTextSummary) {
+ fn append(&mut self, len: FragmentTextSummary) {
self.push(len.visible, true, true);
self.push(len.deleted, false, false);
}
@@ -162,6 +162,12 @@ define_connection! {
ALTER TABLE workspaces ADD COLUMN right_dock_active_panel TEXT;
ALTER TABLE workspaces ADD COLUMN bottom_dock_visible INTEGER; //bool
ALTER TABLE workspaces ADD COLUMN bottom_dock_active_panel TEXT;
+ ),
+ // Add panel zoom persistence
+ sql!(
+ ALTER TABLE workspaces ADD COLUMN left_dock_zoom INTEGER; //bool
+ ALTER TABLE workspaces ADD COLUMN right_dock_zoom INTEGER; //bool
+ ALTER TABLE workspaces ADD COLUMN bottom_dock_zoom INTEGER; //bool
)];
}
@@ -196,10 +202,13 @@ impl WorkspaceDb {
display,
left_dock_visible,
left_dock_active_panel,
+ left_dock_zoom,
right_dock_visible,
right_dock_active_panel,
+ right_dock_zoom,
bottom_dock_visible,
- bottom_dock_active_panel
+ bottom_dock_active_panel,
+ bottom_dock_zoom
FROM workspaces
WHERE workspace_location = ?
})
@@ -244,22 +253,28 @@ impl WorkspaceDb {
workspace_location,
left_dock_visible,
left_dock_active_panel,
+ left_dock_zoom,
right_dock_visible,
right_dock_active_panel,
+ right_dock_zoom,
bottom_dock_visible,
bottom_dock_active_panel,
+ bottom_dock_zoom,
timestamp
)
- VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, CURRENT_TIMESTAMP)
+ VALUES (?1, ?2, ?3, ?4, ?5, ?6, ?7, ?8, ?9, ?10, ?11, CURRENT_TIMESTAMP)
ON CONFLICT DO
UPDATE SET
workspace_location = ?2,
left_dock_visible = ?3,
left_dock_active_panel = ?4,
- right_dock_visible = ?5,
- right_dock_active_panel = ?6,
- bottom_dock_visible = ?7,
- bottom_dock_active_panel = ?8,
+ left_dock_zoom = ?5,
+ right_dock_visible = ?6,
+ right_dock_active_panel = ?7,
+ right_dock_zoom = ?8,
+ bottom_dock_visible = ?9,
+ bottom_dock_active_panel = ?10,
+ bottom_dock_zoom = ?11,
timestamp = CURRENT_TIMESTAMP
))?((workspace.id, &workspace.location, workspace.docks))
.context("Updating workspace")?;
@@ -100,16 +100,19 @@ impl Bind for DockStructure {
pub struct DockData {
pub(crate) visible: bool,
pub(crate) active_panel: Option<String>,
+ pub(crate) zoom: bool,
}
impl Column for DockData {
fn column(statement: &mut Statement, start_index: i32) -> Result<(Self, i32)> {
let (visible, next_index) = Option::<bool>::column(statement, start_index)?;
let (active_panel, next_index) = Option::<String>::column(statement, next_index)?;
+ let (zoom, next_index) = Option::<bool>::column(statement, next_index)?;
Ok((
DockData {
visible: visible.unwrap_or(false),
active_panel,
+ zoom: zoom.unwrap_or(false),
},
next_index,
))
@@ -119,7 +122,8 @@ impl Column for DockData {
impl Bind for DockData {
fn bind(&self, statement: &Statement, start_index: i32) -> Result<i32> {
let next_index = statement.bind(&self.visible, start_index)?;
- statement.bind(&self.active_panel, next_index)
+ let next_index = statement.bind(&self.active_panel, next_index)?;
+ statement.bind(&self.zoom, next_index)
}
}
@@ -553,6 +553,10 @@ impl Workspace {
}
}
+ project::Event::Notification(message) => this.show_notification(0, cx, |cx| {
+ cx.add_view(|_| MessageNotification::new(message.clone()))
+ }),
+
_ => {}
}
cx.notify()
@@ -1599,9 +1603,7 @@ impl Workspace {
focus_center = true;
}
} else {
- if active_panel.is_zoomed(cx) {
- cx.focus(active_panel.as_any());
- }
+ cx.focus(active_panel.as_any());
reveal_dock = true;
}
}
@@ -2850,7 +2852,7 @@ impl Workspace {
cx.notify();
}
- fn serialize_workspace(&self, cx: &AppContext) {
+ fn serialize_workspace(&self, cx: &ViewContext<Self>) {
fn serialize_pane_handle(
pane_handle: &ViewHandle<Pane>,
cx: &AppContext,
@@ -2893,7 +2895,7 @@ impl Workspace {
}
}
- fn build_serialized_docks(this: &Workspace, cx: &AppContext) -> DockStructure {
+ fn build_serialized_docks(this: &Workspace, cx: &ViewContext<Workspace>) -> DockStructure {
let left_dock = this.left_dock.read(cx);
let left_visible = left_dock.is_open();
let left_active_panel = left_dock.visible_panel().and_then(|panel| {
@@ -2902,6 +2904,10 @@ impl Workspace {
.to_string(),
)
});
+ let left_dock_zoom = left_dock
+ .visible_panel()
+ .map(|panel| panel.is_zoomed(cx))
+ .unwrap_or(false);
let right_dock = this.right_dock.read(cx);
let right_visible = right_dock.is_open();
@@ -2911,6 +2917,10 @@ impl Workspace {
.to_string(),
)
});
+ let right_dock_zoom = right_dock
+ .visible_panel()
+ .map(|panel| panel.is_zoomed(cx))
+ .unwrap_or(false);
let bottom_dock = this.bottom_dock.read(cx);
let bottom_visible = bottom_dock.is_open();
@@ -2920,19 +2930,26 @@ impl Workspace {
.to_string(),
)
});
+ let bottom_dock_zoom = bottom_dock
+ .visible_panel()
+ .map(|panel| panel.is_zoomed(cx))
+ .unwrap_or(false);
DockStructure {
left: DockData {
visible: left_visible,
active_panel: left_active_panel,
+ zoom: left_dock_zoom,
},
right: DockData {
visible: right_visible,
active_panel: right_active_panel,
+ zoom: right_dock_zoom,
},
bottom: DockData {
visible: bottom_visible,
active_panel: bottom_active_panel,
+ zoom: bottom_dock_zoom,
},
}
}
@@ -3045,14 +3062,31 @@ impl Workspace {
dock.activate_panel(ix, cx);
}
}
+ dock.active_panel()
+ .map(|panel| {
+ panel.set_zoomed(docks.left.zoom, cx)
+ });
+ if docks.left.visible && docks.left.zoom {
+ cx.focus_self()
+ }
});
+ // TODO: I think the bug is that setting zoom or active undoes the bottom zoom or something
workspace.right_dock.update(cx, |dock, cx| {
dock.set_open(docks.right.visible, cx);
if let Some(active_panel) = docks.right.active_panel {
if let Some(ix) = dock.panel_index_for_ui_name(&active_panel, cx) {
dock.activate_panel(ix, cx);
+
}
}
+ dock.active_panel()
+ .map(|panel| {
+ panel.set_zoomed(docks.right.zoom, cx)
+ });
+
+ if docks.right.visible && docks.right.zoom {
+ cx.focus_self()
+ }
});
workspace.bottom_dock.update(cx, |dock, cx| {
dock.set_open(docks.bottom.visible, cx);
@@ -3061,8 +3095,18 @@ impl Workspace {
dock.activate_panel(ix, cx);
}
}
+
+ dock.active_panel()
+ .map(|panel| {
+ panel.set_zoomed(docks.bottom.zoom, cx)
+ });
+
+ if docks.bottom.visible && docks.bottom.zoom {
+ cx.focus_self()
+ }
});
+
cx.notify();
})?;
@@ -4425,7 +4469,7 @@ mod tests {
workspace.read_with(cx, |workspace, cx| {
assert!(workspace.right_dock().read(cx).is_open());
assert!(!panel.is_zoomed(cx));
- assert!(!panel.has_focus(cx));
+ assert!(panel.has_focus(cx));
});
// Focus and zoom panel
@@ -4500,7 +4544,7 @@ mod tests {
workspace.read_with(cx, |workspace, cx| {
let pane = pane.read(cx);
assert!(!pane.is_zoomed());
- assert!(pane.has_focus());
+ assert!(!pane.has_focus());
assert!(workspace.right_dock().read(cx).is_open());
assert!(workspace.zoomed.is_none());
});
@@ -5,12 +5,11 @@ pub use language::*;
use lsp::LanguageServerBinary;
use smol::fs::{self, File};
use std::{any::Any, path::PathBuf, sync::Arc};
-use util::fs::remove_matching;
-use util::github::latest_github_release;
-use util::http::HttpClient;
-use util::ResultExt;
-
-use util::github::GitHubLspBinaryVersion;
+use util::{
+ fs::remove_matching,
+ github::{latest_github_release, GitHubLspBinaryVersion},
+ ResultExt,
+};
pub struct CLspAdapter;
@@ -22,9 +21,9 @@ impl super::LspAdapter for CLspAdapter {
async fn fetch_latest_server_version(
&self,
- http: Arc<dyn HttpClient>,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
- let release = latest_github_release("clangd/clangd", false, http).await?;
+ let release = latest_github_release("clangd/clangd", false, delegate.http_client()).await?;
let asset_name = format!("clangd-mac-{}.zip", release.name);
let asset = release
.assets
@@ -41,8 +40,8 @@ impl super::LspAdapter for CLspAdapter {
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- http: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let zip_path = container_dir.join(format!("clangd_{}.zip", version.name));
@@ -50,7 +49,8 @@ impl super::LspAdapter for CLspAdapter {
let binary_path = version_dir.join("bin/clangd");
if fs::metadata(&binary_path).await.is_err() {
- let mut response = http
+ let mut response = delegate
+ .http_client()
.get(&version.url, Default::default(), true)
.await
.context("error downloading release")?;
@@ -82,39 +82,19 @@ impl super::LspAdapter for CLspAdapter {
})
}
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
- (|| async move {
- let mut last_clangd_dir = None;
- let mut entries = fs::read_dir(&container_dir).await?;
- while let Some(entry) = entries.next().await {
- let entry = entry?;
- if entry.file_type().await?.is_dir() {
- last_clangd_dir = Some(entry.path());
- }
- }
- let clangd_dir = last_clangd_dir.ok_or_else(|| anyhow!("no cached binary"))?;
- let clangd_bin = clangd_dir.join("bin/clangd");
- if clangd_bin.exists() {
- Ok(LanguageServerBinary {
- path: clangd_bin,
- arguments: vec![],
- })
- } else {
- Err(anyhow!(
- "missing clangd binary in directory {:?}",
- clangd_dir
- ))
- }
- })()
- .await
- .log_err()
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
+ get_cached_server_binary(container_dir).await
}
async fn installation_test_binary(
&self,
container_dir: PathBuf,
) -> Option<LanguageServerBinary> {
- self.cached_server_binary(container_dir)
+ get_cached_server_binary(container_dir)
.await
.map(|mut binary| {
binary.arguments = vec!["--help".into()];
@@ -259,6 +239,34 @@ impl super::LspAdapter for CLspAdapter {
}
}
+async fn get_cached_server_binary(container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ (|| async move {
+ let mut last_clangd_dir = None;
+ let mut entries = fs::read_dir(&container_dir).await?;
+ while let Some(entry) = entries.next().await {
+ let entry = entry?;
+ if entry.file_type().await?.is_dir() {
+ last_clangd_dir = Some(entry.path());
+ }
+ }
+ let clangd_dir = last_clangd_dir.ok_or_else(|| anyhow!("no cached binary"))?;
+ let clangd_bin = clangd_dir.join("bin/clangd");
+ if clangd_bin.exists() {
+ Ok(LanguageServerBinary {
+ path: clangd_bin,
+ arguments: vec![],
+ })
+ } else {
+ Err(anyhow!(
+ "missing clangd binary in directory {:?}",
+ clangd_dir
+ ))
+ }
+ })()
+ .await
+ .log_err()
+}
+
#[cfg(test)]
mod tests {
use gpui::TestAppContext;
@@ -1,16 +1,23 @@
use anyhow::{anyhow, Context, Result};
use async_trait::async_trait;
use futures::StreamExt;
+use gpui::{AsyncAppContext, Task};
pub use language::*;
use lsp::{CompletionItemKind, LanguageServerBinary, SymbolKind};
use smol::fs::{self, File};
-use std::{any::Any, path::PathBuf, sync::Arc};
-use util::fs::remove_matching;
-use util::github::latest_github_release;
-use util::http::HttpClient;
-use util::ResultExt;
-
-use util::github::GitHubLspBinaryVersion;
+use std::{
+ any::Any,
+ path::PathBuf,
+ sync::{
+ atomic::{AtomicBool, Ordering::SeqCst},
+ Arc,
+ },
+};
+use util::{
+ fs::remove_matching,
+ github::{latest_github_release, GitHubLspBinaryVersion},
+ ResultExt,
+};
pub struct ElixirLspAdapter;
@@ -20,11 +27,43 @@ impl LspAdapter for ElixirLspAdapter {
LanguageServerName("elixir-ls".into())
}
+ fn will_start_server(
+ &self,
+ delegate: &Arc<dyn LspAdapterDelegate>,
+ cx: &mut AsyncAppContext,
+ ) -> Option<Task<Result<()>>> {
+ static DID_SHOW_NOTIFICATION: AtomicBool = AtomicBool::new(false);
+
+ const NOTIFICATION_MESSAGE: &str = "Could not run the elixir language server, `elixir-ls`, because `elixir` was not found.";
+
+ let delegate = delegate.clone();
+ Some(cx.spawn(|mut cx| async move {
+ let elixir_output = smol::process::Command::new("elixir")
+ .args(["--version"])
+ .output()
+ .await;
+ if elixir_output.is_err() {
+ if DID_SHOW_NOTIFICATION
+ .compare_exchange(false, true, SeqCst, SeqCst)
+ .is_ok()
+ {
+ cx.update(|cx| {
+ delegate.show_notification(NOTIFICATION_MESSAGE, cx);
+ })
+ }
+ return Err(anyhow!("cannot run elixir-ls"));
+ }
+
+ Ok(())
+ }))
+ }
+
async fn fetch_latest_server_version(
&self,
- http: Arc<dyn HttpClient>,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
- let release = latest_github_release("elixir-lsp/elixir-ls", false, http).await?;
+ let release =
+ latest_github_release("elixir-lsp/elixir-ls", false, delegate.http_client()).await?;
let asset_name = "elixir-ls.zip";
let asset = release
.assets
@@ -41,8 +80,8 @@ impl LspAdapter for ElixirLspAdapter {
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- http: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let zip_path = container_dir.join(format!("elixir-ls_{}.zip", version.name));
@@ -50,7 +89,8 @@ impl LspAdapter for ElixirLspAdapter {
let binary_path = version_dir.join("language_server.sh");
if fs::metadata(&binary_path).await.is_err() {
- let mut response = http
+ let mut response = delegate
+ .http_client()
.get(&version.url, Default::default(), true)
.await
.context("error downloading release")?;
@@ -88,7 +128,11 @@ impl LspAdapter for ElixirLspAdapter {
})
}
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
(|| async move {
let mut last = None;
let mut entries = fs::read_dir(&container_dir).await?;
@@ -36,8 +36,6 @@
(char) @constant
-(interpolation "#{" @punctuation.special "}" @punctuation.special) @embedded
-
(escape_sequence) @string.escape
[
@@ -146,3 +144,10 @@
"<<"
">>"
] @punctuation.bracket
+
+(interpolation "#{" @punctuation.special "}" @punctuation.special) @embedded
+
+((sigil
+ (sigil_name) @_sigil_name
+ (quoted_content) @embedded)
+ (#eq? @_sigil_name "H"))
@@ -1,17 +1,24 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::StreamExt;
+use gpui::{AsyncAppContext, Task};
pub use language::*;
use lazy_static::lazy_static;
use lsp::LanguageServerBinary;
use regex::Regex;
use smol::{fs, process};
-use std::ffi::{OsStr, OsString};
-use std::{any::Any, ops::Range, path::PathBuf, str, sync::Arc};
-use util::fs::remove_matching;
-use util::github::latest_github_release;
-use util::http::HttpClient;
-use util::ResultExt;
+use std::{
+ any::Any,
+ ffi::{OsStr, OsString},
+ ops::Range,
+ path::PathBuf,
+ str,
+ sync::{
+ atomic::{AtomicBool, Ordering::SeqCst},
+ Arc,
+ },
+};
+use util::{fs::remove_matching, github::latest_github_release, ResultExt};
fn server_binary_arguments() -> Vec<OsString> {
vec!["-mode=stdio".into()]
@@ -32,9 +39,9 @@ impl super::LspAdapter for GoLspAdapter {
async fn fetch_latest_server_version(
&self,
- http: Arc<dyn HttpClient>,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
- let release = latest_github_release("golang/tools", false, http).await?;
+ let release = latest_github_release("golang/tools", false, delegate.http_client()).await?;
let version: Option<String> = release.name.strip_prefix("gopls/v").map(str::to_string);
if version.is_none() {
log::warn!(
@@ -45,11 +52,39 @@ impl super::LspAdapter for GoLspAdapter {
Ok(Box::new(version) as Box<_>)
}
+ fn will_fetch_server(
+ &self,
+ delegate: &Arc<dyn LspAdapterDelegate>,
+ cx: &mut AsyncAppContext,
+ ) -> Option<Task<Result<()>>> {
+ static DID_SHOW_NOTIFICATION: AtomicBool = AtomicBool::new(false);
+
+ const NOTIFICATION_MESSAGE: &str =
+ "Could not install the Go language server `gopls`, because `go` was not found.";
+
+ let delegate = delegate.clone();
+ Some(cx.spawn(|mut cx| async move {
+ let install_output = process::Command::new("go").args(["version"]).output().await;
+ if install_output.is_err() {
+ if DID_SHOW_NOTIFICATION
+ .compare_exchange(false, true, SeqCst, SeqCst)
+ .is_ok()
+ {
+ cx.update(|cx| {
+ delegate.show_notification(NOTIFICATION_MESSAGE, cx);
+ })
+ }
+ return Err(anyhow!("cannot install gopls"));
+ }
+ Ok(())
+ }))
+ }
+
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- _: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<Option<String>>().unwrap();
let this = *self;
@@ -69,7 +104,10 @@ impl super::LspAdapter for GoLspAdapter {
});
}
}
- } else if let Some(path) = this.cached_server_binary(container_dir.clone()).await {
+ } else if let Some(path) = this
+ .cached_server_binary(container_dir.clone(), delegate)
+ .await
+ {
return Ok(path);
}
@@ -106,7 +144,11 @@ impl super::LspAdapter for GoLspAdapter {
})
}
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
(|| async move {
let mut last_binary_path = None;
let mut entries = fs::read_dir(&container_dir).await?;
@@ -1,17 +1,11 @@
; HEEx delimiters
[
- "%>"
"--%>"
"-->"
"/>"
"<!"
"<!--"
"<"
- "<%!--"
- "<%"
- "<%#"
- "<%%="
- "<%="
"</"
"</:"
"<:"
@@ -20,6 +14,15 @@
"}"
] @punctuation.bracket
+[
+ "<%!--"
+ "<%"
+ "<%#"
+ "<%%="
+ "<%="
+ "%>"
+] @keyword
+
; HEEx operators are highlighted as such
"=" @operator
@@ -1,11 +1,13 @@
-((directive (partial_expression_value) @content)
- (#set! language "elixir")
- (#set! include-children)
- (#set! combined))
-
-; Regular expression_values do not need to be combined
-((directive (expression_value) @content)
- (#set! language "elixir"))
+(
+ (directive
+ [
+ (partial_expression_value)
+ (expression_value)
+ (ending_expression_value)
+ ] @content)
+ (#set! language "elixir")
+ (#set! combined)
+)
; expressions live within HTML tags, and do not need to be combined
; <link href={ Routes.static_path(..) } />
@@ -1,15 +1,17 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::StreamExt;
-use language::{LanguageServerName, LspAdapter};
+use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use serde_json::json;
use smol::fs;
-use std::ffi::OsString;
-use std::path::Path;
-use std::{any::Any, path::PathBuf, sync::Arc};
-use util::http::HttpClient;
+use std::{
+ any::Any,
+ ffi::OsString,
+ path::{Path, PathBuf},
+ sync::Arc,
+};
use util::ResultExt;
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
@@ -37,7 +39,7 @@ impl LspAdapter for HtmlLspAdapter {
async fn fetch_latest_server_version(
&self,
- _: Arc<dyn HttpClient>,
+ _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(
self.node
@@ -49,8 +51,8 @@ impl LspAdapter for HtmlLspAdapter {
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- _: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<String>().unwrap();
let server_path = container_dir.join(Self::SERVER_PATH);
@@ -70,7 +72,11 @@ impl LspAdapter for HtmlLspAdapter {
})
}
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
(|| async move {
let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?;
@@ -3,7 +3,7 @@ use async_trait::async_trait;
use collections::HashMap;
use futures::{future::BoxFuture, FutureExt, StreamExt};
use gpui::AppContext;
-use language::{LanguageRegistry, LanguageServerName, LspAdapter};
+use language::{LanguageRegistry, LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use serde_json::json;
@@ -17,7 +17,6 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
-use util::http::HttpClient;
use util::{paths, ResultExt};
const SERVER_PATH: &'static str =
@@ -46,7 +45,7 @@ impl LspAdapter for JsonLspAdapter {
async fn fetch_latest_server_version(
&self,
- _: Arc<dyn HttpClient>,
+ _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
Ok(Box::new(
self.node
@@ -58,8 +57,8 @@ impl LspAdapter for JsonLspAdapter {
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- _: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<String>().unwrap();
let server_path = container_dir.join(SERVER_PATH);
@@ -79,7 +78,11 @@ impl LspAdapter for JsonLspAdapter {
})
}
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
(|| async move {
let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?;
@@ -3,11 +3,10 @@ use async_trait::async_trait;
use collections::HashMap;
use futures::lock::Mutex;
use gpui::executor::Background;
-use language::{LanguageServerName, LspAdapter};
+use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use plugin_runtime::{Plugin, PluginBinary, PluginBuilder, WasiFn};
use std::{any::Any, path::PathBuf, sync::Arc};
-use util::http::HttpClient;
use util::ResultExt;
#[allow(dead_code)]
@@ -73,7 +72,7 @@ impl LspAdapter for PluginLspAdapter {
async fn fetch_latest_server_version(
&self,
- _: Arc<dyn HttpClient>,
+ _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
let runtime = self.runtime.clone();
let function = self.fetch_latest_server_version;
@@ -93,8 +92,8 @@ impl LspAdapter for PluginLspAdapter {
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- _: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = *version.downcast::<String>().unwrap();
let runtime = self.runtime.clone();
@@ -111,7 +110,11 @@ impl LspAdapter for PluginLspAdapter {
.await
}
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
let runtime = self.runtime.clone();
let function = self.cached_server_binary;
@@ -3,13 +3,15 @@ use async_compression::futures::bufread::GzipDecoder;
use async_tar::Archive;
use async_trait::async_trait;
use futures::{io::BufReader, StreamExt};
-use language::LanguageServerName;
+use language::{LanguageServerName, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use smol::fs;
-use std::{any::Any, env::consts, ffi::OsString, path::PathBuf, sync::Arc};
-use util::{async_iife, github::latest_github_release, http::HttpClient, ResultExt};
-
-use util::github::GitHubLspBinaryVersion;
+use std::{any::Any, env::consts, ffi::OsString, path::PathBuf};
+use util::{
+ async_iife,
+ github::{latest_github_release, GitHubLspBinaryVersion},
+ ResultExt,
+};
#[derive(Copy, Clone)]
pub struct LuaLspAdapter;
@@ -29,9 +31,11 @@ impl super::LspAdapter for LuaLspAdapter {
async fn fetch_latest_server_version(
&self,
- http: Arc<dyn HttpClient>,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
- let release = latest_github_release("LuaLS/lua-language-server", false, http).await?;
+ let release =
+ latest_github_release("LuaLS/lua-language-server", false, delegate.http_client())
+ .await?;
let version = release.name.clone();
let platform = match consts::ARCH {
"x86_64" => "x64",
@@ -54,15 +58,16 @@ impl super::LspAdapter for LuaLspAdapter {
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- http: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let binary_path = container_dir.join("bin/lua-language-server");
if fs::metadata(&binary_path).await.is_err() {
- let mut response = http
+ let mut response = delegate
+ .http_client()
.get(&version.url, Default::default(), true)
.await
.map_err(|err| anyhow!("error downloading release: {}", err))?;
@@ -82,7 +87,11 @@ impl super::LspAdapter for LuaLspAdapter {
})
}
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
async_iife!({
let mut last_binary_path = None;
let mut entries = fs::read_dir(&container_dir).await?;
@@ -1,7 +1,7 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::StreamExt;
-use language::{LanguageServerName, LspAdapter};
+use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use smol::fs;
@@ -11,7 +11,6 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
-use util::http::HttpClient;
use util::ResultExt;
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
@@ -38,7 +37,7 @@ impl LspAdapter for PythonLspAdapter {
async fn fetch_latest_server_version(
&self,
- _: Arc<dyn HttpClient>,
+ _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(self.node.npm_package_latest_version("pyright").await?) as Box<_>)
}
@@ -46,8 +45,8 @@ impl LspAdapter for PythonLspAdapter {
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- _: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<String>().unwrap();
let server_path = container_dir.join(Self::SERVER_PATH);
@@ -64,7 +63,11 @@ impl LspAdapter for PythonLspAdapter {
})
}
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
(|| async move {
let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?;
@@ -1,9 +1,8 @@
use anyhow::{anyhow, Result};
use async_trait::async_trait;
-use language::{LanguageServerName, LspAdapter};
+use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::LanguageServerBinary;
use std::{any::Any, path::PathBuf, sync::Arc};
-use util::http::HttpClient;
pub struct RubyLanguageServer;
@@ -15,7 +14,7 @@ impl LspAdapter for RubyLanguageServer {
async fn fetch_latest_server_version(
&self,
- _: Arc<dyn HttpClient>,
+ _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(()))
}
@@ -23,13 +22,17 @@ impl LspAdapter for RubyLanguageServer {
async fn fetch_server_binary(
&self,
_version: Box<dyn 'static + Send + Any>,
- _: Arc<dyn HttpClient>,
_container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
Err(anyhow!("solargraph must be installed manually"))
}
- async fn cached_server_binary(&self, _container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ _: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
Some(LanguageServerBinary {
path: "solargraph".into(),
arguments: vec!["stdio".into()],
@@ -8,10 +8,11 @@ use lsp::LanguageServerBinary;
use regex::Regex;
use smol::fs::{self, File};
use std::{any::Any, borrow::Cow, env::consts, path::PathBuf, str, sync::Arc};
-use util::fs::remove_matching;
-use util::github::{latest_github_release, GitHubLspBinaryVersion};
-use util::http::HttpClient;
-use util::ResultExt;
+use util::{
+ fs::remove_matching,
+ github::{latest_github_release, GitHubLspBinaryVersion},
+ ResultExt,
+};
pub struct RustLspAdapter;
@@ -23,9 +24,11 @@ impl LspAdapter for RustLspAdapter {
async fn fetch_latest_server_version(
&self,
- http: Arc<dyn HttpClient>,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
- let release = latest_github_release("rust-analyzer/rust-analyzer", false, http).await?;
+ let release =
+ latest_github_release("rust-analyzer/rust-analyzer", false, delegate.http_client())
+ .await?;
let asset_name = format!("rust-analyzer-{}-apple-darwin.gz", consts::ARCH);
let asset = release
.assets
@@ -41,14 +44,15 @@ impl LspAdapter for RustLspAdapter {
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- http: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let destination_path = container_dir.join(format!("rust-analyzer-{}", version.name));
if fs::metadata(&destination_path).await.is_err() {
- let mut response = http
+ let mut response = delegate
+ .http_client()
.get(&version.url, Default::default(), true)
.await
.map_err(|err| anyhow!("error downloading release: {}", err))?;
@@ -70,7 +74,11 @@ impl LspAdapter for RustLspAdapter {
})
}
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
(|| async move {
let mut last = None;
let mut entries = fs::read_dir(&container_dir).await?;
@@ -4,7 +4,7 @@ use async_tar::Archive;
use async_trait::async_trait;
use futures::{future::BoxFuture, FutureExt};
use gpui::AppContext;
-use language::{LanguageServerName, LspAdapter};
+use language::{LanguageServerName, LspAdapter, LspAdapterDelegate};
use lsp::{CodeActionKind, LanguageServerBinary};
use node_runtime::NodeRuntime;
use serde_json::{json, Value};
@@ -16,7 +16,7 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
-use util::{fs::remove_matching, github::latest_github_release, http::HttpClient};
+use util::{fs::remove_matching, github::latest_github_release};
use util::{github::GitHubLspBinaryVersion, ResultExt};
fn typescript_server_binary_arguments(server_path: &Path) -> Vec<OsString> {
@@ -58,7 +58,7 @@ impl LspAdapter for TypeScriptLspAdapter {
async fn fetch_latest_server_version(
&self,
- _: Arc<dyn HttpClient>,
+ _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
Ok(Box::new(TypeScriptVersions {
typescript_version: self.node.npm_package_latest_version("typescript").await?,
@@ -72,8 +72,8 @@ impl LspAdapter for TypeScriptLspAdapter {
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- _: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<TypeScriptVersions>().unwrap();
let server_path = container_dir.join(Self::NEW_SERVER_PATH);
@@ -99,7 +99,11 @@ impl LspAdapter for TypeScriptLspAdapter {
})
}
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
(|| async move {
let old_server_path = container_dir.join(Self::OLD_SERVER_PATH);
let new_server_path = container_dir.join(Self::NEW_SERVER_PATH);
@@ -204,12 +208,13 @@ impl LspAdapter for EsLintLspAdapter {
async fn fetch_latest_server_version(
&self,
- http: Arc<dyn HttpClient>,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Send + Any>> {
// At the time of writing the latest vscode-eslint release was released in 2020 and requires
// special custom LSP protocol extensions be handled to fully initialize. Download the latest
// prerelease instead to sidestep this issue
- let release = latest_github_release("microsoft/vscode-eslint", true, http).await?;
+ let release =
+ latest_github_release("microsoft/vscode-eslint", true, delegate.http_client()).await?;
Ok(Box::new(GitHubLspBinaryVersion {
name: release.name,
url: release.tarball_url,
@@ -219,8 +224,8 @@ impl LspAdapter for EsLintLspAdapter {
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- http: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ delegate: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<GitHubLspBinaryVersion>().unwrap();
let destination_path = container_dir.join(format!("vscode-eslint-{}", version.name));
@@ -229,7 +234,8 @@ impl LspAdapter for EsLintLspAdapter {
if fs::metadata(&server_path).await.is_err() {
remove_matching(&container_dir, |entry| entry != destination_path).await;
- let mut response = http
+ let mut response = delegate
+ .http_client()
.get(&version.url, Default::default(), true)
.await
.map_err(|err| anyhow!("error downloading release: {}", err))?;
@@ -257,7 +263,11 @@ impl LspAdapter for EsLintLspAdapter {
})
}
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
(|| async move {
// This is unfortunate but we don't know what the version is to build a path directly
let mut dir = fs::read_dir(&container_dir).await?;
@@ -2,7 +2,9 @@ use anyhow::{anyhow, Result};
use async_trait::async_trait;
use futures::{future::BoxFuture, FutureExt, StreamExt};
use gpui::AppContext;
-use language::{language_settings::all_language_settings, LanguageServerName, LspAdapter};
+use language::{
+ language_settings::all_language_settings, LanguageServerName, LspAdapter, LspAdapterDelegate,
+};
use lsp::LanguageServerBinary;
use node_runtime::NodeRuntime;
use serde_json::Value;
@@ -14,7 +16,6 @@ use std::{
path::{Path, PathBuf},
sync::Arc,
};
-use util::http::HttpClient;
use util::ResultExt;
fn server_binary_arguments(server_path: &Path) -> Vec<OsString> {
@@ -41,7 +42,7 @@ impl LspAdapter for YamlLspAdapter {
async fn fetch_latest_server_version(
&self,
- _: Arc<dyn HttpClient>,
+ _: &dyn LspAdapterDelegate,
) -> Result<Box<dyn 'static + Any + Send>> {
Ok(Box::new(
self.node
@@ -53,8 +54,8 @@ impl LspAdapter for YamlLspAdapter {
async fn fetch_server_binary(
&self,
version: Box<dyn 'static + Send + Any>,
- _: Arc<dyn HttpClient>,
container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
) -> Result<LanguageServerBinary> {
let version = version.downcast::<String>().unwrap();
let server_path = container_dir.join(Self::SERVER_PATH);
@@ -71,7 +72,11 @@ impl LspAdapter for YamlLspAdapter {
})
}
- async fn cached_server_binary(&self, container_dir: PathBuf) -> Option<LanguageServerBinary> {
+ async fn cached_server_binary(
+ &self,
+ container_dir: PathBuf,
+ _: &dyn LspAdapterDelegate,
+ ) -> Option<LanguageServerBinary> {
(|| async move {
let mut last_version_dir = None;
let mut entries = fs::read_dir(&container_dir).await?;
@@ -31,7 +31,6 @@ use std::{
ffi::OsStr,
fs::OpenOptions,
io::Write as _,
- ops::Not,
os::unix::prelude::OsStrExt,
panic,
path::{Path, PathBuf},
@@ -373,7 +372,6 @@ struct Panic {
os_version: Option<String>,
architecture: String,
panicked_on: u128,
- identifying_backtrace: Option<Vec<String>>,
}
#[derive(Serialize)]
@@ -401,61 +399,18 @@ fn init_panic_hook(app: &App) {
.unwrap_or_else(|| "Box<Any>".to_string());
let backtrace = Backtrace::new();
- let backtrace = backtrace
+ let mut backtrace = backtrace
.frames()
.iter()
- .filter_map(|frame| {
- let symbol = frame.symbols().first()?;
- let path = symbol.filename()?;
- Some((path, symbol.lineno(), format!("{:#}", symbol.name()?)))
- })
+ .filter_map(|frame| Some(format!("{:#}", frame.symbols().first()?.name()?)))
.collect::<Vec<_>>();
- let this_file_path = Path::new(file!());
-
- // Find the first frame in the backtrace for this panic hook itself. Exclude
- // that frame and all frames before it.
- let mut start_frame_ix = 0;
- let mut codebase_root_path = None;
- for (ix, (path, _, _)) in backtrace.iter().enumerate() {
- if path.ends_with(this_file_path) {
- start_frame_ix = ix + 1;
- codebase_root_path = path.ancestors().nth(this_file_path.components().count());
- break;
- }
- }
-
- // Exclude any subsequent frames inside of rust's panic handling system.
- while let Some((path, _, _)) = backtrace.get(start_frame_ix) {
- if path.starts_with("/rustc") {
- start_frame_ix += 1;
- } else {
- break;
- }
- }
-
- // Build two backtraces:
- // * one for display, which includes symbol names for all frames, and files
- // and line numbers for symbols in this codebase
- // * one for identification and de-duplication, which only includes symbol
- // names for symbols in this codebase.
- let mut display_backtrace = Vec::new();
- let mut identifying_backtrace = Vec::new();
- for (path, line, symbol) in &backtrace[start_frame_ix..] {
- display_backtrace.push(symbol.clone());
-
- if let Some(codebase_root_path) = &codebase_root_path {
- if let Ok(suffix) = path.strip_prefix(&codebase_root_path) {
- identifying_backtrace.push(symbol.clone());
-
- let display_path = suffix.to_string_lossy();
- if let Some(line) = line {
- display_backtrace.push(format!(" {display_path}:{line}"));
- } else {
- display_backtrace.push(format!(" {display_path}"));
- }
- }
- }
+ // Strip out leading stack frames for rust panic-handling.
+ if let Some(ix) = backtrace
+ .iter()
+ .position(|name| name == "rust_begin_unwind")
+ {
+ backtrace.drain(0..=ix);
}
let panic_data = Panic {
@@ -477,29 +432,27 @@ fn init_panic_hook(app: &App) {
.duration_since(UNIX_EPOCH)
.unwrap()
.as_millis(),
- backtrace: display_backtrace,
- identifying_backtrace: identifying_backtrace
- .is_empty()
- .not()
- .then_some(identifying_backtrace),
+ backtrace,
};
- if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() {
- if is_pty {
+ if is_pty {
+ if let Some(panic_data_json) = serde_json::to_string_pretty(&panic_data).log_err() {
eprintln!("{}", panic_data_json);
return;
}
-
- let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
- let panic_file_path = paths::LOGS_DIR.join(format!("zed-{}.panic", timestamp));
- let panic_file = std::fs::OpenOptions::new()
- .append(true)
- .create(true)
- .open(&panic_file_path)
- .log_err();
- if let Some(mut panic_file) = panic_file {
- write!(&mut panic_file, "{}", panic_data_json).log_err();
- panic_file.flush().log_err();
+ } else {
+ if let Some(panic_data_json) = serde_json::to_string(&panic_data).log_err() {
+ let timestamp = chrono::Utc::now().format("%Y_%m_%d %H_%M_%S").to_string();
+ let panic_file_path = paths::LOGS_DIR.join(format!("zed-{}.panic", timestamp));
+ let panic_file = std::fs::OpenOptions::new()
+ .append(true)
+ .create(true)
+ .open(&panic_file_path)
+ .log_err();
+ if let Some(mut panic_file) = panic_file {
+ writeln!(&mut panic_file, "{}", panic_data_json).log_err();
+ panic_file.flush().log_err();
+ }
}
}
}));
@@ -531,23 +484,45 @@ fn upload_previous_panics(http: Arc<dyn HttpClient>, cx: &mut AppContext) {
}
if telemetry_settings.diagnostics {
- let panic_data_text = smol::fs::read_to_string(&child_path)
+ let panic_file_content = smol::fs::read_to_string(&child_path)
.await
.context("error reading panic file")?;
- let body = serde_json::to_string(&PanicRequest {
- panic: serde_json::from_str(&panic_data_text)?,
- token: ZED_SECRET_CLIENT_TOKEN.into(),
- })
- .unwrap();
-
- let request = Request::post(&panic_report_url)
- .redirect_policy(isahc::config::RedirectPolicy::Follow)
- .header("Content-Type", "application/json")
- .body(body.into())?;
- let response = http.send(request).await.context("error sending panic")?;
- if !response.status().is_success() {
- log::error!("Error uploading panic to server: {}", response.status());
+ let panic = serde_json::from_str(&panic_file_content)
+ .ok()
+ .or_else(|| {
+ panic_file_content
+ .lines()
+ .next()
+ .and_then(|line| serde_json::from_str(line).ok())
+ })
+ .unwrap_or_else(|| {
+ log::error!(
+ "failed to deserialize panic file {:?}",
+ panic_file_content
+ );
+ None
+ });
+
+ if let Some(panic) = panic {
+ let body = serde_json::to_string(&PanicRequest {
+ panic,
+ token: ZED_SECRET_CLIENT_TOKEN.into(),
+ })
+ .unwrap();
+
+ let request = Request::post(&panic_report_url)
+ .redirect_policy(isahc::config::RedirectPolicy::Follow)
+ .header("Content-Type", "application/json")
+ .body(body.into())?;
+ let response =
+ http.send(request).await.context("error sending panic")?;
+ if !response.status().is_success() {
+ log::error!(
+ "Error uploading panic to server: {}",
+ response.status()
+ );
+ }
}
}
@@ -384,6 +384,8 @@ pub fn initialize_workspace(
workspace.toggle_dock(project_panel_position, cx);
}
+ cx.focus_self();
+
workspace.add_panel(terminal_panel, cx);
if let Some(assistant_panel) = assistant_panel {
workspace.add_panel(assistant_panel, cx);