Detailed changes
@@ -0,0 +1,44 @@
+{{#if language_name}}
+Here's a file of {{language_name}} that the user is going to ask you to make an edit to.
+{{else}}
+Here's a file of text that the user is going to ask you to make an edit to.
+{{/if}}
+
+The section you'll need to rewrite is marked with <rewrite_this></rewrite_this> tags.
+
+<document>
+{{{document_content}}}
+</document>
+
+{{#if is_truncated}}
+The context around the relevant section has been truncated (possibly in the middle of a line) for brevity.
+{{/if}}
+
+{{#if rewrite_section}}
+And here's the section to rewrite based on that prompt again for reference:
+
+<rewrite_this>
+{{{rewrite_section}}}
+</rewrite_this>
+
+{{#if diagnostic_errors}}
+Below are the diagnostic errors visible to the user. If the user requests problems to be fixed, use this information, but do not try to fix these errors if the user hasn't asked you to.
+
+{{#each diagnostic_errors}}
+<diagnostic_error>
+ <line_number>{{line_number}}</line_number>
+ <error_message>{{error_message}}</error_message>
+ <code_content>{{code_content}}</code_content>
+</diagnostic_error>
+{{/each}}
+{{/if}}
+
+{{/if}}
+
+Only make changes that are necessary to fulfill the prompt, leave everything else as-is. All surrounding {{content_type}} will be preserved.
+
+Start at the indentation level in the original file in the rewritten {{content_type}}.
+
+You must use one of the provided tools to make the rewrite or to provide an explanation as to why the user's request cannot be fulfilled. It is an error if
+you simply send back unstructured text. If you need to make a statement or ask a question you must use one of the tools to do so.
+It is an error if you try to make a change that cannot be made simply by editing the rewrite_section.
@@ -4,6 +4,7 @@ mod create_directory_tool;
mod delete_path_tool;
mod diagnostics_tool;
mod edit_file_tool;
+
mod fetch_tool;
mod find_path_tool;
mod grep_tool;
@@ -12,6 +13,7 @@ mod move_path_tool;
mod now_tool;
mod open_tool;
mod read_file_tool;
+
mod terminal_tool;
mod thinking_tool;
mod web_search_tool;
@@ -25,6 +27,7 @@ pub use create_directory_tool::*;
pub use delete_path_tool::*;
pub use diagnostics_tool::*;
pub use edit_file_tool::*;
+
pub use fetch_tool::*;
pub use find_path_tool::*;
pub use grep_tool::*;
@@ -33,6 +36,7 @@ pub use move_path_tool::*;
pub use now_tool::*;
pub use open_tool::*;
pub use read_file_tool::*;
+
pub use terminal_tool::*;
pub use thinking_tool::*;
pub use web_search_tool::*;
@@ -98,7 +98,7 @@ impl Render for AgentModelSelector {
.child(
Icon::new(IconName::ChevronDown)
.color(color)
- .size(IconSize::XSmall),
+ .size(IconSize::Small),
),
move |_window, cx| {
Tooltip::for_action_in("Change Model", &ToggleModelSelector, &focus_handle, cx)
@@ -5,22 +5,26 @@ use client::telemetry::Telemetry;
use cloud_llm_client::CompletionIntent;
use collections::HashSet;
use editor::{Anchor, AnchorRangeExt, MultiBuffer, MultiBufferSnapshot, ToOffset as _, ToPoint};
+use feature_flags::{FeatureFlagAppExt as _, InlineAssistantV2FeatureFlag};
use futures::{
SinkExt, Stream, StreamExt, TryStreamExt as _,
channel::mpsc,
future::{LocalBoxFuture, Shared},
join,
};
-use gpui::{App, AppContext as _, Context, Entity, EventEmitter, Subscription, Task};
+use gpui::{App, AppContext as _, AsyncApp, Context, Entity, EventEmitter, Subscription, Task};
use language::{Buffer, IndentKind, Point, TransactionId, line_diff};
use language_model::{
- LanguageModel, LanguageModelRegistry, LanguageModelRequest, LanguageModelRequestMessage,
- LanguageModelTextStream, Role, report_assistant_event,
+ LanguageModel, LanguageModelCompletionError, LanguageModelRegistry, LanguageModelRequest,
+ LanguageModelRequestMessage, LanguageModelRequestTool, LanguageModelTextStream, Role,
+ report_assistant_event,
};
use multi_buffer::MultiBufferRow;
use parking_lot::Mutex;
use prompt_store::PromptBuilder;
use rope::Rope;
+use schemars::JsonSchema;
+use serde::{Deserialize, Serialize};
use smol::future::FutureExt;
use std::{
cmp,
@@ -34,6 +38,29 @@ use std::{
};
use streaming_diff::{CharOperation, LineDiff, LineOperation, StreamingDiff};
use telemetry_events::{AssistantEventData, AssistantKind, AssistantPhase};
+use ui::SharedString;
+
+/// Use this tool to provide a message to the user when you're unable to complete a task.
+#[derive(Debug, Serialize, Deserialize, JsonSchema)]
+pub struct FailureMessageInput {
+ /// A brief message to the user explaining why you're unable to fulfill the request or to ask a question about the request.
+ ///
+ /// The message may use markdown formatting if you wish.
+ pub message: String,
+}
+
+/// Replaces text in <rewrite_this></rewrite_this> tags with your replacement_text.
+#[derive(Debug, Serialize, Deserialize, JsonSchema)]
+pub struct RewriteSectionInput {
+ /// A brief description of the edit you have made.
+ ///
+ /// The description may use markdown formatting if you wish.
+ /// This is optional - if the edit is simple or obvious, you should leave it empty.
+ pub description: String,
+
+ /// The text to replace the section with.
+ pub replacement_text: String,
+}
pub struct BufferCodegen {
alternatives: Vec<Entity<CodegenAlternative>>,
@@ -238,6 +265,7 @@ pub struct CodegenAlternative {
elapsed_time: Option<f64>,
completion: Option<String>,
pub message_id: Option<String>,
+ pub model_explanation: Option<SharedString>,
}
impl EventEmitter<CodegenEvent> for CodegenAlternative {}
@@ -288,14 +316,15 @@ impl CodegenAlternative {
generation: Task::ready(()),
diff: Diff::default(),
telemetry,
- _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
builder,
- active,
+ active: active,
edits: Vec::new(),
line_operations: Vec::new(),
range,
elapsed_time: None,
completion: None,
+ model_explanation: None,
+ _subscription: cx.subscribe(&buffer, Self::handle_buffer_event),
}
}
@@ -358,18 +387,124 @@ impl CodegenAlternative {
let api_key = model.api_key(cx);
let telemetry_id = model.telemetry_id();
let provider_id = model.provider_id();
- let stream: LocalBoxFuture<Result<LanguageModelTextStream>> =
- if user_prompt.trim().to_lowercase() == "delete" {
- async { Ok(LanguageModelTextStream::default()) }.boxed_local()
+
+ if cx.has_flag::<InlineAssistantV2FeatureFlag>() {
+ let request = self.build_request(&model, user_prompt, context_task, cx)?;
+ let tool_use =
+ cx.spawn(async move |_, cx| model.stream_completion_tool(request.await, cx).await);
+ self.handle_tool_use(telemetry_id, provider_id.to_string(), api_key, tool_use, cx);
+ } else {
+ let stream: LocalBoxFuture<Result<LanguageModelTextStream>> =
+ if user_prompt.trim().to_lowercase() == "delete" {
+ async { Ok(LanguageModelTextStream::default()) }.boxed_local()
+ } else {
+ let request = self.build_request(&model, user_prompt, context_task, cx)?;
+ cx.spawn(async move |_, cx| {
+ Ok(model.stream_completion_text(request.await, cx).await?)
+ })
+ .boxed_local()
+ };
+ self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
+ }
+
+ Ok(())
+ }
+
+ fn build_request_v2(
+ &self,
+ model: &Arc<dyn LanguageModel>,
+ user_prompt: String,
+ context_task: Shared<Task<Option<LoadedContext>>>,
+ cx: &mut App,
+ ) -> Result<Task<LanguageModelRequest>> {
+ let buffer = self.buffer.read(cx).snapshot(cx);
+ let language = buffer.language_at(self.range.start);
+ let language_name = if let Some(language) = language.as_ref() {
+ if Arc::ptr_eq(language, &language::PLAIN_TEXT) {
+ None
} else {
- let request = self.build_request(&model, user_prompt, context_task, cx)?;
- cx.spawn(async move |_, cx| {
- Ok(model.stream_completion_text(request.await, cx).await?)
- })
- .boxed_local()
+ Some(language.name())
+ }
+ } else {
+ None
+ };
+
+ let language_name = language_name.as_ref();
+ let start = buffer.point_to_buffer_offset(self.range.start);
+ let end = buffer.point_to_buffer_offset(self.range.end);
+ let (buffer, range) = if let Some((start, end)) = start.zip(end) {
+ let (start_buffer, start_buffer_offset) = start;
+ let (end_buffer, end_buffer_offset) = end;
+ if start_buffer.remote_id() == end_buffer.remote_id() {
+ (start_buffer.clone(), start_buffer_offset..end_buffer_offset)
+ } else {
+ anyhow::bail!("invalid transformation range");
+ }
+ } else {
+ anyhow::bail!("invalid transformation range");
+ };
+
+ let system_prompt = self
+ .builder
+ .generate_inline_transformation_prompt_v2(
+ language_name,
+ buffer,
+ range.start.0..range.end.0,
+ )
+ .context("generating content prompt")?;
+
+ let temperature = AgentSettings::temperature_for_model(model, cx);
+
+ let tool_input_format = model.tool_input_format();
+
+ Ok(cx.spawn(async move |_cx| {
+ let mut messages = vec![LanguageModelRequestMessage {
+ role: Role::System,
+ content: vec![system_prompt.into()],
+ cache: false,
+ reasoning_details: None,
+ }];
+
+ let mut user_message = LanguageModelRequestMessage {
+ role: Role::User,
+ content: Vec::new(),
+ cache: false,
+ reasoning_details: None,
};
- self.handle_stream(telemetry_id, provider_id.to_string(), api_key, stream, cx);
- Ok(())
+
+ if let Some(context) = context_task.await {
+ context.add_to_request_message(&mut user_message);
+ }
+
+ user_message.content.push(user_prompt.into());
+ messages.push(user_message);
+
+ let tools = vec![
+ LanguageModelRequestTool {
+ name: "rewrite_section".to_string(),
+ description: "Replaces text in <rewrite_this></rewrite_this> tags with your replacement_text.".to_string(),
+ input_schema: language_model::tool_schema::root_schema_for::<RewriteSectionInput>(tool_input_format).to_value(),
+ },
+ LanguageModelRequestTool {
+ name: "failure_message".to_string(),
+ description: "Use this tool to provide a message to the user when you're unable to complete a task.".to_string(),
+ input_schema: language_model::tool_schema::root_schema_for::<FailureMessageInput>(tool_input_format).to_value(),
+ },
+ ];
+
+ LanguageModelRequest {
+ thread_id: None,
+ prompt_id: None,
+ intent: Some(CompletionIntent::InlineAssist),
+ mode: None,
+ tools,
+ tool_choice: None,
+ stop: Vec::new(),
+ temperature,
+ messages,
+ thinking_allowed: false,
+ }
+ }))
}
fn build_request(
@@ -379,6 +514,10 @@ impl CodegenAlternative {
context_task: Shared<Task<Option<LoadedContext>>>,
cx: &mut App,
) -> Result<Task<LanguageModelRequest>> {
+ if cx.has_flag::<InlineAssistantV2FeatureFlag>() {
+ return self.build_request_v2(model, user_prompt, context_task, cx);
+ }
+
let buffer = self.buffer.read(cx).snapshot(cx);
let language = buffer.language_at(self.range.start);
let language_name = if let Some(language) = language.as_ref() {
@@ -510,6 +649,7 @@ impl CodegenAlternative {
self.generation = cx.spawn(async move |codegen, cx| {
let stream = stream.await;
+
let token_usage = stream
.as_ref()
.ok()
@@ -899,6 +1039,101 @@ impl CodegenAlternative {
.ok();
})
}
+
+ fn handle_tool_use(
+ &mut self,
+ _telemetry_id: String,
+ _provider_id: String,
+ _api_key: Option<String>,
+ tool_use: impl 'static
+ + Future<
+ Output = Result<language_model::LanguageModelToolUse, LanguageModelCompletionError>,
+ >,
+ cx: &mut Context<Self>,
+ ) {
+ self.diff = Diff::default();
+ self.status = CodegenStatus::Pending;
+
+ self.generation = cx.spawn(async move |codegen, cx| {
+ let finish_with_status = |status: CodegenStatus, cx: &mut AsyncApp| {
+ let _ = codegen.update(cx, |this, cx| {
+ this.status = status;
+ cx.emit(CodegenEvent::Finished);
+ cx.notify();
+ });
+ };
+
+ let tool_use = tool_use.await;
+
+ match tool_use {
+ Ok(tool_use) if tool_use.name.as_ref() == "rewrite_section" => {
+ // Parse the input JSON into RewriteSectionInput
+ match serde_json::from_value::<RewriteSectionInput>(tool_use.input) {
+ Ok(input) => {
+ // Store the description if non-empty
+ let description = if !input.description.trim().is_empty() {
+ Some(input.description.clone())
+ } else {
+ None
+ };
+
+ // Apply the replacement text to the buffer and compute diff
+ let batch_diff_task = codegen
+ .update(cx, |this, cx| {
+ this.model_explanation = description.map(Into::into);
+ let range = this.range.clone();
+ this.apply_edits(
+ std::iter::once((range, input.replacement_text)),
+ cx,
+ );
+ this.reapply_batch_diff(cx)
+ })
+ .ok();
+
+ // Wait for the diff computation to complete
+ if let Some(diff_task) = batch_diff_task {
+ diff_task.await;
+ }
+
+ finish_with_status(CodegenStatus::Done, cx);
+ return;
+ }
+ Err(e) => {
+ finish_with_status(CodegenStatus::Error(e.into()), cx);
+ return;
+ }
+ }
+ }
+ Ok(tool_use) if tool_use.name.as_ref() == "failure_message" => {
+ // Handle failure message tool use
+ match serde_json::from_value::<FailureMessageInput>(tool_use.input) {
+ Ok(input) => {
+ let _ = codegen.update(cx, |this, _cx| {
+ // Store the failure message as the tool description
+ this.model_explanation = Some(input.message.into());
+ });
+ finish_with_status(CodegenStatus::Done, cx);
+ return;
+ }
+ Err(e) => {
+ finish_with_status(CodegenStatus::Error(e.into()), cx);
+ return;
+ }
+ }
+ }
+ Ok(_tool_use) => {
+ // Unexpected tool.
+ finish_with_status(CodegenStatus::Done, cx);
+ return;
+ }
+ Err(e) => {
+ finish_with_status(CodegenStatus::Error(e.into()), cx);
+ return;
+ }
+ }
+ });
+ cx.notify();
+ }
}
#[derive(Copy, Clone, Debug)]
@@ -387,17 +387,9 @@ impl InlineAssistant {
let mut selections = Vec::<Selection<Point>>::new();
let mut newest_selection = None;
for mut selection in initial_selections {
- if selection.end > selection.start {
- selection.start.column = 0;
- // If the selection ends at the start of the line, we don't want to include it.
- if selection.end.column == 0 {
- selection.end.row -= 1;
- }
- selection.end.column = snapshot
- .buffer_snapshot()
- .line_len(MultiBufferRow(selection.end.row));
- } else if let Some(fold) =
- snapshot.crease_for_buffer_row(MultiBufferRow(selection.end.row))
+ if selection.end == selection.start
+ && let Some(fold) =
+ snapshot.crease_for_buffer_row(MultiBufferRow(selection.end.row))
{
selection.start = fold.range().start;
selection.end = fold.range().end;
@@ -424,6 +416,15 @@ impl InlineAssistant {
}
}
}
+ } else {
+ selection.start.column = 0;
+ // If the selection ends at the start of the line, we don't want to include it.
+ if selection.end.column == 0 && selection.start.row != selection.end.row {
+ selection.end.row -= 1;
+ }
+ selection.end.column = snapshot
+ .buffer_snapshot()
+ .line_len(MultiBufferRow(selection.end.row));
}
if let Some(prev_selection) = selections.last_mut()
@@ -544,14 +545,15 @@ impl InlineAssistant {
}
}
- let [prompt_block_id, end_block_id] =
- self.insert_assist_blocks(editor, &range, &prompt_editor, cx);
+ let [prompt_block_id, tool_description_block_id, end_block_id] =
+ self.insert_assist_blocks(&editor, &range, &prompt_editor, cx);
assists.push((
assist_id,
range.clone(),
prompt_editor,
prompt_block_id,
+ tool_description_block_id,
end_block_id,
));
}
@@ -570,7 +572,15 @@ impl InlineAssistant {
};
let mut assist_group = InlineAssistGroup::new();
- for (assist_id, range, prompt_editor, prompt_block_id, end_block_id) in assists {
+ for (
+ assist_id,
+ range,
+ prompt_editor,
+ prompt_block_id,
+ tool_description_block_id,
+ end_block_id,
+ ) in assists
+ {
let codegen = prompt_editor.read(cx).codegen().clone();
self.assists.insert(
@@ -581,6 +591,7 @@ impl InlineAssistant {
editor,
&prompt_editor,
prompt_block_id,
+ tool_description_block_id,
end_block_id,
range,
codegen,
@@ -689,7 +700,7 @@ impl InlineAssistant {
range: &Range<Anchor>,
prompt_editor: &Entity<PromptEditor<BufferCodegen>>,
cx: &mut App,
- ) -> [CustomBlockId; 2] {
+ ) -> [CustomBlockId; 3] {
let prompt_editor_height = prompt_editor.update(cx, |prompt_editor, cx| {
prompt_editor
.editor
@@ -703,6 +714,14 @@ impl InlineAssistant {
render: build_assist_editor_renderer(prompt_editor),
priority: 0,
},
+ // Placeholder for tool description - will be updated dynamically
+ BlockProperties {
+ style: BlockStyle::Flex,
+ placement: BlockPlacement::Below(range.end),
+ height: Some(0),
+ render: Arc::new(|_cx| div().into_any_element()),
+ priority: 0,
+ },
BlockProperties {
style: BlockStyle::Sticky,
placement: BlockPlacement::Below(range.end),
@@ -721,7 +740,7 @@ impl InlineAssistant {
editor.update(cx, |editor, cx| {
let block_ids = editor.insert_blocks(assist_blocks, None, cx);
- [block_ids[0], block_ids[1]]
+ [block_ids[0], block_ids[1], block_ids[2]]
})
}
@@ -1113,6 +1132,9 @@ impl InlineAssistant {
let mut to_remove = decorations.removed_line_block_ids;
to_remove.insert(decorations.prompt_block_id);
to_remove.insert(decorations.end_block_id);
+ if let Some(tool_description_block_id) = decorations.model_explanation {
+ to_remove.insert(tool_description_block_id);
+ }
editor.remove_blocks(to_remove, None, cx);
});
@@ -1433,8 +1455,60 @@ impl InlineAssistant {
let old_snapshot = codegen.snapshot(cx);
let old_buffer = codegen.old_buffer(cx);
let deleted_row_ranges = codegen.diff(cx).deleted_row_ranges.clone();
+ // let model_explanation = codegen.model_explanation(cx);
editor.update(cx, |editor, cx| {
+ // Update tool description block
+ // if let Some(description) = model_explanation {
+ // if let Some(block_id) = decorations.model_explanation {
+ // editor.remove_blocks(HashSet::from_iter([block_id]), None, cx);
+ // let new_block_id = editor.insert_blocks(
+ // [BlockProperties {
+ // style: BlockStyle::Flex,
+ // placement: BlockPlacement::Below(assist.range.end),
+ // height: Some(1),
+ // render: Arc::new({
+ // let description = description.clone();
+ // move |cx| {
+ // div()
+ // .w_full()
+ // .py_1()
+ // .px_2()
+ // .bg(cx.theme().colors().editor_background)
+ // .border_y_1()
+ // .border_color(cx.theme().status().info_border)
+ // .child(
+ // Label::new(description.clone())
+ // .color(Color::Muted)
+ // .size(LabelSize::Small),
+ // )
+ // .into_any_element()
+ // }
+ // }),
+ // priority: 0,
+ // }],
+ // None,
+ // cx,
+ // );
+ // decorations.model_explanation = new_block_id.into_iter().next();
+ // }
+ // } else if let Some(block_id) = decorations.model_explanation {
+ // // Hide the block if there's no description
+ // editor.remove_blocks(HashSet::from_iter([block_id]), None, cx);
+ // let new_block_id = editor.insert_blocks(
+ // [BlockProperties {
+ // style: BlockStyle::Flex,
+ // placement: BlockPlacement::Below(assist.range.end),
+ // height: Some(0),
+ // render: Arc::new(|_cx| div().into_any_element()),
+ // priority: 0,
+ // }],
+ // None,
+ // cx,
+ // );
+ // decorations.model_explanation = new_block_id.into_iter().next();
+ // }
+
let old_blocks = mem::take(&mut decorations.removed_line_block_ids);
editor.remove_blocks(old_blocks, None, cx);
@@ -1686,6 +1760,7 @@ impl InlineAssist {
editor: &Entity<Editor>,
prompt_editor: &Entity<PromptEditor<BufferCodegen>>,
prompt_block_id: CustomBlockId,
+ tool_description_block_id: CustomBlockId,
end_block_id: CustomBlockId,
range: Range<Anchor>,
codegen: Entity<BufferCodegen>,
@@ -1700,7 +1775,8 @@ impl InlineAssist {
decorations: Some(InlineAssistDecorations {
prompt_block_id,
prompt_editor: prompt_editor.clone(),
- removed_line_block_ids: HashSet::default(),
+ removed_line_block_ids: Default::default(),
+ model_explanation: Some(tool_description_block_id),
end_block_id,
}),
range,
@@ -1804,6 +1880,7 @@ struct InlineAssistDecorations {
prompt_block_id: CustomBlockId,
prompt_editor: Entity<PromptEditor<BufferCodegen>>,
removed_line_block_ids: HashSet<CustomBlockId>,
+ model_explanation: Option<CustomBlockId>,
end_block_id: CustomBlockId,
}
@@ -11,9 +11,10 @@ use editor::{
use fs::Fs;
use gpui::{
AnyElement, App, Context, CursorStyle, Entity, EventEmitter, FocusHandle, Focusable,
- Subscription, TextStyle, WeakEntity, Window,
+ Subscription, TextStyle, TextStyleRefinement, WeakEntity, Window,
};
use language_model::{LanguageModel, LanguageModelRegistry};
+use markdown::{HeadingLevelStyles, Markdown, MarkdownElement, MarkdownStyle};
use parking_lot::Mutex;
use project::Project;
use prompt_store::PromptStore;
@@ -65,7 +66,7 @@ impl<T: 'static> Render for PromptEditor<T> {
const RIGHT_PADDING: Pixels = px(9.);
- let (left_gutter_width, right_padding) = match &self.mode {
+ let (left_gutter_width, right_padding, explanation) = match &self.mode {
PromptEditorMode::Buffer {
id: _,
codegen,
@@ -83,11 +84,17 @@ impl<T: 'static> Render for PromptEditor<T> {
let left_gutter_width = gutter.full_width() + (gutter.margin / 2.0);
let right_padding = editor_margins.right + RIGHT_PADDING;
- (left_gutter_width, right_padding)
+ let explanation = codegen
+ .active_alternative()
+ .read(cx)
+ .model_explanation
+ .clone();
+
+ (left_gutter_width, right_padding, explanation)
}
PromptEditorMode::Terminal { .. } => {
// Give the equivalent of the same left-padding that we're using on the right
- (Pixels::from(40.0), Pixels::from(24.))
+ (Pixels::from(40.0), Pixels::from(24.), None)
}
};
@@ -111,18 +118,30 @@ impl<T: 'static> Render for PromptEditor<T> {
this.trigger_completion_menu(window, cx);
}));
+ let markdown = window.use_state(cx, |_, cx| Markdown::new("".into(), None, None, cx));
+
+ if let Some(explanation) = &explanation {
+ markdown.update(cx, |markdown, cx| {
+ markdown.reset(explanation.clone(), cx);
+ });
+ }
+
+ let explanation_label = self
+ .render_markdown(markdown, markdown_style(window, cx))
+ .into_any_element();
+
v_flex()
.key_context("PromptEditor")
.capture_action(cx.listener(Self::paste))
- .bg(cx.theme().colors().editor_background)
.block_mouse_except_scroll()
- .gap_0p5()
- .border_y_1()
- .border_color(cx.theme().status().info_border)
.size_full()
.pt_0p5()
.pb(bottom_padding)
.pr(right_padding)
+ .bg(cx.theme().colors().editor_background)
+ .gap_0p5()
+ .border_y_1()
+ .border_color(cx.theme().colors().border)
.child(
h_flex()
.items_start()
@@ -139,12 +158,12 @@ impl<T: 'static> Render for PromptEditor<T> {
.capture_action(cx.listener(Self::cycle_next))
.child(
WithRemSize::new(ui_font_size)
+ .h_full()
+ .w(left_gutter_width)
.flex()
.flex_row()
.flex_shrink_0()
.items_center()
- .h_full()
- .w(left_gutter_width)
.justify_center()
.gap_2()
.child(self.render_close_button(cx))
@@ -177,26 +196,82 @@ impl<T: 'static> Render for PromptEditor<T> {
.flex_row()
.items_center()
.gap_1()
+ .child(add_context_button)
+ .child(self.model_selector.clone())
.children(buttons),
),
),
)
- .child(
- WithRemSize::new(ui_font_size)
- .flex()
- .flex_row()
- .items_center()
- .child(h_flex().flex_shrink_0().w(left_gutter_width))
- .child(
- h_flex()
- .w_full()
- .pl_1()
- .items_start()
- .justify_between()
- .child(add_context_button)
- .child(self.model_selector.clone()),
- ),
- )
+ .when_some(explanation, |this, _| {
+ this.child(
+ h_flex()
+ .size_full()
+ .child(div().w(left_gutter_width + px(6.)))
+ .child(
+ div()
+ .size_full()
+ .min_w_0()
+ .pb_px()
+ .pl_1()
+ .flex_1()
+ .border_t_1()
+ .border_color(cx.theme().colors().border_variant)
+ .child(explanation_label),
+ ),
+ )
+ })
+ }
+}
+
+fn markdown_style(window: &Window, cx: &App) -> MarkdownStyle {
+ let theme_settings = ThemeSettings::get_global(cx);
+ let colors = cx.theme().colors();
+ let mut text_style = window.text_style();
+
+ text_style.refine(&TextStyleRefinement {
+ font_family: Some(theme_settings.ui_font.family.clone()),
+ color: Some(colors.text),
+ ..Default::default()
+ });
+
+ MarkdownStyle {
+ base_text_style: text_style.clone(),
+ syntax: cx.theme().syntax().clone(),
+ selection_background_color: colors.element_selection_background,
+ heading_level_styles: Some(HeadingLevelStyles {
+ h1: Some(TextStyleRefinement {
+ font_size: Some(rems(1.15).into()),
+ ..Default::default()
+ }),
+ h2: Some(TextStyleRefinement {
+ font_size: Some(rems(1.1).into()),
+ ..Default::default()
+ }),
+ h3: Some(TextStyleRefinement {
+ font_size: Some(rems(1.05).into()),
+ ..Default::default()
+ }),
+ h4: Some(TextStyleRefinement {
+ font_size: Some(rems(1.).into()),
+ ..Default::default()
+ }),
+ h5: Some(TextStyleRefinement {
+ font_size: Some(rems(0.95).into()),
+ ..Default::default()
+ }),
+ h6: Some(TextStyleRefinement {
+ font_size: Some(rems(0.875).into()),
+ ..Default::default()
+ }),
+ }),
+ inline_code: TextStyleRefinement {
+ font_family: Some(theme_settings.buffer_font.family.clone()),
+ font_fallbacks: theme_settings.buffer_font.fallbacks.clone(),
+ font_features: Some(theme_settings.buffer_font.features.clone()),
+ background_color: Some(colors.editor_foreground.opacity(0.08)),
+ ..Default::default()
+ },
+ ..Default::default()
}
}
@@ -759,6 +834,10 @@ impl<T: 'static> PromptEditor<T> {
})
.into_any_element()
}
+
+ fn render_markdown(&self, markdown: Entity<Markdown>, style: MarkdownStyle) -> MarkdownElement {
+ MarkdownElement::new(markdown, style)
+ }
}
pub enum PromptEditorMode {
@@ -11,3 +11,9 @@ pub struct PanicFeatureFlag;
impl FeatureFlag for PanicFeatureFlag {
const NAME: &'static str = "panic";
}
+
+pub struct InlineAssistantV2FeatureFlag;
+
+impl FeatureFlag for InlineAssistantV2FeatureFlag {
+ const NAME: &'static str = "inline-assistant-v2";
+}
@@ -707,6 +707,40 @@ pub trait LanguageModel: Send + Sync {
.boxed()
}
+ fn stream_completion_tool(
+ &self,
+ request: LanguageModelRequest,
+ cx: &AsyncApp,
+ ) -> BoxFuture<'static, Result<LanguageModelToolUse, LanguageModelCompletionError>> {
+ let future = self.stream_completion(request, cx);
+
+ async move {
+ let events = future.await?;
+ let mut events = events.fuse();
+
+ // Iterate through events until we find a complete ToolUse
+ while let Some(event) = events.next().await {
+ match event {
+ Ok(LanguageModelCompletionEvent::ToolUse(tool_use))
+ if tool_use.is_input_complete =>
+ {
+ return Ok(tool_use);
+ }
+ Err(err) => {
+ return Err(err);
+ }
+ _ => {}
+ }
+ }
+
+ // Stream ended without a complete tool use
+ Err(LanguageModelCompletionError::Other(anyhow::anyhow!(
+ "Stream ended without receiving a complete tool use"
+ )))
+ }
+ .boxed()
+ }
+
fn cache_configuration(&self) -> Option<LanguageModelCacheConfiguration> {
None
}
@@ -94,6 +94,16 @@ pub struct ContentPromptContext {
pub diagnostic_errors: Vec<ContentPromptDiagnosticContext>,
}
+#[derive(Serialize)]
+pub struct ContentPromptContextV2 {
+ pub content_type: String,
+ pub language_name: Option<String>,
+ pub is_truncated: bool,
+ pub document_content: String,
+ pub rewrite_section: Option<String>,
+ pub diagnostic_errors: Vec<ContentPromptDiagnosticContext>,
+}
+
#[derive(Serialize)]
pub struct TerminalAssistantPromptContext {
pub os: String,
@@ -276,6 +286,88 @@ impl PromptBuilder {
Ok(())
}
+ pub fn generate_inline_transformation_prompt_v2(
+ &self,
+ language_name: Option<&LanguageName>,
+ buffer: BufferSnapshot,
+ range: Range<usize>,
+ ) -> Result<String, RenderError> {
+ let content_type = match language_name.as_ref().map(|l| l.as_ref()) {
+ None | Some("Markdown" | "Plain Text") => "text",
+ Some(_) => "code",
+ };
+
+ const MAX_CTX: usize = 50000;
+ let is_insert = range.is_empty();
+ let mut is_truncated = false;
+
+ let before_range = 0..range.start;
+ let truncated_before = if before_range.len() > MAX_CTX {
+ is_truncated = true;
+ let start = buffer.clip_offset(range.start - MAX_CTX, text::Bias::Right);
+ start..range.start
+ } else {
+ before_range
+ };
+
+ let after_range = range.end..buffer.len();
+ let truncated_after = if after_range.len() > MAX_CTX {
+ is_truncated = true;
+ let end = buffer.clip_offset(range.end + MAX_CTX, text::Bias::Left);
+ range.end..end
+ } else {
+ after_range
+ };
+
+ let mut document_content = String::new();
+ for chunk in buffer.text_for_range(truncated_before) {
+ document_content.push_str(chunk);
+ }
+ if is_insert {
+ document_content.push_str("<insert_here></insert_here>");
+ } else {
+ document_content.push_str("<rewrite_this>\n");
+ for chunk in buffer.text_for_range(range.clone()) {
+ document_content.push_str(chunk);
+ }
+ document_content.push_str("\n</rewrite_this>");
+ }
+ for chunk in buffer.text_for_range(truncated_after) {
+ document_content.push_str(chunk);
+ }
+
+ let rewrite_section = if !is_insert {
+ let mut section = String::new();
+ for chunk in buffer.text_for_range(range.clone()) {
+ section.push_str(chunk);
+ }
+ Some(section)
+ } else {
+ None
+ };
+ let diagnostics = buffer.diagnostics_in_range::<_, Point>(range, false);
+ let diagnostic_errors: Vec<ContentPromptDiagnosticContext> = diagnostics
+ .map(|entry| {
+ let start = entry.range.start;
+ ContentPromptDiagnosticContext {
+ line_number: (start.row + 1) as usize,
+ error_message: entry.diagnostic.message.clone(),
+ code_content: buffer.text_for_range(entry.range).collect(),
+ }
+ })
+ .collect();
+
+ let context = ContentPromptContextV2 {
+ content_type: content_type.to_string(),
+ language_name: language_name.map(|s| s.to_string()),
+ is_truncated,
+ document_content,
+ rewrite_section,
+ diagnostic_errors,
+ };
+ self.handlebars.lock().render("content_prompt_v2", &context)
+ }
+
pub fn generate_inline_transformation_prompt(
&self,
user_prompt: String,