rename templates to prompts in ai crate

KCaverly created

Change summary

crates/ai/src/ai.rs                         |  2 +-
crates/ai/src/prompts/base.rs               |  2 +-
crates/ai/src/prompts/file_context.rs       |  4 ++--
crates/ai/src/prompts/generate.rs           |  2 +-
crates/ai/src/prompts/mod.rs                |  0 
crates/ai/src/prompts/preamble.rs           |  2 +-
crates/ai/src/prompts/repository_context.rs |  2 +-
crates/assistant/src/assistant_panel.rs     |  2 +-
crates/assistant/src/prompts.rs             | 10 +++++-----
9 files changed, 13 insertions(+), 13 deletions(-)

Detailed changes

crates/ai/src/ai.rs 🔗

@@ -1,4 +1,4 @@
 pub mod completion;
 pub mod embedding;
 pub mod models;
-pub mod templates;
+pub mod prompts;

crates/ai/src/templates/base.rs → crates/ai/src/prompts/base.rs 🔗

@@ -6,7 +6,7 @@ use language::BufferSnapshot;
 use util::ResultExt;
 
 use crate::models::LanguageModel;
-use crate::templates::repository_context::PromptCodeSnippet;
+use crate::prompts::repository_context::PromptCodeSnippet;
 
 pub(crate) enum PromptFileType {
     Text,

crates/ai/src/templates/file_context.rs → crates/ai/src/prompts/file_context.rs 🔗

@@ -4,8 +4,8 @@ use language::ToOffset;
 
 use crate::models::LanguageModel;
 use crate::models::TruncationDirection;
-use crate::templates::base::PromptArguments;
-use crate::templates::base::PromptTemplate;
+use crate::prompts::base::PromptArguments;
+use crate::prompts::base::PromptTemplate;
 use std::fmt::Write;
 use std::ops::Range;
 use std::sync::Arc;

crates/ai/src/templates/generate.rs → crates/ai/src/prompts/generate.rs 🔗

@@ -1,4 +1,4 @@
-use crate::templates::base::{PromptArguments, PromptFileType, PromptTemplate};
+use crate::prompts::base::{PromptArguments, PromptFileType, PromptTemplate};
 use anyhow::anyhow;
 use std::fmt::Write;
 

crates/ai/src/templates/preamble.rs → crates/ai/src/prompts/preamble.rs 🔗

@@ -1,4 +1,4 @@
-use crate::templates::base::{PromptArguments, PromptFileType, PromptTemplate};
+use crate::prompts::base::{PromptArguments, PromptFileType, PromptTemplate};
 use std::fmt::Write;
 
 pub struct EngineerPreamble {}

crates/ai/src/templates/repository_context.rs → crates/ai/src/prompts/repository_context.rs 🔗

@@ -1,4 +1,4 @@
-use crate::templates::base::{PromptArguments, PromptTemplate};
+use crate::prompts::base::{PromptArguments, PromptTemplate};
 use std::fmt::Write;
 use std::{ops::Range, path::PathBuf};
 

crates/assistant/src/assistant_panel.rs 🔗

@@ -9,7 +9,7 @@ use ai::{
     completion::{
         stream_completion, OpenAICompletionProvider, OpenAIRequest, RequestMessage, OPENAI_API_URL,
     },
-    templates::repository_context::PromptCodeSnippet,
+    prompts::repository_context::PromptCodeSnippet,
 };
 use anyhow::{anyhow, Result};
 use chrono::{DateTime, Local};

crates/assistant/src/prompts.rs 🔗

@@ -1,9 +1,9 @@
 use ai::models::{LanguageModel, OpenAILanguageModel};
-use ai::templates::base::{PromptArguments, PromptChain, PromptPriority, PromptTemplate};
-use ai::templates::file_context::FileContext;
-use ai::templates::generate::GenerateInlineContent;
-use ai::templates::preamble::EngineerPreamble;
-use ai::templates::repository_context::{PromptCodeSnippet, RepositoryContext};
+use ai::prompts::base::{PromptArguments, PromptChain, PromptPriority, PromptTemplate};
+use ai::prompts::file_context::FileContext;
+use ai::prompts::generate::GenerateInlineContent;
+use ai::prompts::preamble::EngineerPreamble;
+use ai::prompts::repository_context::{PromptCodeSnippet, RepositoryContext};
 use language::{BufferSnapshot, OffsetRangeExt, ToOffset};
 use std::cmp::{self, Reverse};
 use std::ops::Range;