Detailed changes
@@ -941,6 +941,7 @@ impl LanguageModel for CloudLanguageModel {
request,
model.id(),
model.supports_parallel_tool_calls(),
+ model.supports_prompt_cache_key(),
None,
None,
);
@@ -370,6 +370,7 @@ impl LanguageModel for OpenAiLanguageModel {
request,
self.model.id(),
self.model.supports_parallel_tool_calls(),
+ self.model.supports_prompt_cache_key(),
self.max_output_tokens(),
self.model.reasoning_effort(),
);
@@ -386,6 +387,7 @@ pub fn into_open_ai(
request: LanguageModelRequest,
model_id: &str,
supports_parallel_tool_calls: bool,
+ supports_prompt_cache_key: bool,
max_output_tokens: Option<u64>,
reasoning_effort: Option<ReasoningEffort>,
) -> open_ai::Request {
@@ -477,7 +479,11 @@ pub fn into_open_ai(
} else {
None
},
- prompt_cache_key: request.thread_id,
+ prompt_cache_key: if supports_prompt_cache_key {
+ request.thread_id
+ } else {
+ None
+ },
tools: request
.tools
.into_iter()
@@ -355,10 +355,13 @@ impl LanguageModel for OpenAiCompatibleLanguageModel {
LanguageModelCompletionError,
>,
> {
+ let supports_parallel_tool_call = true;
+ let supports_prompt_cache_key = false;
let request = into_open_ai(
request,
&self.model.name,
- true,
+ supports_parallel_tool_call,
+ supports_prompt_cache_key,
self.max_output_tokens(),
None,
);
@@ -355,6 +355,7 @@ impl LanguageModel for VercelLanguageModel {
request,
self.model.id(),
self.model.supports_parallel_tool_calls(),
+ self.model.supports_prompt_cache_key(),
self.max_output_tokens(),
None,
);
@@ -359,6 +359,7 @@ impl LanguageModel for XAiLanguageModel {
request,
self.model.id(),
self.model.supports_parallel_tool_calls(),
+ self.model.supports_prompt_cache_key(),
self.max_output_tokens(),
None,
);
@@ -236,6 +236,13 @@ impl Model {
Self::O1 | Self::O3 | Self::O3Mini | Self::O4Mini | Model::Custom { .. } => false,
}
}
+
+ /// Returns whether the given model supports the `prompt_cache_key` parameter.
+ ///
+ /// If the model does not support the parameter, do not pass it up.
+ pub fn supports_prompt_cache_key(&self) -> bool {
+ return true;
+ }
}
#[derive(Debug, Serialize, Deserialize)]
@@ -71,4 +71,8 @@ impl Model {
Model::Custom { .. } => false,
}
}
+
+ pub fn supports_prompt_cache_key(&self) -> bool {
+ false
+ }
}
@@ -105,6 +105,10 @@ impl Model {
}
}
+ pub fn supports_prompt_cache_key(&self) -> bool {
+ false
+ }
+
pub fn supports_tool(&self) -> bool {
match self {
Self::Grok2Vision