@@ -1387,11 +1387,6 @@ impl ExtensionStore {
// Tuple is (provider_info, models, is_authenticated)
let mut llm_providers_with_models = Vec::new();
if !extension.manifest.language_model_providers.is_empty() {
- eprintln!(
- "Extension {} declares {} LLM providers in manifest, querying...",
- extension.manifest.id,
- extension.manifest.language_model_providers.len()
- );
let providers_result = wasm_extension
.call(|ext, store| {
async move { ext.call_llm_providers(store).await }.boxed()
@@ -1399,11 +1394,6 @@ impl ExtensionStore {
.await;
if let Ok(Ok(providers)) = providers_result {
- eprintln!(
- "Extension {} returned {} LLM providers",
- extension.manifest.id,
- providers.len()
- );
for provider_info in providers {
let models_result = wasm_extension
.call({
@@ -1421,7 +1411,7 @@ impl ExtensionStore {
let models: Vec<LlmModelInfo> = match models_result {
Ok(Ok(Ok(models))) => models,
Ok(Ok(Err(e))) => {
- eprintln!(
+ log::error!(
"Failed to get models for LLM provider {} in extension {}: {}",
provider_info.id,
extension.manifest.id,
@@ -1430,7 +1420,7 @@ impl ExtensionStore {
Vec::new()
}
Ok(Err(e)) => {
- eprintln!(
+ log::error!(
"Wasm error calling llm_provider_models for {} in extension {}: {:?}",
provider_info.id,
extension.manifest.id,
@@ -1439,7 +1429,7 @@ impl ExtensionStore {
Vec::new()
}
Err(e) => {
- eprintln!(
+ log::error!(
"Extension call failed for llm_provider_models {} in extension {}: {:?}",
provider_info.id,
extension.manifest.id,
@@ -1468,17 +1458,11 @@ impl ExtensionStore {
.unwrap_or(Ok(false))
.unwrap_or(false);
- eprintln!(
- "LLM provider {} has {} models, is_authenticated={}",
- provider_info.id,
- models.len(),
- is_authenticated
- );
llm_providers_with_models
.push((provider_info, models, is_authenticated));
}
} else {
- eprintln!(
+ log::error!(
"Failed to get LLM providers from extension {}: {:?}",
extension.manifest.id,
providers_result
@@ -1576,23 +1560,18 @@ impl ExtensionStore {
this.proxy.register_language_model_provider(
provider_id.clone(),
Box::new(move |cx: &mut App| {
- eprintln!("register_fn closure called, creating provider");
let provider = Arc::new(ExtensionLanguageModelProvider::new(
wasm_ext, pinfo, mods, auth, cx,
));
- eprintln!("Provider created, registering with registry");
language_model::LanguageModelRegistry::global(cx).update(
cx,
|registry, cx| {
- eprintln!("Inside registry.register_provider");
registry.register_provider(provider, cx);
},
);
- eprintln!("Provider registered");
}),
cx,
);
- eprintln!("register_language_model_provider call completed for {}", provider_id);
}
}
@@ -665,11 +665,22 @@ impl LanguageModel for ExtensionLanguageModel {
let provider_id = self.provider_info.id.clone();
let model_id = self.model_info.id.clone();
+ eprintln!(
+ "[EXT LLM DEBUG] stream_completion called for provider={}, model={}",
+ provider_id, model_id
+ );
+
let wit_request = convert_request_to_wit(request);
+ eprintln!(
+ "[EXT LLM DEBUG] Converted request: {} messages, {} tools",
+ wit_request.messages.len(),
+ wit_request.tools.len()
+ );
async move {
// Start the stream
- let stream_id = extension
+ eprintln!("[EXT LLM DEBUG] Calling llm_stream_completion_start...");
+ let stream_id_result = extension
.call({
let provider_id = provider_id.clone();
let model_id = model_id.clone();
@@ -690,10 +701,19 @@ impl LanguageModel for ExtensionLanguageModel {
.boxed()
}
})
- .await
+ .await;
+
+ eprintln!(
+ "[EXT LLM DEBUG] llm_stream_completion_start result: {:?}",
+ stream_id_result
+ );
+
+ let stream_id = stream_id_result
.map_err(LanguageModelCompletionError::Other)?
.map_err(LanguageModelCompletionError::Other)?;
+ eprintln!("[EXT LLM DEBUG] Got stream_id: {}", stream_id);
+
// Create a stream that polls for events
let stream = futures::stream::unfold(
(extension.clone(), stream_id, false),
@@ -721,12 +741,37 @@ impl LanguageModel for ExtensionLanguageModel {
match result {
Ok(Some(event)) => {
+ let event_desc = match &event {
+ LlmCompletionEvent::Started => "Started".to_string(),
+ LlmCompletionEvent::Text(t) => format!("Text: {:?}", t),
+ LlmCompletionEvent::Thinking(th) => {
+ format!("Thinking: {:?}", th.text)
+ }
+ LlmCompletionEvent::RedactedThinking(r) => {
+ format!("RedactedThinking: {:?}", r)
+ }
+ LlmCompletionEvent::ToolUse(tu) => {
+ format!("ToolUse: name={}, input={}", tu.name, tu.input)
+ }
+ LlmCompletionEvent::ToolUseJsonParseError(e) => {
+ format!("ToolUseJsonParseError: {:?}", e.error)
+ }
+ LlmCompletionEvent::Stop(r) => format!("Stop({:?})", r),
+ LlmCompletionEvent::Usage(u) => {
+ format!("Usage: in={}, out={}", u.input_tokens, u.output_tokens)
+ }
+ LlmCompletionEvent::ReasoningDetails(d) => {
+ format!("ReasoningDetails: {:?}", d)
+ }
+ };
+ eprintln!("[EXT LLM DEBUG] Got event: {}", event_desc);
let converted = convert_completion_event(event);
let is_done =
matches!(&converted, Ok(LanguageModelCompletionEvent::Stop(_)));
Some((converted, (extension, stream_id, is_done)))
}
Ok(None) => {
+ eprintln!("[EXT LLM DEBUG] Stream returned None, closing");
// Stream complete, close it
let _ = extension
.call({
@@ -743,10 +788,13 @@ impl LanguageModel for ExtensionLanguageModel {
.await;
None
}
- Err(e) => Some((
- Err(LanguageModelCompletionError::Other(e)),
- (extension, stream_id, true),
- )),
+ Err(e) => {
+ eprintln!("[EXT LLM DEBUG] Stream error: {:?}", e);
+ Some((
+ Err(LanguageModelCompletionError::Other(e)),
+ (extension, stream_id, true),
+ ))
+ }
}
},
);