Detailed changes
@@ -25,6 +25,8 @@ third-party = [
{ name = "reqwest", version = "0.11.27" },
# build of remote_server should not include scap / its x11 dependency
{ name = "scap", git = "https://github.com/zed-industries/scap", rev = "808aa5c45b41e8f44729d02e38fd00a2fe2722e7" },
+ # build of remote_server should not need to include on libalsa through rodio
+ { name = "rodio" },
]
[final-excludes]
@@ -7883,6 +7883,12 @@ dependencies = [
"windows-sys 0.59.0",
]
+[[package]]
+name = "hound"
+version = "3.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "62adaabb884c94955b19907d60019f4e145d091c75345379e70d1ee696f7854f"
+
[[package]]
name = "html5ever"
version = "0.27.0"
@@ -9711,6 +9717,7 @@ dependencies = [
"objc",
"parking_lot",
"postage",
+ "rodio",
"scap",
"serde",
"serde_json",
@@ -13972,6 +13979,7 @@ checksum = "e40ecf59e742e03336be6a3d53755e789fd05a059fa22dfa0ed624722319e183"
dependencies = [
"cpal",
"dasp_sample",
+ "hound",
"num-rational",
"symphonia",
"tracing",
@@ -20576,6 +20584,7 @@ dependencies = [
"language_tools",
"languages",
"libc",
+ "livekit_client",
"log",
"markdown",
"markdown_preview",
@@ -363,6 +363,7 @@ remote_server = { path = "crates/remote_server" }
repl = { path = "crates/repl" }
reqwest_client = { path = "crates/reqwest_client" }
rich_text = { path = "crates/rich_text" }
+rodio = { version = "0.21.1", default-features = false }
rope = { path = "crates/rope" }
rpc = { path = "crates/rpc" }
rules_library = { path = "crates/rules_library" }
@@ -564,7 +565,6 @@ reqwest = { git = "https://github.com/zed-industries/reqwest.git", rev = "951c77
"socks",
"stream",
] }
-rodio = { version = "0.21.1", default-features = false }
rsa = "0.9.6"
runtimelib = { git = "https://github.com/ConradIrwin/runtimed", rev = "7130c804216b6914355d15d0b91ea91f6babd734", default-features = false, features = [
"async-dispatcher-runtime",
@@ -18,6 +18,6 @@ collections.workspace = true
derive_more.workspace = true
gpui.workspace = true
parking_lot.workspace = true
-rodio = { workspace = true, features = ["wav", "playback", "tracing"] }
+rodio = { workspace = true, features = [ "wav", "playback", "tracing" ] }
util.workspace = true
workspace-hack.workspace = true
@@ -39,6 +39,8 @@ tokio-tungstenite.workspace = true
util.workspace = true
workspace-hack.workspace = true
+rodio = { workspace = true, features = ["wav_output"] }
+
[target.'cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))'.dependencies]
libwebrtc = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks" }
livekit = { rev = "5f04705ac3f356350ae31534ffbc476abc9ea83d", git = "https://github.com/zed-industries/livekit-rust-sdks", features = [
@@ -1,7 +1,13 @@
+use anyhow::Context as _;
use collections::HashMap;
mod remote_video_track_view;
+use cpal::traits::HostTrait as _;
pub use remote_video_track_view::{RemoteVideoTrackView, RemoteVideoTrackViewEvent};
+use rodio::DeviceTrait as _;
+
+mod record;
+pub use record::CaptureInput;
#[cfg(not(any(
test,
@@ -18,6 +24,8 @@ mod livekit_client;
)))]
pub use livekit_client::*;
+// If you need proper LSP in livekit_client you've got to comment out
+// the mocks and test
#[cfg(any(
test,
feature = "test-support",
@@ -168,3 +176,59 @@ pub enum RoomEvent {
Reconnecting,
Reconnected,
}
+
+pub(crate) fn default_device(
+ input: bool,
+) -> anyhow::Result<(cpal::Device, cpal::SupportedStreamConfig)> {
+ let device;
+ let config;
+ if input {
+ device = cpal::default_host()
+ .default_input_device()
+ .context("no audio input device available")?;
+ config = device
+ .default_input_config()
+ .context("failed to get default input config")?;
+ } else {
+ device = cpal::default_host()
+ .default_output_device()
+ .context("no audio output device available")?;
+ config = device
+ .default_output_config()
+ .context("failed to get default output config")?;
+ }
+ Ok((device, config))
+}
+
+pub(crate) fn get_sample_data(
+ sample_format: cpal::SampleFormat,
+ data: &cpal::Data,
+) -> anyhow::Result<Vec<i16>> {
+ match sample_format {
+ cpal::SampleFormat::I8 => Ok(convert_sample_data::<i8, i16>(data)),
+ cpal::SampleFormat::I16 => Ok(data.as_slice::<i16>().unwrap().to_vec()),
+ cpal::SampleFormat::I24 => Ok(convert_sample_data::<cpal::I24, i16>(data)),
+ cpal::SampleFormat::I32 => Ok(convert_sample_data::<i32, i16>(data)),
+ cpal::SampleFormat::I64 => Ok(convert_sample_data::<i64, i16>(data)),
+ cpal::SampleFormat::U8 => Ok(convert_sample_data::<u8, i16>(data)),
+ cpal::SampleFormat::U16 => Ok(convert_sample_data::<u16, i16>(data)),
+ cpal::SampleFormat::U32 => Ok(convert_sample_data::<u32, i16>(data)),
+ cpal::SampleFormat::U64 => Ok(convert_sample_data::<u64, i16>(data)),
+ cpal::SampleFormat::F32 => Ok(convert_sample_data::<f32, i16>(data)),
+ cpal::SampleFormat::F64 => Ok(convert_sample_data::<f64, i16>(data)),
+ _ => anyhow::bail!("Unsupported sample format"),
+ }
+}
+
+pub(crate) fn convert_sample_data<
+ TSource: cpal::SizedSample,
+ TDest: cpal::SizedSample + cpal::FromSample<TSource>,
+>(
+ data: &cpal::Data,
+) -> Vec<TDest> {
+ data.as_slice::<TSource>()
+ .unwrap()
+ .iter()
+ .map(|e| e.to_sample::<TDest>())
+ .collect()
+}
@@ -8,6 +8,8 @@ use gpui_tokio::Tokio;
use playback::capture_local_video_track;
mod playback;
+#[cfg(feature = "record-microphone")]
+mod record;
use crate::{LocalTrack, Participant, RemoteTrack, RoomEvent, TrackPublication};
pub use playback::AudioStream;
@@ -1,7 +1,6 @@
use anyhow::{Context as _, Result};
-use cpal::traits::{DeviceTrait, HostTrait, StreamTrait as _};
-use cpal::{Data, FromSample, I24, SampleFormat, SizedSample};
+use cpal::traits::{DeviceTrait, StreamTrait as _};
use futures::channel::mpsc::UnboundedSender;
use futures::{Stream, StreamExt as _};
use gpui::{
@@ -166,7 +165,7 @@ impl AudioStack {
) -> Result<()> {
loop {
let mut device_change_listener = DeviceChangeListener::new(false)?;
- let (output_device, output_config) = default_device(false)?;
+ let (output_device, output_config) = crate::default_device(false)?;
let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>();
let mixer = mixer.clone();
let apm = apm.clone();
@@ -238,7 +237,7 @@ impl AudioStack {
) -> Result<()> {
loop {
let mut device_change_listener = DeviceChangeListener::new(true)?;
- let (device, config) = default_device(true)?;
+ let (device, config) = crate::default_device(true)?;
let (end_on_drop_tx, end_on_drop_rx) = std::sync::mpsc::channel::<()>();
let apm = apm.clone();
let frame_tx = frame_tx.clone();
@@ -262,7 +261,7 @@ impl AudioStack {
config.sample_format(),
move |data, _: &_| {
let data =
- Self::get_sample_data(config.sample_format(), data).log_err();
+ crate::get_sample_data(config.sample_format(), data).log_err();
let Some(data) = data else {
return;
};
@@ -320,33 +319,6 @@ impl AudioStack {
drop(end_on_drop_tx)
}
}
-
- fn get_sample_data(sample_format: SampleFormat, data: &Data) -> Result<Vec<i16>> {
- match sample_format {
- SampleFormat::I8 => Ok(Self::convert_sample_data::<i8, i16>(data)),
- SampleFormat::I16 => Ok(data.as_slice::<i16>().unwrap().to_vec()),
- SampleFormat::I24 => Ok(Self::convert_sample_data::<I24, i16>(data)),
- SampleFormat::I32 => Ok(Self::convert_sample_data::<i32, i16>(data)),
- SampleFormat::I64 => Ok(Self::convert_sample_data::<i64, i16>(data)),
- SampleFormat::U8 => Ok(Self::convert_sample_data::<u8, i16>(data)),
- SampleFormat::U16 => Ok(Self::convert_sample_data::<u16, i16>(data)),
- SampleFormat::U32 => Ok(Self::convert_sample_data::<u32, i16>(data)),
- SampleFormat::U64 => Ok(Self::convert_sample_data::<u64, i16>(data)),
- SampleFormat::F32 => Ok(Self::convert_sample_data::<f32, i16>(data)),
- SampleFormat::F64 => Ok(Self::convert_sample_data::<f64, i16>(data)),
- _ => anyhow::bail!("Unsupported sample format"),
- }
- }
-
- fn convert_sample_data<TSource: SizedSample, TDest: SizedSample + FromSample<TSource>>(
- data: &Data,
- ) -> Vec<TDest> {
- data.as_slice::<TSource>()
- .unwrap()
- .iter()
- .map(|e| e.to_sample::<TDest>())
- .collect()
- }
}
use super::LocalVideoTrack;
@@ -393,27 +365,6 @@ pub(crate) async fn capture_local_video_track(
))
}
-fn default_device(input: bool) -> Result<(cpal::Device, cpal::SupportedStreamConfig)> {
- let device;
- let config;
- if input {
- device = cpal::default_host()
- .default_input_device()
- .context("no audio input device available")?;
- config = device
- .default_input_config()
- .context("failed to get default input config")?;
- } else {
- device = cpal::default_host()
- .default_output_device()
- .context("no audio output device available")?;
- config = device
- .default_output_config()
- .context("failed to get default output config")?;
- }
- Ok((device, config))
-}
-
#[derive(Clone)]
struct AudioMixerSource {
ssrc: i32,
@@ -0,0 +1,91 @@
+use std::{
+ env,
+ path::{Path, PathBuf},
+ sync::{Arc, Mutex},
+ time::Duration,
+};
+
+use anyhow::{Context, Result};
+use cpal::traits::{DeviceTrait, StreamTrait};
+use rodio::{buffer::SamplesBuffer, conversions::SampleTypeConverter};
+use util::ResultExt;
+
+pub struct CaptureInput {
+ pub name: String,
+ config: cpal::SupportedStreamConfig,
+ samples: Arc<Mutex<Vec<i16>>>,
+ _stream: cpal::Stream,
+}
+
+impl CaptureInput {
+ pub fn start() -> anyhow::Result<Self> {
+ let (device, config) = crate::default_device(true)?;
+ let name = device.name().unwrap_or("<unknown>".to_string());
+ log::info!("Using microphone: {}", name);
+
+ let samples = Arc::new(Mutex::new(Vec::new()));
+ let stream = start_capture(device, config.clone(), samples.clone())?;
+
+ Ok(Self {
+ name,
+ _stream: stream,
+ config,
+ samples,
+ })
+ }
+
+ pub fn finish(self) -> Result<PathBuf> {
+ let name = self.name;
+ let mut path = env::current_dir().context("Could not get current dir")?;
+ path.push(&format!("test_recording_{name}.wav"));
+ log::info!("Test recording written to: {}", path.display());
+ write_out(self.samples, self.config, &path)?;
+ Ok(path)
+ }
+}
+
+fn start_capture(
+ device: cpal::Device,
+ config: cpal::SupportedStreamConfig,
+ samples: Arc<Mutex<Vec<i16>>>,
+) -> Result<cpal::Stream> {
+ let stream = device
+ .build_input_stream_raw(
+ &config.config(),
+ config.sample_format(),
+ move |data, _: &_| {
+ let data = crate::get_sample_data(config.sample_format(), data).log_err();
+ let Some(data) = data else {
+ return;
+ };
+ samples
+ .try_lock()
+ .expect("Only locked after stream ends")
+ .extend_from_slice(&data);
+ },
+ |err| log::error!("error capturing audio track: {:?}", err),
+ Some(Duration::from_millis(100)),
+ )
+ .context("failed to build input stream")?;
+
+ stream.play()?;
+ Ok(stream)
+}
+
+fn write_out(
+ samples: Arc<Mutex<Vec<i16>>>,
+ config: cpal::SupportedStreamConfig,
+ path: &Path,
+) -> Result<()> {
+ let samples = std::mem::take(
+ &mut *samples
+ .try_lock()
+ .expect("Stream has ended, callback cant hold the lock"),
+ );
+ let samples: Vec<f32> = SampleTypeConverter::<_, f32>::new(samples.into_iter()).collect();
+ let mut samples = SamplesBuffer::new(config.channels(), config.sample_rate().0, samples);
+ match rodio::output_to_wav(&mut samples, path) {
+ Ok(_) => Ok(()),
+ Err(e) => Err(anyhow::anyhow!("Failed to write wav file: {}", e)),
+ }
+}
@@ -6,6 +6,7 @@ use gpui::{
Task, svg,
};
use parking_lot::Mutex;
+
use std::ops::Deref;
use std::sync::{Arc, LazyLock};
use std::{any::TypeId, time::Duration};
@@ -189,6 +190,7 @@ impl Workspace {
cx.notify();
}
+ /// Hide all notifications matching the given ID
pub fn suppress_notification(&mut self, id: &NotificationId, cx: &mut Context<Self>) {
self.dismiss_notification(id, cx);
self.suppressed_notifications.insert(id.clone());
@@ -462,16 +464,144 @@ impl EventEmitter<SuppressEvent> for ErrorMessagePrompt {}
impl Notification for ErrorMessagePrompt {}
+#[derive(IntoElement, RegisterComponent)]
+pub struct NotificationFrame {
+ title: Option<SharedString>,
+ show_suppress_button: bool,
+ show_close_button: bool,
+ close: Option<Box<dyn Fn(&bool, &mut Window, &mut App) + 'static>>,
+ contents: Option<AnyElement>,
+ suffix: Option<AnyElement>,
+}
+
+impl NotificationFrame {
+ pub fn new() -> Self {
+ Self {
+ title: None,
+ contents: None,
+ suffix: None,
+ show_suppress_button: true,
+ show_close_button: true,
+ close: None,
+ }
+ }
+
+ pub fn with_title(mut self, title: Option<impl Into<SharedString>>) -> Self {
+ self.title = title.map(Into::into);
+ self
+ }
+
+ pub fn with_content(self, content: impl IntoElement) -> Self {
+ Self {
+ contents: Some(content.into_any_element()),
+ ..self
+ }
+ }
+
+ /// Determines whether the given notification ID should be suppressible
+ /// Suppressed motifications will not be shown anymore
+ pub fn show_suppress_button(mut self, show: bool) -> Self {
+ self.show_suppress_button = show;
+ self
+ }
+
+ pub fn show_close_button(mut self, show: bool) -> Self {
+ self.show_close_button = show;
+ self
+ }
+
+ pub fn on_close(self, on_close: impl Fn(&bool, &mut Window, &mut App) + 'static) -> Self {
+ Self {
+ close: Some(Box::new(on_close)),
+ ..self
+ }
+ }
+
+ pub fn with_suffix(mut self, suffix: impl IntoElement) -> Self {
+ self.suffix = Some(suffix.into_any_element());
+ self
+ }
+}
+
+impl RenderOnce for NotificationFrame {
+ fn render(mut self, window: &mut Window, cx: &mut App) -> impl IntoElement {
+ let entity = window.current_view();
+ let show_suppress_button = self.show_suppress_button;
+ let suppress = show_suppress_button && window.modifiers().shift;
+ let (close_id, close_icon) = if suppress {
+ ("suppress", IconName::Minimize)
+ } else {
+ ("close", IconName::Close)
+ };
+
+ v_flex()
+ .occlude()
+ .p_3()
+ .gap_2()
+ .elevation_3(cx)
+ .child(
+ h_flex()
+ .gap_4()
+ .justify_between()
+ .items_start()
+ .child(
+ v_flex()
+ .gap_0p5()
+ .when_some(self.title.clone(), |div, title| {
+ div.child(Label::new(title))
+ })
+ .child(div().max_w_96().children(self.contents)),
+ )
+ .when(self.show_close_button, |this| {
+ this.on_modifiers_changed(move |_, _, cx| cx.notify(entity))
+ .child(
+ IconButton::new(close_id, close_icon)
+ .tooltip(move |window, cx| {
+ if suppress {
+ Tooltip::for_action(
+ "Suppress.\nClose with click.",
+ &SuppressNotification,
+ window,
+ cx,
+ )
+ } else if show_suppress_button {
+ Tooltip::for_action(
+ "Close.\nSuppress with shift-click.",
+ &menu::Cancel,
+ window,
+ cx,
+ )
+ } else {
+ Tooltip::for_action("Close", &menu::Cancel, window, cx)
+ }
+ })
+ .on_click({
+ let close = self.close.take();
+ move |_, window, cx| {
+ if let Some(close) = &close {
+ close(&suppress, window, cx)
+ }
+ }
+ }),
+ )
+ }),
+ )
+ .children(self.suffix)
+ }
+}
+
+impl Component for NotificationFrame {}
+
pub mod simple_message_notification {
use std::sync::Arc;
use gpui::{
- AnyElement, ClickEvent, DismissEvent, EventEmitter, FocusHandle, Focusable, ParentElement,
- Render, SharedString, Styled, div,
+ AnyElement, DismissEvent, EventEmitter, FocusHandle, Focusable, ParentElement, Render,
+ SharedString, Styled,
};
- use ui::{Tooltip, prelude::*};
+ use ui::prelude::*;
- use crate::SuppressNotification;
+ use crate::notifications::NotificationFrame;
use super::{Notification, SuppressEvent};
@@ -631,6 +761,8 @@ pub mod simple_message_notification {
self
}
+ /// Determines whether the given notification ID should be supressable
+ /// Suppressed motifications will not be shown anymor
pub fn show_suppress_button(mut self, show: bool) -> Self {
self.show_suppress_button = show;
self
@@ -647,71 +779,19 @@ pub mod simple_message_notification {
impl Render for MessageNotification {
fn render(&mut self, window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
- let show_suppress_button = self.show_suppress_button;
- let suppress = show_suppress_button && window.modifiers().shift;
- let (close_id, close_icon) = if suppress {
- ("suppress", IconName::Minimize)
- } else {
- ("close", IconName::Close)
- };
-
- v_flex()
- .occlude()
- .p_3()
- .gap_2()
- .elevation_3(cx)
- .child(
- h_flex()
- .gap_4()
- .justify_between()
- .items_start()
- .child(
- v_flex()
- .gap_0p5()
- .when_some(self.title.clone(), |element, title| {
- element.child(Label::new(title))
- })
- .child(div().max_w_96().child((self.build_content)(window, cx))),
- )
- .when(self.show_close_button, |this| {
- this.on_modifiers_changed(cx.listener(|_, _, _, cx| cx.notify()))
- .child(
- IconButton::new(close_id, close_icon)
- .tooltip(move |window, cx| {
- if suppress {
- Tooltip::for_action(
- "Suppress.\nClose with click.",
- &SuppressNotification,
- window,
- cx,
- )
- } else if show_suppress_button {
- Tooltip::for_action(
- "Close.\nSuppress with shift-click.",
- &menu::Cancel,
- window,
- cx,
- )
- } else {
- Tooltip::for_action(
- "Close",
- &menu::Cancel,
- window,
- cx,
- )
- }
- })
- .on_click(cx.listener(move |_, _: &ClickEvent, _, cx| {
- if suppress {
- cx.emit(SuppressEvent);
- } else {
- cx.emit(DismissEvent);
- }
- })),
- )
- }),
- )
- .child(
+ NotificationFrame::new()
+ .with_title(self.title.clone())
+ .with_content((self.build_content)(window, cx))
+ .show_close_button(self.show_close_button)
+ .show_suppress_button(self.show_suppress_button)
+ .on_close(cx.listener(|_, suppress, _, cx| {
+ if *suppress {
+ cx.emit(SuppressEvent);
+ } else {
+ cx.emit(DismissEvent);
+ }
+ }))
+ .with_suffix(
h_flex()
.gap_1()
.children(self.primary_message.iter().map(|message| {
@@ -15,6 +15,8 @@ mod toast_layer;
mod toolbar;
mod workspace_settings;
+pub use crate::notifications::NotificationFrame;
+pub use dock::Panel;
pub use toast_layer::{ToastAction, ToastLayer, ToastView};
use anyhow::{Context as _, Result, anyhow};
@@ -24,7 +26,6 @@ use client::{
proto::{self, ErrorCode, PanelId, PeerId},
};
use collections::{HashMap, HashSet, hash_map};
-pub use dock::Panel;
use dock::{Dock, DockPosition, PanelButtons, PanelHandle, RESIZE_HANDLE_SIZE};
use futures::{
Future, FutureExt, StreamExt,
@@ -82,6 +82,7 @@ inspector_ui.workspace = true
install_cli.workspace = true
jj_ui.workspace = true
journal.workspace = true
+livekit_client.workspace = true
language.workspace = true
language_extension.workspace = true
language_model.workspace = true
@@ -56,6 +56,7 @@ use settings::{
initial_local_debug_tasks_content, initial_project_settings_content, initial_tasks_content,
update_settings_file,
};
+use std::time::{Duration, Instant};
use std::{
borrow::Cow,
path::{Path, PathBuf},
@@ -69,13 +70,17 @@ use util::markdown::MarkdownString;
use util::{ResultExt, asset_str};
use uuid::Uuid;
use vim_mode_setting::VimModeSetting;
-use workspace::notifications::{NotificationId, dismiss_app_notification, show_app_notification};
+use workspace::notifications::{
+ NotificationId, SuppressEvent, dismiss_app_notification, show_app_notification,
+};
use workspace::{
AppState, NewFile, NewWindow, OpenLog, Toast, Workspace, WorkspaceSettings,
create_and_open_local_file, notifications::simple_message_notification::MessageNotification,
open_new,
};
-use workspace::{CloseIntent, CloseWindow, RestoreBanner, with_active_or_new_workspace};
+use workspace::{
+ CloseIntent, CloseWindow, NotificationFrame, RestoreBanner, with_active_or_new_workspace,
+};
use workspace::{Pane, notifications::DetachAndPromptErr};
use zed_actions::{
OpenAccountSettings, OpenBrowser, OpenDocs, OpenServerSettings, OpenSettings, OpenZedUrl, Quit,
@@ -117,6 +122,14 @@ actions!(
]
);
+actions!(
+ dev,
+ [
+ /// Record 10s of audio from your current microphone
+ CaptureAudio
+ ]
+);
+
pub fn init(cx: &mut App) {
#[cfg(target_os = "macos")]
cx.on_action(|_: &Hide, cx| cx.hide());
@@ -897,7 +910,11 @@ fn register_actions(
.detach();
}
}
+ })
+ .register_action(|workspace, _: &CaptureAudio, window, cx| {
+ capture_audio(workspace, window, cx);
});
+
if workspace.project().read(cx).is_via_ssh() {
workspace.register_action({
move |workspace, _: &OpenServerSettings, window, cx| {
@@ -1806,6 +1823,107 @@ fn open_settings_file(
.detach_and_log_err(cx);
}
+fn capture_audio(workspace: &mut Workspace, _: &mut Window, cx: &mut Context<Workspace>) {
+ #[derive(Default)]
+ enum State {
+ Recording(livekit_client::CaptureInput),
+ Failed(String),
+ Finished(PathBuf),
+ // Used during state switch. Should never occur naturally.
+ #[default]
+ Invalid,
+ }
+
+ struct CaptureAudioNotification {
+ focus_handle: gpui::FocusHandle,
+ start_time: Instant,
+ state: State,
+ }
+
+ impl gpui::EventEmitter<DismissEvent> for CaptureAudioNotification {}
+ impl gpui::EventEmitter<SuppressEvent> for CaptureAudioNotification {}
+ impl gpui::Focusable for CaptureAudioNotification {
+ fn focus_handle(&self, _cx: &App) -> gpui::FocusHandle {
+ self.focus_handle.clone()
+ }
+ }
+ impl workspace::notifications::Notification for CaptureAudioNotification {}
+
+ const AUDIO_RECORDING_TIME_SECS: u64 = 10;
+
+ impl Render for CaptureAudioNotification {
+ fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
+ let elapsed = self.start_time.elapsed().as_secs();
+ let message = match &self.state {
+ State::Recording(capture) => format!(
+ "Recording {} seconds of audio from input: '{}'",
+ AUDIO_RECORDING_TIME_SECS - elapsed,
+ capture.name,
+ ),
+ State::Failed(e) => format!("Error capturing audio: {e}"),
+ State::Finished(path) => format!("Audio recorded to {}", path.display()),
+ State::Invalid => "Error invalid state".to_string(),
+ };
+
+ NotificationFrame::new()
+ .with_title(Some("Recording Audio"))
+ .show_suppress_button(false)
+ .on_close(cx.listener(|_, _, _, cx| {
+ cx.emit(DismissEvent);
+ }))
+ .with_content(message)
+ }
+ }
+
+ impl CaptureAudioNotification {
+ fn finish(&mut self) {
+ let state = std::mem::take(&mut self.state);
+ self.state = if let State::Recording(capture) = state {
+ match capture.finish() {
+ Ok(path) => State::Finished(path),
+ Err(e) => State::Failed(e.to_string()),
+ }
+ } else {
+ state
+ };
+ }
+
+ fn new(cx: &mut Context<Self>) -> Self {
+ cx.spawn(async move |this, cx| {
+ for _ in 0..10 {
+ cx.background_executor().timer(Duration::from_secs(1)).await;
+ this.update(cx, |_, cx| {
+ cx.notify();
+ })?;
+ }
+
+ this.update(cx, |this, cx| {
+ this.finish();
+ cx.notify();
+ })?;
+
+ anyhow::Ok(())
+ })
+ .detach();
+
+ let state = match livekit_client::CaptureInput::start() {
+ Ok(capture_input) => State::Recording(capture_input),
+ Err(err) => State::Failed(format!("Error starting audio capture: {}", err)),
+ };
+
+ Self {
+ focus_handle: cx.focus_handle(),
+ start_time: Instant::now(),
+ state,
+ }
+ }
+ }
+
+ workspace.show_notification(NotificationId::unique::<CaptureAudio>(), cx, |cx| {
+ cx.new(CaptureAudioNotification::new)
+ });
+}
+
#[cfg(test)]
mod tests {
use super::*;