audio.rs

  1use anyhow::{Context as _, Result};
  2use collections::HashMap;
  3use gpui::{App, BackgroundExecutor, BorrowAppContext, Global};
  4
  5#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
  6mod non_windows_and_freebsd_deps {
  7    pub(super) use gpui::AsyncApp;
  8    pub(super) use libwebrtc::native::apm;
  9    pub(super) use log::info;
 10    pub(super) use parking_lot::Mutex;
 11    pub(super) use rodio::cpal::Sample;
 12    pub(super) use rodio::source::LimitSettings;
 13    pub(super) use std::sync::Arc;
 14}
 15
 16#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
 17use non_windows_and_freebsd_deps::*;
 18
 19use rodio::{
 20    Decoder, OutputStream, OutputStreamBuilder, Source, mixer::Mixer, nz, source::Buffered,
 21};
 22use settings::Settings;
 23use std::{io::Cursor, num::NonZero, path::PathBuf, sync::atomic::Ordering, time::Duration};
 24use util::ResultExt;
 25
 26mod audio_settings;
 27mod replays;
 28mod rodio_ext;
 29pub use audio_settings::AudioSettings;
 30pub use rodio_ext::RodioExt;
 31
 32use crate::audio_settings::LIVE_SETTINGS;
 33
 34// We are migrating to 16kHz sample rate from 48kHz. In the future
 35// once we are reasonably sure most users have upgraded we will
 36// remove the LEGACY parameters.
 37//
 38// We migrate to 16kHz because it is sufficient for speech and required
 39// by the denoiser and future Speech to Text layers.
 40pub const SAMPLE_RATE: NonZero<u32> = nz!(16000);
 41pub const CHANNEL_COUNT: NonZero<u16> = nz!(1);
 42pub const BUFFER_SIZE: usize = // echo canceller and livekit want 10ms of audio
 43    (SAMPLE_RATE.get() as usize / 100) * CHANNEL_COUNT.get() as usize;
 44
 45pub const LEGACY_SAMPLE_RATE: NonZero<u32> = nz!(48000);
 46pub const LEGACY_CHANNEL_COUNT: NonZero<u16> = nz!(2);
 47
 48pub const REPLAY_DURATION: Duration = Duration::from_secs(30);
 49
 50pub fn init(cx: &mut App) {
 51    AudioSettings::register(cx);
 52    LIVE_SETTINGS.initialize(cx);
 53}
 54
 55#[derive(Debug, Copy, Clone, Eq, Hash, PartialEq)]
 56pub enum Sound {
 57    Joined,
 58    Leave,
 59    Mute,
 60    Unmute,
 61    StartScreenshare,
 62    StopScreenshare,
 63    AgentDone,
 64}
 65
 66impl Sound {
 67    fn file(&self) -> &'static str {
 68        match self {
 69            Self::Joined => "joined_call",
 70            Self::Leave => "leave_call",
 71            Self::Mute => "mute",
 72            Self::Unmute => "unmute",
 73            Self::StartScreenshare => "start_screenshare",
 74            Self::StopScreenshare => "stop_screenshare",
 75            Self::AgentDone => "agent_done",
 76        }
 77    }
 78}
 79
 80pub struct Audio {
 81    output_handle: Option<OutputStream>,
 82    output_mixer: Option<Mixer>,
 83    #[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
 84    pub echo_canceller: Arc<Mutex<apm::AudioProcessingModule>>,
 85    source_cache: HashMap<Sound, Buffered<Decoder<Cursor<Vec<u8>>>>>,
 86    replays: replays::Replays,
 87}
 88
 89impl Default for Audio {
 90    fn default() -> Self {
 91        Self {
 92            output_handle: Default::default(),
 93            output_mixer: Default::default(),
 94            #[cfg(not(any(
 95                all(target_os = "windows", target_env = "gnu"),
 96                target_os = "freebsd"
 97            )))]
 98            echo_canceller: Arc::new(Mutex::new(apm::AudioProcessingModule::new(
 99                true, false, false, false,
100            ))),
101            source_cache: Default::default(),
102            replays: Default::default(),
103        }
104    }
105}
106
107impl Global for Audio {}
108
109impl Audio {
110    fn ensure_output_exists(&mut self) -> Result<&Mixer> {
111        if self.output_handle.is_none() {
112            self.output_handle = Some(
113                OutputStreamBuilder::open_default_stream()
114                    .context("Could not open default output stream")?,
115            );
116            if let Some(output_handle) = &self.output_handle {
117                let (mixer, source) = rodio::mixer::mixer(CHANNEL_COUNT, SAMPLE_RATE);
118                // or the mixer will end immediately as its empty.
119                mixer.add(rodio::source::Zero::new(CHANNEL_COUNT, SAMPLE_RATE));
120                self.output_mixer = Some(mixer);
121
122                // The webrtc apm is not yet compiling for windows & freebsd
123                #[cfg(not(any(
124                    any(all(target_os = "windows", target_env = "gnu")),
125                    target_os = "freebsd"
126                )))]
127                let echo_canceller = Arc::clone(&self.echo_canceller);
128                #[cfg(not(any(
129                    any(all(target_os = "windows", target_env = "gnu")),
130                    target_os = "freebsd"
131                )))]
132                let source = source.inspect_buffer::<BUFFER_SIZE, _>(move |buffer| {
133                    let mut buf: [i16; _] = buffer.map(|s| s.to_sample());
134                    echo_canceller
135                        .lock()
136                        .process_reverse_stream(
137                            &mut buf,
138                            SAMPLE_RATE.get() as i32,
139                            CHANNEL_COUNT.get().into(),
140                        )
141                        .expect("Audio input and output threads should not panic");
142                });
143                output_handle.mixer().add(source);
144            }
145        }
146
147        Ok(self
148            .output_mixer
149            .as_ref()
150            .expect("we only get here if opening the outputstream succeeded"))
151    }
152
153    pub fn save_replays(
154        &self,
155        executor: BackgroundExecutor,
156    ) -> gpui::Task<anyhow::Result<(PathBuf, Duration)>> {
157        self.replays.replays_to_tar(executor)
158    }
159
160    #[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
161    pub fn open_microphone(voip_parts: VoipParts) -> anyhow::Result<impl Source> {
162        let stream = rodio::microphone::MicrophoneBuilder::new()
163            .default_device()?
164            .default_config()?
165            .prefer_sample_rates([
166                SAMPLE_RATE, // sample rates trivially resamplable to `SAMPLE_RATE`
167                SAMPLE_RATE.saturating_mul(nz!(2)),
168                SAMPLE_RATE.saturating_mul(nz!(3)),
169                SAMPLE_RATE.saturating_mul(nz!(4)),
170            ])
171            .prefer_channel_counts([nz!(2)])
172            // .prefer_channel_counts([CHANNEL_COUNT, CHANNEL_COUNT.saturating_mul(nz!(2))])
173            .prefer_buffer_sizes(512..)
174            .open_stream()?;
175        info!("Opened microphone: {:?}", stream.config());
176
177        let (replay, stream) = stream
178            .possibly_disconnected_channels_to_mono()
179            .constant_samplerate(SAMPLE_RATE)
180            // .constant_params(CHANNEL_COUNT, SAMPLE_RATE)
181            .limit(LimitSettings::live_performance())
182            .process_buffer::<BUFFER_SIZE, _>(move |buffer| {
183                let mut int_buffer: [i16; _] = buffer.map(|s| s.to_sample());
184                if voip_parts
185                    .echo_canceller
186                    .lock()
187                    .process_stream(
188                        &mut int_buffer,
189                        SAMPLE_RATE.get() as i32,
190                        CHANNEL_COUNT.get() as i32,
191                    )
192                    .context("livekit audio processor error")
193                    .log_err()
194                    .is_some()
195                {
196                    for (sample, processed) in buffer.iter_mut().zip(&int_buffer) {
197                        *sample = (*processed).to_sample();
198                    }
199                }
200            })
201            .denoise()
202            .context("Could not set up denoiser")?
203            .periodic_access(Duration::from_millis(100), move |denoise| {
204                denoise.set_enabled(LIVE_SETTINGS.denoise.load(Ordering::Relaxed));
205            })
206            .automatic_gain_control(1.0, 2.0, 0.0, 5.0)
207            .periodic_access(Duration::from_millis(100), move |agc_source| {
208                agc_source
209                    .set_enabled(LIVE_SETTINGS.auto_microphone_volume.load(Ordering::Relaxed));
210            })
211            .replayable(REPLAY_DURATION)?;
212
213        voip_parts
214            .replays
215            .add_voip_stream("local microphone".to_string(), replay);
216
217        let stream = stream.constant_params(LEGACY_CHANNEL_COUNT, LEGACY_SAMPLE_RATE);
218
219        Ok(stream)
220    }
221
222    pub fn play_voip_stream(
223        source: impl rodio::Source + Send + 'static,
224        speaker_name: String,
225        is_staff: bool,
226        cx: &mut App,
227    ) -> anyhow::Result<()> {
228        let (replay_source, source) = source
229            .constant_params(CHANNEL_COUNT, SAMPLE_RATE)
230            .automatic_gain_control(1.0, 2.0, 0.0, 5.0)
231            .periodic_access(Duration::from_millis(100), move |agc_source| {
232                agc_source.set_enabled(LIVE_SETTINGS.auto_speaker_volume.load(Ordering::Relaxed));
233            })
234            .replayable(REPLAY_DURATION)
235            .expect("REPLAY_DURATION is longer than 100ms");
236
237        cx.update_default_global(|this: &mut Self, _cx| {
238            let output_mixer = this
239                .ensure_output_exists()
240                .context("Could not get output mixer")?;
241            output_mixer.add(source);
242            if is_staff {
243                this.replays.add_voip_stream(speaker_name, replay_source);
244            }
245            Ok(())
246        })
247    }
248
249    pub fn play_sound(sound: Sound, cx: &mut App) {
250        cx.update_default_global(|this: &mut Self, cx| {
251            let source = this.sound_source(sound, cx).log_err()?;
252            let output_mixer = this
253                .ensure_output_exists()
254                .context("Could not get output mixer")
255                .log_err()?;
256
257            output_mixer.add(source);
258            Some(())
259        });
260    }
261
262    pub fn end_call(cx: &mut App) {
263        cx.update_default_global(|this: &mut Self, _cx| {
264            this.output_handle.take();
265        });
266    }
267
268    fn sound_source(&mut self, sound: Sound, cx: &App) -> Result<impl Source + use<>> {
269        if let Some(wav) = self.source_cache.get(&sound) {
270            return Ok(wav.clone());
271        }
272
273        let path = format!("sounds/{}.wav", sound.file());
274        let bytes = cx
275            .asset_source()
276            .load(&path)?
277            .map(anyhow::Ok)
278            .with_context(|| format!("No asset available for path {path}"))??
279            .into_owned();
280        let cursor = Cursor::new(bytes);
281        let source = Decoder::new(cursor)?.buffered();
282
283        self.source_cache.insert(sound, source.clone());
284
285        Ok(source)
286    }
287}
288
289#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
290pub struct VoipParts {
291    echo_canceller: Arc<Mutex<apm::AudioProcessingModule>>,
292    replays: replays::Replays,
293}
294
295#[cfg(not(any(all(target_os = "windows", target_env = "gnu"), target_os = "freebsd")))]
296impl VoipParts {
297    pub fn new(cx: &AsyncApp) -> anyhow::Result<Self> {
298        let (apm, replays) = cx.try_read_default_global::<Audio, _>(|audio, _| {
299            (Arc::clone(&audio.echo_canceller), audio.replays.clone())
300        })?;
301
302        Ok(Self {
303            echo_canceller: apm,
304            replays,
305        })
306    }
307}