Detailed changes
@@ -612,6 +612,19 @@ dependencies = [
"winapi 0.3.9",
]
+[[package]]
+name = "audio"
+version = "0.1.0"
+dependencies = [
+ "anyhow",
+ "collections",
+ "gpui",
+ "log",
+ "parking_lot 0.11.2",
+ "rodio",
+ "util",
+]
+
[[package]]
name = "auto_update"
version = "0.1.0"
@@ -1028,6 +1041,7 @@ version = "0.1.0"
dependencies = [
"anyhow",
"async-broadcast",
+ "audio",
"client",
"collections",
"fs",
@@ -1039,7 +1053,6 @@ dependencies = [
"media",
"postage",
"project",
- "rodio",
"settings",
"util",
]
@@ -1392,6 +1405,7 @@ version = "0.15.0"
dependencies = [
"anyhow",
"async-tungstenite",
+ "audio",
"axum",
"axum-extra",
"base64 0.13.1",
@@ -9331,6 +9345,7 @@ dependencies = [
"async-recursion 0.3.2",
"async-tar",
"async-trait",
+ "audio",
"auto_update",
"backtrace",
"breadcrumbs",
@@ -2,6 +2,7 @@
members = [
"crates/activity_indicator",
"crates/ai",
+ "crates/audio",
"crates/auto_update",
"crates/breadcrumbs",
"crates/call",
@@ -0,0 +1,23 @@
+[package]
+name = "audio"
+version = "0.1.0"
+edition = "2021"
+publish = false
+
+[lib]
+path = "src/audio.rs"
+doctest = false
+
+[dependencies]
+gpui = { path = "../gpui" }
+collections = { path = "../collections" }
+util = { path = "../util" }
+
+rodio = "0.17.1"
+
+log.workspace = true
+
+anyhow.workspace = true
+parking_lot.workspace = true
+
+[dev-dependencies]
@@ -0,0 +1,44 @@
+use std::{io::Cursor, sync::Arc};
+
+use anyhow::Result;
+use collections::HashMap;
+use gpui::{AppContext, AssetSource};
+use rodio::{
+ source::{Buffered, SamplesConverter},
+ Decoder, Source,
+};
+
+type Sound = Buffered<SamplesConverter<Decoder<Cursor<Vec<u8>>>, f32>>;
+
+pub struct SoundRegistry {
+ cache: Arc<parking_lot::Mutex<HashMap<String, Sound>>>,
+ assets: Box<dyn AssetSource>,
+}
+
+impl SoundRegistry {
+ pub fn new(source: impl AssetSource) -> Arc<Self> {
+ Arc::new(Self {
+ cache: Default::default(),
+ assets: Box::new(source),
+ })
+ }
+
+ pub fn global(cx: &AppContext) -> Arc<Self> {
+ cx.global::<Arc<Self>>().clone()
+ }
+
+ pub fn get(&self, name: &str) -> Result<impl Source<Item = f32>> {
+ if let Some(wav) = self.cache.lock().get(name) {
+ return Ok(wav.clone());
+ }
+
+ let path = format!("sounds/{}.wav", name);
+ let bytes = self.assets.load(&path)?.into_owned();
+ let cursor = Cursor::new(bytes);
+ let source = Decoder::new(cursor)?.convert_samples::<f32>().buffered();
+
+ self.cache.lock().insert(name.to_string(), source.clone());
+
+ Ok(source)
+ }
+}
@@ -0,0 +1,59 @@
+use assets::SoundRegistry;
+use gpui::{AppContext, AssetSource};
+use rodio::{OutputStream, OutputStreamHandle};
+use util::ResultExt;
+
+mod assets;
+
+pub fn init(source: impl AssetSource, cx: &mut AppContext) {
+ cx.set_global(SoundRegistry::new(source));
+ cx.set_global(Audio::new());
+}
+
+pub enum Sound {
+ Joined,
+ Leave,
+ Mute,
+ Unmute,
+}
+
+impl Sound {
+ fn file(&self) -> &'static str {
+ match self {
+ Self::Joined => "joined",
+ Self::Leave => "leave",
+ Self::Mute => "mute",
+ Self::Unmute => "unmute",
+ }
+ }
+}
+
+pub struct Audio {
+ _output_stream: Option<OutputStream>,
+ output_handle: Option<OutputStreamHandle>,
+}
+
+impl Audio {
+ pub fn new() -> Self {
+ let (_output_stream, output_handle) = OutputStream::try_default().log_err().unzip();
+
+ Self {
+ _output_stream,
+ output_handle,
+ }
+ }
+
+ pub fn play_sound(sound: Sound, cx: &AppContext) {
+ let this = cx.global::<Self>();
+
+ let Some(output_handle) = this.output_handle.as_ref() else {
+ return;
+ };
+
+ let Some(source) = SoundRegistry::global(cx).get(sound.file()).log_err() else {
+ return;
+ };
+
+ output_handle.play_raw(source).log_err();
+ }
+}
@@ -19,6 +19,7 @@ test-support = [
]
[dependencies]
+audio = { path = "../audio" }
client = { path = "../client" }
collections = { path = "../collections" }
gpui = { path = "../gpui" }
@@ -30,7 +31,6 @@ media = { path = "../media" }
project = { path = "../project" }
settings = { path = "../settings" }
util = { path = "../util" }
-rodio = "0.17.1"
anyhow.workspace = true
async-broadcast = "0.4"
@@ -1,18 +1,16 @@
-mod assets;
pub mod participant;
pub mod room;
use std::sync::Arc;
use anyhow::{anyhow, Result};
-use assets::SoundRegistry;
use client::{proto, Client, TypedEnvelope, User, UserStore};
use collections::HashSet;
use futures::{future::Shared, FutureExt};
use postage::watch;
use gpui::{
- AppContext, AssetSource, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription,
+ AppContext, AsyncAppContext, Entity, ModelContext, ModelHandle, Subscription,
Task, WeakModelHandle,
};
use project::Project;
@@ -23,10 +21,8 @@ pub use room::Room;
pub fn init(
client: Arc<Client>,
user_store: ModelHandle<UserStore>,
- source: impl AssetSource,
cx: &mut AppContext,
) {
- cx.set_global(SoundRegistry::new(source));
let active_call = cx.add_model(|cx| ActiveCall::new(client, user_store, cx));
cx.set_global(active_call);
}
@@ -1,9 +1,9 @@
use crate::{
- assets::SoundRegistry,
participant::{LocalParticipant, ParticipantLocation, RemoteParticipant, RemoteVideoTrack},
IncomingCall,
};
use anyhow::{anyhow, Result};
+use audio::{Audio, Sound};
use client::{
proto::{self, PeerId},
Client, TypedEnvelope, User, UserStore,
@@ -19,30 +19,11 @@ use live_kit_client::{
};
use postage::stream::Stream;
use project::Project;
-use rodio::{OutputStream, OutputStreamHandle, Source};
use std::{future::Future, mem, pin::Pin, sync::Arc, time::Duration};
use util::{post_inc, ResultExt, TryFutureExt};
pub const RECONNECT_TIMEOUT: Duration = Duration::from_secs(30);
-enum Sound {
- Joined,
- Leaved,
- Mute,
- Unmute,
-}
-
-impl Sound {
- fn file(&self) -> &'static str {
- match self {
- Self::Joined => "joined",
- Self::Leaved => "leave",
- Self::Mute => "mute",
- Self::Unmute => "unmute",
- }
- }
-}
-
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum Event {
ParticipantLocationChanged {
@@ -68,8 +49,6 @@ pub enum Event {
pub struct Room {
id: u64,
live_kit: Option<LiveKitRoom>,
- _sound_output_stream: Option<OutputStream>,
- sound_output_handle: Option<OutputStreamHandle>,
status: RoomStatus,
shared_projects: HashSet<WeakModelHandle<Project>>,
joined_projects: HashSet<WeakModelHandle<Project>>,
@@ -173,6 +152,7 @@ impl Room {
let connect = room.connect(&connection_info.server_url, &connection_info.token);
cx.spawn(|this, mut cx| async move {
connect.await?;
+
this.update(&mut cx, |this, cx| this.share_microphone(cx))
.await?;
@@ -198,14 +178,11 @@ impl Room {
let maintain_connection =
cx.spawn_weak(|this, cx| Self::maintain_connection(this, client.clone(), cx).log_err());
- let (sound_output_stream, sound_output_handle) =
- OutputStream::try_default().log_err().unzip();
+ Audio::play_sound(Sound::Joined, cx);
Self {
id,
live_kit: live_kit_room,
- _sound_output_stream: sound_output_stream,
- sound_output_handle,
status: RoomStatus::Online,
shared_projects: Default::default(),
joined_projects: Default::default(),
@@ -292,6 +269,7 @@ impl Room {
room.apply_room_update(room_proto, cx)?;
anyhow::Ok(())
})?;
+
Ok(room)
})
}
@@ -333,6 +311,8 @@ impl Room {
}
}
+ Audio::play_sound(Sound::Leave, cx);
+
self.status = RoomStatus::Offline;
self.remote_participants.clear();
self.pending_participants.clear();
@@ -937,18 +917,6 @@ impl Room {
})
}
- fn play_sound(&self, sound: Sound, cx: &AppContext) {
- let Some(output_handle) = self.sound_output_handle.as_ref() else {
- return;
- };
-
- let Some(source) = SoundRegistry::global(cx).get(sound.file()) else {
- return;
- };
-
- output_handle.play_raw(source.convert_samples()).log_err();
- }
-
pub fn join_project(
&mut self,
id: u64,
@@ -962,8 +930,6 @@ impl Room {
let project =
Project::remote(id, client, user_store, language_registry, fs, cx.clone()).await?;
- cx.read(|cx| this.read(cx).play_sound(Sound::Joined, cx));
-
this.update(&mut cx, |this, cx| {
this.joined_projects.retain(|project| {
if let Some(project) = project.upgrade(cx) {
@@ -1269,38 +1235,20 @@ impl Room {
})
})
}
- fn set_mute(
- live_kit: &mut LiveKitRoom,
- should_mute: bool,
- cx: &mut ModelContext<Self>,
- ) -> Result<Task<Result<()>>> {
- if !should_mute {
- // clear user muting state.
- live_kit.muted_by_user = false;
- }
- match &mut live_kit.microphone_track {
- LocalTrack::None => Err(anyhow!("microphone was not shared")),
- LocalTrack::Pending { muted, .. } => {
- *muted = should_mute;
- cx.notify();
- Ok(Task::Ready(Some(Ok(()))))
- }
- LocalTrack::Published {
- track_publication,
- muted,
- } => {
- *muted = should_mute;
- cx.notify();
- Ok(cx.background().spawn(track_publication.set_mute(*muted)))
- }
- }
- }
+
pub fn toggle_mute(&mut self, cx: &mut ModelContext<Self>) -> Result<Task<Result<()>>> {
let should_mute = !self.is_muted();
if let Some(live_kit) = self.live_kit.as_mut() {
- let ret = Self::set_mute(live_kit, should_mute, cx);
+ let (ret_task, old_muted) = live_kit.set_mute(should_mute, cx)?;
live_kit.muted_by_user = should_mute;
- ret
+
+ if old_muted == true && live_kit.deafened == true {
+ if let Some(task) = self.toggle_deafen(cx).ok() {
+ task.detach();
+ }
+ }
+
+ Ok(ret_task)
} else {
Err(anyhow!("LiveKit not started"))
}
@@ -1316,7 +1264,7 @@ impl Room {
// When deafening, mute user's mic as well.
// When undeafening, unmute user's mic unless it was manually muted prior to deafening.
if live_kit.deafened || !live_kit.muted_by_user {
- mute_task = Some(Self::set_mute(live_kit, live_kit.deafened, cx)?);
+ mute_task = Some(live_kit.set_mute(live_kit.deafened, cx)?.0);
};
for participant in self.remote_participants.values() {
for track in live_kit
@@ -1389,6 +1337,48 @@ struct LiveKitRoom {
_maintain_tracks: [Task<()>; 2],
}
+impl LiveKitRoom {
+ fn set_mute(
+ self: &mut LiveKitRoom,
+ should_mute: bool,
+ cx: &mut ModelContext<Room>,
+ ) -> Result<(Task<Result<()>>, bool)> {
+ if !should_mute {
+ // clear user muting state.
+ self.muted_by_user = false;
+ }
+
+ let (result, old_muted) = match &mut self.microphone_track {
+ LocalTrack::None => Err(anyhow!("microphone was not shared")),
+ LocalTrack::Pending { muted, .. } => {
+ let old_muted = *muted;
+ *muted = should_mute;
+ cx.notify();
+ Ok((Task::Ready(Some(Ok(()))), old_muted))
+ }
+ LocalTrack::Published {
+ track_publication,
+ muted,
+ } => {
+ let old_muted = *muted;
+ *muted = should_mute;
+ cx.notify();
+ Ok((cx.background().spawn(track_publication.set_mute(*muted)), old_muted))
+ }
+ }?;
+
+ if old_muted != should_mute {
+ if should_mute {
+ Audio::play_sound(Sound::Mute, cx);
+ } else {
+ Audio::play_sound(Sound::Unmute, cx);
+ }
+ }
+
+ Ok((result, old_muted))
+ }
+}
+
enum LocalTrack {
None,
Pending {
@@ -14,6 +14,7 @@ name = "seed"
required-features = ["seed-support"]
[dependencies]
+audio = { path = "../audio" }
collections = { path = "../collections" }
live_kit_server = { path = "../live_kit_server" }
rpc = { path = "../rpc" }
@@ -203,7 +203,8 @@ impl TestServer {
language::init(cx);
editor::init_settings(cx);
workspace::init(app_state.clone(), cx);
- call::init(client.clone(), user_store.clone(), (), cx);
+ audio::init((), cx);
+ call::init(client.clone(), user_store.clone(), cx);
});
client
@@ -16,6 +16,7 @@ name = "Zed"
path = "src/main.rs"
[dependencies]
+audio = { path = "../audio" }
activity_indicator = { path = "../activity_indicator" }
auto_update = { path = "../auto_update" }
breadcrumbs = { path = "../breadcrumbs" }
@@ -180,6 +180,8 @@ fn main() {
background_actions,
});
cx.set_global(Arc::downgrade(&app_state));
+
+ audio::init(Assets, cx);
auto_update::init(http.clone(), client::ZED_SERVER_URL.clone(), cx);
workspace::init(app_state.clone(), cx);
@@ -190,7 +192,7 @@ fn main() {
theme_selector::init(cx);
activity_indicator::init(cx);
language_tools::init(cx);
- call::init(app_state.client.clone(), app_state.user_store.clone(), Assets, cx);
+ call::init(app_state.client.clone(), app_state.user_store.clone(), cx);
collab_ui::init(&app_state, cx);
feedback::init(cx);
welcome::init(cx);
@@ -2160,7 +2160,8 @@ mod tests {
state.initialize_workspace = initialize_workspace;
state.build_window_options = build_window_options;
theme::init((), cx);
- call::init(app_state.client.clone(), app_state.user_store.clone(), (), cx);
+ audio::init((), cx);
+ call::init(app_state.client.clone(), app_state.user_store.clone(), cx);
workspace::init(app_state.clone(), cx);
Project::init_settings(cx);
language::init(cx);