1use std::sync::Arc;
2
3use futures::StreamExt;
4use gpui::{
5 actions, bounds, div, point,
6 prelude::{FluentBuilder as _, IntoElement},
7 px, rgb, size, AppContext as _, AsyncApp, Bounds, Context, Entity, InteractiveElement,
8 KeyBinding, Menu, MenuItem, ParentElement, Pixels, Render, ScreenCaptureStream, SharedString,
9 StatefulInteractiveElement as _, Styled, Task, Window, WindowBounds, WindowHandle,
10 WindowOptions,
11};
12use livekit_client::{
13 AudioStream, LocalTrackPublication, Participant, ParticipantIdentity, RemoteParticipant,
14 RemoteTrackPublication, RemoteVideoTrack, RemoteVideoTrackView, Room, RoomEvent,
15};
16
17use livekit_api::token::{self, VideoGrant};
18use log::LevelFilter;
19use simplelog::SimpleLogger;
20
21actions!(livekit_client, [Quit]);
22
23fn main() {
24 SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
25
26 gpui::Application::new().run(|cx| {
27 #[cfg(any(test, feature = "test-support"))]
28 println!("USING TEST LIVEKIT");
29
30 #[cfg(not(any(test, feature = "test-support")))]
31 println!("USING REAL LIVEKIT");
32
33 gpui_tokio::init(cx);
34
35 cx.activate(true);
36 cx.on_action(quit);
37 cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
38 cx.set_menus(vec![Menu {
39 name: "Zed".into(),
40 items: vec![MenuItem::Action {
41 name: "Quit".into(),
42 action: Box::new(Quit),
43 os_action: None,
44 }],
45 }]);
46
47 let livekit_url = std::env::var("LIVEKIT_URL").unwrap_or("http://localhost:7880".into());
48 let livekit_key = std::env::var("LIVEKIT_KEY").unwrap_or("devkey".into());
49 let livekit_secret = std::env::var("LIVEKIT_SECRET").unwrap_or("secret".into());
50 let height = px(800.);
51 let width = px(800.);
52
53 cx.spawn(async move |cx| {
54 let mut windows = Vec::new();
55 for i in 0..2 {
56 let token = token::create(
57 &livekit_key,
58 &livekit_secret,
59 Some(&format!("test-participant-{i}")),
60 VideoGrant::to_join("wtej-trty"),
61 )
62 .unwrap();
63
64 let bounds = bounds(point(width * i, px(0.0)), size(width, height));
65 let window = LivekitWindow::new(livekit_url.clone(), token, bounds, cx).await;
66 windows.push(window);
67 }
68 })
69 .detach();
70 });
71}
72
73fn quit(_: &Quit, cx: &mut gpui::App) {
74 cx.quit();
75}
76
77struct LivekitWindow {
78 room: Arc<livekit_client::Room>,
79 microphone_track: Option<LocalTrackPublication>,
80 screen_share_track: Option<LocalTrackPublication>,
81 microphone_stream: Option<livekit_client::AudioStream>,
82 screen_share_stream: Option<Box<dyn ScreenCaptureStream>>,
83 remote_participants: Vec<(ParticipantIdentity, ParticipantState)>,
84 _events_task: Task<()>,
85}
86
87#[derive(Default)]
88struct ParticipantState {
89 audio_output_stream: Option<(RemoteTrackPublication, AudioStream)>,
90 muted: bool,
91 screen_share_output_view: Option<(RemoteVideoTrack, Entity<RemoteVideoTrackView>)>,
92 speaking: bool,
93}
94
95impl LivekitWindow {
96 async fn new(
97 url: String,
98 token: String,
99 bounds: Bounds<Pixels>,
100 cx: &mut AsyncApp,
101 ) -> WindowHandle<Self> {
102 let (room, mut events) =
103 Room::connect(url.clone(), token, cx)
104 .await
105 .unwrap_or_else(|err| {
106 eprintln!(
107 "Failed to connect to {url}: {err}.\nTry `foreman start` to run the livekit server"
108 );
109
110 std::process::exit(1)
111 });
112
113 cx.update(|cx| {
114 cx.open_window(
115 WindowOptions {
116 window_bounds: Some(WindowBounds::Windowed(bounds)),
117 ..Default::default()
118 },
119 |window, cx| {
120 cx.new(|cx| {
121 let _events_task = cx.spawn_in(window, async move |this, cx| {
122 while let Some(event) = events.next().await {
123 cx.update(|window, cx| {
124 this.update(cx, |this: &mut LivekitWindow, cx| {
125 this.handle_room_event(event, window, cx)
126 })
127 })
128 .ok();
129 }
130 });
131
132 Self {
133 room: Arc::new(room),
134 microphone_track: None,
135 microphone_stream: None,
136 screen_share_track: None,
137 screen_share_stream: None,
138 remote_participants: Vec::new(),
139 _events_task,
140 }
141 })
142 },
143 )
144 .unwrap()
145 })
146 .unwrap()
147 }
148
149 fn handle_room_event(&mut self, event: RoomEvent, window: &mut Window, cx: &mut Context<Self>) {
150 eprintln!("event: {event:?}");
151
152 match event {
153 RoomEvent::TrackUnpublished {
154 publication,
155 participant,
156 } => {
157 let output = self.remote_participant(participant);
158 let unpublish_sid = publication.sid();
159 if output
160 .audio_output_stream
161 .as_ref()
162 .map_or(false, |(track, _)| track.sid() == unpublish_sid)
163 {
164 output.audio_output_stream.take();
165 }
166 if output
167 .screen_share_output_view
168 .as_ref()
169 .map_or(false, |(track, _)| track.sid() == unpublish_sid)
170 {
171 output.screen_share_output_view.take();
172 }
173 cx.notify();
174 }
175
176 RoomEvent::TrackSubscribed {
177 publication,
178 participant,
179 track,
180 } => {
181 let room = self.room.clone();
182 let output = self.remote_participant(participant);
183 match track {
184 livekit_client::RemoteTrack::Audio(track) => {
185 output.audio_output_stream = Some((
186 publication.clone(),
187 room.play_remote_audio_track(&track, cx).unwrap(),
188 ));
189 }
190 livekit_client::RemoteTrack::Video(track) => {
191 output.screen_share_output_view = Some((
192 track.clone(),
193 cx.new(|cx| RemoteVideoTrackView::new(track, window, cx)),
194 ));
195 }
196 }
197 cx.notify();
198 }
199
200 RoomEvent::TrackMuted { participant, .. } => {
201 if let Participant::Remote(participant) = participant {
202 self.remote_participant(participant).muted = true;
203 cx.notify();
204 }
205 }
206
207 RoomEvent::TrackUnmuted { participant, .. } => {
208 if let Participant::Remote(participant) = participant {
209 self.remote_participant(participant).muted = false;
210 cx.notify();
211 }
212 }
213
214 RoomEvent::ActiveSpeakersChanged { speakers } => {
215 for (identity, output) in &mut self.remote_participants {
216 output.speaking = speakers.iter().any(|speaker| {
217 if let Participant::Remote(speaker) = speaker {
218 speaker.identity() == *identity
219 } else {
220 false
221 }
222 });
223 }
224 cx.notify();
225 }
226
227 _ => {}
228 }
229
230 cx.notify();
231 }
232
233 fn remote_participant(&mut self, participant: RemoteParticipant) -> &mut ParticipantState {
234 match self
235 .remote_participants
236 .binary_search_by_key(&&participant.identity(), |row| &row.0)
237 {
238 Ok(ix) => &mut self.remote_participants[ix].1,
239 Err(ix) => {
240 self.remote_participants
241 .insert(ix, (participant.identity(), ParticipantState::default()));
242 &mut self.remote_participants[ix].1
243 }
244 }
245 }
246
247 fn toggle_mute(&mut self, window: &mut Window, cx: &mut Context<Self>) {
248 if let Some(track) = &self.microphone_track {
249 if track.is_muted() {
250 track.unmute(cx);
251 } else {
252 track.mute(cx);
253 }
254 cx.notify();
255 } else {
256 let room = self.room.clone();
257 cx.spawn_in(window, async move |this, cx| {
258 let (publication, stream) = room.publish_local_microphone_track(cx).await.unwrap();
259 this.update(cx, |this, cx| {
260 this.microphone_track = Some(publication);
261 this.microphone_stream = Some(stream);
262 cx.notify();
263 })
264 })
265 .detach();
266 }
267 }
268
269 fn toggle_screen_share(&mut self, window: &mut Window, cx: &mut Context<Self>) {
270 if let Some(track) = self.screen_share_track.take() {
271 self.screen_share_stream.take();
272 let participant = self.room.local_participant();
273 cx.spawn(async move |_, cx| {
274 participant.unpublish_track(track.sid(), cx).await.unwrap();
275 })
276 .detach();
277 cx.notify();
278 } else {
279 let participant = self.room.local_participant();
280 let sources = cx.screen_capture_sources();
281 cx.spawn_in(window, async move |this, cx| {
282 let sources = sources.await.unwrap()?;
283 let source = sources.into_iter().next().unwrap();
284
285 let (publication, stream) = participant
286 .publish_screenshare_track(&*source, cx)
287 .await
288 .unwrap();
289 this.update(cx, |this, cx| {
290 this.screen_share_track = Some(publication);
291 this.screen_share_stream = Some(stream);
292 cx.notify();
293 })
294 })
295 .detach();
296 }
297 }
298
299 fn toggle_remote_audio_for_participant(
300 &mut self,
301 identity: &ParticipantIdentity,
302 cx: &mut Context<Self>,
303 ) -> Option<()> {
304 let participant = self.remote_participants.iter().find_map(|(id, state)| {
305 if id == identity {
306 Some(state)
307 } else {
308 None
309 }
310 })?;
311 let publication = &participant.audio_output_stream.as_ref()?.0;
312 publication.set_enabled(!publication.is_enabled(), cx);
313 cx.notify();
314 Some(())
315 }
316}
317
318impl Render for LivekitWindow {
319 fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
320 fn button() -> gpui::Div {
321 div()
322 .w(px(180.0))
323 .h(px(30.0))
324 .px_2()
325 .m_2()
326 .bg(rgb(0x8888ff))
327 }
328
329 div()
330 .bg(rgb(0xffffff))
331 .size_full()
332 .flex()
333 .flex_col()
334 .child(
335 div().bg(rgb(0xffd4a8)).flex().flex_row().children([
336 button()
337 .id("toggle-mute")
338 .child(if let Some(track) = &self.microphone_track {
339 if track.is_muted() {
340 "Unmute"
341 } else {
342 "Mute"
343 }
344 } else {
345 "Publish mic"
346 })
347 .on_click(cx.listener(|this, _, window, cx| this.toggle_mute(window, cx))),
348 button()
349 .id("toggle-screen-share")
350 .child(if self.screen_share_track.is_none() {
351 "Share screen"
352 } else {
353 "Unshare screen"
354 })
355 .on_click(
356 cx.listener(|this, _, window, cx| this.toggle_screen_share(window, cx)),
357 ),
358 ]),
359 )
360 .child(
361 div()
362 .id("remote-participants")
363 .overflow_y_scroll()
364 .flex()
365 .flex_col()
366 .flex_grow()
367 .children(self.remote_participants.iter().map(|(identity, state)| {
368 div()
369 .h(px(1080.0))
370 .flex()
371 .flex_col()
372 .m_2()
373 .px_2()
374 .bg(rgb(0x8888ff))
375 .child(SharedString::from(if state.speaking {
376 format!("{} (speaking)", &identity.0)
377 } else if state.muted {
378 format!("{} (muted)", &identity.0)
379 } else {
380 identity.0.clone()
381 }))
382 .when_some(state.audio_output_stream.as_ref(), |el, state| {
383 el.child(
384 button()
385 .id(SharedString::from(identity.0.clone()))
386 .child(if state.0.is_enabled() {
387 "Deafen"
388 } else {
389 "Undeafen"
390 })
391 .on_click(cx.listener({
392 let identity = identity.clone();
393 move |this, _, _, cx| {
394 this.toggle_remote_audio_for_participant(
395 &identity, cx,
396 );
397 }
398 })),
399 )
400 })
401 .children(state.screen_share_output_view.as_ref().map(|e| e.1.clone()))
402 })),
403 )
404 }
405}