1#![cfg_attr(windows, allow(unused))]
2// TODO: For some reason mac build complains about import of postage::stream::Stream, but removal of
3// it causes compile errors.
4#![cfg_attr(target_os = "macos", allow(unused_imports))]
5
6use gpui::{
7 actions, bounds, div, point,
8 prelude::{FluentBuilder as _, IntoElement},
9 px, rgb, size, AppContext as _, AsyncApp, Bounds, Context, Entity, InteractiveElement,
10 KeyBinding, Menu, MenuItem, ParentElement, Pixels, Render, ScreenCaptureStream, SharedString,
11 StatefulInteractiveElement as _, Styled, Task, Window, WindowBounds, WindowHandle,
12 WindowOptions,
13};
14#[cfg(not(target_os = "windows"))]
15use livekit_client::{
16 capture_local_audio_track, capture_local_video_track,
17 id::ParticipantIdentity,
18 options::{TrackPublishOptions, VideoCodec},
19 participant::{Participant, RemoteParticipant},
20 play_remote_audio_track,
21 publication::{LocalTrackPublication, RemoteTrackPublication},
22 track::{LocalTrack, RemoteTrack, RemoteVideoTrack, TrackSource},
23 AudioStream, RemoteVideoTrackView, Room, RoomEvent, RoomOptions,
24};
25#[cfg(not(target_os = "windows"))]
26use postage::stream::Stream;
27
28#[cfg(target_os = "windows")]
29use livekit_client::{
30 participant::{Participant, RemoteParticipant},
31 publication::{LocalTrackPublication, RemoteTrackPublication},
32 track::{LocalTrack, RemoteTrack, RemoteVideoTrack},
33 AudioStream, RemoteVideoTrackView, Room, RoomEvent,
34};
35
36use livekit_server::token::{self, VideoGrant};
37use log::LevelFilter;
38use simplelog::SimpleLogger;
39
40actions!(livekit_client, [Quit]);
41
42#[cfg(windows)]
43fn main() {}
44
45#[cfg(not(windows))]
46fn main() {
47 SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
48
49 gpui::Application::new().run(|cx| {
50 livekit_client::init(
51 cx.background_executor().dispatcher.clone(),
52 cx.http_client(),
53 );
54
55 #[cfg(any(test, feature = "test-support"))]
56 println!("USING TEST LIVEKIT");
57
58 #[cfg(not(any(test, feature = "test-support")))]
59 println!("USING REAL LIVEKIT");
60
61 cx.activate(true);
62 cx.on_action(quit);
63 cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
64 cx.set_menus(vec![Menu {
65 name: "Zed".into(),
66 items: vec![MenuItem::Action {
67 name: "Quit".into(),
68 action: Box::new(Quit),
69 os_action: None,
70 }],
71 }]);
72
73 let livekit_url = std::env::var("LIVEKIT_URL").unwrap_or("http://localhost:7880".into());
74 let livekit_key = std::env::var("LIVEKIT_KEY").unwrap_or("devkey".into());
75 let livekit_secret = std::env::var("LIVEKIT_SECRET").unwrap_or("secret".into());
76 let height = px(800.);
77 let width = px(800.);
78
79 cx.spawn(|cx| async move {
80 let mut windows = Vec::new();
81 for i in 0..2 {
82 let token = token::create(
83 &livekit_key,
84 &livekit_secret,
85 Some(&format!("test-participant-{i}")),
86 VideoGrant::to_join("test-room"),
87 )
88 .unwrap();
89
90 let bounds = bounds(point(width * i, px(0.0)), size(width, height));
91 let window =
92 LivekitWindow::new(livekit_url.as_str(), token.as_str(), bounds, cx.clone())
93 .await;
94 windows.push(window);
95 }
96 })
97 .detach();
98 });
99}
100
101fn quit(_: &Quit, cx: &mut gpui::App) {
102 cx.quit();
103}
104
105struct LivekitWindow {
106 room: Room,
107 microphone_track: Option<LocalTrackPublication>,
108 screen_share_track: Option<LocalTrackPublication>,
109 microphone_stream: Option<AudioStream>,
110 screen_share_stream: Option<Box<dyn ScreenCaptureStream>>,
111 #[cfg(not(target_os = "windows"))]
112 remote_participants: Vec<(ParticipantIdentity, ParticipantState)>,
113 _events_task: Task<()>,
114}
115
116#[derive(Default)]
117struct ParticipantState {
118 audio_output_stream: Option<(RemoteTrackPublication, AudioStream)>,
119 muted: bool,
120 screen_share_output_view: Option<(RemoteVideoTrack, Entity<RemoteVideoTrackView>)>,
121 speaking: bool,
122}
123
124#[cfg(not(windows))]
125impl LivekitWindow {
126 async fn new(
127 url: &str,
128 token: &str,
129 bounds: Bounds<Pixels>,
130 cx: AsyncApp,
131 ) -> WindowHandle<Self> {
132 let (room, mut events) = Room::connect(url, token, RoomOptions::default())
133 .await
134 .unwrap();
135
136 cx.update(|cx| {
137 cx.open_window(
138 WindowOptions {
139 window_bounds: Some(WindowBounds::Windowed(bounds)),
140 ..Default::default()
141 },
142 |window, cx| {
143 cx.new(|cx| {
144 let _events_task = cx.spawn_in(window, |this, mut cx| async move {
145 while let Some(event) = events.recv().await {
146 cx.update(|window, cx| {
147 this.update(cx, |this: &mut LivekitWindow, cx| {
148 this.handle_room_event(event, window, cx)
149 })
150 })
151 .ok();
152 }
153 });
154
155 Self {
156 room,
157 microphone_track: None,
158 microphone_stream: None,
159 screen_share_track: None,
160 screen_share_stream: None,
161 remote_participants: Vec::new(),
162 _events_task,
163 }
164 })
165 },
166 )
167 .unwrap()
168 })
169 .unwrap()
170 }
171
172 fn handle_room_event(&mut self, event: RoomEvent, window: &mut Window, cx: &mut Context<Self>) {
173 eprintln!("event: {event:?}");
174
175 match event {
176 RoomEvent::TrackUnpublished {
177 publication,
178 participant,
179 } => {
180 let output = self.remote_participant(participant);
181 let unpublish_sid = publication.sid();
182 if output
183 .audio_output_stream
184 .as_ref()
185 .map_or(false, |(track, _)| track.sid() == unpublish_sid)
186 {
187 output.audio_output_stream.take();
188 }
189 if output
190 .screen_share_output_view
191 .as_ref()
192 .map_or(false, |(track, _)| track.sid() == unpublish_sid)
193 {
194 output.screen_share_output_view.take();
195 }
196 cx.notify();
197 }
198
199 RoomEvent::TrackSubscribed {
200 publication,
201 participant,
202 track,
203 } => {
204 let output = self.remote_participant(participant);
205 match track {
206 RemoteTrack::Audio(track) => {
207 output.audio_output_stream = Some((
208 publication.clone(),
209 play_remote_audio_track(&track, cx.background_executor()).unwrap(),
210 ));
211 }
212 RemoteTrack::Video(track) => {
213 output.screen_share_output_view = Some((
214 track.clone(),
215 cx.new(|cx| RemoteVideoTrackView::new(track, window, cx)),
216 ));
217 }
218 }
219 cx.notify();
220 }
221
222 RoomEvent::TrackMuted { participant, .. } => {
223 if let Participant::Remote(participant) = participant {
224 self.remote_participant(participant).muted = true;
225 cx.notify();
226 }
227 }
228
229 RoomEvent::TrackUnmuted { participant, .. } => {
230 if let Participant::Remote(participant) = participant {
231 self.remote_participant(participant).muted = false;
232 cx.notify();
233 }
234 }
235
236 RoomEvent::ActiveSpeakersChanged { speakers } => {
237 for (identity, output) in &mut self.remote_participants {
238 output.speaking = speakers.iter().any(|speaker| {
239 if let Participant::Remote(speaker) = speaker {
240 speaker.identity() == *identity
241 } else {
242 false
243 }
244 });
245 }
246 cx.notify();
247 }
248
249 _ => {}
250 }
251
252 cx.notify();
253 }
254
255 fn remote_participant(&mut self, participant: RemoteParticipant) -> &mut ParticipantState {
256 match self
257 .remote_participants
258 .binary_search_by_key(&&participant.identity(), |row| &row.0)
259 {
260 Ok(ix) => &mut self.remote_participants[ix].1,
261 Err(ix) => {
262 self.remote_participants
263 .insert(ix, (participant.identity(), ParticipantState::default()));
264 &mut self.remote_participants[ix].1
265 }
266 }
267 }
268
269 fn toggle_mute(&mut self, window: &mut Window, cx: &mut Context<Self>) {
270 if let Some(track) = &self.microphone_track {
271 if track.is_muted() {
272 track.unmute();
273 } else {
274 track.mute();
275 }
276 cx.notify();
277 } else {
278 let participant = self.room.local_participant();
279 cx.spawn_in(window, |this, mut cx| async move {
280 let (track, stream) = capture_local_audio_track(cx.background_executor())?.await;
281 let publication = participant
282 .publish_track(
283 LocalTrack::Audio(track),
284 TrackPublishOptions {
285 source: TrackSource::Microphone,
286 ..Default::default()
287 },
288 )
289 .await
290 .unwrap();
291 this.update(&mut cx, |this, cx| {
292 this.microphone_track = Some(publication);
293 this.microphone_stream = Some(stream);
294 cx.notify();
295 })
296 })
297 .detach();
298 }
299 }
300
301 fn toggle_screen_share(&mut self, window: &mut Window, cx: &mut Context<Self>) {
302 if let Some(track) = self.screen_share_track.take() {
303 self.screen_share_stream.take();
304 let participant = self.room.local_participant();
305 cx.background_executor()
306 .spawn(async move {
307 participant.unpublish_track(&track.sid()).await.unwrap();
308 })
309 .detach();
310 cx.notify();
311 } else {
312 let participant = self.room.local_participant();
313 let sources = cx.screen_capture_sources();
314 cx.spawn_in(window, |this, mut cx| async move {
315 let sources = sources.await.unwrap()?;
316 let source = sources.into_iter().next().unwrap();
317 let (track, stream) = capture_local_video_track(&*source).await?;
318 let publication = participant
319 .publish_track(
320 LocalTrack::Video(track),
321 TrackPublishOptions {
322 source: TrackSource::Screenshare,
323 video_codec: VideoCodec::H264,
324 ..Default::default()
325 },
326 )
327 .await
328 .unwrap();
329 this.update(&mut cx, |this, cx| {
330 this.screen_share_track = Some(publication);
331 this.screen_share_stream = Some(stream);
332 cx.notify();
333 })
334 })
335 .detach();
336 }
337 }
338
339 fn toggle_remote_audio_for_participant(
340 &mut self,
341 identity: &ParticipantIdentity,
342
343 cx: &mut Context<Self>,
344 ) -> Option<()> {
345 let participant = self.remote_participants.iter().find_map(|(id, state)| {
346 if id == identity {
347 Some(state)
348 } else {
349 None
350 }
351 })?;
352 let publication = &participant.audio_output_stream.as_ref()?.0;
353 publication.set_enabled(!publication.is_enabled());
354 cx.notify();
355 Some(())
356 }
357}
358
359#[cfg(not(windows))]
360impl Render for LivekitWindow {
361 fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
362 fn button() -> gpui::Div {
363 div()
364 .w(px(180.0))
365 .h(px(30.0))
366 .px_2()
367 .m_2()
368 .bg(rgb(0x8888ff))
369 }
370
371 div()
372 .bg(rgb(0xffffff))
373 .size_full()
374 .flex()
375 .flex_col()
376 .child(
377 div().bg(rgb(0xffd4a8)).flex().flex_row().children([
378 button()
379 .id("toggle-mute")
380 .child(if let Some(track) = &self.microphone_track {
381 if track.is_muted() {
382 "Unmute"
383 } else {
384 "Mute"
385 }
386 } else {
387 "Publish mic"
388 })
389 .on_click(cx.listener(|this, _, window, cx| this.toggle_mute(window, cx))),
390 button()
391 .id("toggle-screen-share")
392 .child(if self.screen_share_track.is_none() {
393 "Share screen"
394 } else {
395 "Unshare screen"
396 })
397 .on_click(
398 cx.listener(|this, _, window, cx| this.toggle_screen_share(window, cx)),
399 ),
400 ]),
401 )
402 .child(
403 div()
404 .id("remote-participants")
405 .overflow_y_scroll()
406 .flex()
407 .flex_col()
408 .flex_grow()
409 .children(self.remote_participants.iter().map(|(identity, state)| {
410 div()
411 .h(px(300.0))
412 .flex()
413 .flex_col()
414 .m_2()
415 .px_2()
416 .bg(rgb(0x8888ff))
417 .child(SharedString::from(if state.speaking {
418 format!("{} (speaking)", &identity.0)
419 } else if state.muted {
420 format!("{} (muted)", &identity.0)
421 } else {
422 identity.0.clone()
423 }))
424 .when_some(state.audio_output_stream.as_ref(), |el, state| {
425 el.child(
426 button()
427 .id(SharedString::from(identity.0.clone()))
428 .child(if state.0.is_enabled() {
429 "Deafen"
430 } else {
431 "Undeafen"
432 })
433 .on_click(cx.listener({
434 let identity = identity.clone();
435 move |this, _, _, cx| {
436 this.toggle_remote_audio_for_participant(
437 &identity, cx,
438 );
439 }
440 })),
441 )
442 })
443 .children(state.screen_share_output_view.as_ref().map(|e| e.1.clone()))
444 })),
445 )
446 }
447}