1#![cfg_attr(windows, allow(unused))]
2// TODO: For some reason mac build complains about import of postage::stream::Stream, but removal of
3// it causes compile errors.
4#![cfg_attr(target_os = "macos", allow(unused_imports))]
5
6use gpui::{
7 actions, bounds, div, point,
8 prelude::{FluentBuilder as _, IntoElement},
9 px, rgb, size, AppContext as _, AsyncApp, Bounds, Context, Entity, InteractiveElement,
10 KeyBinding, Menu, MenuItem, ParentElement, Pixels, Render, ScreenCaptureStream, SharedString,
11 StatefulInteractiveElement as _, Styled, Task, Window, WindowBounds, WindowHandle,
12 WindowOptions,
13};
14#[cfg(not(target_os = "windows"))]
15use livekit_client::{
16 capture_local_audio_track, capture_local_video_track,
17 id::ParticipantIdentity,
18 options::{TrackPublishOptions, VideoCodec},
19 participant::{Participant, RemoteParticipant},
20 play_remote_audio_track,
21 publication::{LocalTrackPublication, RemoteTrackPublication},
22 track::{LocalTrack, RemoteTrack, RemoteVideoTrack, TrackSource},
23 AudioStream, RemoteVideoTrackView, Room, RoomEvent, RoomOptions,
24};
25#[cfg(not(target_os = "windows"))]
26use postage::stream::Stream;
27
28#[cfg(target_os = "windows")]
29use livekit_client::{
30 participant::{Participant, RemoteParticipant},
31 publication::{LocalTrackPublication, RemoteTrackPublication},
32 track::{LocalTrack, RemoteTrack, RemoteVideoTrack},
33 AudioStream, RemoteVideoTrackView, Room, RoomEvent,
34};
35
36use livekit_api::token::{self, VideoGrant};
37use log::LevelFilter;
38use simplelog::SimpleLogger;
39
40actions!(livekit_client, [Quit]);
41
42#[cfg(windows)]
43fn main() {}
44
45#[cfg(not(windows))]
46fn main() {
47 SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
48
49 gpui::Application::new().run(|cx| {
50 livekit_client::init(
51 cx.background_executor().dispatcher.clone(),
52 cx.http_client(),
53 );
54
55 #[cfg(any(test, feature = "test-support"))]
56 println!("USING TEST LIVEKIT");
57
58 #[cfg(not(any(test, feature = "test-support")))]
59 println!("USING REAL LIVEKIT");
60
61 cx.activate(true);
62 cx.on_action(quit);
63 cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
64 cx.set_menus(vec![Menu {
65 name: "Zed".into(),
66 items: vec![MenuItem::Action {
67 name: "Quit".into(),
68 action: Box::new(Quit),
69 os_action: None,
70 }],
71 }]);
72
73 let livekit_url = std::env::var("LIVEKIT_URL").unwrap_or("http://localhost:7880".into());
74 let livekit_key = std::env::var("LIVEKIT_KEY").unwrap_or("devkey".into());
75 let livekit_secret = std::env::var("LIVEKIT_SECRET").unwrap_or("secret".into());
76 let height = px(800.);
77 let width = px(800.);
78
79 cx.spawn(async move |cx| {
80 let mut windows = Vec::new();
81 for i in 0..2 {
82 let token = token::create(
83 &livekit_key,
84 &livekit_secret,
85 Some(&format!("test-participant-{i}")),
86 VideoGrant::to_join("test-room"),
87 )
88 .unwrap();
89
90 let bounds = bounds(point(width * i, px(0.0)), size(width, height));
91 let window =
92 LivekitWindow::new(livekit_url.as_str(), token.as_str(), bounds, cx.clone())
93 .await;
94 windows.push(window);
95 }
96 })
97 .detach();
98 });
99}
100
101fn quit(_: &Quit, cx: &mut gpui::App) {
102 cx.quit();
103}
104
105struct LivekitWindow {
106 room: Room,
107 microphone_track: Option<LocalTrackPublication>,
108 screen_share_track: Option<LocalTrackPublication>,
109 microphone_stream: Option<AudioStream>,
110 screen_share_stream: Option<Box<dyn ScreenCaptureStream>>,
111 #[cfg(not(target_os = "windows"))]
112 remote_participants: Vec<(ParticipantIdentity, ParticipantState)>,
113 _events_task: Task<()>,
114}
115
116#[derive(Default)]
117struct ParticipantState {
118 audio_output_stream: Option<(RemoteTrackPublication, AudioStream)>,
119 muted: bool,
120 screen_share_output_view: Option<(RemoteVideoTrack, Entity<RemoteVideoTrackView>)>,
121 speaking: bool,
122}
123
124#[cfg(not(windows))]
125impl LivekitWindow {
126 async fn new(
127 url: &str,
128 token: &str,
129 bounds: Bounds<Pixels>,
130 cx: AsyncApp,
131 ) -> WindowHandle<Self> {
132 let (room, mut events) = Room::connect(url, token, RoomOptions::default())
133 .await
134 .unwrap();
135
136 cx.update(|cx| {
137 cx.open_window(
138 WindowOptions {
139 window_bounds: Some(WindowBounds::Windowed(bounds)),
140 ..Default::default()
141 },
142 |window, cx| {
143 cx.new(|cx| {
144 let _events_task = cx.spawn_in(window, async move |this, cx| {
145 while let Some(event) = events.recv().await {
146 cx.update(|window, cx| {
147 this.update(cx, |this: &mut LivekitWindow, cx| {
148 this.handle_room_event(event, window, cx)
149 })
150 })
151 .ok();
152 }
153 });
154
155 Self {
156 room,
157 microphone_track: None,
158 microphone_stream: None,
159 screen_share_track: None,
160 screen_share_stream: None,
161 remote_participants: Vec::new(),
162 _events_task,
163 }
164 })
165 },
166 )
167 .unwrap()
168 })
169 .unwrap()
170 }
171
172 fn handle_room_event(&mut self, event: RoomEvent, window: &mut Window, cx: &mut Context<Self>) {
173 eprintln!("event: {event:?}");
174
175 match event {
176 RoomEvent::TrackUnpublished {
177 publication,
178 participant,
179 } => {
180 let output = self.remote_participant(participant);
181 let unpublish_sid = publication.sid();
182 if output
183 .audio_output_stream
184 .as_ref()
185 .map_or(false, |(track, _)| track.sid() == unpublish_sid)
186 {
187 output.audio_output_stream.take();
188 }
189 if output
190 .screen_share_output_view
191 .as_ref()
192 .map_or(false, |(track, _)| track.sid() == unpublish_sid)
193 {
194 output.screen_share_output_view.take();
195 }
196 cx.notify();
197 }
198
199 RoomEvent::TrackSubscribed {
200 publication,
201 participant,
202 track,
203 } => {
204 let output = self.remote_participant(participant);
205 match track {
206 RemoteTrack::Audio(track) => {
207 output.audio_output_stream = Some((
208 publication.clone(),
209 play_remote_audio_track(&track, cx.background_executor()).unwrap(),
210 ));
211 }
212 RemoteTrack::Video(track) => {
213 output.screen_share_output_view = Some((
214 track.clone(),
215 cx.new(|cx| RemoteVideoTrackView::new(track, window, cx)),
216 ));
217 }
218 }
219 cx.notify();
220 }
221
222 RoomEvent::TrackMuted { participant, .. } => {
223 if let Participant::Remote(participant) = participant {
224 self.remote_participant(participant).muted = true;
225 cx.notify();
226 }
227 }
228
229 RoomEvent::TrackUnmuted { participant, .. } => {
230 if let Participant::Remote(participant) = participant {
231 self.remote_participant(participant).muted = false;
232 cx.notify();
233 }
234 }
235
236 RoomEvent::ActiveSpeakersChanged { speakers } => {
237 for (identity, output) in &mut self.remote_participants {
238 output.speaking = speakers.iter().any(|speaker| {
239 if let Participant::Remote(speaker) = speaker {
240 speaker.identity() == *identity
241 } else {
242 false
243 }
244 });
245 }
246 cx.notify();
247 }
248
249 _ => {}
250 }
251
252 cx.notify();
253 }
254
255 fn remote_participant(&mut self, participant: RemoteParticipant) -> &mut ParticipantState {
256 match self
257 .remote_participants
258 .binary_search_by_key(&&participant.identity(), |row| &row.0)
259 {
260 Ok(ix) => &mut self.remote_participants[ix].1,
261 Err(ix) => {
262 self.remote_participants
263 .insert(ix, (participant.identity(), ParticipantState::default()));
264 &mut self.remote_participants[ix].1
265 }
266 }
267 }
268
269 fn toggle_mute(&mut self, window: &mut Window, cx: &mut Context<Self>) {
270 if let Some(track) = &self.microphone_track {
271 if track.is_muted() {
272 track.unmute();
273 } else {
274 track.mute();
275 }
276 cx.notify();
277 } else {
278 let participant = self.room.local_participant();
279 cx.spawn_in(window, async move |this, cx| {
280 let (track, stream) = capture_local_audio_track(cx.background_executor())?.await;
281 let publication = participant
282 .publish_track(
283 LocalTrack::Audio(track),
284 TrackPublishOptions {
285 source: TrackSource::Microphone,
286 ..Default::default()
287 },
288 )
289 .await
290 .unwrap();
291 this.update(cx, |this, cx| {
292 this.microphone_track = Some(publication);
293 this.microphone_stream = Some(stream);
294 cx.notify();
295 })
296 })
297 .detach();
298 }
299 }
300
301 fn toggle_screen_share(&mut self, window: &mut Window, cx: &mut Context<Self>) {
302 if let Some(track) = self.screen_share_track.take() {
303 self.screen_share_stream.take();
304 let participant = self.room.local_participant();
305 cx.background_spawn(async move {
306 participant.unpublish_track(&track.sid()).await.unwrap();
307 })
308 .detach();
309 cx.notify();
310 } else {
311 let participant = self.room.local_participant();
312 let sources = cx.screen_capture_sources();
313 cx.spawn_in(window, async move |this, cx| {
314 let sources = sources.await.unwrap()?;
315 let source = sources.into_iter().next().unwrap();
316 let (track, stream) = capture_local_video_track(&*source).await?;
317 let publication = participant
318 .publish_track(
319 LocalTrack::Video(track),
320 TrackPublishOptions {
321 source: TrackSource::Screenshare,
322 video_codec: VideoCodec::H264,
323 ..Default::default()
324 },
325 )
326 .await
327 .unwrap();
328 this.update(cx, |this, cx| {
329 this.screen_share_track = Some(publication);
330 this.screen_share_stream = Some(stream);
331 cx.notify();
332 })
333 })
334 .detach();
335 }
336 }
337
338 fn toggle_remote_audio_for_participant(
339 &mut self,
340 identity: &ParticipantIdentity,
341
342 cx: &mut Context<Self>,
343 ) -> Option<()> {
344 let participant = self.remote_participants.iter().find_map(|(id, state)| {
345 if id == identity {
346 Some(state)
347 } else {
348 None
349 }
350 })?;
351 let publication = &participant.audio_output_stream.as_ref()?.0;
352 publication.set_enabled(!publication.is_enabled());
353 cx.notify();
354 Some(())
355 }
356}
357
358#[cfg(not(windows))]
359impl Render for LivekitWindow {
360 fn render(&mut self, _window: &mut Window, cx: &mut Context<Self>) -> impl IntoElement {
361 fn button() -> gpui::Div {
362 div()
363 .w(px(180.0))
364 .h(px(30.0))
365 .px_2()
366 .m_2()
367 .bg(rgb(0x8888ff))
368 }
369
370 div()
371 .bg(rgb(0xffffff))
372 .size_full()
373 .flex()
374 .flex_col()
375 .child(
376 div().bg(rgb(0xffd4a8)).flex().flex_row().children([
377 button()
378 .id("toggle-mute")
379 .child(if let Some(track) = &self.microphone_track {
380 if track.is_muted() {
381 "Unmute"
382 } else {
383 "Mute"
384 }
385 } else {
386 "Publish mic"
387 })
388 .on_click(cx.listener(|this, _, window, cx| this.toggle_mute(window, cx))),
389 button()
390 .id("toggle-screen-share")
391 .child(if self.screen_share_track.is_none() {
392 "Share screen"
393 } else {
394 "Unshare screen"
395 })
396 .on_click(
397 cx.listener(|this, _, window, cx| this.toggle_screen_share(window, cx)),
398 ),
399 ]),
400 )
401 .child(
402 div()
403 .id("remote-participants")
404 .overflow_y_scroll()
405 .flex()
406 .flex_col()
407 .flex_grow()
408 .children(self.remote_participants.iter().map(|(identity, state)| {
409 div()
410 .h(px(300.0))
411 .flex()
412 .flex_col()
413 .m_2()
414 .px_2()
415 .bg(rgb(0x8888ff))
416 .child(SharedString::from(if state.speaking {
417 format!("{} (speaking)", &identity.0)
418 } else if state.muted {
419 format!("{} (muted)", &identity.0)
420 } else {
421 identity.0.clone()
422 }))
423 .when_some(state.audio_output_stream.as_ref(), |el, state| {
424 el.child(
425 button()
426 .id(SharedString::from(identity.0.clone()))
427 .child(if state.0.is_enabled() {
428 "Deafen"
429 } else {
430 "Undeafen"
431 })
432 .on_click(cx.listener({
433 let identity = identity.clone();
434 move |this, _, _, cx| {
435 this.toggle_remote_audio_for_participant(
436 &identity, cx,
437 );
438 }
439 })),
440 )
441 })
442 .children(state.screen_share_output_view.as_ref().map(|e| e.1.clone()))
443 })),
444 )
445 }
446}