1#![cfg_attr(windows, allow(unused))]
2// TODO: For some reason mac build complains about import of postage::stream::Stream, but removal of
3// it causes compile errors.
4#![cfg_attr(target_os = "macos", allow(unused_imports))]
5
6use gpui::{
7 actions, bounds, div, point,
8 prelude::{FluentBuilder as _, IntoElement},
9 px, rgb, size, AsyncAppContext, Bounds, InteractiveElement, KeyBinding, Menu, MenuItem,
10 ParentElement, Pixels, Render, ScreenCaptureStream, SharedString,
11 StatefulInteractiveElement as _, Styled, Task, View, ViewContext, VisualContext, WindowBounds,
12 WindowHandle, WindowOptions,
13};
14#[cfg(not(target_os = "windows"))]
15use livekit_client::{
16 capture_local_audio_track, capture_local_video_track,
17 id::ParticipantIdentity,
18 options::{TrackPublishOptions, VideoCodec},
19 participant::{Participant, RemoteParticipant},
20 play_remote_audio_track,
21 publication::{LocalTrackPublication, RemoteTrackPublication},
22 track::{LocalTrack, RemoteTrack, RemoteVideoTrack, TrackSource},
23 AudioStream, RemoteVideoTrackView, Room, RoomEvent, RoomOptions,
24};
25#[cfg(not(target_os = "windows"))]
26use postage::stream::Stream;
27
28#[cfg(target_os = "windows")]
29use livekit_client::{
30 participant::{Participant, RemoteParticipant},
31 publication::{LocalTrackPublication, RemoteTrackPublication},
32 track::{LocalTrack, RemoteTrack, RemoteVideoTrack},
33 AudioStream, RemoteVideoTrackView, Room, RoomEvent,
34};
35
36use livekit_server::token::{self, VideoGrant};
37use log::LevelFilter;
38use simplelog::SimpleLogger;
39
40actions!(livekit_client, [Quit]);
41
42#[cfg(windows)]
43fn main() {}
44
45#[cfg(not(windows))]
46fn main() {
47 SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
48
49 gpui::App::new().run(|cx| {
50 livekit_client::init(
51 cx.background_executor().dispatcher.clone(),
52 cx.http_client(),
53 );
54
55 #[cfg(any(test, feature = "test-support"))]
56 println!("USING TEST LIVEKIT");
57
58 #[cfg(not(any(test, feature = "test-support")))]
59 println!("USING REAL LIVEKIT");
60
61 cx.activate(true);
62 cx.on_action(quit);
63 cx.bind_keys([KeyBinding::new("cmd-q", Quit, None)]);
64 cx.set_menus(vec![Menu {
65 name: "Zed".into(),
66 items: vec![MenuItem::Action {
67 name: "Quit".into(),
68 action: Box::new(Quit),
69 os_action: None,
70 }],
71 }]);
72
73 let livekit_url = std::env::var("LIVEKIT_URL").unwrap_or("http://localhost:7880".into());
74 let livekit_key = std::env::var("LIVEKIT_KEY").unwrap_or("devkey".into());
75 let livekit_secret = std::env::var("LIVEKIT_SECRET").unwrap_or("secret".into());
76 let height = px(800.);
77 let width = px(800.);
78
79 cx.spawn(|cx| async move {
80 let mut windows = Vec::new();
81 for i in 0..2 {
82 let token = token::create(
83 &livekit_key,
84 &livekit_secret,
85 Some(&format!("test-participant-{i}")),
86 VideoGrant::to_join("test-room"),
87 )
88 .unwrap();
89
90 let bounds = bounds(point(width * i, px(0.0)), size(width, height));
91 let window =
92 LivekitWindow::new(livekit_url.as_str(), token.as_str(), bounds, cx.clone())
93 .await;
94 windows.push(window);
95 }
96 })
97 .detach();
98 });
99}
100
101fn quit(_: &Quit, cx: &mut gpui::AppContext) {
102 cx.quit();
103}
104
105struct LivekitWindow {
106 room: Room,
107 microphone_track: Option<LocalTrackPublication>,
108 screen_share_track: Option<LocalTrackPublication>,
109 microphone_stream: Option<AudioStream>,
110 screen_share_stream: Option<Box<dyn ScreenCaptureStream>>,
111 #[cfg(not(target_os = "windows"))]
112 remote_participants: Vec<(ParticipantIdentity, ParticipantState)>,
113 _events_task: Task<()>,
114}
115
116#[derive(Default)]
117struct ParticipantState {
118 audio_output_stream: Option<(RemoteTrackPublication, AudioStream)>,
119 muted: bool,
120 screen_share_output_view: Option<(RemoteVideoTrack, View<RemoteVideoTrackView>)>,
121 speaking: bool,
122}
123
124#[cfg(not(windows))]
125impl LivekitWindow {
126 async fn new(
127 url: &str,
128 token: &str,
129 bounds: Bounds<Pixels>,
130 cx: AsyncAppContext,
131 ) -> WindowHandle<Self> {
132 let (room, mut events) = Room::connect(url, token, RoomOptions::default())
133 .await
134 .unwrap();
135
136 cx.update(|cx| {
137 cx.open_window(
138 WindowOptions {
139 window_bounds: Some(WindowBounds::Windowed(bounds)),
140 ..Default::default()
141 },
142 |cx| {
143 cx.new_view(|cx| {
144 let _events_task = cx.spawn(|this, mut cx| async move {
145 while let Some(event) = events.recv().await {
146 this.update(&mut cx, |this: &mut LivekitWindow, cx| {
147 this.handle_room_event(event, cx)
148 })
149 .ok();
150 }
151 });
152
153 Self {
154 room,
155 microphone_track: None,
156 microphone_stream: None,
157 screen_share_track: None,
158 screen_share_stream: None,
159 remote_participants: Vec::new(),
160 _events_task,
161 }
162 })
163 },
164 )
165 .unwrap()
166 })
167 .unwrap()
168 }
169
170 fn handle_room_event(&mut self, event: RoomEvent, cx: &mut ViewContext<Self>) {
171 eprintln!("event: {event:?}");
172
173 match event {
174 RoomEvent::TrackUnpublished {
175 publication,
176 participant,
177 } => {
178 let output = self.remote_participant(participant);
179 let unpublish_sid = publication.sid();
180 if output
181 .audio_output_stream
182 .as_ref()
183 .map_or(false, |(track, _)| track.sid() == unpublish_sid)
184 {
185 output.audio_output_stream.take();
186 }
187 if output
188 .screen_share_output_view
189 .as_ref()
190 .map_or(false, |(track, _)| track.sid() == unpublish_sid)
191 {
192 output.screen_share_output_view.take();
193 }
194 cx.notify();
195 }
196
197 RoomEvent::TrackSubscribed {
198 publication,
199 participant,
200 track,
201 } => {
202 let output = self.remote_participant(participant);
203 match track {
204 RemoteTrack::Audio(track) => {
205 output.audio_output_stream = Some((
206 publication.clone(),
207 play_remote_audio_track(&track, cx.background_executor()).unwrap(),
208 ));
209 }
210 RemoteTrack::Video(track) => {
211 output.screen_share_output_view = Some((
212 track.clone(),
213 cx.new_view(|cx| RemoteVideoTrackView::new(track, cx)),
214 ));
215 }
216 }
217 cx.notify();
218 }
219
220 RoomEvent::TrackMuted { participant, .. } => {
221 if let Participant::Remote(participant) = participant {
222 self.remote_participant(participant).muted = true;
223 cx.notify();
224 }
225 }
226
227 RoomEvent::TrackUnmuted { participant, .. } => {
228 if let Participant::Remote(participant) = participant {
229 self.remote_participant(participant).muted = false;
230 cx.notify();
231 }
232 }
233
234 RoomEvent::ActiveSpeakersChanged { speakers } => {
235 for (identity, output) in &mut self.remote_participants {
236 output.speaking = speakers.iter().any(|speaker| {
237 if let Participant::Remote(speaker) = speaker {
238 speaker.identity() == *identity
239 } else {
240 false
241 }
242 });
243 }
244 cx.notify();
245 }
246
247 _ => {}
248 }
249
250 cx.notify();
251 }
252
253 fn remote_participant(&mut self, participant: RemoteParticipant) -> &mut ParticipantState {
254 match self
255 .remote_participants
256 .binary_search_by_key(&&participant.identity(), |row| &row.0)
257 {
258 Ok(ix) => &mut self.remote_participants[ix].1,
259 Err(ix) => {
260 self.remote_participants
261 .insert(ix, (participant.identity(), ParticipantState::default()));
262 &mut self.remote_participants[ix].1
263 }
264 }
265 }
266
267 fn toggle_mute(&mut self, cx: &mut ViewContext<Self>) {
268 if let Some(track) = &self.microphone_track {
269 if track.is_muted() {
270 track.unmute();
271 } else {
272 track.mute();
273 }
274 cx.notify();
275 } else {
276 let participant = self.room.local_participant();
277 cx.spawn(|this, mut cx| async move {
278 let (track, stream) = capture_local_audio_track(cx.background_executor())?.await;
279 let publication = participant
280 .publish_track(
281 LocalTrack::Audio(track),
282 TrackPublishOptions {
283 source: TrackSource::Microphone,
284 ..Default::default()
285 },
286 )
287 .await
288 .unwrap();
289 this.update(&mut cx, |this, cx| {
290 this.microphone_track = Some(publication);
291 this.microphone_stream = Some(stream);
292 cx.notify();
293 })
294 })
295 .detach();
296 }
297 }
298
299 fn toggle_screen_share(&mut self, cx: &mut ViewContext<Self>) {
300 if let Some(track) = self.screen_share_track.take() {
301 self.screen_share_stream.take();
302 let participant = self.room.local_participant();
303 cx.background_executor()
304 .spawn(async move {
305 participant.unpublish_track(&track.sid()).await.unwrap();
306 })
307 .detach();
308 cx.notify();
309 } else {
310 let participant = self.room.local_participant();
311 let sources = cx.screen_capture_sources();
312 cx.spawn(|this, mut cx| async move {
313 let sources = sources.await.unwrap()?;
314 let source = sources.into_iter().next().unwrap();
315 let (track, stream) = capture_local_video_track(&*source).await?;
316 let publication = participant
317 .publish_track(
318 LocalTrack::Video(track),
319 TrackPublishOptions {
320 source: TrackSource::Screenshare,
321 video_codec: VideoCodec::H264,
322 ..Default::default()
323 },
324 )
325 .await
326 .unwrap();
327 this.update(&mut cx, |this, cx| {
328 this.screen_share_track = Some(publication);
329 this.screen_share_stream = Some(stream);
330 cx.notify();
331 })
332 })
333 .detach();
334 }
335 }
336
337 fn toggle_remote_audio_for_participant(
338 &mut self,
339 identity: &ParticipantIdentity,
340 cx: &mut ViewContext<Self>,
341 ) -> Option<()> {
342 let participant = self.remote_participants.iter().find_map(|(id, state)| {
343 if id == identity {
344 Some(state)
345 } else {
346 None
347 }
348 })?;
349 let publication = &participant.audio_output_stream.as_ref()?.0;
350 publication.set_enabled(!publication.is_enabled());
351 cx.notify();
352 Some(())
353 }
354}
355
356#[cfg(not(windows))]
357impl Render for LivekitWindow {
358 fn render(&mut self, cx: &mut ViewContext<Self>) -> impl IntoElement {
359 fn button() -> gpui::Div {
360 div()
361 .w(px(180.0))
362 .h(px(30.0))
363 .px_2()
364 .m_2()
365 .bg(rgb(0x8888ff))
366 }
367
368 div()
369 .bg(rgb(0xffffff))
370 .size_full()
371 .flex()
372 .flex_col()
373 .child(
374 div().bg(rgb(0xffd4a8)).flex().flex_row().children([
375 button()
376 .id("toggle-mute")
377 .child(if let Some(track) = &self.microphone_track {
378 if track.is_muted() {
379 "Unmute"
380 } else {
381 "Mute"
382 }
383 } else {
384 "Publish mic"
385 })
386 .on_click(cx.listener(|this, _, cx| this.toggle_mute(cx))),
387 button()
388 .id("toggle-screen-share")
389 .child(if self.screen_share_track.is_none() {
390 "Share screen"
391 } else {
392 "Unshare screen"
393 })
394 .on_click(cx.listener(|this, _, cx| this.toggle_screen_share(cx))),
395 ]),
396 )
397 .child(
398 div()
399 .id("remote-participants")
400 .overflow_y_scroll()
401 .flex()
402 .flex_col()
403 .flex_grow()
404 .children(self.remote_participants.iter().map(|(identity, state)| {
405 div()
406 .h(px(300.0))
407 .flex()
408 .flex_col()
409 .m_2()
410 .px_2()
411 .bg(rgb(0x8888ff))
412 .child(SharedString::from(if state.speaking {
413 format!("{} (speaking)", &identity.0)
414 } else if state.muted {
415 format!("{} (muted)", &identity.0)
416 } else {
417 identity.0.clone()
418 }))
419 .when_some(state.audio_output_stream.as_ref(), |el, state| {
420 el.child(
421 button()
422 .id(SharedString::from(identity.0.clone()))
423 .child(if state.0.is_enabled() {
424 "Deafen"
425 } else {
426 "Undeafen"
427 })
428 .on_click(cx.listener({
429 let identity = identity.clone();
430 move |this, _, cx| {
431 this.toggle_remote_audio_for_participant(
432 &identity, cx,
433 );
434 }
435 })),
436 )
437 })
438 .children(state.screen_share_output_view.as_ref().map(|e| e.1.clone()))
439 })),
440 )
441 }
442}