1use std::sync::Arc;
2
3use call::{ActiveCall, ParticipantLocation, Room};
4use client::{proto::PeerId, User};
5use gpui::{actions, App, Task, Window};
6use gpui::{canvas, point, AnyElement, Hsla, IntoElement, MouseButton, Path, Styled};
7use rpc::proto::{self};
8use theme::ActiveTheme;
9use ui::{prelude::*, Avatar, AvatarAudioStatusIndicator, Facepile, TintColor, Tooltip};
10use workspace::notifications::DetachAndPromptErr;
11
12use crate::TitleBar;
13
14actions!(
15 collab,
16 [ToggleScreenSharing, ToggleMute, ToggleDeafen, LeaveCall]
17);
18
19fn toggle_screen_sharing(_: &ToggleScreenSharing, window: &mut Window, cx: &mut App) {
20 let call = ActiveCall::global(cx).read(cx);
21 if let Some(room) = call.room().cloned() {
22 let toggle_screen_sharing = room.update(cx, |room, cx| {
23 if room.is_screen_sharing() {
24 telemetry::event!(
25 "Screen Share Disabled",
26 room_id = room.id(),
27 channel_id = room.channel_id(),
28 );
29 Task::ready(room.unshare_screen(cx))
30 } else {
31 telemetry::event!(
32 "Screen Share Enabled",
33 room_id = room.id(),
34 channel_id = room.channel_id(),
35 );
36 room.share_screen(cx)
37 }
38 });
39 toggle_screen_sharing.detach_and_prompt_err("Sharing Screen Failed", window, cx, |e, _, _| Some(format!("{:?}\n\nPlease check that you have given Zed permissions to record your screen in Settings.", e)));
40 }
41}
42
43fn toggle_mute(_: &ToggleMute, cx: &mut App) {
44 let call = ActiveCall::global(cx).read(cx);
45 if let Some(room) = call.room().cloned() {
46 room.update(cx, |room, cx| {
47 let operation = if room.is_muted() {
48 "Microphone Enabled"
49 } else {
50 "Microphone Disabled"
51 };
52 telemetry::event!(
53 operation,
54 room_id = room.id(),
55 channel_id = room.channel_id(),
56 );
57
58 room.toggle_mute(cx)
59 });
60 }
61}
62
63fn toggle_deafen(_: &ToggleDeafen, cx: &mut App) {
64 if let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() {
65 room.update(cx, |room, cx| room.toggle_deafen(cx));
66 }
67}
68
69fn render_color_ribbon(color: Hsla) -> impl Element {
70 canvas(
71 move |_, _, _| {},
72 move |bounds, _, window, _| {
73 let height = bounds.size.height;
74 let horizontal_offset = height;
75 let vertical_offset = px(height.0 / 2.0);
76 let mut path = Path::new(bounds.bottom_left());
77 path.curve_to(
78 bounds.origin + point(horizontal_offset, vertical_offset),
79 bounds.origin + point(px(0.0), vertical_offset),
80 );
81 path.line_to(bounds.top_right() + point(-horizontal_offset, vertical_offset));
82 path.curve_to(
83 bounds.bottom_right(),
84 bounds.top_right() + point(px(0.0), vertical_offset),
85 );
86 path.line_to(bounds.bottom_left());
87 window.paint_path(path, color);
88 },
89 )
90 .h_1()
91 .w_full()
92}
93
94impl TitleBar {
95 pub(crate) fn render_collaborator_list(
96 &self,
97 _: &mut Window,
98 cx: &mut Context<Self>,
99 ) -> impl IntoElement {
100 let room = ActiveCall::global(cx).read(cx).room().cloned();
101 let current_user = self.user_store.read(cx).current_user();
102 let client = self.client.clone();
103 let project_id = self.project.read(cx).remote_id();
104 let workspace = self.workspace.upgrade();
105
106 h_flex()
107 .id("collaborator-list")
108 .w_full()
109 .gap_1()
110 .overflow_x_scroll()
111 .when_some(
112 current_user.clone().zip(client.peer_id()).zip(room.clone()),
113 |this, ((current_user, peer_id), room)| {
114 let player_colors = cx.theme().players();
115 let room = room.read(cx);
116 let mut remote_participants =
117 room.remote_participants().values().collect::<Vec<_>>();
118 remote_participants.sort_by_key(|p| p.participant_index.0);
119
120 let current_user_face_pile = self.render_collaborator(
121 ¤t_user,
122 peer_id,
123 true,
124 room.is_speaking(),
125 room.is_muted(),
126 None,
127 room,
128 project_id,
129 ¤t_user,
130 cx,
131 );
132
133 this.children(current_user_face_pile.map(|face_pile| {
134 v_flex()
135 .on_mouse_down(MouseButton::Left, |_, _, cx| cx.stop_propagation())
136 .child(face_pile)
137 .child(render_color_ribbon(player_colors.local().cursor))
138 }))
139 .children(remote_participants.iter().filter_map(|collaborator| {
140 let player_color =
141 player_colors.color_for_participant(collaborator.participant_index.0);
142 let is_following = workspace
143 .as_ref()?
144 .read(cx)
145 .is_being_followed(collaborator.peer_id);
146 let is_present = project_id.map_or(false, |project_id| {
147 collaborator.location
148 == ParticipantLocation::SharedProject { project_id }
149 });
150
151 let facepile = self.render_collaborator(
152 &collaborator.user,
153 collaborator.peer_id,
154 is_present,
155 collaborator.speaking,
156 collaborator.muted,
157 is_following.then_some(player_color.selection),
158 room,
159 project_id,
160 ¤t_user,
161 cx,
162 )?;
163
164 Some(
165 v_flex()
166 .id(("collaborator", collaborator.user.id))
167 .child(facepile)
168 .child(render_color_ribbon(player_color.cursor))
169 .cursor_pointer()
170 .on_click({
171 let peer_id = collaborator.peer_id;
172 cx.listener(move |this, _, window, cx| {
173 this.workspace
174 .update(cx, |workspace, cx| {
175 if is_following {
176 workspace.unfollow(peer_id, window, cx);
177 } else {
178 workspace.follow(peer_id, window, cx);
179 }
180 })
181 .ok();
182 })
183 })
184 .tooltip({
185 let login = collaborator.user.github_login.clone();
186 Tooltip::text(format!("Follow {login}"))
187 }),
188 )
189 }))
190 },
191 )
192 }
193
194 #[allow(clippy::too_many_arguments)]
195 fn render_collaborator(
196 &self,
197 user: &Arc<User>,
198 peer_id: PeerId,
199 is_present: bool,
200 is_speaking: bool,
201 is_muted: bool,
202 leader_selection_color: Option<Hsla>,
203 room: &Room,
204 project_id: Option<u64>,
205 current_user: &Arc<User>,
206 cx: &App,
207 ) -> Option<Div> {
208 if room.role_for_user(user.id) == Some(proto::ChannelRole::Guest) {
209 return None;
210 }
211
212 const FACEPILE_LIMIT: usize = 3;
213 let followers = project_id.map_or(&[] as &[_], |id| room.followers_for(peer_id, id));
214 let extra_count = followers.len().saturating_sub(FACEPILE_LIMIT);
215
216 Some(
217 div()
218 .m_0p5()
219 .p_0p5()
220 // When the collaborator is not followed, still draw this wrapper div, but leave
221 // it transparent, so that it does not shift the layout when following.
222 .when_some(leader_selection_color, |div, color| {
223 div.rounded_sm().bg(color)
224 })
225 .child(
226 Facepile::empty()
227 .child(
228 Avatar::new(user.avatar_uri.clone())
229 .grayscale(!is_present)
230 .border_color(if is_speaking {
231 cx.theme().status().info
232 } else {
233 // We draw the border in a transparent color rather to avoid
234 // the layout shift that would come with adding/removing the border.
235 gpui::transparent_black()
236 })
237 .when(is_muted, |avatar| {
238 avatar.indicator(
239 AvatarAudioStatusIndicator::new(ui::AudioStatus::Muted)
240 .tooltip({
241 let github_login = user.github_login.clone();
242 Tooltip::text(format!("{} is muted", github_login))
243 }),
244 )
245 }),
246 )
247 .children(followers.iter().take(FACEPILE_LIMIT).filter_map(
248 |follower_peer_id| {
249 let follower = room
250 .remote_participants()
251 .values()
252 .find_map(|p| {
253 (p.peer_id == *follower_peer_id).then_some(&p.user)
254 })
255 .or_else(|| {
256 (self.client.peer_id() == Some(*follower_peer_id))
257 .then_some(current_user)
258 })?
259 .clone();
260
261 Some(div().mt(-px(4.)).child(
262 Avatar::new(follower.avatar_uri.clone()).size(rems(0.75)),
263 ))
264 },
265 ))
266 .children(if extra_count > 0 {
267 Some(
268 Label::new(format!("+{extra_count}"))
269 .ml_1()
270 .into_any_element(),
271 )
272 } else {
273 None
274 }),
275 ),
276 )
277 }
278
279 pub(crate) fn render_call_controls(
280 &self,
281 window: &mut Window,
282 cx: &mut Context<Self>,
283 ) -> Vec<AnyElement> {
284 let Some(room) = ActiveCall::global(cx).read(cx).room().cloned() else {
285 return Vec::new();
286 };
287
288 let is_connecting_to_project = self
289 .workspace
290 .update(cx, |workspace, cx| workspace.has_active_modal(window, cx))
291 .unwrap_or(false);
292
293 let room = room.read(cx);
294 let project = self.project.read(cx);
295 let is_local = project.is_local() || project.is_via_ssh();
296 let is_shared = is_local && project.is_shared();
297 let is_muted = room.is_muted();
298 let muted_by_user = room.muted_by_user();
299 let is_deafened = room.is_deafened().unwrap_or(false);
300 let is_screen_sharing = room.is_screen_sharing();
301 let can_use_microphone = room.can_use_microphone();
302 let can_share_projects = room.can_share_projects();
303 let screen_sharing_supported = match self.platform_style {
304 PlatformStyle::Mac => true,
305 PlatformStyle::Linux | PlatformStyle::Windows => false,
306 };
307
308 let mut children = Vec::new();
309
310 if is_local && can_share_projects && !is_connecting_to_project {
311 children.push(
312 Button::new(
313 "toggle_sharing",
314 if is_shared { "Unshare" } else { "Share" },
315 )
316 .tooltip(Tooltip::text(if is_shared {
317 "Stop sharing project with call participants"
318 } else {
319 "Share project with call participants"
320 }))
321 .style(ButtonStyle::Subtle)
322 .selected_style(ButtonStyle::Tinted(TintColor::Accent))
323 .toggle_state(is_shared)
324 .label_size(LabelSize::Small)
325 .on_click(cx.listener(move |this, _, window, cx| {
326 if is_shared {
327 this.unshare_project(&Default::default(), window, cx);
328 } else {
329 this.share_project(&Default::default(), cx);
330 }
331 }))
332 .into_any_element(),
333 );
334 }
335
336 children.push(
337 div()
338 .pr_2()
339 .child(
340 IconButton::new("leave-call", ui::IconName::Exit)
341 .style(ButtonStyle::Subtle)
342 .tooltip(Tooltip::text("Leave call"))
343 .icon_size(IconSize::Small)
344 .on_click(move |_, _window, cx| {
345 ActiveCall::global(cx)
346 .update(cx, |call, cx| call.hang_up(cx))
347 .detach_and_log_err(cx);
348 }),
349 )
350 .into_any_element(),
351 );
352
353 if can_use_microphone {
354 children.push(
355 IconButton::new(
356 "mute-microphone",
357 if is_muted {
358 ui::IconName::MicMute
359 } else {
360 ui::IconName::Mic
361 },
362 )
363 .tooltip(move |window, cx| {
364 if is_muted {
365 if is_deafened {
366 Tooltip::with_meta(
367 "Unmute Microphone",
368 None,
369 "Audio will be unmuted",
370 window,
371 cx,
372 )
373 } else {
374 Tooltip::simple("Unmute Microphone", cx)
375 }
376 } else {
377 Tooltip::simple("Mute Microphone", cx)
378 }
379 })
380 .style(ButtonStyle::Subtle)
381 .icon_size(IconSize::Small)
382 .toggle_state(is_muted)
383 .selected_style(ButtonStyle::Tinted(TintColor::Error))
384 .on_click(move |_, _window, cx| {
385 toggle_mute(&Default::default(), cx);
386 })
387 .into_any_element(),
388 );
389 }
390
391 children.push(
392 IconButton::new(
393 "mute-sound",
394 if is_deafened {
395 ui::IconName::AudioOff
396 } else {
397 ui::IconName::AudioOn
398 },
399 )
400 .style(ButtonStyle::Subtle)
401 .selected_style(ButtonStyle::Tinted(TintColor::Error))
402 .icon_size(IconSize::Small)
403 .toggle_state(is_deafened)
404 .tooltip(move |window, cx| {
405 if is_deafened {
406 let label = "Unmute Audio";
407
408 if !muted_by_user {
409 Tooltip::with_meta(label, None, "Microphone will be unmuted", window, cx)
410 } else {
411 Tooltip::simple(label, cx)
412 }
413 } else {
414 let label = "Mute Audio";
415
416 if !muted_by_user {
417 Tooltip::with_meta(label, None, "Microphone will be muted", window, cx)
418 } else {
419 Tooltip::simple(label, cx)
420 }
421 }
422 })
423 .on_click(move |_, _, cx| toggle_deafen(&Default::default(), cx))
424 .into_any_element(),
425 );
426
427 if can_use_microphone && screen_sharing_supported {
428 children.push(
429 IconButton::new("screen-share", ui::IconName::Screen)
430 .style(ButtonStyle::Subtle)
431 .icon_size(IconSize::Small)
432 .toggle_state(is_screen_sharing)
433 .selected_style(ButtonStyle::Tinted(TintColor::Accent))
434 .tooltip(Tooltip::text(if is_screen_sharing {
435 "Stop Sharing Screen"
436 } else {
437 "Share Screen"
438 }))
439 .on_click(move |_, window, cx| {
440 toggle_screen_sharing(&Default::default(), window, cx)
441 })
442 .into_any_element(),
443 );
444 }
445
446 children.push(div().pr_2().into_any_element());
447
448 children
449 }
450}