1mod bindings;
2mod compression_session;
3
4use crate::{bindings::SCStreamOutputType, compression_session::CompressionSession};
5use block::ConcreteBlock;
6use byteorder::{BigEndian, ReadBytesExt};
7use bytes::BytesMut;
8use cocoa::{
9 base::{id, nil, YES},
10 foundation::{NSArray, NSString, NSUInteger},
11};
12use core_foundation::{
13 base::{CFRelease, TCFType},
14 number::{CFBooleanGetValue, CFBooleanRef, CFNumberRef},
15 string::CFStringRef,
16};
17use futures::StreamExt;
18use gpui::{
19 actions,
20 elements::{Canvas, *},
21 keymap::Binding,
22 platform::current::Surface,
23 Menu, MenuItem, ViewContext,
24};
25use live_kit::Room;
26use log::LevelFilter;
27use media::{
28 core_media::{
29 kCMSampleAttachmentKey_NotSync, kCMVideoCodecType_H264, CMSampleBuffer, CMSampleBufferRef,
30 CMTimeMake,
31 },
32 core_video::{self, CVImageBuffer},
33 video_toolbox::VTCompressionSession,
34};
35use objc::{
36 class,
37 declare::ClassDecl,
38 msg_send,
39 runtime::{Class, Object, Sel},
40 sel, sel_impl,
41};
42use parking_lot::Mutex;
43use simplelog::SimpleLogger;
44use std::{ffi::c_void, ptr, slice, str, sync::Arc};
45
46#[allow(non_upper_case_globals)]
47const NSUTF8StringEncoding: NSUInteger = 4;
48
49actions!(capture, [Quit]);
50
51fn main() {
52 println!("Creating room...");
53 let room = Room::new();
54
55 println!("Dropping room...");
56 drop(room);
57
58 SimpleLogger::init(LevelFilter::Info, Default::default()).expect("could not initialize logger");
59
60 gpui::App::new(()).unwrap().run(|cx| {
61 cx.platform().activate(true);
62 cx.add_global_action(quit);
63
64 cx.add_bindings([Binding::new("cmd-q", Quit, None)]);
65 cx.set_menus(vec![Menu {
66 name: "Zed",
67 items: vec![MenuItem::Action {
68 name: "Quit",
69 action: Box::new(Quit),
70 }],
71 }]);
72
73 cx.add_window(Default::default(), |cx| ScreenCaptureView::new(cx));
74 });
75}
76
77struct ScreenCaptureView {
78 image_buffer: Option<core_video::CVImageBuffer>,
79}
80
81impl gpui::Entity for ScreenCaptureView {
82 type Event = ();
83}
84
85impl ScreenCaptureView {
86 pub fn new(cx: &mut ViewContext<Self>) -> Self {
87 let (image_buffer_tx, mut image_buffer_rx) =
88 postage::watch::channel::<Option<CVImageBuffer>>();
89 let image_buffer_tx = Arc::new(Mutex::new(image_buffer_tx));
90
91 unsafe {
92 let block = ConcreteBlock::new(move |content: id, error: id| {
93 if !error.is_null() {
94 println!(
95 "ERROR {}",
96 string_from_objc(msg_send![error, localizedDescription])
97 );
98 return;
99 }
100
101 let applications: id = msg_send![content, applications];
102 let displays: id = msg_send![content, displays];
103 let display: id = displays.objectAtIndex(0);
104 let display_width: usize = msg_send![display, width];
105 let display_height: usize = msg_send![display, height];
106 let mut compression_buffer = BytesMut::new();
107 // let compression_session = CompressionSession::new(
108 // display_width,
109 // display_height,
110 // kCMVideoCodecType_H264,
111 // move |status, flags, sample_buffer| {
112 // if status != 0 {
113 // println!("error encoding frame, code: {}", status);
114 // return;
115 // }
116 // let sample_buffer = CMSampleBuffer::wrap_under_get_rule(sample_buffer);
117
118 // let mut is_iframe = false;
119 // let attachments = sample_buffer.attachments();
120 // if let Some(attachments) = attachments.first() {
121 // is_iframe = attachments
122 // .find(kCMSampleAttachmentKey_NotSync as CFStringRef)
123 // .map_or(true, |not_sync| {
124 // CFBooleanGetValue(*not_sync as CFBooleanRef)
125 // });
126 // }
127
128 // const START_CODE: [u8; 4] = [0x00, 0x00, 0x00, 0x01];
129 // if is_iframe {
130 // let format_description = sample_buffer.format_description();
131 // for ix in 0..format_description.h264_parameter_set_count() {
132 // let parameter_set =
133 // format_description.h264_parameter_set_at_index(ix).unwrap();
134 // compression_buffer.extend_from_slice(&START_CODE);
135 // compression_buffer.extend_from_slice(parameter_set);
136 // let nal_unit = compression_buffer.split();
137 // }
138 // }
139
140 // let data = sample_buffer.data();
141 // let mut data = data.bytes();
142
143 // const AVCC_HEADER_LENGTH: usize = 4;
144 // while data.len() - AVCC_HEADER_LENGTH > 0 {
145 // let nal_unit_len = match data.read_u32::<BigEndian>() {
146 // Ok(len) => len as usize,
147 // Err(error) => {
148 // log::error!("error decoding nal unit length: {}", error);
149 // return;
150 // }
151 // };
152 // compression_buffer.extend_from_slice(&START_CODE);
153 // compression_buffer.extend_from_slice(&data[..nal_unit_len as usize]);
154 // data = &data[nal_unit_len..];
155
156 // let nal_unit = compression_buffer.split();
157 // }
158 // },
159 // )
160 // .unwrap();
161
162 let mut decl = ClassDecl::new("CaptureOutput", class!(NSObject)).unwrap();
163 decl.add_ivar::<*mut c_void>("callback");
164 decl.add_method(
165 sel!(stream:didOutputSampleBuffer:ofType:),
166 sample_output as extern "C" fn(&Object, Sel, id, id, SCStreamOutputType),
167 );
168 let capture_output_class = decl.register();
169
170 let output: id = msg_send![capture_output_class, alloc];
171 let output: id = msg_send![output, init];
172 let surface_tx = image_buffer_tx.clone();
173
174 let callback = Box::new(move |buffer: CMSampleBufferRef| {
175 let buffer = CMSampleBuffer::wrap_under_get_rule(buffer);
176 let attachments = buffer.attachments();
177 let attachments = attachments.first().expect("no attachments for sample");
178 let string = bindings::SCStreamFrameInfoStatus.0 as CFStringRef;
179 let status = core_foundation::number::CFNumber::wrap_under_get_rule(
180 *attachments.get(string) as CFNumberRef,
181 )
182 .to_i64()
183 .expect("invalid frame info status");
184
185 if status != bindings::SCFrameStatus_SCFrameStatusComplete {
186 println!("received incomplete frame");
187 return;
188 }
189
190 let timing_info = buffer.sample_timing_info(0).unwrap();
191 let image_buffer = buffer.image_buffer();
192 // compression_session
193 // .encode_frame(&image_buffer, timing_info)
194 // .unwrap();
195 *surface_tx.lock().borrow_mut() = Some(image_buffer);
196 }) as Box<dyn FnMut(CMSampleBufferRef)>;
197 let callback = Box::into_raw(Box::new(callback));
198 (*output).set_ivar("callback", callback as *mut c_void);
199
200 let filter: id = msg_send![class!(SCContentFilter), alloc];
201 let filter: id = msg_send![filter, initWithDisplay: display includingApplications: applications exceptingWindows: nil];
202 // let filter: id = msg_send![filter, initWithDesktopIndependentWindow: window];
203 let config: id = msg_send![class!(SCStreamConfiguration), alloc];
204 let config: id = msg_send![config, init];
205 let _: () = msg_send![config, setWidth: display_width * 2];
206 let _: () = msg_send![config, setHeight: display_height * 2];
207 let _: () = msg_send![config, setMinimumFrameInterval: CMTimeMake(1, 60)];
208 let _: () = msg_send![config, setQueueDepth: 6];
209 let _: () = msg_send![config, setShowsCursor: YES];
210 let _: () = msg_send![
211 config,
212 setPixelFormat: media::core_video::kCVPixelFormatType_32BGRA
213 ];
214
215 let stream: id = msg_send![class!(SCStream), alloc];
216 let stream: id = msg_send![stream, initWithFilter: filter configuration: config delegate: output];
217 let error: id = nil;
218 let queue = bindings::dispatch_queue_create(
219 ptr::null(),
220 bindings::NSObject(ptr::null_mut()),
221 );
222
223 let _: () = msg_send![stream,
224 addStreamOutput: output type: bindings::SCStreamOutputType_SCStreamOutputTypeScreen
225 sampleHandlerQueue: queue
226 error: &error
227 ];
228
229 let start_capture_completion = ConcreteBlock::new(move |error: id| {
230 if !error.is_null() {
231 println!(
232 "error starting capture... error? {}",
233 string_from_objc(msg_send![error, localizedDescription])
234 );
235 return;
236 }
237
238 println!("starting capture");
239 });
240
241 assert!(!stream.is_null());
242 let _: () = msg_send![
243 stream,
244 startCaptureWithCompletionHandler: start_capture_completion
245 ];
246 });
247
248 let _: id = msg_send![
249 class!(SCShareableContent),
250 getShareableContentWithCompletionHandler: block
251 ];
252 }
253
254 cx.spawn_weak(|this, mut cx| async move {
255 while let Some(image_buffer) = image_buffer_rx.next().await {
256 if let Some(this) = this.upgrade(&cx) {
257 this.update(&mut cx, |this, cx| {
258 this.image_buffer = image_buffer;
259 cx.notify();
260 })
261 } else {
262 break;
263 }
264 }
265 })
266 .detach();
267
268 Self { image_buffer: None }
269 }
270}
271
272impl gpui::View for ScreenCaptureView {
273 fn ui_name() -> &'static str {
274 "View"
275 }
276
277 fn render(&mut self, _: &mut gpui::RenderContext<Self>) -> gpui::ElementBox {
278 let image_buffer = self.image_buffer.clone();
279 Canvas::new(move |bounds, _, cx| {
280 if let Some(image_buffer) = image_buffer.clone() {
281 cx.scene.push_surface(Surface {
282 bounds,
283 image_buffer,
284 });
285 }
286 })
287 .boxed()
288 }
289}
290
291pub unsafe fn string_from_objc(string: id) -> String {
292 if string.is_null() {
293 Default::default()
294 } else {
295 let len = msg_send![string, lengthOfBytesUsingEncoding: NSUTF8StringEncoding];
296 let bytes = string.UTF8String() as *const u8;
297 str::from_utf8(slice::from_raw_parts(bytes, len))
298 .unwrap()
299 .to_string()
300 }
301}
302
303extern "C" fn sample_output(
304 this: &Object,
305 _: Sel,
306 _stream: id,
307 buffer: id,
308 _kind: SCStreamOutputType,
309) {
310 unsafe {
311 let callback = *this.get_ivar::<*mut c_void>("callback");
312 let callback = &mut *(callback as *mut Box<dyn FnMut(CMSampleBufferRef)>);
313 (*callback)(buffer as CMSampleBufferRef);
314 }
315}
316
317fn quit(_: &Quit, cx: &mut gpui::MutableAppContext) {
318 cx.platform().quit();
319}