diff --git a/crates/gpui/src/platform.rs b/crates/gpui/src/platform.rs index 336aa5434195304fe8553c4660935b5838397cad..8d0fbcaed016e058e1500472cfdb4ca47400f6c5 100644 --- a/crates/gpui/src/platform.rs +++ b/crates/gpui/src/platform.rs @@ -47,6 +47,8 @@ use crate::{ use anyhow::Result; use async_task::Runnable; use futures::channel::oneshot; +#[cfg(any(test, feature = "test-support"))] +use image::RgbaImage; use image::codecs::gif::GifDecoder; use image::{AnimationDecoder as _, Frame}; use raw_window_handle::{HasDisplayHandle, HasWindowHandle}; @@ -580,6 +582,14 @@ pub(crate) trait PlatformWindow: HasWindowHandle + HasDisplayHandle { fn native_window_id(&self) -> Option { None } + + /// Renders the given scene to a texture and returns the pixel data as an RGBA image. + /// This does not present the frame to screen - useful for visual testing where we want + /// to capture what would be rendered without displaying it or requiring the window to be visible. + #[cfg(any(test, feature = "test-support"))] + fn render_to_image(&self, _scene: &Scene) -> Result { + anyhow::bail!("render_to_image not implemented for this platform") + } } /// This type is public so that our test macro can generate and use it, but it should not diff --git a/crates/gpui/src/platform/blade/blade_renderer.rs b/crates/gpui/src/platform/blade/blade_renderer.rs index dd0be7db437fba573a1a552b52cf12d7c72f0361..a7c4dde1da57c1b3796a9e370eaa67ea64ec37c9 100644 --- a/crates/gpui/src/platform/blade/blade_renderer.rs +++ b/crates/gpui/src/platform/blade/blade_renderer.rs @@ -7,9 +7,13 @@ use crate::{ PrimitiveBatch, Quad, ScaledPixels, Scene, Shadow, Size, Underline, get_gamma_correction_ratios, }; +#[cfg(any(test, feature = "test-support"))] +use anyhow::Result; use blade_graphics as gpu; use blade_util::{BufferBelt, BufferBeltDescriptor}; use bytemuck::{Pod, Zeroable}; +#[cfg(any(test, feature = "test-support"))] +use image::RgbaImage; #[cfg(target_os = "macos")] use media::core_video::CVMetalTextureCache; use std::sync::Arc; @@ -917,6 +921,13 @@ impl BladeRenderer { self.wait_for_gpu(); self.last_sync_point = Some(sync_point); } + + /// Renders the scene to a texture and returns the pixel data as an RGBA image. + /// This is not yet implemented for BladeRenderer. + #[cfg(any(test, feature = "test-support"))] + pub fn render_to_image(&mut self, _scene: &Scene) -> Result { + anyhow::bail!("render_to_image is not yet implemented for BladeRenderer") + } } fn create_path_intermediate_texture( diff --git a/crates/gpui/src/platform/mac/metal_renderer.rs b/crates/gpui/src/platform/mac/metal_renderer.rs index 550041a0ccb4cd39bc7a86317d9540e806af2a28..467510d378ecd58e49e67c78bf1821e68dd427c6 100644 --- a/crates/gpui/src/platform/mac/metal_renderer.rs +++ b/crates/gpui/src/platform/mac/metal_renderer.rs @@ -11,6 +11,8 @@ use cocoa::{ foundation::{NSSize, NSUInteger}, quartzcore::AutoresizingMask, }; +#[cfg(any(test, feature = "test-support"))] +use image::RgbaImage; use core_foundation::base::TCFType; use core_video::{ @@ -154,6 +156,9 @@ impl MetalRenderer { layer.set_pixel_format(MTLPixelFormat::BGRA8Unorm); layer.set_opaque(false); layer.set_maximum_drawable_count(3); + // Allow texture reading for visual tests (captures screenshots without ScreenCaptureKit) + #[cfg(any(test, feature = "test-support"))] + layer.set_framebuffer_only(false); unsafe { let _: () = msg_send![&*layer, setAllowsNextDrawableTimeout: NO]; let _: () = msg_send![&*layer, setNeedsDisplayOnBoundsChange: YES]; @@ -426,6 +431,97 @@ impl MetalRenderer { } } + /// Renders the scene to a texture and returns the pixel data as an RGBA image. + /// This does not present the frame to screen - useful for visual testing + /// where we want to capture what would be rendered without displaying it. + #[cfg(any(test, feature = "test-support"))] + pub fn render_to_image(&mut self, scene: &Scene) -> Result { + let layer = self.layer.clone(); + let viewport_size = layer.drawable_size(); + let viewport_size: Size = size( + (viewport_size.width.ceil() as i32).into(), + (viewport_size.height.ceil() as i32).into(), + ); + let drawable = layer + .next_drawable() + .ok_or_else(|| anyhow::anyhow!("Failed to get drawable for render_to_image"))?; + + loop { + let mut instance_buffer = self.instance_buffer_pool.lock().acquire(&self.device); + + let command_buffer = + self.draw_primitives(scene, &mut instance_buffer, drawable, viewport_size); + + match command_buffer { + Ok(command_buffer) => { + let instance_buffer_pool = self.instance_buffer_pool.clone(); + let instance_buffer = Cell::new(Some(instance_buffer)); + let block = ConcreteBlock::new(move |_| { + if let Some(instance_buffer) = instance_buffer.take() { + instance_buffer_pool.lock().release(instance_buffer); + } + }); + let block = block.copy(); + command_buffer.add_completed_handler(&block); + + // Commit and wait for completion without presenting + command_buffer.commit(); + command_buffer.wait_until_completed(); + + // Read pixels from the texture + let texture = drawable.texture(); + let width = texture.width() as u32; + let height = texture.height() as u32; + let bytes_per_row = width as usize * 4; + let buffer_size = height as usize * bytes_per_row; + + let mut pixels = vec![0u8; buffer_size]; + + let region = metal::MTLRegion { + origin: metal::MTLOrigin { x: 0, y: 0, z: 0 }, + size: metal::MTLSize { + width: width as u64, + height: height as u64, + depth: 1, + }, + }; + + texture.get_bytes( + pixels.as_mut_ptr() as *mut std::ffi::c_void, + bytes_per_row as u64, + region, + 0, + ); + + // Convert BGRA to RGBA (swap B and R channels) + for chunk in pixels.chunks_exact_mut(4) { + chunk.swap(0, 2); + } + + return RgbaImage::from_raw(width, height, pixels).ok_or_else(|| { + anyhow::anyhow!("Failed to create RgbaImage from pixel data") + }); + } + Err(err) => { + log::error!( + "failed to render: {}. retrying with larger instance buffer size", + err + ); + let mut instance_buffer_pool = self.instance_buffer_pool.lock(); + let buffer_size = instance_buffer_pool.buffer_size; + if buffer_size >= 256 * 1024 * 1024 { + anyhow::bail!("instance buffer size grew too large: {}", buffer_size); + } + instance_buffer_pool.reset(buffer_size * 2); + log::info!( + "increased instance buffer size to {}", + instance_buffer_pool.buffer_size + ); + } + } + } + } + fn draw_primitives( &mut self, scene: &Scene, diff --git a/crates/gpui/src/platform/mac/window.rs b/crates/gpui/src/platform/mac/window.rs index 673e2bd7a7f859f683f8ea13a56d5c5b66177ce1..12c8059d38bbeca800831d3a091ca4a48ff6cd91 100644 --- a/crates/gpui/src/platform/mac/window.rs +++ b/crates/gpui/src/platform/mac/window.rs @@ -8,6 +8,8 @@ use crate::{ WindowBounds, WindowControlArea, WindowKind, WindowParams, dispatch_get_main_queue, dispatch_sys::dispatch_async_f, platform::PlatformInputHandler, point, px, size, }; +#[cfg(any(test, feature = "test-support"))] +use anyhow::Result; use block::ConcreteBlock; use cocoa::{ appkit::{ @@ -25,6 +27,8 @@ use cocoa::{ NSUserDefaults, }, }; +#[cfg(any(test, feature = "test-support"))] +use image::RgbaImage; use core_graphics::display::{CGDirectDisplayID, CGPoint, CGRect}; use ctor::ctor; @@ -1570,6 +1574,12 @@ impl PlatformWindow for MacWindow { fn native_window_id(&self) -> Option { Some(self.window_number()) } + + #[cfg(any(test, feature = "test-support"))] + fn render_to_image(&self, scene: &crate::Scene) -> Result { + let mut this = self.0.lock(); + this.renderer.render_to_image(scene) + } } impl rwh::HasWindowHandle for MacWindow { diff --git a/crates/gpui/src/window.rs b/crates/gpui/src/window.rs index 91cb58ff35566ecd1c0476b127936bfc8eacbf04..72601c6b48a0441c625b662bf2089e3897eee4d3 100644 --- a/crates/gpui/src/window.rs +++ b/crates/gpui/src/window.rs @@ -1784,6 +1784,15 @@ impl Window { self.platform_window.native_window_id() } + /// Renders the current frame's scene to a texture and returns the pixel data as an RGBA image. + /// This does not present the frame to screen - useful for visual testing where we want + /// to capture what would be rendered without displaying it or requiring the window to be visible. + #[cfg(any(test, feature = "test-support"))] + pub fn render_to_image(&self) -> anyhow::Result { + self.platform_window + .render_to_image(&self.rendered_frame.scene) + } + /// Set the content size of the window. pub fn resize(&mut self, size: Size) { self.platform_window.resize(size); diff --git a/crates/zed/src/visual_test_runner.rs b/crates/zed/src/visual_test_runner.rs index f1eab7985550ede4f6058bb39c3bbf5e835592c3..783594472e024e64a46c3f02882326fc4cc54800 100644 --- a/crates/zed/src/visual_test_runner.rs +++ b/crates/zed/src/visual_test_runner.rs @@ -3,15 +3,13 @@ //! This binary runs visual regression tests for Zed's UI. It captures screenshots //! of real Zed windows and compares them against baseline images. //! -//! ## Prerequisites +//! ## How It Works //! -//! **Screen Recording Permission Required**: This tool uses macOS ScreenCaptureKit -//! to capture window screenshots. You must grant Screen Recording permission: -//! -//! 1. Run this tool once - macOS will prompt for permission -//! 2. Or manually: System Settings > Privacy & Security > Screen Recording -//! 3. Enable the terminal app you're running from (e.g., Terminal.app, iTerm2) -//! 4. You may need to restart your terminal after granting permission +//! This tool uses direct texture capture - it renders the scene to a Metal texture +//! and reads the pixels back directly. This approach: +//! - Does NOT require Screen Recording permission +//! - Does NOT require the window to be visible on screen +//! - Captures raw GPUI output without system window chrome //! //! ## Usage //! @@ -87,6 +85,8 @@ fn main() { release_channel::init(semver::Version::new(0, 0, 0), cx); command_palette::init(cx); editor::init(cx); + call::init(app_state.client.clone(), app_state.user_store.clone(), cx); + title_bar::init(cx); project_panel::init(cx); outline_panel::init(cx); terminal_view::init(cx); @@ -95,12 +95,10 @@ fn main() { // Open a real Zed workspace window let window_size = size(px(1280.0), px(800.0)); - // TODO: We'd like to hide this window during tests, but macOS prevents windows - // from being positioned fully offscreen. The proper fix is to implement direct - // texture capture (reading pixels from Metal texture instead of using ScreenCaptureKit). - // See docs/direct-texture-capture-implementation.md for details. + // Window can be hidden since we use direct texture capture (reading pixels from + // Metal texture) instead of ScreenCaptureKit which requires visible windows. let bounds = Bounds { - origin: point(px(100.0), px(100.0)), + origin: point(px(0.0), px(0.0)), size: window_size, }; @@ -120,8 +118,8 @@ fn main() { .open_window( WindowOptions { window_bounds: Some(WindowBounds::Windowed(bounds)), - focus: true, - show: true, + focus: false, + show: false, ..Default::default() }, |window, cx| { @@ -148,18 +146,6 @@ fn main() { eprintln!("Failed to add worktree: {:?}", e); } - // Activate the window and bring it to front to ensure it's visible - // This is needed because macOS won't render occluded windows - cx.update(|cx| { - cx.activate(true); // Activate the app - workspace_window - .update(cx, |_, window, _| { - window.activate_window(); // Bring window to front - }) - .ok(); - }) - .ok(); - // Wait for UI to settle cx.background_executor() .timer(std::time::Duration::from_millis(500)) @@ -262,22 +248,6 @@ fn main() { .timer(std::time::Duration::from_secs(2)) .await; - // Activate window again before screenshot to ensure it's rendered - cx.update(|cx| { - workspace_window - .update(cx, |_, window, _| { - window.activate_window(); - }) - .ok(); - }) - .ok(); - - // One more refresh and wait - cx.refresh().ok(); - cx.background_executor() - .timer(std::time::Duration::from_millis(500)) - .await; - // Track test results let mut passed = 0; let mut failed = 0; @@ -393,8 +363,8 @@ async fn run_visual_test( cx: &mut gpui::AsyncApp, update_baseline: bool, ) -> Result { - // Capture the screenshot - let screenshot = capture_screenshot(window, cx).await?; + // Capture the screenshot using direct texture capture (no ScreenCaptureKit needed) + let screenshot = cx.update(|cx| capture_screenshot(window, cx))??; // Get paths let baseline_path = get_baseline_path(test_name); @@ -550,23 +520,12 @@ fn pixels_are_similar(a: &image::Rgba, b: &image::Rgba) -> bool { && (a[3] as i16 - b[3] as i16).abs() <= TOLERANCE } -async fn capture_screenshot( - window: gpui::AnyWindowHandle, - cx: &mut gpui::AsyncApp, -) -> Result { - // Get the native window ID - let window_id = cx - .update(|cx| { - cx.update_window(window, |_view, window: &mut Window, _cx| { - window.native_window_id() - }) - })?? - .ok_or_else(|| anyhow::anyhow!("Failed to get native window ID"))?; - - // Capture the screenshot - let screenshot = gpui::capture_window_screenshot(window_id) - .await - .map_err(|_| anyhow::anyhow!("Screenshot capture was cancelled"))??; +fn capture_screenshot(window: gpui::AnyWindowHandle, cx: &mut gpui::App) -> Result { + // Use direct texture capture - renders the scene to a texture and reads pixels back. + // This does not require the window to be visible on screen. + let screenshot = cx.update_window(window, |_view, window: &mut Window, _cx| { + window.render_to_image() + })??; println!( "Screenshot captured: {}x{} pixels", diff --git a/crates/zed/test_fixtures/visual_tests/project_panel.png b/crates/zed/test_fixtures/visual_tests/project_panel.png index 61c1570fe781824d0992bf91ca96807302d1b9d7..fcfde6790f6f37f1e1e71ed0718175f7c76694b1 100644 Binary files a/crates/zed/test_fixtures/visual_tests/project_panel.png and b/crates/zed/test_fixtures/visual_tests/project_panel.png differ diff --git a/crates/zed/test_fixtures/visual_tests/workspace_with_editor.png b/crates/zed/test_fixtures/visual_tests/workspace_with_editor.png index c8648a8b0c01d08b283bc699d1624e06df6d0d41..6bc173cc7819ae2b3be343e19e64477b1eb1cb79 100644 Binary files a/crates/zed/test_fixtures/visual_tests/workspace_with_editor.png and b/crates/zed/test_fixtures/visual_tests/workspace_with_editor.png differ