metal_renderer.rs

   1use crate::{
   2    point, size, AtlasTextureId, AtlasTextureKind, AtlasTile, Bounds, ContentMask, DevicePixels,
   3    Hsla, MetalAtlas, MonochromeSprite, Path, PathId, PathVertex, PolychromeSprite, PrimitiveBatch,
   4    Quad, ScaledPixels, Scene, Shadow, Size, Surface, Underline,
   5};
   6use cocoa::{
   7    base::{NO, YES},
   8    foundation::NSUInteger,
   9    quartzcore::AutoresizingMask,
  10};
  11use collections::HashMap;
  12use core_foundation::base::TCFType;
  13use foreign_types::ForeignType;
  14use media::core_video::CVMetalTextureCache;
  15use metal::{CommandQueue, MTLPixelFormat, MTLResourceOptions, NSRange};
  16use objc::{self, msg_send, sel, sel_impl};
  17use smallvec::SmallVec;
  18use std::{ffi::c_void, mem, ptr, sync::Arc};
  19
  20#[cfg(not(feature = "runtime_shaders"))]
  21const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
  22#[cfg(feature = "runtime_shaders")]
  23const SHADERS_SOURCE_FILE: &'static str =
  24    include_str!(concat!(env!("OUT_DIR"), "/stitched_shaders.metal"));
  25const INSTANCE_BUFFER_SIZE: usize = 32 * 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value (maybe even we could adjust dynamically...)
  26
  27pub(crate) struct MetalRenderer {
  28    layer: metal::MetalLayer,
  29    command_queue: CommandQueue,
  30    paths_rasterization_pipeline_state: metal::RenderPipelineState,
  31    path_sprites_pipeline_state: metal::RenderPipelineState,
  32    shadows_pipeline_state: metal::RenderPipelineState,
  33    quads_pipeline_state: metal::RenderPipelineState,
  34    underlines_pipeline_state: metal::RenderPipelineState,
  35    monochrome_sprites_pipeline_state: metal::RenderPipelineState,
  36    polychrome_sprites_pipeline_state: metal::RenderPipelineState,
  37    surfaces_pipeline_state: metal::RenderPipelineState,
  38    unit_vertices: metal::Buffer,
  39    instances: metal::Buffer,
  40    sprite_atlas: Arc<MetalAtlas>,
  41    core_video_texture_cache: CVMetalTextureCache,
  42}
  43
  44impl MetalRenderer {
  45    pub fn new(is_opaque: bool) -> Self {
  46        let device: metal::Device = if let Some(device) = metal::Device::system_default() {
  47            device
  48        } else {
  49            log::error!("unable to access a compatible graphics device");
  50            std::process::exit(1);
  51        };
  52
  53        let layer = metal::MetalLayer::new();
  54        layer.set_device(&device);
  55        layer.set_pixel_format(MTLPixelFormat::BGRA8Unorm);
  56        layer.set_presents_with_transaction(true);
  57        layer.set_opaque(is_opaque);
  58        unsafe {
  59            let _: () = msg_send![&*layer, setAllowsNextDrawableTimeout: NO];
  60            let _: () = msg_send![&*layer, setNeedsDisplayOnBoundsChange: YES];
  61            let _: () = msg_send![
  62                &*layer,
  63                setAutoresizingMask: AutoresizingMask::WIDTH_SIZABLE
  64                    | AutoresizingMask::HEIGHT_SIZABLE
  65            ];
  66        }
  67        #[cfg(feature = "runtime_shaders")]
  68        let library = device
  69            .new_library_with_source(&SHADERS_SOURCE_FILE, &metal::CompileOptions::new())
  70            .expect("error building metal library");
  71        #[cfg(not(feature = "runtime_shaders"))]
  72        let library = device
  73            .new_library_with_data(SHADERS_METALLIB)
  74            .expect("error building metal library");
  75
  76        fn to_float2_bits(point: crate::PointF) -> u64 {
  77            let mut output = point.y.to_bits() as u64;
  78            output <<= 32;
  79            output |= point.x.to_bits() as u64;
  80            output
  81        }
  82
  83        let unit_vertices = [
  84            to_float2_bits(point(0., 0.)),
  85            to_float2_bits(point(1., 0.)),
  86            to_float2_bits(point(0., 1.)),
  87            to_float2_bits(point(0., 1.)),
  88            to_float2_bits(point(1., 0.)),
  89            to_float2_bits(point(1., 1.)),
  90        ];
  91        let unit_vertices = device.new_buffer_with_data(
  92            unit_vertices.as_ptr() as *const c_void,
  93            mem::size_of_val(&unit_vertices) as u64,
  94            MTLResourceOptions::StorageModeManaged,
  95        );
  96        let instances = device.new_buffer(
  97            INSTANCE_BUFFER_SIZE as u64,
  98            MTLResourceOptions::StorageModeManaged,
  99        );
 100
 101        let paths_rasterization_pipeline_state = build_path_rasterization_pipeline_state(
 102            &device,
 103            &library,
 104            "paths_rasterization",
 105            "path_rasterization_vertex",
 106            "path_rasterization_fragment",
 107            MTLPixelFormat::R16Float,
 108        );
 109        let path_sprites_pipeline_state = build_pipeline_state(
 110            &device,
 111            &library,
 112            "path_sprites",
 113            "path_sprite_vertex",
 114            "path_sprite_fragment",
 115            MTLPixelFormat::BGRA8Unorm,
 116        );
 117        let shadows_pipeline_state = build_pipeline_state(
 118            &device,
 119            &library,
 120            "shadows",
 121            "shadow_vertex",
 122            "shadow_fragment",
 123            MTLPixelFormat::BGRA8Unorm,
 124        );
 125        let quads_pipeline_state = build_pipeline_state(
 126            &device,
 127            &library,
 128            "quads",
 129            "quad_vertex",
 130            "quad_fragment",
 131            MTLPixelFormat::BGRA8Unorm,
 132        );
 133        let underlines_pipeline_state = build_pipeline_state(
 134            &device,
 135            &library,
 136            "underlines",
 137            "underline_vertex",
 138            "underline_fragment",
 139            MTLPixelFormat::BGRA8Unorm,
 140        );
 141        let monochrome_sprites_pipeline_state = build_pipeline_state(
 142            &device,
 143            &library,
 144            "monochrome_sprites",
 145            "monochrome_sprite_vertex",
 146            "monochrome_sprite_fragment",
 147            MTLPixelFormat::BGRA8Unorm,
 148        );
 149        let polychrome_sprites_pipeline_state = build_pipeline_state(
 150            &device,
 151            &library,
 152            "polychrome_sprites",
 153            "polychrome_sprite_vertex",
 154            "polychrome_sprite_fragment",
 155            MTLPixelFormat::BGRA8Unorm,
 156        );
 157        let surfaces_pipeline_state = build_pipeline_state(
 158            &device,
 159            &library,
 160            "surfaces",
 161            "surface_vertex",
 162            "surface_fragment",
 163            MTLPixelFormat::BGRA8Unorm,
 164        );
 165
 166        let command_queue = device.new_command_queue();
 167        let sprite_atlas = Arc::new(MetalAtlas::new(device.clone()));
 168
 169        Self {
 170            layer,
 171            command_queue,
 172            paths_rasterization_pipeline_state,
 173            path_sprites_pipeline_state,
 174            shadows_pipeline_state,
 175            quads_pipeline_state,
 176            underlines_pipeline_state,
 177            monochrome_sprites_pipeline_state,
 178            polychrome_sprites_pipeline_state,
 179            surfaces_pipeline_state,
 180            unit_vertices,
 181            instances,
 182            sprite_atlas,
 183            core_video_texture_cache: unsafe { CVMetalTextureCache::new(device.as_ptr()).unwrap() },
 184        }
 185    }
 186
 187    pub fn layer(&self) -> &metal::MetalLayerRef {
 188        &self.layer
 189    }
 190
 191    pub fn sprite_atlas(&self) -> &Arc<MetalAtlas> {
 192        &self.sprite_atlas
 193    }
 194
 195    pub fn draw(&mut self, scene: &Scene) {
 196        let layer = self.layer.clone();
 197        let viewport_size = layer.drawable_size();
 198        let viewport_size: Size<DevicePixels> = size(
 199            (viewport_size.width.ceil() as i32).into(),
 200            (viewport_size.height.ceil() as i32).into(),
 201        );
 202        let drawable = if let Some(drawable) = layer.next_drawable() {
 203            drawable
 204        } else {
 205            log::error!(
 206                "failed to retrieve next drawable, drawable size: {:?}",
 207                viewport_size
 208            );
 209            return;
 210        };
 211        let command_queue = self.command_queue.clone();
 212        let command_buffer = command_queue.new_command_buffer();
 213        let mut instance_offset = 0;
 214
 215        let Some(path_tiles) =
 216            self.rasterize_paths(scene.paths(), &mut instance_offset, command_buffer)
 217        else {
 218            panic!("failed to rasterize {} paths", scene.paths().len());
 219        };
 220
 221        let render_pass_descriptor = metal::RenderPassDescriptor::new();
 222        let color_attachment = render_pass_descriptor
 223            .color_attachments()
 224            .object_at(0)
 225            .unwrap();
 226
 227        color_attachment.set_texture(Some(drawable.texture()));
 228        color_attachment.set_load_action(metal::MTLLoadAction::Clear);
 229        color_attachment.set_store_action(metal::MTLStoreAction::Store);
 230        let alpha = if self.layer.is_opaque() { 1. } else { 0. };
 231        color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., alpha));
 232        let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
 233
 234        command_encoder.set_viewport(metal::MTLViewport {
 235            originX: 0.0,
 236            originY: 0.0,
 237            width: i32::from(viewport_size.width) as f64,
 238            height: i32::from(viewport_size.height) as f64,
 239            znear: 0.0,
 240            zfar: 1.0,
 241        });
 242        for batch in scene.batches() {
 243            let ok = match batch {
 244                PrimitiveBatch::Shadows(shadows) => self.draw_shadows(
 245                    shadows,
 246                    &mut instance_offset,
 247                    viewport_size,
 248                    command_encoder,
 249                ),
 250                PrimitiveBatch::Quads(quads) => {
 251                    self.draw_quads(quads, &mut instance_offset, viewport_size, command_encoder)
 252                }
 253                PrimitiveBatch::Paths(paths) => self.draw_paths(
 254                    paths,
 255                    &path_tiles,
 256                    &mut instance_offset,
 257                    viewport_size,
 258                    command_encoder,
 259                ),
 260                PrimitiveBatch::Underlines(underlines) => self.draw_underlines(
 261                    underlines,
 262                    &mut instance_offset,
 263                    viewport_size,
 264                    command_encoder,
 265                ),
 266                PrimitiveBatch::MonochromeSprites {
 267                    texture_id,
 268                    sprites,
 269                } => self.draw_monochrome_sprites(
 270                    texture_id,
 271                    sprites,
 272                    &mut instance_offset,
 273                    viewport_size,
 274                    command_encoder,
 275                ),
 276                PrimitiveBatch::PolychromeSprites {
 277                    texture_id,
 278                    sprites,
 279                } => self.draw_polychrome_sprites(
 280                    texture_id,
 281                    sprites,
 282                    &mut instance_offset,
 283                    viewport_size,
 284                    command_encoder,
 285                ),
 286                PrimitiveBatch::Surfaces(surfaces) => self.draw_surfaces(
 287                    surfaces,
 288                    &mut instance_offset,
 289                    viewport_size,
 290                    command_encoder,
 291                ),
 292            };
 293
 294            if !ok {
 295                panic!("scene too large: {} paths, {} shadows, {} quads, {} underlines, {} mono, {} poly, {} surfaces",
 296                    scene.paths.len(),
 297                    scene.shadows.len(),
 298                    scene.quads.len(),
 299                    scene.underlines.len(),
 300                    scene.monochrome_sprites.len(),
 301                    scene.polychrome_sprites.len(),
 302                    scene.surfaces.len(),
 303                )
 304            }
 305        }
 306
 307        command_encoder.end_encoding();
 308
 309        self.instances.did_modify_range(NSRange {
 310            location: 0,
 311            length: instance_offset as NSUInteger,
 312        });
 313
 314        command_buffer.commit();
 315        self.sprite_atlas.clear_textures(AtlasTextureKind::Path);
 316
 317        command_buffer.wait_until_completed();
 318        drawable.present();
 319    }
 320
 321    fn rasterize_paths(
 322        &mut self,
 323        paths: &[Path<ScaledPixels>],
 324        offset: &mut usize,
 325        command_buffer: &metal::CommandBufferRef,
 326    ) -> Option<HashMap<PathId, AtlasTile>> {
 327        let mut tiles = HashMap::default();
 328        let mut vertices_by_texture_id = HashMap::default();
 329        for path in paths {
 330            let clipped_bounds = path.bounds.intersect(&path.content_mask.bounds);
 331
 332            let tile = self
 333                .sprite_atlas
 334                .allocate(clipped_bounds.size.map(Into::into), AtlasTextureKind::Path);
 335            vertices_by_texture_id
 336                .entry(tile.texture_id)
 337                .or_insert(Vec::new())
 338                .extend(path.vertices.iter().map(|vertex| PathVertex {
 339                    xy_position: vertex.xy_position - clipped_bounds.origin
 340                        + tile.bounds.origin.map(Into::into),
 341                    st_position: vertex.st_position,
 342                    content_mask: ContentMask {
 343                        bounds: tile.bounds.map(Into::into),
 344                    },
 345                }));
 346            tiles.insert(path.id, tile);
 347        }
 348
 349        for (texture_id, vertices) in vertices_by_texture_id {
 350            align_offset(offset);
 351            let vertices_bytes_len = mem::size_of_val(vertices.as_slice());
 352            let next_offset = *offset + vertices_bytes_len;
 353            if next_offset > INSTANCE_BUFFER_SIZE {
 354                return None;
 355            }
 356
 357            let render_pass_descriptor = metal::RenderPassDescriptor::new();
 358            let color_attachment = render_pass_descriptor
 359                .color_attachments()
 360                .object_at(0)
 361                .unwrap();
 362
 363            let texture = self.sprite_atlas.metal_texture(texture_id);
 364            color_attachment.set_texture(Some(&texture));
 365            color_attachment.set_load_action(metal::MTLLoadAction::Clear);
 366            color_attachment.set_store_action(metal::MTLStoreAction::Store);
 367            color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
 368            let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
 369            command_encoder.set_render_pipeline_state(&self.paths_rasterization_pipeline_state);
 370            command_encoder.set_vertex_buffer(
 371                PathRasterizationInputIndex::Vertices as u64,
 372                Some(&self.instances),
 373                *offset as u64,
 374            );
 375            let texture_size = Size {
 376                width: DevicePixels::from(texture.width()),
 377                height: DevicePixels::from(texture.height()),
 378            };
 379            command_encoder.set_vertex_bytes(
 380                PathRasterizationInputIndex::AtlasTextureSize as u64,
 381                mem::size_of_val(&texture_size) as u64,
 382                &texture_size as *const Size<DevicePixels> as *const _,
 383            );
 384
 385            let buffer_contents = unsafe { (self.instances.contents() as *mut u8).add(*offset) };
 386            unsafe {
 387                ptr::copy_nonoverlapping(
 388                    vertices.as_ptr() as *const u8,
 389                    buffer_contents,
 390                    vertices_bytes_len,
 391                );
 392            }
 393
 394            command_encoder.draw_primitives(
 395                metal::MTLPrimitiveType::Triangle,
 396                0,
 397                vertices.len() as u64,
 398            );
 399            command_encoder.end_encoding();
 400            *offset = next_offset;
 401        }
 402
 403        Some(tiles)
 404    }
 405
 406    fn draw_shadows(
 407        &mut self,
 408        shadows: &[Shadow],
 409        offset: &mut usize,
 410        viewport_size: Size<DevicePixels>,
 411        command_encoder: &metal::RenderCommandEncoderRef,
 412    ) -> bool {
 413        if shadows.is_empty() {
 414            return true;
 415        }
 416        align_offset(offset);
 417
 418        command_encoder.set_render_pipeline_state(&self.shadows_pipeline_state);
 419        command_encoder.set_vertex_buffer(
 420            ShadowInputIndex::Vertices as u64,
 421            Some(&self.unit_vertices),
 422            0,
 423        );
 424        command_encoder.set_vertex_buffer(
 425            ShadowInputIndex::Shadows as u64,
 426            Some(&self.instances),
 427            *offset as u64,
 428        );
 429        command_encoder.set_fragment_buffer(
 430            ShadowInputIndex::Shadows as u64,
 431            Some(&self.instances),
 432            *offset as u64,
 433        );
 434
 435        command_encoder.set_vertex_bytes(
 436            ShadowInputIndex::ViewportSize as u64,
 437            mem::size_of_val(&viewport_size) as u64,
 438            &viewport_size as *const Size<DevicePixels> as *const _,
 439        );
 440
 441        let shadow_bytes_len = mem::size_of_val(shadows);
 442        let buffer_contents = unsafe { (self.instances.contents() as *mut u8).add(*offset) };
 443
 444        let next_offset = *offset + shadow_bytes_len;
 445        if next_offset > INSTANCE_BUFFER_SIZE {
 446            return false;
 447        }
 448
 449        unsafe {
 450            ptr::copy_nonoverlapping(
 451                shadows.as_ptr() as *const u8,
 452                buffer_contents,
 453                shadow_bytes_len,
 454            );
 455        }
 456
 457        command_encoder.draw_primitives_instanced(
 458            metal::MTLPrimitiveType::Triangle,
 459            0,
 460            6,
 461            shadows.len() as u64,
 462        );
 463        *offset = next_offset;
 464        true
 465    }
 466
 467    fn draw_quads(
 468        &mut self,
 469        quads: &[Quad],
 470        offset: &mut usize,
 471        viewport_size: Size<DevicePixels>,
 472        command_encoder: &metal::RenderCommandEncoderRef,
 473    ) -> bool {
 474        if quads.is_empty() {
 475            return true;
 476        }
 477        align_offset(offset);
 478
 479        command_encoder.set_render_pipeline_state(&self.quads_pipeline_state);
 480        command_encoder.set_vertex_buffer(
 481            QuadInputIndex::Vertices as u64,
 482            Some(&self.unit_vertices),
 483            0,
 484        );
 485        command_encoder.set_vertex_buffer(
 486            QuadInputIndex::Quads as u64,
 487            Some(&self.instances),
 488            *offset as u64,
 489        );
 490        command_encoder.set_fragment_buffer(
 491            QuadInputIndex::Quads as u64,
 492            Some(&self.instances),
 493            *offset as u64,
 494        );
 495
 496        command_encoder.set_vertex_bytes(
 497            QuadInputIndex::ViewportSize as u64,
 498            mem::size_of_val(&viewport_size) as u64,
 499            &viewport_size as *const Size<DevicePixels> as *const _,
 500        );
 501
 502        let quad_bytes_len = mem::size_of_val(quads);
 503        let buffer_contents = unsafe { (self.instances.contents() as *mut u8).add(*offset) };
 504
 505        let next_offset = *offset + quad_bytes_len;
 506        if next_offset > INSTANCE_BUFFER_SIZE {
 507            return false;
 508        }
 509
 510        unsafe {
 511            ptr::copy_nonoverlapping(quads.as_ptr() as *const u8, buffer_contents, quad_bytes_len);
 512        }
 513
 514        command_encoder.draw_primitives_instanced(
 515            metal::MTLPrimitiveType::Triangle,
 516            0,
 517            6,
 518            quads.len() as u64,
 519        );
 520        *offset = next_offset;
 521        true
 522    }
 523
 524    fn draw_paths(
 525        &mut self,
 526        paths: &[Path<ScaledPixels>],
 527        tiles_by_path_id: &HashMap<PathId, AtlasTile>,
 528        offset: &mut usize,
 529        viewport_size: Size<DevicePixels>,
 530        command_encoder: &metal::RenderCommandEncoderRef,
 531    ) -> bool {
 532        if paths.is_empty() {
 533            return true;
 534        }
 535
 536        command_encoder.set_render_pipeline_state(&self.path_sprites_pipeline_state);
 537        command_encoder.set_vertex_buffer(
 538            SpriteInputIndex::Vertices as u64,
 539            Some(&self.unit_vertices),
 540            0,
 541        );
 542        command_encoder.set_vertex_bytes(
 543            SpriteInputIndex::ViewportSize as u64,
 544            mem::size_of_val(&viewport_size) as u64,
 545            &viewport_size as *const Size<DevicePixels> as *const _,
 546        );
 547
 548        let mut prev_texture_id = None;
 549        let mut sprites = SmallVec::<[_; 1]>::new();
 550        let mut paths_and_tiles = paths
 551            .iter()
 552            .map(|path| (path, tiles_by_path_id.get(&path.id).unwrap()))
 553            .peekable();
 554
 555        loop {
 556            if let Some((path, tile)) = paths_and_tiles.peek() {
 557                if prev_texture_id.map_or(true, |texture_id| texture_id == tile.texture_id) {
 558                    prev_texture_id = Some(tile.texture_id);
 559                    let origin = path.bounds.intersect(&path.content_mask.bounds).origin;
 560                    sprites.push(PathSprite {
 561                        bounds: Bounds {
 562                            origin: origin.map(|p| p.floor()),
 563                            size: tile.bounds.size.map(Into::into),
 564                        },
 565                        color: path.color,
 566                        tile: (*tile).clone(),
 567                    });
 568                    paths_and_tiles.next();
 569                    continue;
 570                }
 571            }
 572
 573            if sprites.is_empty() {
 574                break;
 575            } else {
 576                align_offset(offset);
 577                let texture_id = prev_texture_id.take().unwrap();
 578                let texture: metal::Texture = self.sprite_atlas.metal_texture(texture_id);
 579                let texture_size = size(
 580                    DevicePixels(texture.width() as i32),
 581                    DevicePixels(texture.height() as i32),
 582                );
 583
 584                command_encoder.set_vertex_buffer(
 585                    SpriteInputIndex::Sprites as u64,
 586                    Some(&self.instances),
 587                    *offset as u64,
 588                );
 589                command_encoder.set_vertex_bytes(
 590                    SpriteInputIndex::AtlasTextureSize as u64,
 591                    mem::size_of_val(&texture_size) as u64,
 592                    &texture_size as *const Size<DevicePixels> as *const _,
 593                );
 594                command_encoder.set_fragment_buffer(
 595                    SpriteInputIndex::Sprites as u64,
 596                    Some(&self.instances),
 597                    *offset as u64,
 598                );
 599                command_encoder
 600                    .set_fragment_texture(SpriteInputIndex::AtlasTexture as u64, Some(&texture));
 601
 602                let sprite_bytes_len = mem::size_of_val(sprites.as_slice());
 603                let next_offset = *offset + sprite_bytes_len;
 604                if next_offset > INSTANCE_BUFFER_SIZE {
 605                    return false;
 606                }
 607
 608                let buffer_contents =
 609                    unsafe { (self.instances.contents() as *mut u8).add(*offset) };
 610
 611                unsafe {
 612                    ptr::copy_nonoverlapping(
 613                        sprites.as_ptr() as *const u8,
 614                        buffer_contents,
 615                        sprite_bytes_len,
 616                    );
 617                }
 618
 619                command_encoder.draw_primitives_instanced(
 620                    metal::MTLPrimitiveType::Triangle,
 621                    0,
 622                    6,
 623                    sprites.len() as u64,
 624                );
 625                *offset = next_offset;
 626                sprites.clear();
 627            }
 628        }
 629        true
 630    }
 631
 632    fn draw_underlines(
 633        &mut self,
 634        underlines: &[Underline],
 635        offset: &mut usize,
 636        viewport_size: Size<DevicePixels>,
 637        command_encoder: &metal::RenderCommandEncoderRef,
 638    ) -> bool {
 639        if underlines.is_empty() {
 640            return true;
 641        }
 642        align_offset(offset);
 643
 644        command_encoder.set_render_pipeline_state(&self.underlines_pipeline_state);
 645        command_encoder.set_vertex_buffer(
 646            UnderlineInputIndex::Vertices as u64,
 647            Some(&self.unit_vertices),
 648            0,
 649        );
 650        command_encoder.set_vertex_buffer(
 651            UnderlineInputIndex::Underlines as u64,
 652            Some(&self.instances),
 653            *offset as u64,
 654        );
 655        command_encoder.set_fragment_buffer(
 656            UnderlineInputIndex::Underlines as u64,
 657            Some(&self.instances),
 658            *offset as u64,
 659        );
 660
 661        command_encoder.set_vertex_bytes(
 662            UnderlineInputIndex::ViewportSize as u64,
 663            mem::size_of_val(&viewport_size) as u64,
 664            &viewport_size as *const Size<DevicePixels> as *const _,
 665        );
 666
 667        let underline_bytes_len = mem::size_of_val(underlines);
 668        let buffer_contents = unsafe { (self.instances.contents() as *mut u8).add(*offset) };
 669
 670        let next_offset = *offset + underline_bytes_len;
 671        if next_offset > INSTANCE_BUFFER_SIZE {
 672            return false;
 673        }
 674
 675        unsafe {
 676            ptr::copy_nonoverlapping(
 677                underlines.as_ptr() as *const u8,
 678                buffer_contents,
 679                underline_bytes_len,
 680            );
 681        }
 682
 683        command_encoder.draw_primitives_instanced(
 684            metal::MTLPrimitiveType::Triangle,
 685            0,
 686            6,
 687            underlines.len() as u64,
 688        );
 689        *offset = next_offset;
 690        true
 691    }
 692
 693    fn draw_monochrome_sprites(
 694        &mut self,
 695        texture_id: AtlasTextureId,
 696        sprites: &[MonochromeSprite],
 697        offset: &mut usize,
 698        viewport_size: Size<DevicePixels>,
 699        command_encoder: &metal::RenderCommandEncoderRef,
 700    ) -> bool {
 701        if sprites.is_empty() {
 702            return true;
 703        }
 704        align_offset(offset);
 705
 706        let texture = self.sprite_atlas.metal_texture(texture_id);
 707        let texture_size = size(
 708            DevicePixels(texture.width() as i32),
 709            DevicePixels(texture.height() as i32),
 710        );
 711        command_encoder.set_render_pipeline_state(&self.monochrome_sprites_pipeline_state);
 712        command_encoder.set_vertex_buffer(
 713            SpriteInputIndex::Vertices as u64,
 714            Some(&self.unit_vertices),
 715            0,
 716        );
 717        command_encoder.set_vertex_buffer(
 718            SpriteInputIndex::Sprites as u64,
 719            Some(&self.instances),
 720            *offset as u64,
 721        );
 722        command_encoder.set_vertex_bytes(
 723            SpriteInputIndex::ViewportSize as u64,
 724            mem::size_of_val(&viewport_size) as u64,
 725            &viewport_size as *const Size<DevicePixels> as *const _,
 726        );
 727        command_encoder.set_vertex_bytes(
 728            SpriteInputIndex::AtlasTextureSize as u64,
 729            mem::size_of_val(&texture_size) as u64,
 730            &texture_size as *const Size<DevicePixels> as *const _,
 731        );
 732        command_encoder.set_fragment_buffer(
 733            SpriteInputIndex::Sprites as u64,
 734            Some(&self.instances),
 735            *offset as u64,
 736        );
 737        command_encoder.set_fragment_texture(SpriteInputIndex::AtlasTexture as u64, Some(&texture));
 738
 739        let sprite_bytes_len = mem::size_of_val(sprites);
 740        let buffer_contents = unsafe { (self.instances.contents() as *mut u8).add(*offset) };
 741
 742        let next_offset = *offset + sprite_bytes_len;
 743        if next_offset > INSTANCE_BUFFER_SIZE {
 744            return false;
 745        }
 746
 747        unsafe {
 748            ptr::copy_nonoverlapping(
 749                sprites.as_ptr() as *const u8,
 750                buffer_contents,
 751                sprite_bytes_len,
 752            );
 753        }
 754
 755        command_encoder.draw_primitives_instanced(
 756            metal::MTLPrimitiveType::Triangle,
 757            0,
 758            6,
 759            sprites.len() as u64,
 760        );
 761        *offset = next_offset;
 762        true
 763    }
 764
 765    fn draw_polychrome_sprites(
 766        &mut self,
 767        texture_id: AtlasTextureId,
 768        sprites: &[PolychromeSprite],
 769        offset: &mut usize,
 770        viewport_size: Size<DevicePixels>,
 771        command_encoder: &metal::RenderCommandEncoderRef,
 772    ) -> bool {
 773        if sprites.is_empty() {
 774            return true;
 775        }
 776        align_offset(offset);
 777
 778        let texture = self.sprite_atlas.metal_texture(texture_id);
 779        let texture_size = size(
 780            DevicePixels(texture.width() as i32),
 781            DevicePixels(texture.height() as i32),
 782        );
 783        command_encoder.set_render_pipeline_state(&self.polychrome_sprites_pipeline_state);
 784        command_encoder.set_vertex_buffer(
 785            SpriteInputIndex::Vertices as u64,
 786            Some(&self.unit_vertices),
 787            0,
 788        );
 789        command_encoder.set_vertex_buffer(
 790            SpriteInputIndex::Sprites as u64,
 791            Some(&self.instances),
 792            *offset as u64,
 793        );
 794        command_encoder.set_vertex_bytes(
 795            SpriteInputIndex::ViewportSize as u64,
 796            mem::size_of_val(&viewport_size) as u64,
 797            &viewport_size as *const Size<DevicePixels> as *const _,
 798        );
 799        command_encoder.set_vertex_bytes(
 800            SpriteInputIndex::AtlasTextureSize as u64,
 801            mem::size_of_val(&texture_size) as u64,
 802            &texture_size as *const Size<DevicePixels> as *const _,
 803        );
 804        command_encoder.set_fragment_buffer(
 805            SpriteInputIndex::Sprites as u64,
 806            Some(&self.instances),
 807            *offset as u64,
 808        );
 809        command_encoder.set_fragment_texture(SpriteInputIndex::AtlasTexture as u64, Some(&texture));
 810
 811        let sprite_bytes_len = mem::size_of_val(sprites);
 812        let buffer_contents = unsafe { (self.instances.contents() as *mut u8).add(*offset) };
 813
 814        let next_offset = *offset + sprite_bytes_len;
 815        if next_offset > INSTANCE_BUFFER_SIZE {
 816            return false;
 817        }
 818
 819        unsafe {
 820            ptr::copy_nonoverlapping(
 821                sprites.as_ptr() as *const u8,
 822                buffer_contents,
 823                sprite_bytes_len,
 824            );
 825        }
 826
 827        command_encoder.draw_primitives_instanced(
 828            metal::MTLPrimitiveType::Triangle,
 829            0,
 830            6,
 831            sprites.len() as u64,
 832        );
 833        *offset = next_offset;
 834        true
 835    }
 836
 837    fn draw_surfaces(
 838        &mut self,
 839        surfaces: &[Surface],
 840        offset: &mut usize,
 841        viewport_size: Size<DevicePixels>,
 842        command_encoder: &metal::RenderCommandEncoderRef,
 843    ) -> bool {
 844        command_encoder.set_render_pipeline_state(&self.surfaces_pipeline_state);
 845        command_encoder.set_vertex_buffer(
 846            SurfaceInputIndex::Vertices as u64,
 847            Some(&self.unit_vertices),
 848            0,
 849        );
 850        command_encoder.set_vertex_bytes(
 851            SurfaceInputIndex::ViewportSize as u64,
 852            mem::size_of_val(&viewport_size) as u64,
 853            &viewport_size as *const Size<DevicePixels> as *const _,
 854        );
 855
 856        for surface in surfaces {
 857            let texture_size = size(
 858                DevicePixels::from(surface.image_buffer.width() as i32),
 859                DevicePixels::from(surface.image_buffer.height() as i32),
 860            );
 861
 862            assert_eq!(
 863                surface.image_buffer.pixel_format_type(),
 864                media::core_video::kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
 865            );
 866
 867            let y_texture = unsafe {
 868                self.core_video_texture_cache
 869                    .create_texture_from_image(
 870                        surface.image_buffer.as_concrete_TypeRef(),
 871                        ptr::null(),
 872                        MTLPixelFormat::R8Unorm,
 873                        surface.image_buffer.plane_width(0),
 874                        surface.image_buffer.plane_height(0),
 875                        0,
 876                    )
 877                    .unwrap()
 878            };
 879            let cb_cr_texture = unsafe {
 880                self.core_video_texture_cache
 881                    .create_texture_from_image(
 882                        surface.image_buffer.as_concrete_TypeRef(),
 883                        ptr::null(),
 884                        MTLPixelFormat::RG8Unorm,
 885                        surface.image_buffer.plane_width(1),
 886                        surface.image_buffer.plane_height(1),
 887                        1,
 888                    )
 889                    .unwrap()
 890            };
 891
 892            align_offset(offset);
 893            let next_offset = *offset + mem::size_of::<Surface>();
 894            if next_offset > INSTANCE_BUFFER_SIZE {
 895                return false;
 896            }
 897
 898            command_encoder.set_vertex_buffer(
 899                SurfaceInputIndex::Surfaces as u64,
 900                Some(&self.instances),
 901                *offset as u64,
 902            );
 903            command_encoder.set_vertex_bytes(
 904                SurfaceInputIndex::TextureSize as u64,
 905                mem::size_of_val(&texture_size) as u64,
 906                &texture_size as *const Size<DevicePixels> as *const _,
 907            );
 908            command_encoder.set_fragment_texture(
 909                SurfaceInputIndex::YTexture as u64,
 910                Some(y_texture.as_texture_ref()),
 911            );
 912            command_encoder.set_fragment_texture(
 913                SurfaceInputIndex::CbCrTexture as u64,
 914                Some(cb_cr_texture.as_texture_ref()),
 915            );
 916
 917            unsafe {
 918                let buffer_contents =
 919                    (self.instances.contents() as *mut u8).add(*offset) as *mut SurfaceBounds;
 920                ptr::write(
 921                    buffer_contents,
 922                    SurfaceBounds {
 923                        bounds: surface.bounds,
 924                        content_mask: surface.content_mask.clone(),
 925                    },
 926                );
 927            }
 928
 929            command_encoder.draw_primitives(metal::MTLPrimitiveType::Triangle, 0, 6);
 930            *offset = next_offset;
 931        }
 932        true
 933    }
 934}
 935
 936fn build_pipeline_state(
 937    device: &metal::DeviceRef,
 938    library: &metal::LibraryRef,
 939    label: &str,
 940    vertex_fn_name: &str,
 941    fragment_fn_name: &str,
 942    pixel_format: metal::MTLPixelFormat,
 943) -> metal::RenderPipelineState {
 944    let vertex_fn = library
 945        .get_function(vertex_fn_name, None)
 946        .expect("error locating vertex function");
 947    let fragment_fn = library
 948        .get_function(fragment_fn_name, None)
 949        .expect("error locating fragment function");
 950
 951    let descriptor = metal::RenderPipelineDescriptor::new();
 952    descriptor.set_label(label);
 953    descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
 954    descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
 955    let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
 956    color_attachment.set_pixel_format(pixel_format);
 957    color_attachment.set_blending_enabled(true);
 958    color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
 959    color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
 960    color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
 961    color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
 962    color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
 963    color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
 964
 965    device
 966        .new_render_pipeline_state(&descriptor)
 967        .expect("could not create render pipeline state")
 968}
 969
 970fn build_path_rasterization_pipeline_state(
 971    device: &metal::DeviceRef,
 972    library: &metal::LibraryRef,
 973    label: &str,
 974    vertex_fn_name: &str,
 975    fragment_fn_name: &str,
 976    pixel_format: metal::MTLPixelFormat,
 977) -> metal::RenderPipelineState {
 978    let vertex_fn = library
 979        .get_function(vertex_fn_name, None)
 980        .expect("error locating vertex function");
 981    let fragment_fn = library
 982        .get_function(fragment_fn_name, None)
 983        .expect("error locating fragment function");
 984
 985    let descriptor = metal::RenderPipelineDescriptor::new();
 986    descriptor.set_label(label);
 987    descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
 988    descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
 989    let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
 990    color_attachment.set_pixel_format(pixel_format);
 991    color_attachment.set_blending_enabled(true);
 992    color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
 993    color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
 994    color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
 995    color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
 996    color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
 997    color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
 998
 999    device
1000        .new_render_pipeline_state(&descriptor)
1001        .expect("could not create render pipeline state")
1002}
1003
1004// Align to multiples of 256 make Metal happy.
1005fn align_offset(offset: &mut usize) {
1006    *offset = ((*offset + 255) / 256) * 256;
1007}
1008
1009#[repr(C)]
1010enum ShadowInputIndex {
1011    Vertices = 0,
1012    Shadows = 1,
1013    ViewportSize = 2,
1014}
1015
1016#[repr(C)]
1017enum QuadInputIndex {
1018    Vertices = 0,
1019    Quads = 1,
1020    ViewportSize = 2,
1021}
1022
1023#[repr(C)]
1024enum UnderlineInputIndex {
1025    Vertices = 0,
1026    Underlines = 1,
1027    ViewportSize = 2,
1028}
1029
1030#[repr(C)]
1031enum SpriteInputIndex {
1032    Vertices = 0,
1033    Sprites = 1,
1034    ViewportSize = 2,
1035    AtlasTextureSize = 3,
1036    AtlasTexture = 4,
1037}
1038
1039#[repr(C)]
1040enum SurfaceInputIndex {
1041    Vertices = 0,
1042    Surfaces = 1,
1043    ViewportSize = 2,
1044    TextureSize = 3,
1045    YTexture = 4,
1046    CbCrTexture = 5,
1047}
1048
1049#[repr(C)]
1050enum PathRasterizationInputIndex {
1051    Vertices = 0,
1052    AtlasTextureSize = 1,
1053}
1054
1055#[derive(Clone, Debug, Eq, PartialEq)]
1056#[repr(C)]
1057pub struct PathSprite {
1058    pub bounds: Bounds<ScaledPixels>,
1059    pub color: Hsla,
1060    pub tile: AtlasTile,
1061}
1062
1063#[derive(Clone, Debug, Eq, PartialEq)]
1064#[repr(C)]
1065pub struct SurfaceBounds {
1066    pub bounds: Bounds<ScaledPixels>,
1067    pub content_mask: ContentMask<ScaledPixels>,
1068}