renderer.rs

   1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
   2use crate::{
   3    color::Color,
   4    geometry::{
   5        rect::RectF,
   6        vector::{vec2f, vec2i, Vector2F},
   7    },
   8    platform,
   9    scene::{Glyph, Icon, Image, ImageGlyph, Layer, Quad, Scene, Shadow, Underline},
  10};
  11use cocoa::{
  12    base::{NO, YES},
  13    foundation::NSUInteger,
  14    quartzcore::AutoresizingMask,
  15};
  16use core_foundation::base::TCFType;
  17use foreign_types::ForeignTypeRef;
  18use log::warn;
  19use media::core_video::{self, CVMetalTextureCache};
  20use metal::{CommandQueue, MTLPixelFormat, MTLResourceOptions, NSRange};
  21use objc::{self, msg_send, sel, sel_impl};
  22use shaders::ToFloat2 as _;
  23use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, ptr, sync::Arc, vec};
  24
  25const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
  26const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
  27
  28pub struct Renderer {
  29    layer: metal::MetalLayer,
  30    command_queue: CommandQueue,
  31    sprite_cache: SpriteCache,
  32    image_cache: ImageCache,
  33    path_atlases: AtlasAllocator,
  34    quad_pipeline_state: metal::RenderPipelineState,
  35    shadow_pipeline_state: metal::RenderPipelineState,
  36    sprite_pipeline_state: metal::RenderPipelineState,
  37    image_pipeline_state: metal::RenderPipelineState,
  38    surface_pipeline_state: metal::RenderPipelineState,
  39    path_atlas_pipeline_state: metal::RenderPipelineState,
  40    underline_pipeline_state: metal::RenderPipelineState,
  41    unit_vertices: metal::Buffer,
  42    instances: metal::Buffer,
  43    cv_texture_cache: core_video::CVMetalTextureCache,
  44}
  45
  46struct PathSprite {
  47    layer_id: usize,
  48    atlas_id: usize,
  49    shader_data: shaders::GPUISprite,
  50}
  51
  52pub struct Surface {
  53    pub bounds: RectF,
  54    pub image_buffer: core_video::CVImageBuffer,
  55}
  56
  57impl Renderer {
  58    pub fn new(is_opaque: bool, fonts: Arc<dyn platform::FontSystem>) -> Self {
  59        const PIXEL_FORMAT: MTLPixelFormat = MTLPixelFormat::BGRA8Unorm;
  60
  61        let device: metal::Device = if let Some(device) = metal::Device::system_default() {
  62            device
  63        } else {
  64            log::error!("unable to access a compatible graphics device");
  65            std::process::exit(1);
  66        };
  67
  68        let layer = metal::MetalLayer::new();
  69        layer.set_device(&device);
  70        layer.set_pixel_format(PIXEL_FORMAT);
  71        layer.set_presents_with_transaction(true);
  72        layer.set_opaque(is_opaque);
  73        unsafe {
  74            let _: () = msg_send![&*layer, setAllowsNextDrawableTimeout: NO];
  75            let _: () = msg_send![&*layer, setNeedsDisplayOnBoundsChange: YES];
  76            let _: () = msg_send![
  77                &*layer,
  78                setAutoresizingMask: AutoresizingMask::WIDTH_SIZABLE
  79                    | AutoresizingMask::HEIGHT_SIZABLE
  80            ];
  81        }
  82
  83        let library = device
  84            .new_library_with_data(SHADERS_METALLIB)
  85            .expect("error building metal library");
  86
  87        let unit_vertices = [
  88            (0., 0.).to_float2(),
  89            (1., 0.).to_float2(),
  90            (0., 1.).to_float2(),
  91            (0., 1.).to_float2(),
  92            (1., 0.).to_float2(),
  93            (1., 1.).to_float2(),
  94        ];
  95        let unit_vertices = device.new_buffer_with_data(
  96            unit_vertices.as_ptr() as *const c_void,
  97            (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
  98            MTLResourceOptions::StorageModeManaged,
  99        );
 100        let instances = device.new_buffer(
 101            INSTANCE_BUFFER_SIZE as u64,
 102            MTLResourceOptions::StorageModeManaged,
 103        );
 104
 105        let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), 1., fonts.clone());
 106        let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768), 1., fonts);
 107        let path_atlases =
 108            AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
 109        let quad_pipeline_state = build_pipeline_state(
 110            &device,
 111            &library,
 112            "quad",
 113            "quad_vertex",
 114            "quad_fragment",
 115            PIXEL_FORMAT,
 116        );
 117        let shadow_pipeline_state = build_pipeline_state(
 118            &device,
 119            &library,
 120            "shadow",
 121            "shadow_vertex",
 122            "shadow_fragment",
 123            PIXEL_FORMAT,
 124        );
 125        let sprite_pipeline_state = build_pipeline_state(
 126            &device,
 127            &library,
 128            "sprite",
 129            "sprite_vertex",
 130            "sprite_fragment",
 131            PIXEL_FORMAT,
 132        );
 133        let image_pipeline_state = build_pipeline_state(
 134            &device,
 135            &library,
 136            "image",
 137            "image_vertex",
 138            "image_fragment",
 139            PIXEL_FORMAT,
 140        );
 141        let surface_pipeline_state = build_pipeline_state(
 142            &device,
 143            &library,
 144            "surface",
 145            "surface_vertex",
 146            "surface_fragment",
 147            PIXEL_FORMAT,
 148        );
 149        let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
 150            &device,
 151            &library,
 152            "path_atlas",
 153            "path_atlas_vertex",
 154            "path_atlas_fragment",
 155            MTLPixelFormat::R16Float,
 156        );
 157        let underline_pipeline_state = build_pipeline_state(
 158            &device,
 159            &library,
 160            "underline",
 161            "underline_vertex",
 162            "underline_fragment",
 163            PIXEL_FORMAT,
 164        );
 165        let cv_texture_cache = CVMetalTextureCache::new(device.as_ptr()).unwrap();
 166        Self {
 167            layer,
 168            command_queue: device.new_command_queue(),
 169            sprite_cache,
 170            image_cache,
 171            path_atlases,
 172            quad_pipeline_state,
 173            shadow_pipeline_state,
 174            sprite_pipeline_state,
 175            image_pipeline_state,
 176            surface_pipeline_state,
 177            path_atlas_pipeline_state,
 178            underline_pipeline_state,
 179            unit_vertices,
 180            instances,
 181            cv_texture_cache,
 182        }
 183    }
 184
 185    pub fn layer(&self) -> &metal::MetalLayerRef {
 186        &*self.layer
 187    }
 188
 189    pub fn render(&mut self, scene: &Scene) {
 190        let layer = self.layer.clone();
 191        let drawable_size = layer.drawable_size();
 192        let drawable = if let Some(drawable) = layer.next_drawable() {
 193            drawable
 194        } else {
 195            log::error!(
 196                "failed to retrieve next drawable, drawable size: {:?}",
 197                drawable_size
 198            );
 199            return;
 200        };
 201        let command_queue = self.command_queue.clone();
 202        let command_buffer = command_queue.new_command_buffer();
 203
 204        self.sprite_cache.set_scale_factor(scene.scale_factor());
 205        self.image_cache.set_scale_factor(scene.scale_factor());
 206
 207        let mut offset = 0;
 208
 209        let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
 210        self.render_layers(
 211            scene,
 212            path_sprites,
 213            &mut offset,
 214            vec2f(drawable_size.width as f32, drawable_size.height as f32),
 215            command_buffer,
 216            drawable.texture(),
 217        );
 218        self.instances.did_modify_range(NSRange {
 219            location: 0,
 220            length: offset as NSUInteger,
 221        });
 222        self.image_cache.finish_frame();
 223
 224        command_buffer.commit();
 225        command_buffer.wait_until_completed();
 226        drawable.present();
 227    }
 228
 229    fn render_path_atlases(
 230        &mut self,
 231        scene: &Scene,
 232        offset: &mut usize,
 233        command_buffer: &metal::CommandBufferRef,
 234    ) -> Vec<PathSprite> {
 235        self.path_atlases.clear();
 236        let mut sprites = Vec::new();
 237        let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
 238        let mut current_atlas_id = None;
 239        for (layer_id, layer) in scene.layers().enumerate() {
 240            for path in layer.paths() {
 241                let origin = path.bounds.origin() * scene.scale_factor();
 242                let size = (path.bounds.size() * scene.scale_factor()).ceil();
 243
 244                let path_allocation = self.path_atlases.allocate(size.to_i32());
 245                if path_allocation.is_none() {
 246                    // Path size was likely zero.
 247                    warn!("could not allocate path texture of size {:?}", size);
 248                    continue;
 249                }
 250                let (alloc_id, atlas_origin) = path_allocation.unwrap();
 251                let atlas_origin = atlas_origin.to_f32();
 252                sprites.push(PathSprite {
 253                    layer_id,
 254                    atlas_id: alloc_id.atlas_id,
 255                    shader_data: shaders::GPUISprite {
 256                        origin: origin.floor().to_float2(),
 257                        target_size: size.to_float2(),
 258                        source_size: size.to_float2(),
 259                        atlas_origin: atlas_origin.to_float2(),
 260                        color: path.color.to_uchar4(),
 261                        compute_winding: 1,
 262                    },
 263                });
 264
 265                if let Some(current_atlas_id) = current_atlas_id {
 266                    if alloc_id.atlas_id != current_atlas_id {
 267                        self.render_paths_to_atlas(
 268                            offset,
 269                            &vertices,
 270                            current_atlas_id,
 271                            command_buffer,
 272                        );
 273                        vertices.clear();
 274                    }
 275                }
 276
 277                current_atlas_id = Some(alloc_id.atlas_id);
 278
 279                for vertex in &path.vertices {
 280                    let xy_position =
 281                        (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
 282                    vertices.push(shaders::GPUIPathVertex {
 283                        xy_position: (atlas_origin + xy_position).to_float2(),
 284                        st_position: vertex.st_position.to_float2(),
 285                        clip_rect_origin: atlas_origin.to_float2(),
 286                        clip_rect_size: size.to_float2(),
 287                    });
 288                }
 289            }
 290        }
 291
 292        if let Some(atlas_id) = current_atlas_id {
 293            self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
 294        }
 295
 296        sprites
 297    }
 298
 299    fn render_paths_to_atlas(
 300        &mut self,
 301        offset: &mut usize,
 302        vertices: &[shaders::GPUIPathVertex],
 303        atlas_id: usize,
 304        command_buffer: &metal::CommandBufferRef,
 305    ) {
 306        align_offset(offset);
 307        let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
 308        assert!(
 309            next_offset <= INSTANCE_BUFFER_SIZE,
 310            "instance buffer exhausted"
 311        );
 312
 313        let render_pass_descriptor = metal::RenderPassDescriptor::new();
 314        let color_attachment = render_pass_descriptor
 315            .color_attachments()
 316            .object_at(0)
 317            .unwrap();
 318        let texture = self.path_atlases.texture(atlas_id).unwrap();
 319        color_attachment.set_texture(Some(texture));
 320        color_attachment.set_load_action(metal::MTLLoadAction::Clear);
 321        color_attachment.set_store_action(metal::MTLStoreAction::Store);
 322        color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
 323
 324        let path_atlas_command_encoder =
 325            command_buffer.new_render_command_encoder(render_pass_descriptor);
 326        path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
 327        path_atlas_command_encoder.set_vertex_buffer(
 328            shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
 329            Some(&self.instances),
 330            *offset as u64,
 331        );
 332        path_atlas_command_encoder.set_vertex_bytes(
 333            shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
 334            mem::size_of::<shaders::vector_float2>() as u64,
 335            [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
 336                as *const c_void,
 337        );
 338
 339        let buffer_contents = unsafe {
 340            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
 341        };
 342
 343        for (ix, vertex) in vertices.iter().enumerate() {
 344            unsafe {
 345                *buffer_contents.add(ix) = *vertex;
 346            }
 347        }
 348
 349        path_atlas_command_encoder.draw_primitives(
 350            metal::MTLPrimitiveType::Triangle,
 351            0,
 352            vertices.len() as u64,
 353        );
 354        path_atlas_command_encoder.end_encoding();
 355        *offset = next_offset;
 356    }
 357
 358    fn render_layers(
 359        &mut self,
 360        scene: &Scene,
 361        path_sprites: Vec<PathSprite>,
 362        offset: &mut usize,
 363        drawable_size: Vector2F,
 364        command_buffer: &metal::CommandBufferRef,
 365        output: &metal::TextureRef,
 366    ) {
 367        let render_pass_descriptor = metal::RenderPassDescriptor::new();
 368        let color_attachment = render_pass_descriptor
 369            .color_attachments()
 370            .object_at(0)
 371            .unwrap();
 372        color_attachment.set_texture(Some(output));
 373        color_attachment.set_load_action(metal::MTLLoadAction::Clear);
 374        color_attachment.set_store_action(metal::MTLStoreAction::Store);
 375        let alpha = if self.layer.is_opaque() { 1. } else { 0. };
 376        color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., alpha));
 377        let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
 378
 379        command_encoder.set_viewport(metal::MTLViewport {
 380            originX: 0.0,
 381            originY: 0.0,
 382            width: drawable_size.x() as f64,
 383            height: drawable_size.y() as f64,
 384            znear: 0.0,
 385            zfar: 1.0,
 386        });
 387
 388        let scale_factor = scene.scale_factor();
 389        let mut path_sprites = path_sprites.into_iter().peekable();
 390        for (layer_id, layer) in scene.layers().enumerate() {
 391            self.clip(scene, layer, drawable_size, command_encoder);
 392            self.render_shadows(
 393                layer.shadows(),
 394                scale_factor,
 395                offset,
 396                drawable_size,
 397                command_encoder,
 398            );
 399            self.render_quads(
 400                layer.quads(),
 401                scale_factor,
 402                offset,
 403                drawable_size,
 404                command_encoder,
 405            );
 406            self.render_path_sprites(
 407                layer_id,
 408                &mut path_sprites,
 409                offset,
 410                drawable_size,
 411                command_encoder,
 412            );
 413            self.render_underlines(
 414                layer.underlines(),
 415                scale_factor,
 416                offset,
 417                drawable_size,
 418                command_encoder,
 419            );
 420            self.render_sprites(
 421                layer.glyphs(),
 422                layer.icons(),
 423                scale_factor,
 424                offset,
 425                drawable_size,
 426                command_encoder,
 427            );
 428            self.render_images(
 429                layer.images(),
 430                layer.image_glyphs(),
 431                scale_factor,
 432                offset,
 433                drawable_size,
 434                command_encoder,
 435            );
 436            self.render_surfaces(
 437                layer.surfaces(),
 438                scale_factor,
 439                offset,
 440                drawable_size,
 441                command_encoder,
 442            );
 443        }
 444
 445        command_encoder.end_encoding();
 446    }
 447
 448    fn clip(
 449        &mut self,
 450        scene: &Scene,
 451        layer: &Layer,
 452        drawable_size: Vector2F,
 453        command_encoder: &metal::RenderCommandEncoderRef,
 454    ) {
 455        let clip_bounds = (layer
 456            .clip_bounds()
 457            .unwrap_or_else(|| RectF::new(vec2f(0., 0.), drawable_size / scene.scale_factor()))
 458            * scene.scale_factor())
 459        .round();
 460        command_encoder.set_scissor_rect(metal::MTLScissorRect {
 461            x: clip_bounds.origin_x() as NSUInteger,
 462            y: clip_bounds.origin_y() as NSUInteger,
 463            width: clip_bounds.width() as NSUInteger,
 464            height: clip_bounds.height() as NSUInteger,
 465        });
 466    }
 467
 468    fn render_shadows(
 469        &mut self,
 470        shadows: &[Shadow],
 471        scale_factor: f32,
 472        offset: &mut usize,
 473        drawable_size: Vector2F,
 474        command_encoder: &metal::RenderCommandEncoderRef,
 475    ) {
 476        if shadows.is_empty() {
 477            return;
 478        }
 479
 480        align_offset(offset);
 481        let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
 482        assert!(
 483            next_offset <= INSTANCE_BUFFER_SIZE,
 484            "instance buffer exhausted"
 485        );
 486
 487        command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
 488        command_encoder.set_vertex_buffer(
 489            shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
 490            Some(&self.unit_vertices),
 491            0,
 492        );
 493        command_encoder.set_vertex_buffer(
 494            shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
 495            Some(&self.instances),
 496            *offset as u64,
 497        );
 498        command_encoder.set_vertex_bytes(
 499            shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
 500            mem::size_of::<shaders::GPUIUniforms>() as u64,
 501            [shaders::GPUIUniforms {
 502                viewport_size: drawable_size.to_float2(),
 503            }]
 504            .as_ptr() as *const c_void,
 505        );
 506
 507        let buffer_contents = unsafe {
 508            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIShadow
 509        };
 510        for (ix, shadow) in shadows.iter().enumerate() {
 511            let shape_bounds = shadow.bounds * scale_factor;
 512            let corner_radii = shadow.corner_radii * scale_factor;
 513            let shader_shadow = shaders::GPUIShadow {
 514                origin: shape_bounds.origin().to_float2(),
 515                size: shape_bounds.size().to_float2(),
 516                corner_radius_top_left: corner_radii.top_left,
 517                corner_radius_top_right: corner_radii.top_right,
 518                corner_radius_bottom_right: corner_radii.bottom_right,
 519                corner_radius_bottom_left: corner_radii.bottom_left,
 520                sigma: shadow.sigma,
 521                color: shadow.color.to_uchar4(),
 522            };
 523            unsafe {
 524                *(buffer_contents.add(ix)) = shader_shadow;
 525            }
 526        }
 527
 528        command_encoder.draw_primitives_instanced(
 529            metal::MTLPrimitiveType::Triangle,
 530            0,
 531            6,
 532            shadows.len() as u64,
 533        );
 534        *offset = next_offset;
 535    }
 536
 537    fn render_quads(
 538        &mut self,
 539        quads: &[Quad],
 540        scale_factor: f32,
 541        offset: &mut usize,
 542        drawable_size: Vector2F,
 543        command_encoder: &metal::RenderCommandEncoderRef,
 544    ) {
 545        if quads.is_empty() {
 546            return;
 547        }
 548        align_offset(offset);
 549        let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
 550        assert!(
 551            next_offset <= INSTANCE_BUFFER_SIZE,
 552            "instance buffer exhausted"
 553        );
 554
 555        command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
 556        command_encoder.set_vertex_buffer(
 557            shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
 558            Some(&self.unit_vertices),
 559            0,
 560        );
 561        command_encoder.set_vertex_buffer(
 562            shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
 563            Some(&self.instances),
 564            *offset as u64,
 565        );
 566        command_encoder.set_vertex_bytes(
 567            shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
 568            mem::size_of::<shaders::GPUIUniforms>() as u64,
 569            [shaders::GPUIUniforms {
 570                viewport_size: drawable_size.to_float2(),
 571            }]
 572            .as_ptr() as *const c_void,
 573        );
 574
 575        let buffer_contents = unsafe {
 576            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIQuad
 577        };
 578        for (ix, quad) in quads.iter().enumerate() {
 579            let bounds = quad.bounds * scale_factor;
 580            let shader_quad = shaders::GPUIQuad {
 581                origin: bounds.origin().round().to_float2(),
 582                size: bounds.size().round().to_float2(),
 583                background_color: quad
 584                    .background
 585                    .unwrap_or_else(Color::transparent_black)
 586                    .to_uchar4(),
 587                border_top: quad.border.top * scale_factor,
 588                border_right: quad.border.right * scale_factor,
 589                border_bottom: quad.border.bottom * scale_factor,
 590                border_left: quad.border.left * scale_factor,
 591                border_color: quad.border.color.to_uchar4(),
 592                corner_radius_top_left: quad.corner_radii.top_left * scale_factor,
 593                corner_radius_top_right: quad.corner_radii.top_right * scale_factor,
 594                corner_radius_bottom_right: quad.corner_radii.bottom_right * scale_factor,
 595                corner_radius_bottom_left: quad.corner_radii.bottom_left * scale_factor,
 596            };
 597            unsafe {
 598                *(buffer_contents.add(ix)) = shader_quad;
 599            }
 600        }
 601
 602        command_encoder.draw_primitives_instanced(
 603            metal::MTLPrimitiveType::Triangle,
 604            0,
 605            6,
 606            quads.len() as u64,
 607        );
 608        *offset = next_offset;
 609    }
 610
 611    fn render_sprites(
 612        &mut self,
 613        glyphs: &[Glyph],
 614        icons: &[Icon],
 615        scale_factor: f32,
 616        offset: &mut usize,
 617        drawable_size: Vector2F,
 618        command_encoder: &metal::RenderCommandEncoderRef,
 619    ) {
 620        if glyphs.is_empty() && icons.is_empty() {
 621            return;
 622        }
 623
 624        let mut sprites_by_atlas = HashMap::new();
 625
 626        for glyph in glyphs {
 627            if let Some(sprite) = self.sprite_cache.render_glyph(
 628                glyph.font_id,
 629                glyph.font_size,
 630                glyph.id,
 631                glyph.origin,
 632            ) {
 633                // Snap sprite to pixel grid.
 634                let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
 635
 636                sprites_by_atlas
 637                    .entry(sprite.atlas_id)
 638                    .or_insert_with(Vec::new)
 639                    .push(shaders::GPUISprite {
 640                        origin: origin.to_float2(),
 641                        target_size: sprite.size.to_float2(),
 642                        source_size: sprite.size.to_float2(),
 643                        atlas_origin: sprite.atlas_origin.to_float2(),
 644                        color: glyph.color.to_uchar4(),
 645                        compute_winding: 0,
 646                    });
 647            }
 648        }
 649
 650        for icon in icons {
 651            // Snap sprite to pixel grid.
 652            let origin = (icon.bounds.origin() * scale_factor).floor();
 653            let target_size = (icon.bounds.size() * scale_factor).ceil();
 654            let source_size = (target_size * 2.).to_i32();
 655
 656            let sprite =
 657                self.sprite_cache
 658                    .render_icon(source_size, icon.path.clone(), icon.svg.clone());
 659            if sprite.is_none() {
 660                continue;
 661            }
 662            let sprite = sprite.unwrap();
 663
 664            sprites_by_atlas
 665                .entry(sprite.atlas_id)
 666                .or_insert_with(Vec::new)
 667                .push(shaders::GPUISprite {
 668                    origin: origin.to_float2(),
 669                    target_size: target_size.to_float2(),
 670                    source_size: sprite.size.to_float2(),
 671                    atlas_origin: sprite.atlas_origin.to_float2(),
 672                    color: icon.color.to_uchar4(),
 673                    compute_winding: 0,
 674                });
 675        }
 676
 677        command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
 678        command_encoder.set_vertex_buffer(
 679            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
 680            Some(&self.unit_vertices),
 681            0,
 682        );
 683        command_encoder.set_vertex_bytes(
 684            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
 685            mem::size_of::<shaders::vector_float2>() as u64,
 686            [drawable_size.to_float2()].as_ptr() as *const c_void,
 687        );
 688
 689        for (atlas_id, sprites) in sprites_by_atlas {
 690            align_offset(offset);
 691            let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
 692            assert!(
 693                next_offset <= INSTANCE_BUFFER_SIZE,
 694                "instance buffer exhausted"
 695            );
 696
 697            let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
 698            command_encoder.set_vertex_buffer(
 699                shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
 700                Some(&self.instances),
 701                *offset as u64,
 702            );
 703            command_encoder.set_vertex_bytes(
 704                shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
 705                mem::size_of::<shaders::vector_float2>() as u64,
 706                [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
 707                    as *const c_void,
 708            );
 709
 710            command_encoder.set_fragment_texture(
 711                shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
 712                Some(texture),
 713            );
 714
 715            unsafe {
 716                let buffer_contents =
 717                    (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
 718                std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
 719            }
 720
 721            command_encoder.draw_primitives_instanced(
 722                metal::MTLPrimitiveType::Triangle,
 723                0,
 724                6,
 725                sprites.len() as u64,
 726            );
 727            *offset = next_offset;
 728        }
 729    }
 730
 731    fn render_images(
 732        &mut self,
 733        images: &[Image],
 734        image_glyphs: &[ImageGlyph],
 735        scale_factor: f32,
 736        offset: &mut usize,
 737        drawable_size: Vector2F,
 738        command_encoder: &metal::RenderCommandEncoderRef,
 739    ) {
 740        if images.is_empty() && image_glyphs.is_empty() {
 741            return;
 742        }
 743
 744        let mut images_by_atlas = HashMap::new();
 745        for image in images {
 746            let origin = image.bounds.origin() * scale_factor;
 747            let target_size = image.bounds.size() * scale_factor;
 748            let corner_radii = image.corner_radii * scale_factor;
 749            let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
 750            images_by_atlas
 751                .entry(alloc_id.atlas_id)
 752                .or_insert_with(Vec::new)
 753                .push(shaders::GPUIImage {
 754                    origin: origin.to_float2(),
 755                    target_size: target_size.to_float2(),
 756                    source_size: atlas_bounds.size().to_float2(),
 757                    atlas_origin: atlas_bounds.origin().to_float2(),
 758                    border_top: image.border.top * scale_factor,
 759                    border_right: image.border.right * scale_factor,
 760                    border_bottom: image.border.bottom * scale_factor,
 761                    border_left: image.border.left * scale_factor,
 762                    border_color: image.border.color.to_uchar4(),
 763                    corner_radius_top_left: corner_radii.top_left,
 764                    corner_radius_top_right: corner_radii.top_right,
 765                    corner_radius_bottom_right: corner_radii.bottom_right,
 766                    corner_radius_bottom_left: corner_radii.bottom_left,
 767                    grayscale: image.grayscale as u8,
 768                });
 769        }
 770
 771        for image_glyph in image_glyphs {
 772            let origin = (image_glyph.origin * scale_factor).floor();
 773            if let Some((alloc_id, atlas_bounds, glyph_origin)) =
 774                self.image_cache.render_glyph(image_glyph)
 775            {
 776                images_by_atlas
 777                    .entry(alloc_id.atlas_id)
 778                    .or_insert_with(Vec::new)
 779                    .push(shaders::GPUIImage {
 780                        origin: (origin + glyph_origin.to_f32()).to_float2(),
 781                        target_size: atlas_bounds.size().to_float2(),
 782                        source_size: atlas_bounds.size().to_float2(),
 783                        atlas_origin: atlas_bounds.origin().to_float2(),
 784                        border_top: 0.,
 785                        border_right: 0.,
 786                        border_bottom: 0.,
 787                        border_left: 0.,
 788                        border_color: Default::default(),
 789                        corner_radius_top_left: 0.,
 790                        corner_radius_top_right: 0.,
 791                        corner_radius_bottom_right: 0.,
 792                        corner_radius_bottom_left: 0.,
 793                        grayscale: false as u8,
 794                    });
 795            } else {
 796                log::warn!("could not render glyph with id {}", image_glyph.id);
 797            }
 798        }
 799
 800        command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
 801        command_encoder.set_vertex_buffer(
 802            shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
 803            Some(&self.unit_vertices),
 804            0,
 805        );
 806        command_encoder.set_vertex_bytes(
 807            shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
 808            mem::size_of::<shaders::vector_float2>() as u64,
 809            [drawable_size.to_float2()].as_ptr() as *const c_void,
 810        );
 811
 812        for (atlas_id, images) in images_by_atlas {
 813            align_offset(offset);
 814            let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
 815            assert!(
 816                next_offset <= INSTANCE_BUFFER_SIZE,
 817                "instance buffer exhausted"
 818            );
 819
 820            let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
 821            command_encoder.set_vertex_buffer(
 822                shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
 823                Some(&self.instances),
 824                *offset as u64,
 825            );
 826            command_encoder.set_vertex_bytes(
 827                shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
 828                mem::size_of::<shaders::vector_float2>() as u64,
 829                [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
 830                    as *const c_void,
 831            );
 832            command_encoder.set_fragment_texture(
 833                shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
 834                Some(texture),
 835            );
 836
 837            unsafe {
 838                let buffer_contents =
 839                    (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
 840                std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
 841            }
 842
 843            command_encoder.draw_primitives_instanced(
 844                metal::MTLPrimitiveType::Triangle,
 845                0,
 846                6,
 847                images.len() as u64,
 848            );
 849            *offset = next_offset;
 850        }
 851    }
 852
 853    fn render_surfaces(
 854        &mut self,
 855        surfaces: &[Surface],
 856        scale_factor: f32,
 857        offset: &mut usize,
 858        drawable_size: Vector2F,
 859        command_encoder: &metal::RenderCommandEncoderRef,
 860    ) {
 861        if surfaces.is_empty() {
 862            return;
 863        }
 864
 865        command_encoder.set_render_pipeline_state(&self.surface_pipeline_state);
 866        command_encoder.set_vertex_buffer(
 867            shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexVertices as u64,
 868            Some(&self.unit_vertices),
 869            0,
 870        );
 871        command_encoder.set_vertex_bytes(
 872            shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexViewportSize as u64,
 873            mem::size_of::<shaders::vector_float2>() as u64,
 874            [drawable_size.to_float2()].as_ptr() as *const c_void,
 875        );
 876
 877        for surface in surfaces {
 878            let origin = surface.bounds.origin() * scale_factor;
 879            let source_size = vec2i(
 880                surface.image_buffer.width() as i32,
 881                surface.image_buffer.height() as i32,
 882            );
 883            let target_size = surface.bounds.size() * scale_factor;
 884
 885            assert_eq!(
 886                surface.image_buffer.pixel_format_type(),
 887                core_video::kCVPixelFormatType_420YpCbCr8BiPlanarFullRange
 888            );
 889
 890            let y_texture = self
 891                .cv_texture_cache
 892                .create_texture_from_image(
 893                    surface.image_buffer.as_concrete_TypeRef(),
 894                    ptr::null(),
 895                    MTLPixelFormat::R8Unorm,
 896                    surface.image_buffer.plane_width(0),
 897                    surface.image_buffer.plane_height(0),
 898                    0,
 899                )
 900                .unwrap();
 901            let cb_cr_texture = self
 902                .cv_texture_cache
 903                .create_texture_from_image(
 904                    surface.image_buffer.as_concrete_TypeRef(),
 905                    ptr::null(),
 906                    MTLPixelFormat::RG8Unorm,
 907                    surface.image_buffer.plane_width(1),
 908                    surface.image_buffer.plane_height(1),
 909                    1,
 910                )
 911                .unwrap();
 912
 913            align_offset(offset);
 914            let next_offset = *offset + mem::size_of::<shaders::GPUISurface>();
 915            assert!(
 916                next_offset <= INSTANCE_BUFFER_SIZE,
 917                "instance buffer exhausted"
 918            );
 919
 920            command_encoder.set_vertex_buffer(
 921                shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexSurfaces as u64,
 922                Some(&self.instances),
 923                *offset as u64,
 924            );
 925            command_encoder.set_vertex_bytes(
 926                shaders::GPUISurfaceVertexInputIndex_GPUISurfaceVertexInputIndexAtlasSize as u64,
 927                mem::size_of::<shaders::vector_float2>() as u64,
 928                [source_size.to_float2()].as_ptr() as *const c_void,
 929            );
 930            command_encoder.set_fragment_texture(
 931                shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexYAtlas as u64,
 932                Some(y_texture.as_texture_ref()),
 933            );
 934            command_encoder.set_fragment_texture(
 935                shaders::GPUISurfaceFragmentInputIndex_GPUISurfaceFragmentInputIndexCbCrAtlas
 936                    as u64,
 937                Some(cb_cr_texture.as_texture_ref()),
 938            );
 939
 940            unsafe {
 941                let buffer_contents = (self.instances.contents() as *mut u8).add(*offset)
 942                    as *mut shaders::GPUISurface;
 943                std::ptr::write(
 944                    buffer_contents,
 945                    shaders::GPUISurface {
 946                        origin: origin.to_float2(),
 947                        target_size: target_size.to_float2(),
 948                        source_size: source_size.to_float2(),
 949                    },
 950                );
 951            }
 952
 953            command_encoder.draw_primitives(metal::MTLPrimitiveType::Triangle, 0, 6);
 954            *offset = next_offset;
 955        }
 956    }
 957
 958    fn render_path_sprites(
 959        &mut self,
 960        layer_id: usize,
 961        sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
 962        offset: &mut usize,
 963        drawable_size: Vector2F,
 964        command_encoder: &metal::RenderCommandEncoderRef,
 965    ) {
 966        command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
 967        command_encoder.set_vertex_buffer(
 968            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
 969            Some(&self.unit_vertices),
 970            0,
 971        );
 972        command_encoder.set_vertex_bytes(
 973            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
 974            mem::size_of::<shaders::vector_float2>() as u64,
 975            [drawable_size.to_float2()].as_ptr() as *const c_void,
 976        );
 977
 978        let mut atlas_id = None;
 979        let mut atlas_sprite_count = 0;
 980        align_offset(offset);
 981
 982        while let Some(sprite) = sprites.peek() {
 983            if sprite.layer_id != layer_id {
 984                break;
 985            }
 986
 987            let sprite = sprites.next().unwrap();
 988            if let Some(atlas_id) = atlas_id.as_mut() {
 989                if sprite.atlas_id != *atlas_id {
 990                    self.render_path_sprites_for_atlas(
 991                        offset,
 992                        *atlas_id,
 993                        atlas_sprite_count,
 994                        command_encoder,
 995                    );
 996
 997                    *atlas_id = sprite.atlas_id;
 998                    atlas_sprite_count = 0;
 999                    align_offset(offset);
1000                }
1001            } else {
1002                atlas_id = Some(sprite.atlas_id);
1003            }
1004
1005            unsafe {
1006                let buffer_contents =
1007                    (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
1008                *buffer_contents.add(atlas_sprite_count) = sprite.shader_data;
1009            }
1010
1011            atlas_sprite_count += 1;
1012        }
1013
1014        if let Some(atlas_id) = atlas_id {
1015            self.render_path_sprites_for_atlas(
1016                offset,
1017                atlas_id,
1018                atlas_sprite_count,
1019                command_encoder,
1020            );
1021        }
1022    }
1023
1024    fn render_path_sprites_for_atlas(
1025        &mut self,
1026        offset: &mut usize,
1027        atlas_id: usize,
1028        sprite_count: usize,
1029        command_encoder: &metal::RenderCommandEncoderRef,
1030    ) {
1031        let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
1032        assert!(
1033            next_offset <= INSTANCE_BUFFER_SIZE,
1034            "instance buffer exhausted"
1035        );
1036        command_encoder.set_vertex_buffer(
1037            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
1038            Some(&self.instances),
1039            *offset as u64,
1040        );
1041        let texture = self.path_atlases.texture(atlas_id).unwrap();
1042        command_encoder.set_fragment_texture(
1043            shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
1044            Some(texture),
1045        );
1046        command_encoder.set_vertex_bytes(
1047            shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
1048            mem::size_of::<shaders::vector_float2>() as u64,
1049            [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
1050                as *const c_void,
1051        );
1052
1053        command_encoder.draw_primitives_instanced(
1054            metal::MTLPrimitiveType::Triangle,
1055            0,
1056            6,
1057            sprite_count as u64,
1058        );
1059        *offset = next_offset;
1060    }
1061
1062    fn render_underlines(
1063        &mut self,
1064        underlines: &[Underline],
1065        scale_factor: f32,
1066        offset: &mut usize,
1067        drawable_size: Vector2F,
1068        command_encoder: &metal::RenderCommandEncoderRef,
1069    ) {
1070        if underlines.is_empty() {
1071            return;
1072        }
1073        align_offset(offset);
1074        let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
1075        assert!(
1076            next_offset <= INSTANCE_BUFFER_SIZE,
1077            "instance buffer exhausted"
1078        );
1079
1080        command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
1081        command_encoder.set_vertex_buffer(
1082            shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
1083            Some(&self.unit_vertices),
1084            0,
1085        );
1086        command_encoder.set_vertex_buffer(
1087            shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
1088            Some(&self.instances),
1089            *offset as u64,
1090        );
1091        command_encoder.set_vertex_bytes(
1092            shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
1093            mem::size_of::<shaders::GPUIUniforms>() as u64,
1094            [shaders::GPUIUniforms {
1095                viewport_size: drawable_size.to_float2(),
1096            }]
1097            .as_ptr() as *const c_void,
1098        );
1099
1100        let buffer_contents = unsafe {
1101            (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIUnderline
1102        };
1103        for (ix, underline) in underlines.iter().enumerate() {
1104            let origin = underline.origin * scale_factor;
1105            let mut height = underline.thickness;
1106            if underline.squiggly {
1107                height *= 3.;
1108            }
1109            let size = vec2f(underline.width, height) * scale_factor;
1110            let shader_underline = shaders::GPUIUnderline {
1111                origin: origin.round().to_float2(),
1112                size: size.round().to_float2(),
1113                thickness: underline.thickness * scale_factor,
1114                color: underline.color.to_uchar4(),
1115                squiggly: underline.squiggly as u8,
1116            };
1117            unsafe {
1118                *(buffer_contents.add(ix)) = shader_underline;
1119            }
1120        }
1121
1122        command_encoder.draw_primitives_instanced(
1123            metal::MTLPrimitiveType::Triangle,
1124            0,
1125            6,
1126            underlines.len() as u64,
1127        );
1128        *offset = next_offset;
1129    }
1130}
1131
1132fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
1133    let texture_descriptor = metal::TextureDescriptor::new();
1134    texture_descriptor.set_width(2048);
1135    texture_descriptor.set_height(2048);
1136    texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
1137    texture_descriptor
1138        .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
1139    texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
1140    texture_descriptor
1141}
1142
1143fn align_offset(offset: &mut usize) {
1144    let r = *offset % 256;
1145    if r > 0 {
1146        *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
1147    }
1148}
1149
1150fn build_pipeline_state(
1151    device: &metal::DeviceRef,
1152    library: &metal::LibraryRef,
1153    label: &str,
1154    vertex_fn_name: &str,
1155    fragment_fn_name: &str,
1156    pixel_format: metal::MTLPixelFormat,
1157) -> metal::RenderPipelineState {
1158    let vertex_fn = library
1159        .get_function(vertex_fn_name, None)
1160        .expect("error locating vertex function");
1161    let fragment_fn = library
1162        .get_function(fragment_fn_name, None)
1163        .expect("error locating fragment function");
1164
1165    let descriptor = metal::RenderPipelineDescriptor::new();
1166    descriptor.set_label(label);
1167    descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1168    descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1169    let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1170    color_attachment.set_pixel_format(pixel_format);
1171    color_attachment.set_blending_enabled(true);
1172    color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1173    color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1174    color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
1175    color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1176    color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
1177    color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1178
1179    device
1180        .new_render_pipeline_state(&descriptor)
1181        .expect("could not create render pipeline state")
1182}
1183
1184fn build_path_atlas_pipeline_state(
1185    device: &metal::DeviceRef,
1186    library: &metal::LibraryRef,
1187    label: &str,
1188    vertex_fn_name: &str,
1189    fragment_fn_name: &str,
1190    pixel_format: metal::MTLPixelFormat,
1191) -> metal::RenderPipelineState {
1192    let vertex_fn = library
1193        .get_function(vertex_fn_name, None)
1194        .expect("error locating vertex function");
1195    let fragment_fn = library
1196        .get_function(fragment_fn_name, None)
1197        .expect("error locating fragment function");
1198
1199    let descriptor = metal::RenderPipelineDescriptor::new();
1200    descriptor.set_label(label);
1201    descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1202    descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1203    let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1204    color_attachment.set_pixel_format(pixel_format);
1205    color_attachment.set_blending_enabled(true);
1206    color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1207    color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1208    color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
1209    color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1210    color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
1211    color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1212
1213    device
1214        .new_render_pipeline_state(&descriptor)
1215        .expect("could not create render pipeline state")
1216}
1217
1218mod shaders {
1219    #![allow(non_upper_case_globals)]
1220    #![allow(non_camel_case_types)]
1221    #![allow(non_snake_case)]
1222
1223    use crate::{
1224        color::Color,
1225        geometry::vector::{Vector2F, Vector2I},
1226    };
1227    use std::mem;
1228
1229    include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1230
1231    pub trait ToFloat2 {
1232        fn to_float2(&self) -> vector_float2;
1233    }
1234
1235    impl ToFloat2 for (f32, f32) {
1236        fn to_float2(&self) -> vector_float2 {
1237            unsafe {
1238                let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1239                output <<= 32;
1240                output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1241                output
1242            }
1243        }
1244    }
1245
1246    impl ToFloat2 for Vector2F {
1247        fn to_float2(&self) -> vector_float2 {
1248            unsafe {
1249                let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1250                output <<= 32;
1251                output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1252                output
1253            }
1254        }
1255    }
1256
1257    impl ToFloat2 for Vector2I {
1258        fn to_float2(&self) -> vector_float2 {
1259            self.to_f32().to_float2()
1260        }
1261    }
1262
1263    impl Color {
1264        pub fn to_uchar4(&self) -> vector_uchar4 {
1265            let mut vec = self.a as vector_uchar4;
1266            vec <<= 8;
1267            vec |= self.b as vector_uchar4;
1268            vec <<= 8;
1269            vec |= self.g as vector_uchar4;
1270            vec <<= 8;
1271            vec |= self.r as vector_uchar4;
1272            vec
1273        }
1274    }
1275}