1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, ImageGlyph, Layer, Quad, Scene, Shadow, Underline},
10};
11use cocoa::foundation::NSUInteger;
12use log::warn;
13use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
14use shaders::ToFloat2 as _;
15use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
16
17const SHADERS_METALLIB: &[u8] = include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
18const INSTANCE_BUFFER_SIZE: usize = 8192 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
19
20pub struct Renderer {
21 sprite_cache: SpriteCache,
22 image_cache: ImageCache,
23 path_atlases: AtlasAllocator,
24 quad_pipeline_state: metal::RenderPipelineState,
25 shadow_pipeline_state: metal::RenderPipelineState,
26 sprite_pipeline_state: metal::RenderPipelineState,
27 image_pipeline_state: metal::RenderPipelineState,
28 path_atlas_pipeline_state: metal::RenderPipelineState,
29 underline_pipeline_state: metal::RenderPipelineState,
30 unit_vertices: metal::Buffer,
31 instances: metal::Buffer,
32}
33
34struct PathSprite {
35 layer_id: usize,
36 atlas_id: usize,
37 shader_data: shaders::GPUISprite,
38}
39
40impl Renderer {
41 pub fn new(
42 device: metal::Device,
43 pixel_format: metal::MTLPixelFormat,
44 scale_factor: f32,
45 fonts: Arc<dyn platform::FontSystem>,
46 ) -> Self {
47 let library = device
48 .new_library_with_data(SHADERS_METALLIB)
49 .expect("error building metal library");
50
51 let unit_vertices = [
52 (0., 0.).to_float2(),
53 (1., 0.).to_float2(),
54 (0., 1.).to_float2(),
55 (0., 1.).to_float2(),
56 (1., 0.).to_float2(),
57 (1., 1.).to_float2(),
58 ];
59 let unit_vertices = device.new_buffer_with_data(
60 unit_vertices.as_ptr() as *const c_void,
61 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
62 MTLResourceOptions::StorageModeManaged,
63 );
64 let instances = device.new_buffer(
65 INSTANCE_BUFFER_SIZE as u64,
66 MTLResourceOptions::StorageModeManaged,
67 );
68
69 let sprite_cache = SpriteCache::new(
70 device.clone(),
71 vec2i(1024, 768),
72 scale_factor,
73 fonts.clone(),
74 );
75 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768), scale_factor, fonts);
76 let path_atlases =
77 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
78 let quad_pipeline_state = build_pipeline_state(
79 &device,
80 &library,
81 "quad",
82 "quad_vertex",
83 "quad_fragment",
84 pixel_format,
85 );
86 let shadow_pipeline_state = build_pipeline_state(
87 &device,
88 &library,
89 "shadow",
90 "shadow_vertex",
91 "shadow_fragment",
92 pixel_format,
93 );
94 let sprite_pipeline_state = build_pipeline_state(
95 &device,
96 &library,
97 "sprite",
98 "sprite_vertex",
99 "sprite_fragment",
100 pixel_format,
101 );
102 let image_pipeline_state = build_pipeline_state(
103 &device,
104 &library,
105 "image",
106 "image_vertex",
107 "image_fragment",
108 pixel_format,
109 );
110 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
111 &device,
112 &library,
113 "path_atlas",
114 "path_atlas_vertex",
115 "path_atlas_fragment",
116 MTLPixelFormat::R16Float,
117 );
118 let underline_pipeline_state = build_pipeline_state(
119 &device,
120 &library,
121 "underline",
122 "underline_vertex",
123 "underline_fragment",
124 pixel_format,
125 );
126 Self {
127 sprite_cache,
128 image_cache,
129 path_atlases,
130 quad_pipeline_state,
131 shadow_pipeline_state,
132 sprite_pipeline_state,
133 image_pipeline_state,
134 path_atlas_pipeline_state,
135 underline_pipeline_state,
136 unit_vertices,
137 instances,
138 }
139 }
140
141 pub fn render(
142 &mut self,
143 scene: &Scene,
144 drawable_size: Vector2F,
145 command_buffer: &metal::CommandBufferRef,
146 output: &metal::TextureRef,
147 ) {
148 self.sprite_cache.set_scale_factor(scene.scale_factor());
149 self.image_cache.set_scale_factor(scene.scale_factor());
150
151 let mut offset = 0;
152
153 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
154 self.render_layers(
155 scene,
156 path_sprites,
157 &mut offset,
158 drawable_size,
159 command_buffer,
160 output,
161 );
162 self.instances.did_modify_range(NSRange {
163 location: 0,
164 length: offset as NSUInteger,
165 });
166 self.image_cache.finish_frame();
167 }
168
169 fn render_path_atlases(
170 &mut self,
171 scene: &Scene,
172 offset: &mut usize,
173 command_buffer: &metal::CommandBufferRef,
174 ) -> Vec<PathSprite> {
175 self.path_atlases.clear();
176 let mut sprites = Vec::new();
177 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
178 let mut current_atlas_id = None;
179 for (layer_id, layer) in scene.layers().enumerate() {
180 for path in layer.paths() {
181 let origin = path.bounds.origin() * scene.scale_factor();
182 let size = (path.bounds.size() * scene.scale_factor()).ceil();
183
184 let path_allocation = self.path_atlases.allocate(size.to_i32());
185 if path_allocation.is_none() {
186 // Path size was likely zero.
187 warn!("could not allocate path texture of size {:?}", size);
188 continue;
189 }
190 let (alloc_id, atlas_origin) = path_allocation.unwrap();
191 let atlas_origin = atlas_origin.to_f32();
192 sprites.push(PathSprite {
193 layer_id,
194 atlas_id: alloc_id.atlas_id,
195 shader_data: shaders::GPUISprite {
196 origin: origin.floor().to_float2(),
197 target_size: size.to_float2(),
198 source_size: size.to_float2(),
199 atlas_origin: atlas_origin.to_float2(),
200 color: path.color.to_uchar4(),
201 compute_winding: 1,
202 },
203 });
204
205 if let Some(current_atlas_id) = current_atlas_id {
206 if alloc_id.atlas_id != current_atlas_id {
207 self.render_paths_to_atlas(
208 offset,
209 &vertices,
210 current_atlas_id,
211 command_buffer,
212 );
213 vertices.clear();
214 }
215 }
216
217 current_atlas_id = Some(alloc_id.atlas_id);
218
219 for vertex in &path.vertices {
220 let xy_position =
221 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
222 vertices.push(shaders::GPUIPathVertex {
223 xy_position: (atlas_origin + xy_position).to_float2(),
224 st_position: vertex.st_position.to_float2(),
225 clip_rect_origin: atlas_origin.to_float2(),
226 clip_rect_size: size.to_float2(),
227 });
228 }
229 }
230 }
231
232 if let Some(atlas_id) = current_atlas_id {
233 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
234 }
235
236 sprites
237 }
238
239 fn render_paths_to_atlas(
240 &mut self,
241 offset: &mut usize,
242 vertices: &[shaders::GPUIPathVertex],
243 atlas_id: usize,
244 command_buffer: &metal::CommandBufferRef,
245 ) {
246 align_offset(offset);
247 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
248 assert!(
249 next_offset <= INSTANCE_BUFFER_SIZE,
250 "instance buffer exhausted"
251 );
252
253 let render_pass_descriptor = metal::RenderPassDescriptor::new();
254 let color_attachment = render_pass_descriptor
255 .color_attachments()
256 .object_at(0)
257 .unwrap();
258 let texture = self.path_atlases.texture(atlas_id).unwrap();
259 color_attachment.set_texture(Some(texture));
260 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
261 color_attachment.set_store_action(metal::MTLStoreAction::Store);
262 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
263
264 let path_atlas_command_encoder =
265 command_buffer.new_render_command_encoder(render_pass_descriptor);
266 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
267 path_atlas_command_encoder.set_vertex_buffer(
268 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
269 Some(&self.instances),
270 *offset as u64,
271 );
272 path_atlas_command_encoder.set_vertex_bytes(
273 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
274 mem::size_of::<shaders::vector_float2>() as u64,
275 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
276 as *const c_void,
277 );
278
279 let buffer_contents = unsafe {
280 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
281 };
282
283 for (ix, vertex) in vertices.iter().enumerate() {
284 unsafe {
285 *buffer_contents.add(ix) = *vertex;
286 }
287 }
288
289 path_atlas_command_encoder.draw_primitives(
290 metal::MTLPrimitiveType::Triangle,
291 0,
292 vertices.len() as u64,
293 );
294 path_atlas_command_encoder.end_encoding();
295 *offset = next_offset;
296 }
297
298 fn render_layers(
299 &mut self,
300 scene: &Scene,
301 path_sprites: Vec<PathSprite>,
302 offset: &mut usize,
303 drawable_size: Vector2F,
304 command_buffer: &metal::CommandBufferRef,
305 output: &metal::TextureRef,
306 ) {
307 let render_pass_descriptor = metal::RenderPassDescriptor::new();
308 let color_attachment = render_pass_descriptor
309 .color_attachments()
310 .object_at(0)
311 .unwrap();
312 color_attachment.set_texture(Some(output));
313 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
314 color_attachment.set_store_action(metal::MTLStoreAction::Store);
315 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
316 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
317
318 command_encoder.set_viewport(metal::MTLViewport {
319 originX: 0.0,
320 originY: 0.0,
321 width: drawable_size.x() as f64,
322 height: drawable_size.y() as f64,
323 znear: 0.0,
324 zfar: 1.0,
325 });
326
327 let scale_factor = scene.scale_factor();
328 let mut path_sprites = path_sprites.into_iter().peekable();
329 for (layer_id, layer) in scene.layers().enumerate() {
330 self.clip(scene, layer, drawable_size, command_encoder);
331 self.render_shadows(
332 layer.shadows(),
333 scale_factor,
334 offset,
335 drawable_size,
336 command_encoder,
337 );
338 self.render_quads(
339 layer.quads(),
340 scale_factor,
341 offset,
342 drawable_size,
343 command_encoder,
344 );
345 self.render_path_sprites(
346 layer_id,
347 &mut path_sprites,
348 offset,
349 drawable_size,
350 command_encoder,
351 );
352 self.render_underlines(
353 layer.underlines(),
354 scale_factor,
355 offset,
356 drawable_size,
357 command_encoder,
358 );
359 self.render_sprites(
360 layer.glyphs(),
361 layer.icons(),
362 scale_factor,
363 offset,
364 drawable_size,
365 command_encoder,
366 );
367 self.render_images(
368 layer.images(),
369 layer.image_glyphs(),
370 scale_factor,
371 offset,
372 drawable_size,
373 command_encoder,
374 );
375 }
376
377 command_encoder.end_encoding();
378 }
379
380 fn clip(
381 &mut self,
382 scene: &Scene,
383 layer: &Layer,
384 drawable_size: Vector2F,
385 command_encoder: &metal::RenderCommandEncoderRef,
386 ) {
387 let clip_bounds = (layer
388 .clip_bounds()
389 .unwrap_or_else(|| RectF::new(vec2f(0., 0.), drawable_size / scene.scale_factor()))
390 * scene.scale_factor())
391 .round();
392 command_encoder.set_scissor_rect(metal::MTLScissorRect {
393 x: clip_bounds.origin_x() as NSUInteger,
394 y: clip_bounds.origin_y() as NSUInteger,
395 width: clip_bounds.width() as NSUInteger,
396 height: clip_bounds.height() as NSUInteger,
397 });
398 }
399
400 fn render_shadows(
401 &mut self,
402 shadows: &[Shadow],
403 scale_factor: f32,
404 offset: &mut usize,
405 drawable_size: Vector2F,
406 command_encoder: &metal::RenderCommandEncoderRef,
407 ) {
408 if shadows.is_empty() {
409 return;
410 }
411
412 align_offset(offset);
413 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
414 assert!(
415 next_offset <= INSTANCE_BUFFER_SIZE,
416 "instance buffer exhausted"
417 );
418
419 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
420 command_encoder.set_vertex_buffer(
421 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
422 Some(&self.unit_vertices),
423 0,
424 );
425 command_encoder.set_vertex_buffer(
426 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
427 Some(&self.instances),
428 *offset as u64,
429 );
430 command_encoder.set_vertex_bytes(
431 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
432 mem::size_of::<shaders::GPUIUniforms>() as u64,
433 [shaders::GPUIUniforms {
434 viewport_size: drawable_size.to_float2(),
435 }]
436 .as_ptr() as *const c_void,
437 );
438
439 let buffer_contents = unsafe {
440 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIShadow
441 };
442 for (ix, shadow) in shadows.iter().enumerate() {
443 let shape_bounds = shadow.bounds * scale_factor;
444 let shader_shadow = shaders::GPUIShadow {
445 origin: shape_bounds.origin().to_float2(),
446 size: shape_bounds.size().to_float2(),
447 corner_radius: shadow.corner_radius * scale_factor,
448 sigma: shadow.sigma,
449 color: shadow.color.to_uchar4(),
450 };
451 unsafe {
452 *(buffer_contents.add(ix)) = shader_shadow;
453 }
454 }
455
456 command_encoder.draw_primitives_instanced(
457 metal::MTLPrimitiveType::Triangle,
458 0,
459 6,
460 shadows.len() as u64,
461 );
462 *offset = next_offset;
463 }
464
465 fn render_quads(
466 &mut self,
467 quads: &[Quad],
468 scale_factor: f32,
469 offset: &mut usize,
470 drawable_size: Vector2F,
471 command_encoder: &metal::RenderCommandEncoderRef,
472 ) {
473 if quads.is_empty() {
474 return;
475 }
476 align_offset(offset);
477 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
478 assert!(
479 next_offset <= INSTANCE_BUFFER_SIZE,
480 "instance buffer exhausted"
481 );
482
483 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
484 command_encoder.set_vertex_buffer(
485 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
486 Some(&self.unit_vertices),
487 0,
488 );
489 command_encoder.set_vertex_buffer(
490 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
491 Some(&self.instances),
492 *offset as u64,
493 );
494 command_encoder.set_vertex_bytes(
495 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
496 mem::size_of::<shaders::GPUIUniforms>() as u64,
497 [shaders::GPUIUniforms {
498 viewport_size: drawable_size.to_float2(),
499 }]
500 .as_ptr() as *const c_void,
501 );
502
503 let buffer_contents = unsafe {
504 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIQuad
505 };
506 for (ix, quad) in quads.iter().enumerate() {
507 let bounds = quad.bounds * scale_factor;
508 let border_width = quad.border.width * scale_factor;
509 let shader_quad = shaders::GPUIQuad {
510 origin: bounds.origin().round().to_float2(),
511 size: bounds.size().round().to_float2(),
512 background_color: quad
513 .background
514 .unwrap_or_else(Color::transparent_black)
515 .to_uchar4(),
516 border_top: border_width * (quad.border.top as usize as f32),
517 border_right: border_width * (quad.border.right as usize as f32),
518 border_bottom: border_width * (quad.border.bottom as usize as f32),
519 border_left: border_width * (quad.border.left as usize as f32),
520 border_color: quad.border.color.to_uchar4(),
521 corner_radius: quad.corner_radius * scale_factor,
522 };
523 unsafe {
524 *(buffer_contents.add(ix)) = shader_quad;
525 }
526 }
527
528 command_encoder.draw_primitives_instanced(
529 metal::MTLPrimitiveType::Triangle,
530 0,
531 6,
532 quads.len() as u64,
533 );
534 *offset = next_offset;
535 }
536
537 fn render_sprites(
538 &mut self,
539 glyphs: &[Glyph],
540 icons: &[Icon],
541 scale_factor: f32,
542 offset: &mut usize,
543 drawable_size: Vector2F,
544 command_encoder: &metal::RenderCommandEncoderRef,
545 ) {
546 if glyphs.is_empty() && icons.is_empty() {
547 return;
548 }
549
550 let mut sprites_by_atlas = HashMap::new();
551
552 for glyph in glyphs {
553 if let Some(sprite) = self.sprite_cache.render_glyph(
554 glyph.font_id,
555 glyph.font_size,
556 glyph.id,
557 glyph.origin,
558 ) {
559 // Snap sprite to pixel grid.
560 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
561 sprites_by_atlas
562 .entry(sprite.atlas_id)
563 .or_insert_with(Vec::new)
564 .push(shaders::GPUISprite {
565 origin: origin.to_float2(),
566 target_size: sprite.size.to_float2(),
567 source_size: sprite.size.to_float2(),
568 atlas_origin: sprite.atlas_origin.to_float2(),
569 color: glyph.color.to_uchar4(),
570 compute_winding: 0,
571 });
572 }
573 }
574
575 for icon in icons {
576 // Snap sprite to pixel grid.
577 let origin = (icon.bounds.origin() * scale_factor).floor();
578 let target_size = (icon.bounds.size() * scale_factor).ceil();
579 let source_size = (target_size * 2.).to_i32();
580
581 let sprite =
582 self.sprite_cache
583 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
584 if sprite.is_none() {
585 continue;
586 }
587 let sprite = sprite.unwrap();
588
589 sprites_by_atlas
590 .entry(sprite.atlas_id)
591 .or_insert_with(Vec::new)
592 .push(shaders::GPUISprite {
593 origin: origin.to_float2(),
594 target_size: target_size.to_float2(),
595 source_size: sprite.size.to_float2(),
596 atlas_origin: sprite.atlas_origin.to_float2(),
597 color: icon.color.to_uchar4(),
598 compute_winding: 0,
599 });
600 }
601
602 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
603 command_encoder.set_vertex_buffer(
604 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
605 Some(&self.unit_vertices),
606 0,
607 );
608 command_encoder.set_vertex_bytes(
609 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
610 mem::size_of::<shaders::vector_float2>() as u64,
611 [drawable_size.to_float2()].as_ptr() as *const c_void,
612 );
613
614 for (atlas_id, sprites) in sprites_by_atlas {
615 align_offset(offset);
616 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
617 assert!(
618 next_offset <= INSTANCE_BUFFER_SIZE,
619 "instance buffer exhausted"
620 );
621
622 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
623 command_encoder.set_vertex_buffer(
624 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
625 Some(&self.instances),
626 *offset as u64,
627 );
628 command_encoder.set_vertex_bytes(
629 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
630 mem::size_of::<shaders::vector_float2>() as u64,
631 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
632 as *const c_void,
633 );
634
635 command_encoder.set_fragment_texture(
636 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
637 Some(texture),
638 );
639
640 unsafe {
641 let buffer_contents =
642 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
643 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
644 }
645
646 command_encoder.draw_primitives_instanced(
647 metal::MTLPrimitiveType::Triangle,
648 0,
649 6,
650 sprites.len() as u64,
651 );
652 *offset = next_offset;
653 }
654 }
655
656 fn render_images(
657 &mut self,
658 images: &[Image],
659 image_glyphs: &[ImageGlyph],
660 scale_factor: f32,
661 offset: &mut usize,
662 drawable_size: Vector2F,
663 command_encoder: &metal::RenderCommandEncoderRef,
664 ) {
665 if images.is_empty() && image_glyphs.is_empty() {
666 return;
667 }
668
669 let mut images_by_atlas = HashMap::new();
670 for image in images {
671 let origin = image.bounds.origin() * scale_factor;
672 let target_size = image.bounds.size() * scale_factor;
673 let corner_radius = image.corner_radius * scale_factor;
674 let border_width = image.border.width * scale_factor;
675 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
676 images_by_atlas
677 .entry(alloc_id.atlas_id)
678 .or_insert_with(Vec::new)
679 .push(shaders::GPUIImage {
680 origin: origin.to_float2(),
681 target_size: target_size.to_float2(),
682 source_size: atlas_bounds.size().to_float2(),
683 atlas_origin: atlas_bounds.origin().to_float2(),
684 border_top: border_width * (image.border.top as usize as f32),
685 border_right: border_width * (image.border.right as usize as f32),
686 border_bottom: border_width * (image.border.bottom as usize as f32),
687 border_left: border_width * (image.border.left as usize as f32),
688 border_color: image.border.color.to_uchar4(),
689 corner_radius,
690 });
691 }
692
693 for image_glyph in image_glyphs {
694 let origin = (image_glyph.origin * scale_factor).floor();
695 if let Some((alloc_id, atlas_bounds, glyph_origin)) =
696 self.image_cache.render_glyph(image_glyph)
697 {
698 images_by_atlas
699 .entry(alloc_id.atlas_id)
700 .or_insert_with(Vec::new)
701 .push(shaders::GPUIImage {
702 origin: (origin + glyph_origin.to_f32()).to_float2(),
703 target_size: atlas_bounds.size().to_float2(),
704 source_size: atlas_bounds.size().to_float2(),
705 atlas_origin: atlas_bounds.origin().to_float2(),
706 border_top: 0.,
707 border_right: 0.,
708 border_bottom: 0.,
709 border_left: 0.,
710 border_color: Default::default(),
711 corner_radius: 0.,
712 });
713 } else {
714 log::warn!("could not render glyph with id {}", image_glyph.id);
715 }
716 }
717
718 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
719 command_encoder.set_vertex_buffer(
720 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
721 Some(&self.unit_vertices),
722 0,
723 );
724 command_encoder.set_vertex_bytes(
725 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
726 mem::size_of::<shaders::vector_float2>() as u64,
727 [drawable_size.to_float2()].as_ptr() as *const c_void,
728 );
729
730 for (atlas_id, images) in images_by_atlas {
731 align_offset(offset);
732 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
733 assert!(
734 next_offset <= INSTANCE_BUFFER_SIZE,
735 "instance buffer exhausted"
736 );
737
738 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
739 command_encoder.set_vertex_buffer(
740 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
741 Some(&self.instances),
742 *offset as u64,
743 );
744 command_encoder.set_vertex_bytes(
745 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
746 mem::size_of::<shaders::vector_float2>() as u64,
747 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
748 as *const c_void,
749 );
750 command_encoder.set_fragment_texture(
751 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
752 Some(texture),
753 );
754
755 unsafe {
756 let buffer_contents =
757 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIImage;
758 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
759 }
760
761 command_encoder.draw_primitives_instanced(
762 metal::MTLPrimitiveType::Triangle,
763 0,
764 6,
765 images.len() as u64,
766 );
767 *offset = next_offset;
768 }
769 }
770
771 fn render_path_sprites(
772 &mut self,
773 layer_id: usize,
774 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
775 offset: &mut usize,
776 drawable_size: Vector2F,
777 command_encoder: &metal::RenderCommandEncoderRef,
778 ) {
779 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
780 command_encoder.set_vertex_buffer(
781 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
782 Some(&self.unit_vertices),
783 0,
784 );
785 command_encoder.set_vertex_bytes(
786 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
787 mem::size_of::<shaders::vector_float2>() as u64,
788 [drawable_size.to_float2()].as_ptr() as *const c_void,
789 );
790
791 let mut atlas_id = None;
792 let mut atlas_sprite_count = 0;
793 align_offset(offset);
794
795 while let Some(sprite) = sprites.peek() {
796 if sprite.layer_id != layer_id {
797 break;
798 }
799
800 let sprite = sprites.next().unwrap();
801 if let Some(atlas_id) = atlas_id.as_mut() {
802 if sprite.atlas_id != *atlas_id {
803 self.render_path_sprites_for_atlas(
804 offset,
805 *atlas_id,
806 atlas_sprite_count,
807 command_encoder,
808 );
809
810 *atlas_id = sprite.atlas_id;
811 atlas_sprite_count = 0;
812 align_offset(offset);
813 }
814 } else {
815 atlas_id = Some(sprite.atlas_id);
816 }
817
818 unsafe {
819 let buffer_contents =
820 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUISprite;
821 *buffer_contents.add(atlas_sprite_count) = sprite.shader_data;
822 }
823
824 atlas_sprite_count += 1;
825 }
826
827 if let Some(atlas_id) = atlas_id {
828 self.render_path_sprites_for_atlas(
829 offset,
830 atlas_id,
831 atlas_sprite_count,
832 command_encoder,
833 );
834 }
835 }
836
837 fn render_path_sprites_for_atlas(
838 &mut self,
839 offset: &mut usize,
840 atlas_id: usize,
841 sprite_count: usize,
842 command_encoder: &metal::RenderCommandEncoderRef,
843 ) {
844 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
845 assert!(
846 next_offset <= INSTANCE_BUFFER_SIZE,
847 "instance buffer exhausted"
848 );
849 command_encoder.set_vertex_buffer(
850 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
851 Some(&self.instances),
852 *offset as u64,
853 );
854 let texture = self.path_atlases.texture(atlas_id).unwrap();
855 command_encoder.set_fragment_texture(
856 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
857 Some(texture),
858 );
859 command_encoder.set_vertex_bytes(
860 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
861 mem::size_of::<shaders::vector_float2>() as u64,
862 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
863 as *const c_void,
864 );
865
866 command_encoder.draw_primitives_instanced(
867 metal::MTLPrimitiveType::Triangle,
868 0,
869 6,
870 sprite_count as u64,
871 );
872 *offset = next_offset;
873 }
874
875 fn render_underlines(
876 &mut self,
877 underlines: &[Underline],
878 scale_factor: f32,
879 offset: &mut usize,
880 drawable_size: Vector2F,
881 command_encoder: &metal::RenderCommandEncoderRef,
882 ) {
883 if underlines.is_empty() {
884 return;
885 }
886 align_offset(offset);
887 let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
888 assert!(
889 next_offset <= INSTANCE_BUFFER_SIZE,
890 "instance buffer exhausted"
891 );
892
893 command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
894 command_encoder.set_vertex_buffer(
895 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
896 Some(&self.unit_vertices),
897 0,
898 );
899 command_encoder.set_vertex_buffer(
900 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
901 Some(&self.instances),
902 *offset as u64,
903 );
904 command_encoder.set_vertex_bytes(
905 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
906 mem::size_of::<shaders::GPUIUniforms>() as u64,
907 [shaders::GPUIUniforms {
908 viewport_size: drawable_size.to_float2(),
909 }]
910 .as_ptr() as *const c_void,
911 );
912
913 let buffer_contents = unsafe {
914 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIUnderline
915 };
916 for (ix, underline) in underlines.iter().enumerate() {
917 let origin = underline.origin * scale_factor;
918 let mut height = underline.thickness;
919 if underline.squiggly {
920 height *= 3.;
921 }
922 let size = vec2f(underline.width, height) * scale_factor;
923 let shader_underline = shaders::GPUIUnderline {
924 origin: origin.round().to_float2(),
925 size: size.round().to_float2(),
926 thickness: underline.thickness * scale_factor,
927 color: underline.color.to_uchar4(),
928 squiggly: underline.squiggly as u8,
929 };
930 unsafe {
931 *(buffer_contents.add(ix)) = shader_underline;
932 }
933 }
934
935 command_encoder.draw_primitives_instanced(
936 metal::MTLPrimitiveType::Triangle,
937 0,
938 6,
939 underlines.len() as u64,
940 );
941 *offset = next_offset;
942 }
943}
944
945fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
946 let texture_descriptor = metal::TextureDescriptor::new();
947 texture_descriptor.set_width(2048);
948 texture_descriptor.set_height(2048);
949 texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
950 texture_descriptor
951 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
952 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
953 texture_descriptor
954}
955
956fn align_offset(offset: &mut usize) {
957 let r = *offset % 256;
958 if r > 0 {
959 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
960 }
961}
962
963fn build_pipeline_state(
964 device: &metal::DeviceRef,
965 library: &metal::LibraryRef,
966 label: &str,
967 vertex_fn_name: &str,
968 fragment_fn_name: &str,
969 pixel_format: metal::MTLPixelFormat,
970) -> metal::RenderPipelineState {
971 let vertex_fn = library
972 .get_function(vertex_fn_name, None)
973 .expect("error locating vertex function");
974 let fragment_fn = library
975 .get_function(fragment_fn_name, None)
976 .expect("error locating fragment function");
977
978 let descriptor = metal::RenderPipelineDescriptor::new();
979 descriptor.set_label(label);
980 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
981 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
982 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
983 color_attachment.set_pixel_format(pixel_format);
984 color_attachment.set_blending_enabled(true);
985 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
986 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
987 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
988 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
989 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
990 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
991
992 device
993 .new_render_pipeline_state(&descriptor)
994 .expect("could not create render pipeline state")
995}
996
997fn build_path_atlas_pipeline_state(
998 device: &metal::DeviceRef,
999 library: &metal::LibraryRef,
1000 label: &str,
1001 vertex_fn_name: &str,
1002 fragment_fn_name: &str,
1003 pixel_format: metal::MTLPixelFormat,
1004) -> metal::RenderPipelineState {
1005 let vertex_fn = library
1006 .get_function(vertex_fn_name, None)
1007 .expect("error locating vertex function");
1008 let fragment_fn = library
1009 .get_function(fragment_fn_name, None)
1010 .expect("error locating fragment function");
1011
1012 let descriptor = metal::RenderPipelineDescriptor::new();
1013 descriptor.set_label(label);
1014 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
1015 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
1016 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
1017 color_attachment.set_pixel_format(pixel_format);
1018 color_attachment.set_blending_enabled(true);
1019 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
1020 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
1021 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
1022 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
1023 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
1024 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
1025
1026 device
1027 .new_render_pipeline_state(&descriptor)
1028 .expect("could not create render pipeline state")
1029}
1030
1031mod shaders {
1032 #![allow(non_upper_case_globals)]
1033 #![allow(non_camel_case_types)]
1034 #![allow(non_snake_case)]
1035
1036 use crate::{
1037 color::Color,
1038 geometry::vector::{Vector2F, Vector2I},
1039 };
1040 use std::mem;
1041
1042 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1043
1044 pub trait ToFloat2 {
1045 fn to_float2(&self) -> vector_float2;
1046 }
1047
1048 impl ToFloat2 for (f32, f32) {
1049 fn to_float2(&self) -> vector_float2 {
1050 unsafe {
1051 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1052 output <<= 32;
1053 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1054 output
1055 }
1056 }
1057 }
1058
1059 impl ToFloat2 for Vector2F {
1060 fn to_float2(&self) -> vector_float2 {
1061 unsafe {
1062 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1063 output <<= 32;
1064 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1065 output
1066 }
1067 }
1068 }
1069
1070 impl ToFloat2 for Vector2I {
1071 fn to_float2(&self) -> vector_float2 {
1072 self.to_f32().to_float2()
1073 }
1074 }
1075
1076 impl Color {
1077 pub fn to_uchar4(&self) -> vector_uchar4 {
1078 let mut vec = self.a as vector_uchar4;
1079 vec <<= 8;
1080 vec |= self.b as vector_uchar4;
1081 vec <<= 8;
1082 vec |= self.g as vector_uchar4;
1083 vec <<= 8;
1084 vec |= self.r as vector_uchar4;
1085 vec
1086 }
1087 }
1088}