1use super::{atlas::AtlasAllocator, image_cache::ImageCache, sprite_cache::SpriteCache};
2use crate::{
3 color::Color,
4 geometry::{
5 rect::RectF,
6 vector::{vec2f, vec2i, Vector2F},
7 },
8 platform,
9 scene::{Glyph, Icon, Image, Layer, Quad, Scene, Shadow, Underline},
10};
11use cocoa::foundation::NSUInteger;
12use log::warn;
13use metal::{MTLPixelFormat, MTLResourceOptions, NSRange};
14use shaders::ToFloat2 as _;
15use std::{collections::HashMap, ffi::c_void, iter::Peekable, mem, sync::Arc, vec};
16
17const SHADERS_METALLIB: &'static [u8] =
18 include_bytes!(concat!(env!("OUT_DIR"), "/shaders.metallib"));
19const INSTANCE_BUFFER_SIZE: usize = 1024 * 1024; // This is an arbitrary decision. There's probably a more optimal value.
20
21pub struct Renderer {
22 sprite_cache: SpriteCache,
23 image_cache: ImageCache,
24 path_atlases: AtlasAllocator,
25 quad_pipeline_state: metal::RenderPipelineState,
26 shadow_pipeline_state: metal::RenderPipelineState,
27 sprite_pipeline_state: metal::RenderPipelineState,
28 image_pipeline_state: metal::RenderPipelineState,
29 path_atlas_pipeline_state: metal::RenderPipelineState,
30 underline_pipeline_state: metal::RenderPipelineState,
31 unit_vertices: metal::Buffer,
32 instances: metal::Buffer,
33}
34
35struct PathSprite {
36 layer_id: usize,
37 atlas_id: usize,
38 shader_data: shaders::GPUISprite,
39}
40
41impl Renderer {
42 pub fn new(
43 device: metal::Device,
44 pixel_format: metal::MTLPixelFormat,
45 scale_factor: f32,
46 fonts: Arc<dyn platform::FontSystem>,
47 ) -> Self {
48 let library = device
49 .new_library_with_data(SHADERS_METALLIB)
50 .expect("error building metal library");
51
52 let unit_vertices = [
53 (0., 0.).to_float2(),
54 (1., 0.).to_float2(),
55 (0., 1.).to_float2(),
56 (0., 1.).to_float2(),
57 (1., 0.).to_float2(),
58 (1., 1.).to_float2(),
59 ];
60 let unit_vertices = device.new_buffer_with_data(
61 unit_vertices.as_ptr() as *const c_void,
62 (unit_vertices.len() * mem::size_of::<shaders::vector_float2>()) as u64,
63 MTLResourceOptions::StorageModeManaged,
64 );
65 let instances = device.new_buffer(
66 INSTANCE_BUFFER_SIZE as u64,
67 MTLResourceOptions::StorageModeManaged,
68 );
69
70 let sprite_cache = SpriteCache::new(device.clone(), vec2i(1024, 768), scale_factor, fonts);
71 let image_cache = ImageCache::new(device.clone(), vec2i(1024, 768));
72 let path_atlases =
73 AtlasAllocator::new(device.clone(), build_path_atlas_texture_descriptor());
74 let quad_pipeline_state = build_pipeline_state(
75 &device,
76 &library,
77 "quad",
78 "quad_vertex",
79 "quad_fragment",
80 pixel_format,
81 );
82 let shadow_pipeline_state = build_pipeline_state(
83 &device,
84 &library,
85 "shadow",
86 "shadow_vertex",
87 "shadow_fragment",
88 pixel_format,
89 );
90 let sprite_pipeline_state = build_pipeline_state(
91 &device,
92 &library,
93 "sprite",
94 "sprite_vertex",
95 "sprite_fragment",
96 pixel_format,
97 );
98 let image_pipeline_state = build_pipeline_state(
99 &device,
100 &library,
101 "image",
102 "image_vertex",
103 "image_fragment",
104 pixel_format,
105 );
106 let path_atlas_pipeline_state = build_path_atlas_pipeline_state(
107 &device,
108 &library,
109 "path_atlas",
110 "path_atlas_vertex",
111 "path_atlas_fragment",
112 MTLPixelFormat::R16Float,
113 );
114 let underline_pipeline_state = build_pipeline_state(
115 &device,
116 &library,
117 "underline",
118 "underline_vertex",
119 "underline_fragment",
120 pixel_format,
121 );
122 Self {
123 sprite_cache,
124 image_cache,
125 path_atlases,
126 quad_pipeline_state,
127 shadow_pipeline_state,
128 sprite_pipeline_state,
129 image_pipeline_state,
130 path_atlas_pipeline_state,
131 underline_pipeline_state,
132 unit_vertices,
133 instances,
134 }
135 }
136
137 pub fn render(
138 &mut self,
139 scene: &Scene,
140 drawable_size: Vector2F,
141 command_buffer: &metal::CommandBufferRef,
142 output: &metal::TextureRef,
143 ) {
144 let mut offset = 0;
145
146 let path_sprites = self.render_path_atlases(scene, &mut offset, command_buffer);
147 self.render_layers(
148 scene,
149 path_sprites,
150 &mut offset,
151 drawable_size,
152 command_buffer,
153 output,
154 );
155 self.instances.did_modify_range(NSRange {
156 location: 0,
157 length: offset as NSUInteger,
158 });
159 self.image_cache.finish_frame();
160 }
161
162 fn render_path_atlases(
163 &mut self,
164 scene: &Scene,
165 offset: &mut usize,
166 command_buffer: &metal::CommandBufferRef,
167 ) -> Vec<PathSprite> {
168 self.path_atlases.clear();
169 let mut sprites = Vec::new();
170 let mut vertices = Vec::<shaders::GPUIPathVertex>::new();
171 let mut current_atlas_id = None;
172 for (layer_id, layer) in scene.layers().enumerate() {
173 for path in layer.paths() {
174 let origin = path.bounds.origin() * scene.scale_factor();
175 let size = (path.bounds.size() * scene.scale_factor()).ceil();
176
177 let path_allocation = self.path_atlases.allocate(size.to_i32());
178 if path_allocation.is_none() {
179 // Path size was likely zero.
180 warn!("could not allocate path texture of size {:?}", size);
181 continue;
182 }
183 let (alloc_id, atlas_origin) = path_allocation.unwrap();
184 let atlas_origin = atlas_origin.to_f32();
185 sprites.push(PathSprite {
186 layer_id,
187 atlas_id: alloc_id.atlas_id,
188 shader_data: shaders::GPUISprite {
189 origin: origin.floor().to_float2(),
190 target_size: size.to_float2(),
191 source_size: size.to_float2(),
192 atlas_origin: atlas_origin.to_float2(),
193 color: path.color.to_uchar4(),
194 compute_winding: 1,
195 },
196 });
197
198 if let Some(current_atlas_id) = current_atlas_id {
199 if alloc_id.atlas_id != current_atlas_id {
200 self.render_paths_to_atlas(
201 offset,
202 &vertices,
203 current_atlas_id,
204 command_buffer,
205 );
206 vertices.clear();
207 }
208 }
209
210 current_atlas_id = Some(alloc_id.atlas_id);
211
212 for vertex in &path.vertices {
213 let xy_position =
214 (vertex.xy_position - path.bounds.origin()) * scene.scale_factor();
215 vertices.push(shaders::GPUIPathVertex {
216 xy_position: (atlas_origin + xy_position).to_float2(),
217 st_position: vertex.st_position.to_float2(),
218 clip_rect_origin: atlas_origin.to_float2(),
219 clip_rect_size: size.to_float2(),
220 });
221 }
222 }
223 }
224
225 if let Some(atlas_id) = current_atlas_id {
226 self.render_paths_to_atlas(offset, &vertices, atlas_id, command_buffer);
227 }
228
229 sprites
230 }
231
232 fn render_paths_to_atlas(
233 &mut self,
234 offset: &mut usize,
235 vertices: &[shaders::GPUIPathVertex],
236 atlas_id: usize,
237 command_buffer: &metal::CommandBufferRef,
238 ) {
239 align_offset(offset);
240 let next_offset = *offset + vertices.len() * mem::size_of::<shaders::GPUIPathVertex>();
241 assert!(
242 next_offset <= INSTANCE_BUFFER_SIZE,
243 "instance buffer exhausted"
244 );
245
246 let render_pass_descriptor = metal::RenderPassDescriptor::new();
247 let color_attachment = render_pass_descriptor
248 .color_attachments()
249 .object_at(0)
250 .unwrap();
251 let texture = self.path_atlases.texture(atlas_id).unwrap();
252 color_attachment.set_texture(Some(texture));
253 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
254 color_attachment.set_store_action(metal::MTLStoreAction::Store);
255 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
256
257 let path_atlas_command_encoder =
258 command_buffer.new_render_command_encoder(render_pass_descriptor);
259 path_atlas_command_encoder.set_render_pipeline_state(&self.path_atlas_pipeline_state);
260 path_atlas_command_encoder.set_vertex_buffer(
261 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexVertices as u64,
262 Some(&self.instances),
263 *offset as u64,
264 );
265 path_atlas_command_encoder.set_vertex_bytes(
266 shaders::GPUIPathAtlasVertexInputIndex_GPUIPathAtlasVertexInputIndexAtlasSize as u64,
267 mem::size_of::<shaders::vector_float2>() as u64,
268 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
269 as *const c_void,
270 );
271
272 let buffer_contents = unsafe {
273 (self.instances.contents() as *mut u8).add(*offset) as *mut shaders::GPUIPathVertex
274 };
275
276 for (ix, vertex) in vertices.iter().enumerate() {
277 unsafe {
278 *buffer_contents.add(ix) = *vertex;
279 }
280 }
281
282 path_atlas_command_encoder.draw_primitives(
283 metal::MTLPrimitiveType::Triangle,
284 0,
285 vertices.len() as u64,
286 );
287 path_atlas_command_encoder.end_encoding();
288 *offset = next_offset;
289 }
290
291 fn render_layers(
292 &mut self,
293 scene: &Scene,
294 path_sprites: Vec<PathSprite>,
295 offset: &mut usize,
296 drawable_size: Vector2F,
297 command_buffer: &metal::CommandBufferRef,
298 output: &metal::TextureRef,
299 ) {
300 let render_pass_descriptor = metal::RenderPassDescriptor::new();
301 let color_attachment = render_pass_descriptor
302 .color_attachments()
303 .object_at(0)
304 .unwrap();
305 color_attachment.set_texture(Some(output));
306 color_attachment.set_load_action(metal::MTLLoadAction::Clear);
307 color_attachment.set_store_action(metal::MTLStoreAction::Store);
308 color_attachment.set_clear_color(metal::MTLClearColor::new(0., 0., 0., 1.));
309 let command_encoder = command_buffer.new_render_command_encoder(render_pass_descriptor);
310
311 command_encoder.set_viewport(metal::MTLViewport {
312 originX: 0.0,
313 originY: 0.0,
314 width: drawable_size.x() as f64,
315 height: drawable_size.y() as f64,
316 znear: 0.0,
317 zfar: 1.0,
318 });
319
320 let scale_factor = scene.scale_factor();
321 let mut path_sprites = path_sprites.into_iter().peekable();
322 for (layer_id, layer) in scene.layers().enumerate() {
323 self.clip(scene, layer, drawable_size, command_encoder);
324 self.render_shadows(
325 layer.shadows(),
326 scale_factor,
327 offset,
328 drawable_size,
329 command_encoder,
330 );
331 self.render_quads(
332 layer.quads(),
333 scale_factor,
334 offset,
335 drawable_size,
336 command_encoder,
337 );
338 self.render_path_sprites(
339 layer_id,
340 &mut path_sprites,
341 offset,
342 drawable_size,
343 command_encoder,
344 );
345 self.render_underlines(
346 layer.underlines(),
347 scale_factor,
348 offset,
349 drawable_size,
350 command_encoder,
351 );
352 self.render_sprites(
353 layer.glyphs(),
354 layer.icons(),
355 scale_factor,
356 offset,
357 drawable_size,
358 command_encoder,
359 );
360 self.render_images(
361 layer.images(),
362 scale_factor,
363 offset,
364 drawable_size,
365 command_encoder,
366 );
367 }
368
369 command_encoder.end_encoding();
370 }
371
372 fn clip(
373 &mut self,
374 scene: &Scene,
375 layer: &Layer,
376 drawable_size: Vector2F,
377 command_encoder: &metal::RenderCommandEncoderRef,
378 ) {
379 let clip_bounds = (layer.clip_bounds().unwrap_or(RectF::new(
380 vec2f(0., 0.),
381 drawable_size / scene.scale_factor(),
382 )) * scene.scale_factor())
383 .round();
384 command_encoder.set_scissor_rect(metal::MTLScissorRect {
385 x: clip_bounds.origin_x() as NSUInteger,
386 y: clip_bounds.origin_y() as NSUInteger,
387 width: clip_bounds.width() as NSUInteger,
388 height: clip_bounds.height() as NSUInteger,
389 });
390 }
391
392 fn render_shadows(
393 &mut self,
394 shadows: &[Shadow],
395 scale_factor: f32,
396 offset: &mut usize,
397 drawable_size: Vector2F,
398 command_encoder: &metal::RenderCommandEncoderRef,
399 ) {
400 if shadows.is_empty() {
401 return;
402 }
403
404 align_offset(offset);
405 let next_offset = *offset + shadows.len() * mem::size_of::<shaders::GPUIShadow>();
406 assert!(
407 next_offset <= INSTANCE_BUFFER_SIZE,
408 "instance buffer exhausted"
409 );
410
411 command_encoder.set_render_pipeline_state(&self.shadow_pipeline_state);
412 command_encoder.set_vertex_buffer(
413 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexVertices as u64,
414 Some(&self.unit_vertices),
415 0,
416 );
417 command_encoder.set_vertex_buffer(
418 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexShadows as u64,
419 Some(&self.instances),
420 *offset as u64,
421 );
422 command_encoder.set_vertex_bytes(
423 shaders::GPUIShadowInputIndex_GPUIShadowInputIndexUniforms as u64,
424 mem::size_of::<shaders::GPUIUniforms>() as u64,
425 [shaders::GPUIUniforms {
426 viewport_size: drawable_size.to_float2(),
427 }]
428 .as_ptr() as *const c_void,
429 );
430
431 let buffer_contents = unsafe {
432 (self.instances.contents() as *mut u8).offset(*offset as isize)
433 as *mut shaders::GPUIShadow
434 };
435 for (ix, shadow) in shadows.iter().enumerate() {
436 let shape_bounds = shadow.bounds * scale_factor;
437 let shader_shadow = shaders::GPUIShadow {
438 origin: shape_bounds.origin().to_float2(),
439 size: shape_bounds.size().to_float2(),
440 corner_radius: shadow.corner_radius * scale_factor,
441 sigma: shadow.sigma,
442 color: shadow.color.to_uchar4(),
443 };
444 unsafe {
445 *(buffer_contents.offset(ix as isize)) = shader_shadow;
446 }
447 }
448
449 command_encoder.draw_primitives_instanced(
450 metal::MTLPrimitiveType::Triangle,
451 0,
452 6,
453 shadows.len() as u64,
454 );
455 *offset = next_offset;
456 }
457
458 fn render_quads(
459 &mut self,
460 quads: &[Quad],
461 scale_factor: f32,
462 offset: &mut usize,
463 drawable_size: Vector2F,
464 command_encoder: &metal::RenderCommandEncoderRef,
465 ) {
466 if quads.is_empty() {
467 return;
468 }
469 align_offset(offset);
470 let next_offset = *offset + quads.len() * mem::size_of::<shaders::GPUIQuad>();
471 assert!(
472 next_offset <= INSTANCE_BUFFER_SIZE,
473 "instance buffer exhausted"
474 );
475
476 command_encoder.set_render_pipeline_state(&self.quad_pipeline_state);
477 command_encoder.set_vertex_buffer(
478 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexVertices as u64,
479 Some(&self.unit_vertices),
480 0,
481 );
482 command_encoder.set_vertex_buffer(
483 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexQuads as u64,
484 Some(&self.instances),
485 *offset as u64,
486 );
487 command_encoder.set_vertex_bytes(
488 shaders::GPUIQuadInputIndex_GPUIQuadInputIndexUniforms as u64,
489 mem::size_of::<shaders::GPUIUniforms>() as u64,
490 [shaders::GPUIUniforms {
491 viewport_size: drawable_size.to_float2(),
492 }]
493 .as_ptr() as *const c_void,
494 );
495
496 let buffer_contents = unsafe {
497 (self.instances.contents() as *mut u8).offset(*offset as isize)
498 as *mut shaders::GPUIQuad
499 };
500 for (ix, quad) in quads.iter().enumerate() {
501 let bounds = quad.bounds * scale_factor;
502 let border_width = quad.border.width * scale_factor;
503 let shader_quad = shaders::GPUIQuad {
504 origin: bounds.origin().round().to_float2(),
505 size: bounds.size().round().to_float2(),
506 background_color: quad
507 .background
508 .unwrap_or(Color::transparent_black())
509 .to_uchar4(),
510 border_top: border_width * (quad.border.top as usize as f32),
511 border_right: border_width * (quad.border.right as usize as f32),
512 border_bottom: border_width * (quad.border.bottom as usize as f32),
513 border_left: border_width * (quad.border.left as usize as f32),
514 border_color: quad.border.color.to_uchar4(),
515 corner_radius: quad.corner_radius * scale_factor,
516 };
517 unsafe {
518 *(buffer_contents.offset(ix as isize)) = shader_quad;
519 }
520 }
521
522 command_encoder.draw_primitives_instanced(
523 metal::MTLPrimitiveType::Triangle,
524 0,
525 6,
526 quads.len() as u64,
527 );
528 *offset = next_offset;
529 }
530
531 fn render_sprites(
532 &mut self,
533 glyphs: &[Glyph],
534 icons: &[Icon],
535 scale_factor: f32,
536 offset: &mut usize,
537 drawable_size: Vector2F,
538 command_encoder: &metal::RenderCommandEncoderRef,
539 ) {
540 if glyphs.is_empty() && icons.is_empty() {
541 return;
542 }
543
544 self.sprite_cache.set_scale_factor(scale_factor);
545
546 let mut sprites_by_atlas = HashMap::new();
547
548 for glyph in glyphs {
549 if let Some(sprite) = self.sprite_cache.render_glyph(
550 glyph.font_id,
551 glyph.font_size,
552 glyph.id,
553 glyph.origin,
554 ) {
555 // Snap sprite to pixel grid.
556 let origin = (glyph.origin * scale_factor).floor() + sprite.offset.to_f32();
557 sprites_by_atlas
558 .entry(sprite.atlas_id)
559 .or_insert_with(Vec::new)
560 .push(shaders::GPUISprite {
561 origin: origin.to_float2(),
562 target_size: sprite.size.to_float2(),
563 source_size: sprite.size.to_float2(),
564 atlas_origin: sprite.atlas_origin.to_float2(),
565 color: glyph.color.to_uchar4(),
566 compute_winding: 0,
567 });
568 }
569 }
570
571 for icon in icons {
572 // Snap sprite to pixel grid.
573 let origin = (icon.bounds.origin() * scale_factor).floor();
574 let target_size = (icon.bounds.size() * scale_factor).ceil();
575 let source_size = (target_size * 2.).to_i32();
576
577 let sprite =
578 self.sprite_cache
579 .render_icon(source_size, icon.path.clone(), icon.svg.clone());
580 if sprite.is_none() {
581 continue;
582 }
583 let sprite = sprite.unwrap();
584
585 sprites_by_atlas
586 .entry(sprite.atlas_id)
587 .or_insert_with(Vec::new)
588 .push(shaders::GPUISprite {
589 origin: origin.to_float2(),
590 target_size: target_size.to_float2(),
591 source_size: sprite.size.to_float2(),
592 atlas_origin: sprite.atlas_origin.to_float2(),
593 color: icon.color.to_uchar4(),
594 compute_winding: 0,
595 });
596 }
597
598 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
599 command_encoder.set_vertex_buffer(
600 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
601 Some(&self.unit_vertices),
602 0,
603 );
604 command_encoder.set_vertex_bytes(
605 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
606 mem::size_of::<shaders::vector_float2>() as u64,
607 [drawable_size.to_float2()].as_ptr() as *const c_void,
608 );
609
610 for (atlas_id, sprites) in sprites_by_atlas {
611 align_offset(offset);
612 let next_offset = *offset + sprites.len() * mem::size_of::<shaders::GPUISprite>();
613 assert!(
614 next_offset <= INSTANCE_BUFFER_SIZE,
615 "instance buffer exhausted"
616 );
617
618 let texture = self.sprite_cache.atlas_texture(atlas_id).unwrap();
619 command_encoder.set_vertex_buffer(
620 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
621 Some(&self.instances),
622 *offset as u64,
623 );
624 command_encoder.set_vertex_bytes(
625 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
626 mem::size_of::<shaders::vector_float2>() as u64,
627 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
628 as *const c_void,
629 );
630
631 command_encoder.set_fragment_texture(
632 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
633 Some(texture),
634 );
635
636 unsafe {
637 let buffer_contents = (self.instances.contents() as *mut u8)
638 .offset(*offset as isize)
639 as *mut shaders::GPUISprite;
640 std::ptr::copy_nonoverlapping(sprites.as_ptr(), buffer_contents, sprites.len());
641 }
642
643 command_encoder.draw_primitives_instanced(
644 metal::MTLPrimitiveType::Triangle,
645 0,
646 6,
647 sprites.len() as u64,
648 );
649 *offset = next_offset;
650 }
651 }
652
653 fn render_images(
654 &mut self,
655 images: &[Image],
656 scale_factor: f32,
657 offset: &mut usize,
658 drawable_size: Vector2F,
659 command_encoder: &metal::RenderCommandEncoderRef,
660 ) {
661 if images.is_empty() {
662 return;
663 }
664
665 let mut images_by_atlas = HashMap::new();
666 for image in images {
667 let origin = image.bounds.origin() * scale_factor;
668 let target_size = image.bounds.size() * scale_factor;
669 let corner_radius = image.corner_radius * scale_factor;
670 let border_width = image.border.width * scale_factor;
671 let (alloc_id, atlas_bounds) = self.image_cache.render(&image.data);
672 images_by_atlas
673 .entry(alloc_id.atlas_id)
674 .or_insert_with(Vec::new)
675 .push(shaders::GPUIImage {
676 origin: origin.to_float2(),
677 target_size: target_size.to_float2(),
678 source_size: atlas_bounds.size().to_float2(),
679 atlas_origin: atlas_bounds.origin().to_float2(),
680 border_top: border_width * (image.border.top as usize as f32),
681 border_right: border_width * (image.border.right as usize as f32),
682 border_bottom: border_width * (image.border.bottom as usize as f32),
683 border_left: border_width * (image.border.left as usize as f32),
684 border_color: image.border.color.to_uchar4(),
685 corner_radius,
686 });
687 }
688
689 command_encoder.set_render_pipeline_state(&self.image_pipeline_state);
690 command_encoder.set_vertex_buffer(
691 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexVertices as u64,
692 Some(&self.unit_vertices),
693 0,
694 );
695 command_encoder.set_vertex_bytes(
696 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexViewportSize as u64,
697 mem::size_of::<shaders::vector_float2>() as u64,
698 [drawable_size.to_float2()].as_ptr() as *const c_void,
699 );
700
701 for (atlas_id, images) in images_by_atlas {
702 align_offset(offset);
703 let next_offset = *offset + images.len() * mem::size_of::<shaders::GPUIImage>();
704 assert!(
705 next_offset <= INSTANCE_BUFFER_SIZE,
706 "instance buffer exhausted"
707 );
708
709 let texture = self.image_cache.atlas_texture(atlas_id).unwrap();
710 command_encoder.set_vertex_buffer(
711 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexImages as u64,
712 Some(&self.instances),
713 *offset as u64,
714 );
715 command_encoder.set_vertex_bytes(
716 shaders::GPUIImageVertexInputIndex_GPUIImageVertexInputIndexAtlasSize as u64,
717 mem::size_of::<shaders::vector_float2>() as u64,
718 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
719 as *const c_void,
720 );
721 command_encoder.set_fragment_texture(
722 shaders::GPUIImageFragmentInputIndex_GPUIImageFragmentInputIndexAtlas as u64,
723 Some(texture),
724 );
725
726 unsafe {
727 let buffer_contents = (self.instances.contents() as *mut u8)
728 .offset(*offset as isize)
729 as *mut shaders::GPUIImage;
730 std::ptr::copy_nonoverlapping(images.as_ptr(), buffer_contents, images.len());
731 }
732
733 command_encoder.draw_primitives_instanced(
734 metal::MTLPrimitiveType::Triangle,
735 0,
736 6,
737 images.len() as u64,
738 );
739 *offset = next_offset;
740 }
741 }
742
743 fn render_path_sprites(
744 &mut self,
745 layer_id: usize,
746 sprites: &mut Peekable<vec::IntoIter<PathSprite>>,
747 offset: &mut usize,
748 drawable_size: Vector2F,
749 command_encoder: &metal::RenderCommandEncoderRef,
750 ) {
751 command_encoder.set_render_pipeline_state(&self.sprite_pipeline_state);
752 command_encoder.set_vertex_buffer(
753 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexVertices as u64,
754 Some(&self.unit_vertices),
755 0,
756 );
757 command_encoder.set_vertex_bytes(
758 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexViewportSize as u64,
759 mem::size_of::<shaders::vector_float2>() as u64,
760 [drawable_size.to_float2()].as_ptr() as *const c_void,
761 );
762
763 let mut atlas_id = None;
764 let mut atlas_sprite_count = 0;
765 align_offset(offset);
766
767 while let Some(sprite) = sprites.peek() {
768 if sprite.layer_id != layer_id {
769 break;
770 }
771
772 let sprite = sprites.next().unwrap();
773 if let Some(atlas_id) = atlas_id.as_mut() {
774 if sprite.atlas_id != *atlas_id {
775 self.render_path_sprites_for_atlas(
776 offset,
777 *atlas_id,
778 atlas_sprite_count,
779 command_encoder,
780 );
781
782 *atlas_id = sprite.atlas_id;
783 atlas_sprite_count = 0;
784 align_offset(offset);
785 }
786 } else {
787 atlas_id = Some(sprite.atlas_id);
788 }
789
790 unsafe {
791 let buffer_contents = (self.instances.contents() as *mut u8)
792 .offset(*offset as isize)
793 as *mut shaders::GPUISprite;
794 *buffer_contents.offset(atlas_sprite_count as isize) = sprite.shader_data;
795 }
796
797 atlas_sprite_count += 1;
798 }
799
800 if let Some(atlas_id) = atlas_id {
801 self.render_path_sprites_for_atlas(
802 offset,
803 atlas_id,
804 atlas_sprite_count,
805 command_encoder,
806 );
807 }
808 }
809
810 fn render_path_sprites_for_atlas(
811 &mut self,
812 offset: &mut usize,
813 atlas_id: usize,
814 sprite_count: usize,
815 command_encoder: &metal::RenderCommandEncoderRef,
816 ) {
817 let next_offset = *offset + sprite_count * mem::size_of::<shaders::GPUISprite>();
818 assert!(
819 next_offset <= INSTANCE_BUFFER_SIZE,
820 "instance buffer exhausted"
821 );
822 command_encoder.set_vertex_buffer(
823 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexSprites as u64,
824 Some(&self.instances),
825 *offset as u64,
826 );
827 let texture = self.path_atlases.texture(atlas_id).unwrap();
828 command_encoder.set_fragment_texture(
829 shaders::GPUISpriteFragmentInputIndex_GPUISpriteFragmentInputIndexAtlas as u64,
830 Some(texture),
831 );
832 command_encoder.set_vertex_bytes(
833 shaders::GPUISpriteVertexInputIndex_GPUISpriteVertexInputIndexAtlasSize as u64,
834 mem::size_of::<shaders::vector_float2>() as u64,
835 [vec2i(texture.width() as i32, texture.height() as i32).to_float2()].as_ptr()
836 as *const c_void,
837 );
838
839 command_encoder.draw_primitives_instanced(
840 metal::MTLPrimitiveType::Triangle,
841 0,
842 6,
843 sprite_count as u64,
844 );
845 *offset = next_offset;
846 }
847
848 fn render_underlines(
849 &mut self,
850 underlines: &[Underline],
851 scale_factor: f32,
852 offset: &mut usize,
853 drawable_size: Vector2F,
854 command_encoder: &metal::RenderCommandEncoderRef,
855 ) {
856 if underlines.is_empty() {
857 return;
858 }
859 align_offset(offset);
860 let next_offset = *offset + underlines.len() * mem::size_of::<shaders::GPUIUnderline>();
861 assert!(
862 next_offset <= INSTANCE_BUFFER_SIZE,
863 "instance buffer exhausted"
864 );
865
866 command_encoder.set_render_pipeline_state(&self.underline_pipeline_state);
867 command_encoder.set_vertex_buffer(
868 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexVertices as u64,
869 Some(&self.unit_vertices),
870 0,
871 );
872 command_encoder.set_vertex_buffer(
873 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUnderlines as u64,
874 Some(&self.instances),
875 *offset as u64,
876 );
877 command_encoder.set_vertex_bytes(
878 shaders::GPUIUnderlineInputIndex_GPUIUnderlineInputIndexUniforms as u64,
879 mem::size_of::<shaders::GPUIUniforms>() as u64,
880 [shaders::GPUIUniforms {
881 viewport_size: drawable_size.to_float2(),
882 }]
883 .as_ptr() as *const c_void,
884 );
885
886 let buffer_contents = unsafe {
887 (self.instances.contents() as *mut u8).offset(*offset as isize)
888 as *mut shaders::GPUIUnderline
889 };
890 for (ix, underline) in underlines.iter().enumerate() {
891 let origin = underline.origin * scale_factor;
892 let mut height = underline.thickness;
893 if underline.squiggly {
894 height *= 3.;
895 }
896 let size = vec2f(underline.width, height) * scale_factor;
897 let shader_underline = shaders::GPUIUnderline {
898 origin: origin.round().to_float2(),
899 size: size.round().to_float2(),
900 thickness: underline.thickness * scale_factor,
901 color: underline.color.to_uchar4(),
902 squiggly: underline.squiggly as u8,
903 };
904 unsafe {
905 *(buffer_contents.offset(ix as isize)) = shader_underline;
906 }
907 }
908
909 command_encoder.draw_primitives_instanced(
910 metal::MTLPrimitiveType::Triangle,
911 0,
912 6,
913 underlines.len() as u64,
914 );
915 *offset = next_offset;
916 }
917}
918
919fn build_path_atlas_texture_descriptor() -> metal::TextureDescriptor {
920 let texture_descriptor = metal::TextureDescriptor::new();
921 texture_descriptor.set_width(2048);
922 texture_descriptor.set_height(2048);
923 texture_descriptor.set_pixel_format(MTLPixelFormat::R16Float);
924 texture_descriptor
925 .set_usage(metal::MTLTextureUsage::RenderTarget | metal::MTLTextureUsage::ShaderRead);
926 texture_descriptor.set_storage_mode(metal::MTLStorageMode::Private);
927 texture_descriptor
928}
929
930fn align_offset(offset: &mut usize) {
931 let r = *offset % 256;
932 if r > 0 {
933 *offset += 256 - r; // Align to a multiple of 256 to make Metal happy
934 }
935}
936
937fn build_pipeline_state(
938 device: &metal::DeviceRef,
939 library: &metal::LibraryRef,
940 label: &str,
941 vertex_fn_name: &str,
942 fragment_fn_name: &str,
943 pixel_format: metal::MTLPixelFormat,
944) -> metal::RenderPipelineState {
945 let vertex_fn = library
946 .get_function(vertex_fn_name, None)
947 .expect("error locating vertex function");
948 let fragment_fn = library
949 .get_function(fragment_fn_name, None)
950 .expect("error locating fragment function");
951
952 let descriptor = metal::RenderPipelineDescriptor::new();
953 descriptor.set_label(label);
954 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
955 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
956 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
957 color_attachment.set_pixel_format(pixel_format);
958 color_attachment.set_blending_enabled(true);
959 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
960 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
961 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::SourceAlpha);
962 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
963 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::OneMinusSourceAlpha);
964 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
965
966 device
967 .new_render_pipeline_state(&descriptor)
968 .expect("could not create render pipeline state")
969}
970
971fn build_path_atlas_pipeline_state(
972 device: &metal::DeviceRef,
973 library: &metal::LibraryRef,
974 label: &str,
975 vertex_fn_name: &str,
976 fragment_fn_name: &str,
977 pixel_format: metal::MTLPixelFormat,
978) -> metal::RenderPipelineState {
979 let vertex_fn = library
980 .get_function(vertex_fn_name, None)
981 .expect("error locating vertex function");
982 let fragment_fn = library
983 .get_function(fragment_fn_name, None)
984 .expect("error locating fragment function");
985
986 let descriptor = metal::RenderPipelineDescriptor::new();
987 descriptor.set_label(label);
988 descriptor.set_vertex_function(Some(vertex_fn.as_ref()));
989 descriptor.set_fragment_function(Some(fragment_fn.as_ref()));
990 let color_attachment = descriptor.color_attachments().object_at(0).unwrap();
991 color_attachment.set_pixel_format(pixel_format);
992 color_attachment.set_blending_enabled(true);
993 color_attachment.set_rgb_blend_operation(metal::MTLBlendOperation::Add);
994 color_attachment.set_alpha_blend_operation(metal::MTLBlendOperation::Add);
995 color_attachment.set_source_rgb_blend_factor(metal::MTLBlendFactor::One);
996 color_attachment.set_source_alpha_blend_factor(metal::MTLBlendFactor::One);
997 color_attachment.set_destination_rgb_blend_factor(metal::MTLBlendFactor::One);
998 color_attachment.set_destination_alpha_blend_factor(metal::MTLBlendFactor::One);
999
1000 device
1001 .new_render_pipeline_state(&descriptor)
1002 .expect("could not create render pipeline state")
1003}
1004
1005mod shaders {
1006 #![allow(non_upper_case_globals)]
1007 #![allow(non_camel_case_types)]
1008 #![allow(non_snake_case)]
1009
1010 use crate::{
1011 color::Color,
1012 geometry::vector::{Vector2F, Vector2I},
1013 };
1014 use std::mem;
1015
1016 include!(concat!(env!("OUT_DIR"), "/shaders.rs"));
1017
1018 pub trait ToFloat2 {
1019 fn to_float2(&self) -> vector_float2;
1020 }
1021
1022 impl ToFloat2 for (f32, f32) {
1023 fn to_float2(&self) -> vector_float2 {
1024 unsafe {
1025 let mut output = mem::transmute::<_, u32>(self.1.to_bits()) as vector_float2;
1026 output <<= 32;
1027 output |= mem::transmute::<_, u32>(self.0.to_bits()) as vector_float2;
1028 output
1029 }
1030 }
1031 }
1032
1033 impl ToFloat2 for Vector2F {
1034 fn to_float2(&self) -> vector_float2 {
1035 unsafe {
1036 let mut output = mem::transmute::<_, u32>(self.y().to_bits()) as vector_float2;
1037 output <<= 32;
1038 output |= mem::transmute::<_, u32>(self.x().to_bits()) as vector_float2;
1039 output
1040 }
1041 }
1042 }
1043
1044 impl ToFloat2 for Vector2I {
1045 fn to_float2(&self) -> vector_float2 {
1046 self.to_f32().to_float2()
1047 }
1048 }
1049
1050 impl Color {
1051 pub fn to_uchar4(&self) -> vector_uchar4 {
1052 let mut vec = self.a as vector_uchar4;
1053 vec <<= 8;
1054 vec |= self.b as vector_uchar4;
1055 vec <<= 8;
1056 vec |= self.g as vector_uchar4;
1057 vec <<= 8;
1058 vec |= self.r as vector_uchar4;
1059 vec
1060 }
1061 }
1062}