1use std::cmp::min;
3use std::marker::PhantomData;
4use std::ptr::NonNull;
5use std::slice;
6
7use cast::i32;
8use cssparser::Color;
9use nalgebra::{storage::Storage, Dim, Matrix};
10
11use crate::color::color_to_rgba;
12use crate::drawing_ctx::set_source_color_on_cairo;
13use crate::error::*;
14use crate::rect::{IRect, Rect};
15use crate::surface_utils::srgb;
16use crate::util::clamp;
17
18use super::{
19 iterators::{PixelRectangle, Pixels},
20 AsCairoARGB, CairoARGB, EdgeMode, ImageSurfaceDataExt, Pixel, PixelOps, ToCairoARGB, ToPixel,
21};
22
23pub enum Interpolation {
32 Nearest,
33 Smooth,
34}
35
36impl From<Interpolation> for cairo::Filter {
37 fn from(i: Interpolation) -> cairo::Filter {
38 match i {
41 Interpolation::Nearest => cairo::Filter::Nearest,
42 Interpolation::Smooth => cairo::Filter::Good,
43 }
44 }
45}
46
47#[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)]
49pub enum SurfaceType {
50 SRgb,
52 LinearRgb,
54 AlphaOnly,
59}
60
61impl SurfaceType {
62 pub fn combine(self, other: SurfaceType) -> SurfaceType {
71 match (self, other) {
72 (SurfaceType::AlphaOnly, t) => t,
73 (t, SurfaceType::AlphaOnly) => t,
74 (t1, t2) if t1 == t2 => t1,
75 _ => panic!(),
76 }
77 }
78}
79
80pub enum Operator {
82 Over,
83 In,
84 Out,
85 Atop,
86 Xor,
87 Multiply,
88 Screen,
89 Darken,
90 Lighten,
91 Overlay,
92 ColorDodge,
93 ColorBurn,
94 HardLight,
95 SoftLight,
96 Difference,
97 Exclusion,
98 HslHue,
99 HslSaturation,
100 HslColor,
101 HslLuminosity,
102}
103
104#[derive(Debug, Clone)]
116pub struct ImageSurface<T> {
117 surface: cairo::ImageSurface,
118
119 data_ptr: NonNull<u8>, width: i32,
121 height: i32,
122 stride: isize,
123
124 surface_type: SurfaceType,
125
126 _state: PhantomData<T>,
127}
128
129#[derive(Debug, Clone)]
130pub struct Shared;
131
132pub type SharedImageSurface = ImageSurface<Shared>;
134
135#[derive(Debug, Clone)]
136pub struct Exclusive;
137
138pub type ExclusiveImageSurface = ImageSurface<Exclusive>;
140
141unsafe impl Sync for SharedImageSurface {}
143
144pub trait BlurDirection {
146 const IS_VERTICAL: bool;
147}
148
149pub enum Vertical {}
151pub enum Horizontal {}
153
154impl BlurDirection for Vertical {
155 const IS_VERTICAL: bool = true;
156}
157
158impl BlurDirection for Horizontal {
159 const IS_VERTICAL: bool = false;
160}
161
162pub trait IsAlphaOnly {
164 const IS_ALPHA_ONLY: bool;
165}
166
167pub enum AlphaOnly {}
169pub enum NotAlphaOnly {}
171
172pub struct Rows<'a> {
174 surface: &'a SharedImageSurface,
175 next_row: i32,
176}
177
178pub struct RowsMut<'a> {
180 data: cairo::ImageSurfaceData<'a>,
183
184 width: i32,
185 height: i32,
186 stride: i32,
187
188 next_row: i32,
189}
190
191impl IsAlphaOnly for AlphaOnly {
192 const IS_ALPHA_ONLY: bool = true;
193}
194
195impl IsAlphaOnly for NotAlphaOnly {
196 const IS_ALPHA_ONLY: bool = false;
197}
198
199impl<T> ImageSurface<T> {
200 #[inline]
202 pub fn width(&self) -> i32 {
203 self.width
204 }
205
206 #[inline]
208 pub fn height(&self) -> i32 {
209 self.height
210 }
211
212 #[inline]
214 pub fn stride(&self) -> isize {
215 self.stride
216 }
217}
218
219impl ImageSurface<Shared> {
220 #[inline]
226 pub fn wrap(
227 surface: cairo::ImageSurface,
228 surface_type: SurfaceType,
229 ) -> Result<SharedImageSurface, cairo::Error> {
230 assert_eq!(surface.format(), cairo::Format::ARgb32);
232
233 let reference_count =
234 unsafe { cairo::ffi::cairo_surface_get_reference_count(surface.to_raw_none()) };
235 assert_eq!(reference_count, 1);
236
237 let (width, height) = (surface.width(), surface.height());
238
239 if !(width > 0 && height > 0) {
243 return Err(cairo::Error::InvalidSize);
244 }
245
246 surface.flush();
247
248 let data_ptr = NonNull::new(unsafe {
249 cairo::ffi::cairo_image_surface_get_data(surface.to_raw_none())
250 })
251 .unwrap();
252
253 let stride = surface.stride() as isize;
254
255 Ok(SharedImageSurface {
256 surface,
257 data_ptr,
258 width,
259 height,
260 stride,
261 surface_type,
262 _state: PhantomData,
263 })
264 }
265
266 #[inline]
269 pub fn copy_from_surface(surface: &cairo::ImageSurface) -> Result<Self, cairo::Error> {
270 let copy =
271 cairo::ImageSurface::create(cairo::Format::ARgb32, surface.width(), surface.height())?;
272
273 {
274 let cr = cairo::Context::new(©)?;
275 cr.set_source_surface(surface, 0f64, 0f64)?;
276 cr.paint()?;
277 }
278
279 SharedImageSurface::wrap(copy, SurfaceType::SRgb)
280 }
281
282 #[inline]
284 pub fn empty(width: i32, height: i32, surface_type: SurfaceType) -> Result<Self, cairo::Error> {
285 let s = cairo::ImageSurface::create(cairo::Format::ARgb32, width, height)?;
286
287 SharedImageSurface::wrap(s, surface_type)
288 }
289
290 #[inline]
292 pub fn into_image_surface(self) -> Result<cairo::ImageSurface, cairo::Error> {
293 let reference_count =
294 unsafe { cairo::ffi::cairo_surface_get_reference_count(self.surface.to_raw_none()) };
295
296 if reference_count == 1 {
297 Ok(self.surface)
298 } else {
299 self.copy_surface(IRect::from_size(self.width, self.height))
301 }
302 }
303
304 pub fn from_image(
305 image: &image::DynamicImage,
306 content_type: Option<&str>,
307 mime_data: Option<Vec<u8>>,
308 ) -> Result<SharedImageSurface, cairo::Error> {
309 let rgba_image = image.to_rgba8();
310
311 let width = i32(rgba_image.width()).map_err(|_| cairo::Error::InvalidSize)?;
312 let height = i32(rgba_image.height()).map_err(|_| cairo::Error::InvalidSize)?;
313
314 let mut surf = ExclusiveImageSurface::new(width, height, SurfaceType::SRgb)?;
315
316 rgba_image
317 .rows()
318 .zip(surf.rows_mut())
319 .flat_map(|(src_row, dest_row)| src_row.zip(dest_row.iter_mut()))
320 .for_each(|(src, dest)| *dest = src.to_pixel().premultiply().to_cairo_argb());
321
322 if let (Some(content_type), Some(bytes)) = (content_type, mime_data) {
323 surf.surface.set_mime_data(content_type, bytes)?;
324 }
325
326 surf.share()
327 }
328
329 #[inline]
331 fn is_alpha_only(&self) -> bool {
332 self.surface_type == SurfaceType::AlphaOnly
333 }
334
335 #[inline]
337 pub fn surface_type(&self) -> SurfaceType {
338 self.surface_type
339 }
340
341 #[inline]
343 pub fn get_pixel(&self, x: u32, y: u32) -> Pixel {
344 assert!(x < self.width as u32);
345 assert!(y < self.height as u32);
346
347 #[allow(clippy::cast_ptr_alignment)]
348 let value = unsafe {
349 *(self
350 .data_ptr
351 .as_ptr()
352 .offset(y as isize * self.stride + x as isize * 4) as *const u32)
353 };
354
355 Pixel::from_u32(value)
356 }
357
358 #[inline]
360 pub fn get_pixel_by_offset(&self, offset: isize) -> Pixel {
361 assert!(offset < self.stride * self.height as isize);
362
363 #[allow(clippy::cast_ptr_alignment)]
364 let value = unsafe { *(self.data_ptr.as_ptr().offset(offset) as *const u32) };
365 Pixel::from_u32(value)
366 }
367
368 #[inline]
370 pub fn set_as_source_surface(
371 &self,
372 cr: &cairo::Context,
373 x: f64,
374 y: f64,
375 ) -> Result<(), cairo::Error> {
376 cr.set_source_surface(&self.surface, x, y)
377 }
378
379 pub fn to_cairo_pattern(&self) -> cairo::SurfacePattern {
381 cairo::SurfacePattern::create(&self.surface)
382 }
383
384 fn copy_surface(&self, bounds: IRect) -> Result<cairo::ImageSurface, cairo::Error> {
387 let output_surface =
388 cairo::ImageSurface::create(cairo::Format::ARgb32, self.width, self.height)?;
389
390 let cr = cairo::Context::new(&output_surface)?;
391 let r = cairo::Rectangle::from(bounds);
392 cr.rectangle(r.x(), r.y(), r.width(), r.height());
393 cr.clip();
394
395 cr.set_source_surface(&self.surface, 0f64, 0f64)?;
396 cr.paint()?;
397
398 Ok(output_surface)
399 }
400
401 pub fn scale_to(
404 &self,
405 width: i32,
406 height: i32,
407 bounds: IRect,
408 x: f64,
409 y: f64,
410 ) -> Result<SharedImageSurface, cairo::Error> {
411 let output_surface = cairo::ImageSurface::create(cairo::Format::ARgb32, width, height)?;
412
413 {
414 let cr = cairo::Context::new(&output_surface)?;
415 let r = cairo::Rectangle::from(bounds);
416 cr.rectangle(r.x(), r.y(), r.width(), r.height());
417 cr.clip();
418
419 cr.scale(x, y);
420 self.set_as_source_surface(&cr, 0.0, 0.0)?;
421 cr.paint()?;
422 }
423
424 SharedImageSurface::wrap(output_surface, self.surface_type)
425 }
426
427 #[inline]
429 pub fn scale(
430 &self,
431 bounds: IRect,
432 x: f64,
433 y: f64,
434 ) -> Result<(SharedImageSurface, IRect), cairo::Error> {
435 let new_width = (f64::from(self.width) * x).ceil() as i32;
436 let new_height = (f64::from(self.height) * y).ceil() as i32;
437 let new_bounds = bounds.scale(x, y);
438
439 Ok((
440 self.scale_to(new_width, new_height, new_bounds, x, y)?,
441 new_bounds,
442 ))
443 }
444
445 pub fn extract_alpha(&self, bounds: IRect) -> Result<SharedImageSurface, cairo::Error> {
447 let mut output_surface =
448 cairo::ImageSurface::create(cairo::Format::ARgb32, self.width, self.height)?;
449
450 let output_stride = output_surface.stride() as usize;
451 {
452 let mut output_data = output_surface.data().unwrap();
453
454 for (x, y, Pixel { a, .. }) in Pixels::within(self, bounds) {
455 let output_pixel = Pixel {
456 r: 0,
457 g: 0,
458 b: 0,
459 a,
460 };
461 output_data.set_pixel(output_stride, output_pixel, x, y);
462 }
463 }
464
465 SharedImageSurface::wrap(output_surface, SurfaceType::AlphaOnly)
466 }
467
468 pub fn to_luminance_mask(&self) -> Result<SharedImageSurface, cairo::Error> {
475 let bounds = IRect::from_size(self.width, self.height);
476
477 let mut output_surface =
478 cairo::ImageSurface::create(cairo::Format::ARgb32, self.width, self.height)?;
479
480 let stride = output_surface.stride() as usize;
481 {
482 let mut data = output_surface.data().unwrap();
483
484 for (x, y, pixel) in Pixels::within(self, bounds) {
485 data.set_pixel(stride, pixel.to_luminance_mask(), x, y);
486 }
487 }
488
489 SharedImageSurface::wrap(output_surface, self.surface_type)
490 }
491
492 pub fn unpremultiply(&self, bounds: IRect) -> Result<SharedImageSurface, cairo::Error> {
497 if self.is_alpha_only() {
499 return Ok(self.clone());
500 }
501
502 let mut output_surface =
503 cairo::ImageSurface::create(cairo::Format::ARgb32, self.width, self.height)?;
504
505 let stride = output_surface.stride() as usize;
506 {
507 let mut data = output_surface.data().unwrap();
508
509 for (x, y, pixel) in Pixels::within(self, bounds) {
510 data.set_pixel(stride, pixel.unpremultiply(), x, y);
511 }
512 }
513
514 SharedImageSurface::wrap(output_surface, self.surface_type)
515 }
516
517 #[inline]
519 pub fn to_linear_rgb(&self, bounds: IRect) -> Result<SharedImageSurface, cairo::Error> {
520 match self.surface_type {
521 SurfaceType::LinearRgb | SurfaceType::AlphaOnly => Ok(self.clone()),
522 _ => srgb::linearize_surface(self, bounds),
523 }
524 }
525
526 #[inline]
528 pub fn to_srgb(&self, bounds: IRect) -> Result<SharedImageSurface, cairo::Error> {
529 match self.surface_type {
530 SurfaceType::SRgb | SurfaceType::AlphaOnly => Ok(self.clone()),
531 _ => srgb::unlinearize_surface(self, bounds),
532 }
533 }
534
535 pub fn convolve<R: Dim, C: Dim, S: Storage<f64, R, C>>(
548 &self,
549 bounds: IRect,
550 target: (i32, i32),
551 kernel: &Matrix<f64, R, C, S>,
552 edge_mode: EdgeMode,
553 ) -> Result<SharedImageSurface, cairo::Error> {
554 assert!(kernel.nrows() >= 1);
555 assert!(kernel.ncols() >= 1);
556
557 let mut output_surface =
558 cairo::ImageSurface::create(cairo::Format::ARgb32, self.width, self.height)?;
559
560 let output_stride = output_surface.stride() as usize;
561 {
562 let mut output_data = output_surface.data().unwrap();
563
564 if self.is_alpha_only() {
565 for (x, y, _pixel) in Pixels::within(self, bounds) {
566 let kernel_bounds = IRect::new(
567 x as i32 - target.0,
568 y as i32 - target.1,
569 x as i32 - target.0 + kernel.ncols() as i32,
570 y as i32 - target.1 + kernel.nrows() as i32,
571 );
572
573 let mut a = 0.0;
574
575 for (x, y, pixel) in
576 PixelRectangle::within(self, bounds, kernel_bounds, edge_mode)
577 {
578 let kernel_x = (kernel_bounds.x1 - x - 1) as usize;
579 let kernel_y = (kernel_bounds.y1 - y - 1) as usize;
580 let factor = kernel[(kernel_y, kernel_x)];
581
582 a += f64::from(pixel.a) * factor;
583 }
584
585 let convert = |x: f64| (clamp(x, 0.0, 255.0) + 0.5) as u8;
586
587 let output_pixel = Pixel {
588 r: 0,
589 g: 0,
590 b: 0,
591 a: convert(a),
592 };
593
594 output_data.set_pixel(output_stride, output_pixel, x, y);
595 }
596 } else {
597 for (x, y, _pixel) in Pixels::within(self, bounds) {
598 let kernel_bounds = IRect::new(
599 x as i32 - target.0,
600 y as i32 - target.1,
601 x as i32 - target.0 + kernel.ncols() as i32,
602 y as i32 - target.1 + kernel.nrows() as i32,
603 );
604
605 let mut r = 0.0;
606 let mut g = 0.0;
607 let mut b = 0.0;
608 let mut a = 0.0;
609
610 for (x, y, pixel) in
611 PixelRectangle::within(self, bounds, kernel_bounds, edge_mode)
612 {
613 let kernel_x = (kernel_bounds.x1 - x - 1) as usize;
614 let kernel_y = (kernel_bounds.y1 - y - 1) as usize;
615 let factor = kernel[(kernel_y, kernel_x)];
616
617 r += f64::from(pixel.r) * factor;
618 g += f64::from(pixel.g) * factor;
619 b += f64::from(pixel.b) * factor;
620 a += f64::from(pixel.a) * factor;
621 }
622
623 let convert = |x: f64| (clamp(x, 0.0, 255.0) + 0.5) as u8;
624
625 let output_pixel = Pixel {
626 r: convert(r),
627 g: convert(g),
628 b: convert(b),
629 a: convert(a),
630 };
631
632 output_data.set_pixel(output_stride, output_pixel, x, y);
633 }
634 }
635 }
636
637 SharedImageSurface::wrap(output_surface, self.surface_type)
638 }
639
640 pub fn box_blur_loop<B: BlurDirection, A: IsAlphaOnly>(
652 &self,
653 output_surface: &mut cairo::ImageSurface,
654 bounds: IRect,
655 kernel_size: usize,
656 target: usize,
657 ) {
658 assert_ne!(kernel_size, 0);
659 assert!(target < kernel_size);
660 assert_eq!(self.is_alpha_only(), A::IS_ALPHA_ONLY);
661
662 {
663 struct UnsafeSendPixelData<'a> {
678 width: u32,
679 height: u32,
680 stride: isize,
681 ptr: NonNull<u8>,
682 _marker: PhantomData<&'a mut ()>,
683 }
684
685 unsafe impl<'a> Send for UnsafeSendPixelData<'a> {}
686
687 impl<'a> UnsafeSendPixelData<'a> {
688 #[inline]
695 unsafe fn new(surface: &mut cairo::ImageSurface) -> Self {
696 assert_eq!(surface.format(), cairo::Format::ARgb32);
697 let ptr = surface.data().unwrap().as_mut_ptr();
698
699 Self {
700 width: surface.width() as u32,
701 height: surface.height() as u32,
702 stride: surface.stride() as isize,
703 ptr: NonNull::new(ptr).unwrap(),
704 _marker: PhantomData,
705 }
706 }
707
708 #[inline]
710 fn set_pixel(&mut self, pixel: Pixel, x: u32, y: u32) {
711 assert!(x < self.width);
712 assert!(y < self.height);
713
714 let value = pixel.to_u32();
715
716 #[allow(clippy::cast_ptr_alignment)]
717 unsafe {
718 let ptr = self
719 .ptr
720 .as_ptr()
721 .offset(y as isize * self.stride + x as isize * 4)
722 as *mut u32;
723 *ptr = value;
724 }
725 }
726
727 #[inline]
732 fn split_at_row(self, index: u32) -> (Self, Self) {
733 assert!(index <= self.height);
734
735 (
736 UnsafeSendPixelData {
737 width: self.width,
738 height: index,
739 stride: self.stride,
740 ptr: self.ptr,
741 _marker: PhantomData,
742 },
743 UnsafeSendPixelData {
744 width: self.width,
745 height: self.height - index,
746 stride: self.stride,
747 ptr: NonNull::new(unsafe {
748 self.ptr.as_ptr().offset(index as isize * self.stride)
749 })
750 .unwrap(),
751 _marker: PhantomData,
752 },
753 )
754 }
755
756 #[inline]
761 fn split_at_column(self, index: u32) -> (Self, Self) {
762 assert!(index <= self.width);
763
764 (
765 UnsafeSendPixelData {
766 width: index,
767 height: self.height,
768 stride: self.stride,
769 ptr: self.ptr,
770 _marker: PhantomData,
771 },
772 UnsafeSendPixelData {
773 width: self.width - index,
774 height: self.height,
775 stride: self.stride,
776 ptr: NonNull::new(unsafe {
777 self.ptr.as_ptr().offset(index as isize * 4)
778 })
779 .unwrap(),
780 _marker: PhantomData,
781 },
782 )
783 }
784 }
785
786 let output_data = unsafe { UnsafeSendPixelData::new(output_surface) };
787
788 let shift = (kernel_size - target) as i32;
790 let target = target as i32;
791
792 let kernel_size_f64 = kernel_size as f64;
794 let compute = |x: u32| (f64::from(x) / kernel_size_f64 + 0.5) as u8;
795
796 let (main_axis_min, main_axis_max, other_axis_min, other_axis_max) = if B::IS_VERTICAL {
802 (bounds.y0, bounds.y1, bounds.x0, bounds.x1)
803 } else {
804 (bounds.x0, bounds.x1, bounds.y0, bounds.y1)
805 };
806
807 let pixel = |i, j| {
809 let (x, y) = if B::IS_VERTICAL { (i, j) } else { (j, i) };
810
811 self.get_pixel(x as u32, y as u32)
812 };
813
814 let mut output_data = if B::IS_VERTICAL {
817 output_data.split_at_column(bounds.x0 as u32).1
818 } else {
819 output_data.split_at_row(bounds.y0 as u32).1
820 };
821
822 rayon::scope(|s| {
823 for i in other_axis_min..other_axis_max {
824 let (mut current, remaining) = if B::IS_VERTICAL {
828 output_data.split_at_column(1)
829 } else {
830 output_data.split_at_row(1)
831 };
832
833 output_data = remaining;
834
835 s.spawn(move |_| {
836 let mut set_pixel = |j, pixel| {
838 let (x, y) = if B::IS_VERTICAL { (0, j) } else { (j, 0) };
841 current.set_pixel(pixel, x, y);
842 };
843
844 let mut sum_r = 0;
851 let mut sum_g = 0;
852 let mut sum_b = 0;
853 let mut sum_a = 0;
854
855 for j in main_axis_min..min(main_axis_max, main_axis_min + shift) {
859 let Pixel { r, g, b, a } = pixel(i, j);
860
861 if !A::IS_ALPHA_ONLY {
862 sum_r += u32::from(r);
863 sum_g += u32::from(g);
864 sum_b += u32::from(b);
865 }
866
867 sum_a += u32::from(a);
868 }
869
870 set_pixel(
871 main_axis_min as u32,
872 Pixel {
873 r: compute(sum_r),
874 g: compute(sum_g),
875 b: compute(sum_b),
876 a: compute(sum_a),
877 },
878 );
879
880 let start_subtracting_at = main_axis_min + target + 1;
885
886 let stop_adding_at = main_axis_max - shift + 1;
889
890 for j in main_axis_min + 1..main_axis_max {
891 if j >= start_subtracting_at {
892 let old_pixel = pixel(i, j - target - 1);
893
894 if !A::IS_ALPHA_ONLY {
895 sum_r -= u32::from(old_pixel.r);
896 sum_g -= u32::from(old_pixel.g);
897 sum_b -= u32::from(old_pixel.b);
898 }
899
900 sum_a -= u32::from(old_pixel.a);
901 }
902
903 if j < stop_adding_at {
904 let new_pixel = pixel(i, j + shift - 1);
905
906 if !A::IS_ALPHA_ONLY {
907 sum_r += u32::from(new_pixel.r);
908 sum_g += u32::from(new_pixel.g);
909 sum_b += u32::from(new_pixel.b);
910 }
911
912 sum_a += u32::from(new_pixel.a);
913 }
914
915 set_pixel(
916 j as u32,
917 Pixel {
918 r: compute(sum_r),
919 g: compute(sum_g),
920 b: compute(sum_b),
921 a: compute(sum_a),
922 },
923 );
924 }
925 });
926 }
927 });
928 }
929
930 unsafe { cairo::ffi::cairo_surface_mark_dirty(output_surface.to_raw_none()) }
933 }
934
935 #[inline]
945 pub fn box_blur<B: BlurDirection>(
946 &self,
947 bounds: IRect,
948 kernel_size: usize,
949 target: usize,
950 ) -> Result<SharedImageSurface, cairo::Error> {
951 let mut output_surface =
952 cairo::ImageSurface::create(cairo::Format::ARgb32, self.width, self.height)?;
953
954 if self.is_alpha_only() {
955 self.box_blur_loop::<B, AlphaOnly>(&mut output_surface, bounds, kernel_size, target);
956 } else {
957 self.box_blur_loop::<B, NotAlphaOnly>(&mut output_surface, bounds, kernel_size, target);
958 }
959
960 SharedImageSurface::wrap(output_surface, self.surface_type)
961 }
962
963 #[inline]
965 pub fn flood(&self, bounds: IRect, color: Color) -> Result<SharedImageSurface, cairo::Error> {
966 let output_surface =
967 cairo::ImageSurface::create(cairo::Format::ARgb32, self.width, self.height)?;
968
969 let rgba = color_to_rgba(&color);
970
971 if rgba.alpha.unwrap_or(0.0) > 0.0 {
972 let cr = cairo::Context::new(&output_surface)?;
973 let r = cairo::Rectangle::from(bounds);
974 cr.rectangle(r.x(), r.y(), r.width(), r.height());
975 cr.clip();
976
977 set_source_color_on_cairo(&cr, &color);
978 cr.paint()?;
979 }
980
981 SharedImageSurface::wrap(output_surface, self.surface_type)
982 }
983
984 #[inline]
986 pub fn offset(
987 &self,
988 bounds: Rect,
989 dx: f64,
990 dy: f64,
991 ) -> Result<SharedImageSurface, cairo::Error> {
992 let output_surface =
993 cairo::ImageSurface::create(cairo::Format::ARgb32, self.width, self.height)?;
994
995 if let Some(output_bounds) = bounds.translate((dx, dy)).intersection(&bounds) {
998 let cr = cairo::Context::new(&output_surface)?;
999 let r = cairo::Rectangle::from(output_bounds);
1000 cr.rectangle(r.x(), r.y(), r.width(), r.height());
1001 cr.clip();
1002
1003 self.set_as_source_surface(&cr, dx, dy)?;
1004 cr.paint()?;
1005 }
1006
1007 SharedImageSurface::wrap(output_surface, self.surface_type)
1008 }
1009
1010 #[inline]
1013 pub fn paint_image(
1014 &self,
1015 bounds: Rect,
1016 image: &SharedImageSurface,
1017 rect: Option<Rect>,
1018 interpolation: Interpolation,
1019 ) -> Result<SharedImageSurface, cairo::Error> {
1020 let output_surface =
1021 cairo::ImageSurface::create(cairo::Format::ARgb32, self.width, self.height)?;
1022
1023 if rect.is_none() || !rect.unwrap().is_empty() {
1024 let cr = cairo::Context::new(&output_surface)?;
1025 let r = cairo::Rectangle::from(bounds);
1026 cr.rectangle(r.x(), r.y(), r.width(), r.height());
1027 cr.clip();
1028
1029 image.set_as_source_surface(&cr, 0f64, 0f64)?;
1030
1031 if let Some(rect) = rect {
1032 let mut matrix = cairo::Matrix::new(
1033 rect.width() / f64::from(image.width()),
1034 0.0,
1035 0.0,
1036 rect.height() / f64::from(image.height()),
1037 rect.x0,
1038 rect.y0,
1039 );
1040 matrix.invert();
1041
1042 cr.source().set_matrix(matrix);
1043 cr.source().set_filter(cairo::Filter::from(interpolation));
1044 }
1045
1046 cr.paint()?;
1047 }
1048
1049 SharedImageSurface::wrap(output_surface, image.surface_type)
1050 }
1051
1052 #[inline]
1058 pub fn tile(&self, bounds: IRect) -> Result<SharedImageSurface, cairo::Error> {
1059 assert!(!bounds.is_empty());
1062
1063 let output_surface =
1064 cairo::ImageSurface::create(cairo::Format::ARgb32, bounds.width(), bounds.height())?;
1065
1066 {
1067 let cr = cairo::Context::new(&output_surface)?;
1068 self.set_as_source_surface(&cr, f64::from(-bounds.x0), f64::from(-bounds.y0))?;
1069 cr.paint()?;
1070 }
1071
1072 SharedImageSurface::wrap(output_surface, self.surface_type)
1073 }
1074
1075 #[inline]
1078 pub fn paint_image_tiled(
1079 &self,
1080 bounds: IRect,
1081 image: &SharedImageSurface,
1082 x: i32,
1083 y: i32,
1084 ) -> Result<SharedImageSurface, cairo::Error> {
1085 let output_surface =
1086 cairo::ImageSurface::create(cairo::Format::ARgb32, self.width, self.height)?;
1087
1088 {
1089 let cr = cairo::Context::new(&output_surface)?;
1090
1091 let ptn = image.to_cairo_pattern();
1092 ptn.set_extend(cairo::Extend::Repeat);
1093 let mut mat = cairo::Matrix::identity();
1094 mat.translate(f64::from(-x), f64::from(-y));
1095 ptn.set_matrix(mat);
1096
1097 let r = cairo::Rectangle::from(bounds);
1098 cr.rectangle(r.x(), r.y(), r.width(), r.height());
1099 cr.clip();
1100
1101 cr.set_source(&ptn)?;
1102 cr.paint()?;
1103 }
1104
1105 SharedImageSurface::wrap(output_surface, image.surface_type)
1106 }
1107
1108 #[inline]
1114 pub fn compose(
1115 &self,
1116 other: &SharedImageSurface,
1117 bounds: IRect,
1118 operator: Operator,
1119 ) -> Result<SharedImageSurface, cairo::Error> {
1120 let output_surface = other.copy_surface(bounds)?;
1121
1122 {
1123 let cr = cairo::Context::new(&output_surface)?;
1124 let r = cairo::Rectangle::from(bounds);
1125 cr.rectangle(r.x(), r.y(), r.width(), r.height());
1126 cr.clip();
1127
1128 self.set_as_source_surface(&cr, 0.0, 0.0)?;
1129 cr.set_operator(operator.into());
1130 cr.paint()?;
1131 }
1132
1133 SharedImageSurface::wrap(
1134 output_surface,
1135 self.surface_type.combine(other.surface_type),
1136 )
1137 }
1138
1139 #[inline]
1147 pub fn compose_arithmetic(
1148 &self,
1149 other: &SharedImageSurface,
1150 bounds: IRect,
1151 k1: f64,
1152 k2: f64,
1153 k3: f64,
1154 k4: f64,
1155 ) -> Result<SharedImageSurface, cairo::Error> {
1156 let mut output_surface = ExclusiveImageSurface::new(
1157 self.width,
1158 self.height,
1159 self.surface_type.combine(other.surface_type),
1160 )?;
1161
1162 composite_arithmetic(self, other, &mut output_surface, bounds, k1, k2, k3, k4);
1163
1164 output_surface.share()
1165 }
1166
1167 pub fn rows(&self) -> Rows<'_> {
1168 Rows {
1169 surface: self,
1170 next_row: 0,
1171 }
1172 }
1173}
1174
1175impl<'a> Iterator for Rows<'a> {
1176 type Item = &'a [CairoARGB];
1177
1178 fn next(&mut self) -> Option<Self::Item> {
1179 if self.next_row == self.surface.height {
1180 return None;
1181 }
1182
1183 let row = self.next_row;
1184
1185 self.next_row += 1;
1186
1187 unsafe {
1196 let row_ptr: *const u8 = self
1197 .surface
1198 .data_ptr
1199 .as_ptr()
1200 .offset(row as isize * self.surface.stride);
1201 let row_of_u32: &[u32] =
1202 slice::from_raw_parts(row_ptr as *const u32, self.surface.width as usize);
1203 let pixels = row_of_u32.as_cairo_argb();
1204 assert!(pixels.len() == self.surface.width as usize);
1205 Some(pixels)
1206 }
1207 }
1208}
1209
1210impl<'a> Iterator for RowsMut<'a> {
1211 type Item = &'a mut [CairoARGB];
1212
1213 fn next(&mut self) -> Option<Self::Item> {
1214 if self.next_row == self.height {
1215 return None;
1216 }
1217
1218 let row = self.next_row as usize;
1219
1220 self.next_row += 1;
1221
1222 unsafe {
1231 let data_ptr = self.data.as_mut_ptr();
1236 let row_ptr: *mut u8 = data_ptr.offset(row as isize * self.stride as isize);
1237 let row_of_u32: &mut [u32] =
1238 slice::from_raw_parts_mut(row_ptr as *mut u32, self.width as usize);
1239 let pixels = row_of_u32.as_cairo_argb_mut();
1240 assert!(pixels.len() == self.width as usize);
1241 Some(pixels)
1242 }
1243 }
1244}
1245
1246#[inline]
1248pub fn composite_arithmetic(
1249 surface1: &SharedImageSurface,
1250 surface2: &SharedImageSurface,
1251 output_surface: &mut ExclusiveImageSurface,
1252 bounds: IRect,
1253 k1: f64,
1254 k2: f64,
1255 k3: f64,
1256 k4: f64,
1257) {
1258 output_surface.modify(&mut |data, stride| {
1259 for (x, y, pixel, pixel_2) in
1260 Pixels::within(surface1, bounds).map(|(x, y, p)| (x, y, p, surface2.get_pixel(x, y)))
1261 {
1262 let i1a = f64::from(pixel.a) / 255f64;
1263 let i2a = f64::from(pixel_2.a) / 255f64;
1264 let oa = k1 * i1a * i2a + k2 * i1a + k3 * i2a + k4;
1265 let oa = clamp(oa, 0f64, 1f64);
1266
1267 if oa > 0f64 {
1270 let compute = |i1, i2| {
1271 let i1 = f64::from(i1) / 255f64;
1272 let i2 = f64::from(i2) / 255f64;
1273
1274 let o = k1 * i1 * i2 + k2 * i1 + k3 * i2 + k4;
1275 let o = clamp(o, 0f64, oa);
1276
1277 ((o * 255f64) + 0.5) as u8
1278 };
1279
1280 let output_pixel = Pixel {
1281 r: compute(pixel.r, pixel_2.r),
1282 g: compute(pixel.g, pixel_2.g),
1283 b: compute(pixel.b, pixel_2.b),
1284 a: ((oa * 255f64) + 0.5) as u8,
1285 };
1286
1287 data.set_pixel(stride, output_pixel, x, y);
1288 }
1289 }
1290 });
1291}
1292
1293impl ImageSurface<Exclusive> {
1294 #[inline]
1295 pub fn new(
1296 width: i32,
1297 height: i32,
1298 surface_type: SurfaceType,
1299 ) -> Result<ExclusiveImageSurface, cairo::Error> {
1300 let surface = cairo::ImageSurface::create(cairo::Format::ARgb32, width, height)?;
1301
1302 let (width, height) = (surface.width(), surface.height());
1303
1304 if !(width > 0 && height > 0) {
1308 return Err(cairo::Error::InvalidSize);
1309 }
1310
1311 let data_ptr = NonNull::new(unsafe {
1312 cairo::ffi::cairo_image_surface_get_data(surface.to_raw_none())
1313 })
1314 .unwrap();
1315
1316 let stride = surface.stride() as isize;
1317
1318 Ok(ExclusiveImageSurface {
1319 surface,
1320 data_ptr,
1321 width,
1322 height,
1323 stride,
1324 surface_type,
1325 _state: PhantomData,
1326 })
1327 }
1328
1329 #[inline]
1330 pub fn share(self) -> Result<SharedImageSurface, cairo::Error> {
1331 SharedImageSurface::wrap(self.surface, self.surface_type)
1332 }
1333
1334 #[inline]
1336 pub fn data(&mut self) -> cairo::ImageSurfaceData<'_> {
1337 self.surface.data().unwrap()
1338 }
1339
1340 #[inline]
1342 pub fn modify(&mut self, draw_fn: &mut dyn FnMut(&mut cairo::ImageSurfaceData<'_>, usize)) {
1343 let stride = self.stride() as usize;
1344 let mut data = self.data();
1345
1346 draw_fn(&mut data, stride)
1347 }
1348
1349 #[inline]
1351 pub fn draw(
1352 &mut self,
1353 draw_fn: &mut dyn FnMut(cairo::Context) -> Result<(), InternalRenderingError>,
1354 ) -> Result<(), InternalRenderingError> {
1355 let cr = cairo::Context::new(&self.surface)?;
1356 draw_fn(cr)
1357 }
1358
1359 pub fn rows_mut(&mut self) -> RowsMut<'_> {
1360 let width = self.surface.width();
1361 let height = self.surface.height();
1362 let stride = self.surface.stride();
1363
1364 let data = self.surface.data().unwrap();
1365
1366 RowsMut {
1367 width,
1368 height,
1369 stride,
1370 data,
1371 next_row: 0,
1372 }
1373 }
1374}
1375
1376impl From<Operator> for cairo::Operator {
1377 fn from(op: Operator) -> cairo::Operator {
1378 use cairo::Operator as Cairo;
1379 use Operator::*;
1380
1381 match op {
1382 Over => Cairo::Over,
1383 In => Cairo::In,
1384 Out => Cairo::Out,
1385 Atop => Cairo::Atop,
1386 Xor => Cairo::Xor,
1387 Multiply => Cairo::Multiply,
1388 Screen => Cairo::Screen,
1389 Darken => Cairo::Darken,
1390 Lighten => Cairo::Lighten,
1391 Overlay => Cairo::Overlay,
1392 ColorDodge => Cairo::ColorDodge,
1393 ColorBurn => Cairo::ColorBurn,
1394 HardLight => Cairo::HardLight,
1395 SoftLight => Cairo::SoftLight,
1396 Difference => Cairo::Difference,
1397 Exclusion => Cairo::Exclusion,
1398 HslHue => Cairo::HslHue,
1399 HslSaturation => Cairo::HslSaturation,
1400 HslColor => Cairo::HslColor,
1401 HslLuminosity => Cairo::HslLuminosity,
1402 }
1403 }
1404}
1405
1406#[cfg(test)]
1407mod tests {
1408 use super::*;
1409 use crate::surface_utils::iterators::Pixels;
1410
1411 #[test]
1412 fn test_extract_alpha() {
1413 const WIDTH: i32 = 32;
1414 const HEIGHT: i32 = 64;
1415
1416 let bounds = IRect::new(8, 24, 16, 48);
1417 let full_bounds = IRect::from_size(WIDTH, HEIGHT);
1418
1419 let mut surface = ExclusiveImageSurface::new(WIDTH, HEIGHT, SurfaceType::SRgb).unwrap();
1420
1421 {
1423 let mut data = surface.data();
1424
1425 let mut counter = 0u16;
1426 for x in data.iter_mut() {
1427 *x = counter as u8;
1428 counter = (counter + 1) % 256;
1429 }
1430 }
1431
1432 let surface = surface.share().unwrap();
1433 let alpha = surface.extract_alpha(bounds).unwrap();
1434
1435 for (x, y, p, pa) in
1436 Pixels::within(&surface, full_bounds).map(|(x, y, p)| (x, y, p, alpha.get_pixel(x, y)))
1437 {
1438 assert_eq!(pa.r, 0);
1439 assert_eq!(pa.g, 0);
1440 assert_eq!(pa.b, 0);
1441
1442 if !bounds.contains(x as i32, y as i32) {
1443 assert_eq!(pa.a, 0);
1444 } else {
1445 assert_eq!(pa.a, p.a);
1446 }
1447 }
1448 }
1449}