images.rs (15135B)
1 use egui::{pos2, Color32, ColorImage, Context, Rect, Sense, SizeHint}; 2 use image::codecs::gif::GifDecoder; 3 use image::imageops::FilterType; 4 use image::{AnimationDecoder, DynamicImage, FlatSamples, Frame}; 5 use notedeck::{ 6 Animation, GifStateMap, ImageFrame, Images, LoadableTextureState, MediaCache, MediaCacheType, 7 TextureFrame, TextureState, TexturedImage, 8 }; 9 use poll_promise::Promise; 10 use std::collections::VecDeque; 11 use std::io::Cursor; 12 use std::path::PathBuf; 13 use std::path::{self, Path}; 14 use std::sync::mpsc; 15 use std::sync::mpsc::SyncSender; 16 use std::thread; 17 use std::time::Duration; 18 use tokio::fs; 19 20 // NOTE(jb55): chatgpt wrote this because I was too dumb to 21 pub fn aspect_fill( 22 ui: &mut egui::Ui, 23 sense: Sense, 24 texture_id: egui::TextureId, 25 aspect_ratio: f32, 26 ) -> egui::Response { 27 let frame = ui.available_rect_before_wrap(); // Get the available frame space in the current layout 28 let frame_ratio = frame.width() / frame.height(); 29 30 let (width, height) = if frame_ratio > aspect_ratio { 31 // Frame is wider than the content 32 (frame.width(), frame.width() / aspect_ratio) 33 } else { 34 // Frame is taller than the content 35 (frame.height() * aspect_ratio, frame.height()) 36 }; 37 38 let content_rect = Rect::from_min_size( 39 frame.min 40 + egui::vec2( 41 (frame.width() - width) / 2.0, 42 (frame.height() - height) / 2.0, 43 ), 44 egui::vec2(width, height), 45 ); 46 47 // Set the clipping rectangle to the frame 48 //let clip_rect = ui.clip_rect(); // Preserve the original clipping rectangle 49 //ui.set_clip_rect(frame); 50 51 let uv = Rect::from_min_max(pos2(0.0, 0.0), pos2(1.0, 1.0)); 52 53 let (response, painter) = ui.allocate_painter(ui.available_size(), sense); 54 55 // Draw the texture within the calculated rect, potentially clipping it 56 painter.rect_filled(content_rect, 0.0, ui.ctx().style().visuals.window_fill()); 57 painter.image(texture_id, content_rect, uv, Color32::WHITE); 58 59 // Restore the original clipping rectangle 60 //ui.set_clip_rect(clip_rect); 61 response 62 } 63 64 #[profiling::function] 65 pub fn round_image(image: &mut ColorImage) { 66 // The radius to the edge of of the avatar circle 67 let edge_radius = image.size[0] as f32 / 2.0; 68 let edge_radius_squared = edge_radius * edge_radius; 69 70 for (pixnum, pixel) in image.pixels.iter_mut().enumerate() { 71 // y coordinate 72 let uy = pixnum / image.size[0]; 73 let y = uy as f32; 74 let y_offset = edge_radius - y; 75 76 // x coordinate 77 let ux = pixnum % image.size[0]; 78 let x = ux as f32; 79 let x_offset = edge_radius - x; 80 81 // The radius to this pixel (may be inside or outside the circle) 82 let pixel_radius_squared: f32 = x_offset * x_offset + y_offset * y_offset; 83 84 // If inside of the avatar circle 85 if pixel_radius_squared <= edge_radius_squared { 86 // squareroot to find how many pixels we are from the edge 87 let pixel_radius: f32 = pixel_radius_squared.sqrt(); 88 let distance = edge_radius - pixel_radius; 89 90 // If we are within 1 pixel of the edge, we should fade, to 91 // antialias the edge of the circle. 1 pixel from the edge should 92 // be 100% of the original color, and right on the edge should be 93 // 0% of the original color. 94 if distance <= 1.0 { 95 *pixel = Color32::from_rgba_premultiplied( 96 (pixel.r() as f32 * distance) as u8, 97 (pixel.g() as f32 * distance) as u8, 98 (pixel.b() as f32 * distance) as u8, 99 (pixel.a() as f32 * distance) as u8, 100 ); 101 } 102 } else { 103 // Outside of the avatar circle 104 *pixel = Color32::TRANSPARENT; 105 } 106 } 107 } 108 109 #[profiling::function] 110 fn process_pfp_bitmap(imgtyp: ImageType, mut image: image::DynamicImage) -> ColorImage { 111 match imgtyp { 112 ImageType::Content => { 113 let image_buffer = image.clone().into_rgba8(); 114 let color_image = ColorImage::from_rgba_unmultiplied( 115 [ 116 image_buffer.width() as usize, 117 image_buffer.height() as usize, 118 ], 119 image_buffer.as_flat_samples().as_slice(), 120 ); 121 color_image 122 } 123 ImageType::Profile(size) => { 124 // Crop square 125 let smaller = image.width().min(image.height()); 126 127 if image.width() > smaller { 128 let excess = image.width() - smaller; 129 image = image.crop_imm(excess / 2, 0, image.width() - excess, image.height()); 130 } else if image.height() > smaller { 131 let excess = image.height() - smaller; 132 image = image.crop_imm(0, excess / 2, image.width(), image.height() - excess); 133 } 134 let image = image.resize(size, size, FilterType::CatmullRom); // DynamicImage 135 let image_buffer = image.into_rgba8(); // RgbaImage (ImageBuffer) 136 let mut color_image = ColorImage::from_rgba_unmultiplied( 137 [ 138 image_buffer.width() as usize, 139 image_buffer.height() as usize, 140 ], 141 image_buffer.as_flat_samples().as_slice(), 142 ); 143 round_image(&mut color_image); 144 color_image 145 } 146 } 147 } 148 149 #[profiling::function] 150 fn parse_img_response( 151 response: ehttp::Response, 152 imgtyp: ImageType, 153 ) -> Result<ColorImage, notedeck::Error> { 154 let content_type = response.content_type().unwrap_or_default(); 155 let size_hint = match imgtyp { 156 ImageType::Profile(size) => SizeHint::Size(size, size), 157 ImageType::Content => SizeHint::default(), 158 }; 159 160 if content_type.starts_with("image/svg") { 161 profiling::scope!("load_svg"); 162 163 let mut color_image = 164 egui_extras::image::load_svg_bytes_with_size(&response.bytes, Some(size_hint))?; 165 round_image(&mut color_image); 166 Ok(color_image) 167 } else if content_type.starts_with("image/") { 168 profiling::scope!("load_from_memory"); 169 let dyn_image = image::load_from_memory(&response.bytes)?; 170 Ok(process_pfp_bitmap(imgtyp, dyn_image)) 171 } else { 172 Err(format!("Expected image, found content-type {content_type:?}").into()) 173 } 174 } 175 176 fn fetch_img_from_disk( 177 ctx: &egui::Context, 178 url: &str, 179 path: &path::Path, 180 cache_type: MediaCacheType, 181 ) -> Promise<Option<Result<TexturedImage, notedeck::Error>>> { 182 let ctx = ctx.clone(); 183 let url = url.to_owned(); 184 let path = path.to_owned(); 185 186 Promise::spawn_async(async move { 187 Some(async_fetch_img_from_disk(ctx, url, &path, cache_type).await) 188 }) 189 } 190 191 async fn async_fetch_img_from_disk( 192 ctx: egui::Context, 193 url: String, 194 path: &path::Path, 195 cache_type: MediaCacheType, 196 ) -> Result<TexturedImage, notedeck::Error> { 197 match cache_type { 198 MediaCacheType::Image => { 199 let data = fs::read(path).await?; 200 let image_buffer = image::load_from_memory(&data).map_err(notedeck::Error::Image)?; 201 202 let img = buffer_to_color_image( 203 image_buffer.as_flat_samples_u8(), 204 image_buffer.width(), 205 image_buffer.height(), 206 ); 207 Ok(TexturedImage::Static(ctx.load_texture( 208 &url, 209 img, 210 Default::default(), 211 ))) 212 } 213 MediaCacheType::Gif => { 214 let gif_bytes = fs::read(path).await?; // Read entire file into a Vec<u8> 215 generate_gif(ctx, url, path, gif_bytes, false, |i| { 216 buffer_to_color_image(i.as_flat_samples_u8(), i.width(), i.height()) 217 }) 218 } 219 } 220 } 221 222 fn generate_gif( 223 ctx: egui::Context, 224 url: String, 225 path: &path::Path, 226 data: Vec<u8>, 227 write_to_disk: bool, 228 process_to_egui: impl Fn(DynamicImage) -> ColorImage + Send + Copy + 'static, 229 ) -> Result<TexturedImage, notedeck::Error> { 230 let decoder = { 231 let reader = Cursor::new(data.as_slice()); 232 GifDecoder::new(reader)? 233 }; 234 let (tex_input, tex_output) = mpsc::sync_channel(4); 235 let (maybe_encoder_input, maybe_encoder_output) = if write_to_disk { 236 let (inp, out) = mpsc::sync_channel(4); 237 (Some(inp), Some(out)) 238 } else { 239 (None, None) 240 }; 241 242 let mut frames: VecDeque<Frame> = decoder 243 .into_frames() 244 .collect::<std::result::Result<VecDeque<_>, image::ImageError>>() 245 .map_err(|e| notedeck::Error::Generic(e.to_string()))?; 246 247 let first_frame = frames.pop_front().map(|frame| { 248 generate_animation_frame( 249 &ctx, 250 &url, 251 0, 252 frame, 253 maybe_encoder_input.as_ref(), 254 process_to_egui, 255 ) 256 }); 257 258 let cur_url = url.clone(); 259 thread::spawn(move || { 260 for (index, frame) in frames.into_iter().enumerate() { 261 let texture_frame = generate_animation_frame( 262 &ctx, 263 &cur_url, 264 index, 265 frame, 266 maybe_encoder_input.as_ref(), 267 process_to_egui, 268 ); 269 270 if tex_input.send(texture_frame).is_err() { 271 tracing::debug!("AnimationTextureFrame mpsc stopped abruptly"); 272 break; 273 } 274 } 275 }); 276 277 if let Some(encoder_output) = maybe_encoder_output { 278 let path = path.to_owned(); 279 280 thread::spawn(move || { 281 let mut imgs = Vec::new(); 282 while let Ok(img) = encoder_output.recv() { 283 imgs.push(img); 284 } 285 286 if let Err(e) = MediaCache::write_gif(&path, &url, imgs) { 287 tracing::error!("Could not write gif to disk: {e}"); 288 } 289 }); 290 } 291 292 first_frame.map_or_else( 293 || { 294 Err(notedeck::Error::Generic( 295 "first frame not found for gif".to_owned(), 296 )) 297 }, 298 |first_frame| { 299 Ok(TexturedImage::Animated(Animation { 300 other_frames: Default::default(), 301 receiver: Some(tex_output), 302 first_frame, 303 })) 304 }, 305 ) 306 } 307 308 fn generate_animation_frame( 309 ctx: &egui::Context, 310 url: &str, 311 index: usize, 312 frame: image::Frame, 313 maybe_encoder_input: Option<&SyncSender<ImageFrame>>, 314 process_to_egui: impl Fn(DynamicImage) -> ColorImage + Send + 'static, 315 ) -> TextureFrame { 316 let delay = Duration::from(frame.delay()); 317 let img = DynamicImage::ImageRgba8(frame.into_buffer()); 318 let color_img = process_to_egui(img); 319 320 if let Some(sender) = maybe_encoder_input { 321 if let Err(e) = sender.send(ImageFrame { 322 delay, 323 image: color_img.clone(), 324 }) { 325 tracing::error!("ImageFrame mpsc unexpectedly closed: {e}"); 326 } 327 } 328 329 TextureFrame { 330 delay, 331 texture: ctx.load_texture(format!("{url}{index}"), color_img, Default::default()), 332 } 333 } 334 335 fn buffer_to_color_image( 336 samples: Option<FlatSamples<&[u8]>>, 337 width: u32, 338 height: u32, 339 ) -> ColorImage { 340 // TODO(jb55): remove unwrap here 341 let flat_samples = samples.unwrap(); 342 ColorImage::from_rgba_unmultiplied([width as usize, height as usize], flat_samples.as_slice()) 343 } 344 345 pub fn fetch_binary_from_disk(path: PathBuf) -> Result<Vec<u8>, notedeck::Error> { 346 std::fs::read(path).map_err(|e| notedeck::Error::Generic(e.to_string())) 347 } 348 349 /// Controls type-specific handling 350 #[derive(Debug, Clone, Copy)] 351 pub enum ImageType { 352 /// Profile Image (size) 353 Profile(u32), 354 /// Content Image 355 Content, 356 } 357 358 pub fn fetch_img( 359 img_cache_path: &Path, 360 ctx: &egui::Context, 361 url: &str, 362 imgtyp: ImageType, 363 cache_type: MediaCacheType, 364 ) -> Promise<Option<Result<TexturedImage, notedeck::Error>>> { 365 let key = MediaCache::key(url); 366 let path = img_cache_path.join(key); 367 368 if path.exists() { 369 fetch_img_from_disk(ctx, url, &path, cache_type) 370 } else { 371 fetch_img_from_net(img_cache_path, ctx, url, imgtyp, cache_type) 372 } 373 374 // TODO: fetch image from local cache 375 } 376 377 fn fetch_img_from_net( 378 cache_path: &path::Path, 379 ctx: &egui::Context, 380 url: &str, 381 imgtyp: ImageType, 382 cache_type: MediaCacheType, 383 ) -> Promise<Option<Result<TexturedImage, notedeck::Error>>> { 384 let (sender, promise) = Promise::new(); 385 let request = ehttp::Request::get(url); 386 let ctx = ctx.clone(); 387 let cloned_url = url.to_owned(); 388 let cache_path = cache_path.to_owned(); 389 ehttp::fetch(request, move |response| { 390 let handle = response.map_err(notedeck::Error::Generic).and_then(|resp| { 391 match cache_type { 392 MediaCacheType::Image => { 393 let img = parse_img_response(resp, imgtyp); 394 img.map(|img| { 395 let texture_handle = 396 ctx.load_texture(&cloned_url, img.clone(), Default::default()); 397 398 // write to disk 399 std::thread::spawn(move || { 400 MediaCache::write(&cache_path, &cloned_url, img) 401 }); 402 403 TexturedImage::Static(texture_handle) 404 }) 405 } 406 MediaCacheType::Gif => { 407 let gif_bytes = resp.bytes; 408 generate_gif( 409 ctx.clone(), 410 cloned_url, 411 &cache_path, 412 gif_bytes, 413 true, 414 move |img| process_pfp_bitmap(imgtyp, img), 415 ) 416 } 417 } 418 }); 419 420 sender.send(Some(handle)); // send the results back to the UI thread. 421 ctx.request_repaint(); 422 }); 423 424 promise 425 } 426 427 pub fn get_render_state<'a>( 428 ctx: &Context, 429 images: &'a mut Images, 430 cache_type: MediaCacheType, 431 url: &str, 432 img_type: ImageType, 433 ) -> RenderState<'a> { 434 let cache = match cache_type { 435 MediaCacheType::Image => &mut images.static_imgs, 436 MediaCacheType::Gif => &mut images.gifs, 437 }; 438 439 let cur_state = cache.textures_cache.handle_and_get_or_insert(url, || { 440 crate::images::fetch_img(&cache.cache_dir, ctx, url, img_type, cache_type) 441 }); 442 443 RenderState { 444 texture_state: cur_state, 445 gifs: &mut images.gif_states, 446 } 447 } 448 449 pub struct LoadableRenderState<'a> { 450 pub texture_state: LoadableTextureState<'a>, 451 pub gifs: &'a mut GifStateMap, 452 } 453 454 pub struct RenderState<'a> { 455 pub texture_state: TextureState<'a>, 456 pub gifs: &'a mut GifStateMap, 457 } 458 459 pub fn fetch_no_pfp_promise( 460 ctx: &Context, 461 cache: &MediaCache, 462 ) -> Promise<Option<Result<TexturedImage, notedeck::Error>>> { 463 crate::images::fetch_img( 464 &cache.cache_dir, 465 ctx, 466 notedeck::profile::no_pfp_url(), 467 ImageType::Profile(128), 468 MediaCacheType::Image, 469 ) 470 }