Fullscreen MediaViewer refactor

- Moved media related logic into notedeck instead of the ui crate,
  since they pertain to Images/ImageCache based systems

- Made RenderableMedia owned to make it less of a nightmware
  to work with and the perf should be negligible

- Added a ImageMetadata cache to Images. This is referenced
  whenever we encounter an image so we don't have to
  redo the work all of the time

- Relpaced our ad-hoc, hand(vibe?)-coded panning and zoom logic
  with the Scene widget, which is explicitly designed for
  this use case

- Extracted and detangle fullscreen media rendering from inside of note
  rendering.  We instead let the application decide what action they
  want to perform when note media is clicked on.

- We add an on_view_media action to MediaAction for the application to
  handle. The Columns app uses this toggle a FullscreenMedia app
  option bits whenever we get a MediaAction::ViewMedis(urls).

Signed-off-by: William Casarin <jb55@jb55.com>
This commit is contained in:
William Casarin
2025-07-25 13:45:54 -07:00
parent 96ab4ee681
commit 3d18db8fd2
45 changed files with 1284 additions and 1222 deletions

View File

@@ -9,11 +9,13 @@ nostrdb = { workspace = true }
jni = { workspace = true }
url = { workspace = true }
strum = { workspace = true }
blurhash = { workspace = true }
strum_macros = { workspace = true }
dirs = { workspace = true }
enostr = { workspace = true }
nostr = { workspace = true }
egui = { workspace = true }
egui_extras = { workspace = true }
eframe = { workspace = true }
image = { workspace = true }
base32 = { workspace = true }

View File

@@ -1,4 +1,9 @@
use crate::media::gif::ensure_latest_texture_from_cache;
use crate::media::images::ImageType;
use crate::urls::{UrlCache, UrlMimes};
use crate::ImageMetadata;
use crate::ObfuscationType;
use crate::RenderableMedia;
use crate::Result;
use egui::TextureHandle;
use image::{Delay, Frame};
@@ -21,7 +26,7 @@ use tracing::warn;
#[derive(Default)]
pub struct TexturesCache {
cache: hashbrown::HashMap<String, TextureStateInternal>,
pub cache: hashbrown::HashMap<String, TextureStateInternal>,
}
impl TexturesCache {
@@ -141,6 +146,12 @@ pub enum TextureState<'a> {
Loaded(&'a mut TexturedImage),
}
impl<'a> TextureState<'a> {
pub fn is_loaded(&self) -> bool {
matches!(self, Self::Loaded(_))
}
}
impl<'a> From<&'a mut TextureStateInternal> for TextureState<'a> {
fn from(value: &'a mut TextureStateInternal) -> Self {
match value {
@@ -402,6 +413,8 @@ pub struct Images {
pub static_imgs: MediaCache,
pub gifs: MediaCache,
pub urls: UrlMimes,
/// cached imeta data
pub metadata: HashMap<String, ImageMetadata>,
pub gif_states: GifStateMap,
}
@@ -414,6 +427,7 @@ impl Images {
gifs: MediaCache::new(&path, MediaCacheType::Gif),
urls: UrlMimes::new(UrlCache::new(path.join(UrlCache::rel_dir()))),
gif_states: Default::default(),
metadata: Default::default(),
}
}
@@ -422,6 +436,58 @@ impl Images {
self.gifs.migrate_v0()
}
pub fn get_renderable_media(&mut self, url: &str) -> Option<RenderableMedia> {
Self::find_renderable_media(&mut self.urls, &self.metadata, url)
}
pub fn find_renderable_media(
urls: &mut UrlMimes,
imeta: &HashMap<String, ImageMetadata>,
url: &str,
) -> Option<RenderableMedia> {
let media_type = crate::urls::supported_mime_hosted_at_url(urls, url)?;
let obfuscation_type = match imeta.get(url) {
Some(blur) => ObfuscationType::Blurhash(blur.clone()),
None => ObfuscationType::Default,
};
Some(RenderableMedia {
url: url.to_string(),
media_type,
obfuscation_type,
})
}
pub fn latest_texture(
&mut self,
ui: &mut egui::Ui,
url: &str,
img_type: ImageType,
) -> Option<TextureHandle> {
let cache_type = crate::urls::supported_mime_hosted_at_url(&mut self.urls, url)?;
let cache_dir = self.get_cache(cache_type).cache_dir.clone();
let is_loaded = self
.get_cache_mut(cache_type)
.textures_cache
.handle_and_get_or_insert(url, || {
crate::media::images::fetch_img(&cache_dir, ui.ctx(), url, img_type, cache_type)
})
.is_loaded();
if !is_loaded {
return None;
}
let cache = match cache_type {
MediaCacheType::Image => &mut self.static_imgs,
MediaCacheType::Gif => &mut self.gifs,
};
ensure_latest_texture_from_cache(ui, url, &mut self.gif_states, &mut cache.textures_cache)
}
pub fn get_cache(&self, cache_type: MediaCacheType) -> &MediaCache {
match cache_type {
MediaCacheType::Image => &self.static_imgs,
@@ -465,3 +531,35 @@ pub struct GifState {
pub next_frame_time: Option<SystemTime>,
pub last_frame_index: usize,
}
pub struct LatestTexture {
pub texture: TextureHandle,
pub request_next_repaint: Option<SystemTime>,
}
pub fn get_render_state<'a>(
ctx: &egui::Context,
images: &'a mut Images,
cache_type: MediaCacheType,
url: &str,
img_type: ImageType,
) -> RenderState<'a> {
let cache = match cache_type {
MediaCacheType::Image => &mut images.static_imgs,
MediaCacheType::Gif => &mut images.gifs,
};
let texture_state = cache.textures_cache.handle_and_get_or_insert(url, || {
crate::media::images::fetch_img(&cache.cache_dir, ctx, url, img_type, cache_type)
});
RenderState {
texture_state,
gifs: &mut images.gif_states,
}
}
pub struct RenderState<'a> {
pub texture_state: TextureState<'a>,
pub gifs: &'a mut GifStateMap,
}

View File

@@ -23,6 +23,7 @@ impl JobPool {
pub fn new(num_threads: usize) -> Self {
let (tx, rx) = mpsc::channel::<Job>();
// TODO(jb55) why not mpmc here !???
let arc_rx = Arc::new(Mutex::new(rx));
for _ in 0..num_threads {
let arc_rx_clone = arc_rx.clone();

153
crates/notedeck/src/jobs.rs Normal file
View File

@@ -0,0 +1,153 @@
use crate::JobPool;
use egui::TextureHandle;
use hashbrown::{hash_map::RawEntryMut, HashMap};
use poll_promise::Promise;
#[derive(Default)]
pub struct JobsCache {
jobs: HashMap<JobIdOwned, JobState>,
}
pub enum JobState {
Pending(Promise<Option<Result<Job, JobError>>>),
Error(JobError),
Completed(Job),
}
pub enum JobError {
InvalidParameters,
}
#[derive(Debug)]
pub enum JobParams<'a> {
Blurhash(BlurhashParams<'a>),
}
#[derive(Debug)]
pub enum JobParamsOwned {
Blurhash(BlurhashParamsOwned),
}
impl<'a> From<BlurhashParams<'a>> for BlurhashParamsOwned {
fn from(params: BlurhashParams<'a>) -> Self {
BlurhashParamsOwned {
blurhash: params.blurhash.to_owned(),
url: params.url.to_owned(),
ctx: params.ctx.clone(),
}
}
}
impl<'a> From<JobParams<'a>> for JobParamsOwned {
fn from(params: JobParams<'a>) -> Self {
match params {
JobParams::Blurhash(bp) => JobParamsOwned::Blurhash(bp.into()),
}
}
}
#[derive(Debug)]
pub struct BlurhashParams<'a> {
pub blurhash: &'a str,
pub url: &'a str,
pub ctx: &'a egui::Context,
}
#[derive(Debug)]
pub struct BlurhashParamsOwned {
pub blurhash: String,
pub url: String,
pub ctx: egui::Context,
}
impl JobsCache {
pub fn get_or_insert_with<
'a,
F: FnOnce(Option<JobParamsOwned>) -> Result<Job, JobError> + Send + 'static,
>(
&'a mut self,
job_pool: &mut JobPool,
jobid: &JobId,
params: Option<JobParams>,
run_job: F,
) -> &'a mut JobState {
match self.jobs.raw_entry_mut().from_key(jobid) {
RawEntryMut::Occupied(entry) => 's: {
let mut state = entry.into_mut();
let JobState::Pending(promise) = &mut state else {
break 's state;
};
let Some(res) = promise.ready_mut() else {
break 's state;
};
let Some(res) = res.take() else {
tracing::error!("Failed to take the promise for job: {:?}", jobid);
break 's state;
};
*state = match res {
Ok(j) => JobState::Completed(j),
Err(e) => JobState::Error(e),
};
state
}
RawEntryMut::Vacant(entry) => {
let owned_params = params.map(JobParams::into);
let wrapped: Box<dyn FnOnce() -> Option<Result<Job, JobError>> + Send + 'static> =
Box::new(move || Some(run_job(owned_params)));
let promise = Promise::spawn_async(job_pool.schedule(wrapped));
let (_, state) = entry.insert(jobid.into(), JobState::Pending(promise));
state
}
}
}
pub fn get(&self, jobid: &JobId) -> Option<&JobState> {
self.jobs.get(jobid)
}
}
impl<'a> From<&JobId<'a>> for JobIdOwned {
fn from(jobid: &JobId<'a>) -> Self {
match jobid {
JobId::Blurhash(s) => JobIdOwned::Blurhash(s.to_string()),
}
}
}
impl hashbrown::Equivalent<JobIdOwned> for JobId<'_> {
fn equivalent(&self, key: &JobIdOwned) -> bool {
match (self, key) {
(JobId::Blurhash(a), JobIdOwned::Blurhash(b)) => *a == b.as_str(),
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
enum JobIdOwned {
Blurhash(String), // image URL
}
#[derive(Debug, Hash)]
pub enum JobId<'a> {
Blurhash(&'a str), // image URL
}
pub enum Job {
Blurhash(Option<TextureHandle>),
}
impl std::fmt::Debug for Job {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Job::Blurhash(_) => write!(f, "Blurhash"),
}
}
}

View File

@@ -12,6 +12,8 @@ mod frame_history;
pub mod i18n;
mod imgcache;
mod job_pool;
mod jobs;
pub mod media;
mod muted;
pub mod name;
pub mod note;
@@ -47,10 +49,18 @@ pub use filter::{FilterState, FilterStates, UnifiedSubscription};
pub use fonts::NamedFontFamily;
pub use i18n::{CacheStats, FluentArgs, FluentValue, LanguageIdentifier, Localization};
pub use imgcache::{
Animation, GifState, GifStateMap, ImageFrame, Images, LoadableTextureState, MediaCache,
MediaCacheType, TextureFrame, TextureState, TexturedImage, TexturesCache,
get_render_state, Animation, GifState, GifStateMap, ImageFrame, Images, LatestTexture,
LoadableTextureState, MediaCache, MediaCacheType, RenderState, TextureFrame, TextureState,
TexturedImage, TexturesCache,
};
pub use job_pool::JobPool;
pub use jobs::{
BlurhashParams, Job, JobError, JobId, JobParams, JobParamsOwned, JobState, JobsCache,
};
pub use media::{
compute_blurhash, update_imeta_blurhashes, ImageMetadata, ImageType, MediaAction,
ObfuscationType, PixelDimensions, PointDimensions, RenderableMedia,
};
pub use muted::{MuteFun, Muted};
pub use name::NostrName;
pub use note::{

View File

@@ -0,0 +1,90 @@
use crate::{Images, MediaCacheType, TexturedImage};
use poll_promise::Promise;
/// Actions generated by media ui interactions
pub enum MediaAction {
/// An image was clicked on in a carousel, we have
/// the opportunity to open into a fullscreen media viewer
/// with a list of url values
ViewMedias(Vec<String>),
FetchImage {
url: String,
cache_type: MediaCacheType,
no_pfp_promise: Promise<Option<Result<TexturedImage, crate::Error>>>,
},
DoneLoading {
url: String,
cache_type: MediaCacheType,
},
}
impl std::fmt::Debug for MediaAction {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::ViewMedias(urls) => f.debug_struct("ViewMedias").field("urls", urls).finish(),
Self::FetchImage {
url,
cache_type,
no_pfp_promise,
} => f
.debug_struct("FetchNoPfpImage")
.field("url", url)
.field("cache_type", cache_type)
.field("no_pfp_promise ready", &no_pfp_promise.ready().is_some())
.finish(),
Self::DoneLoading { url, cache_type } => f
.debug_struct("DoneLoading")
.field("url", url)
.field("cache_type", cache_type)
.finish(),
}
}
}
impl MediaAction {
/// Handle view media actions
pub fn on_view_media(&self, handler: impl FnOnce(Vec<String>)) {
if let MediaAction::ViewMedias(urls) = self {
handler(urls.clone())
}
}
/// Default processing logic for Media Actions. We don't handle ViewMedias here since
/// this may be app specific ?
pub fn process_default_media_actions(self, images: &mut Images) {
match self {
MediaAction::ViewMedias(_urls) => {
// NOTE(jb55): don't assume we want to show a fullscreen
// media viewer we can use on_view_media for that. We
// also don't want to have a notedeck_ui dependency in
// the notedeck lib (MediaViewerState)
//
// In general our notedeck crate should be pretty
// agnostic to functionallity in general unless it low
// level like image rendering.
//
//mview_state.set_urls(urls);
}
MediaAction::FetchImage {
url,
cache_type,
no_pfp_promise: promise,
} => {
images
.get_cache_mut(cache_type)
.textures_cache
.insert_pending(&url, promise);
}
MediaAction::DoneLoading { url, cache_type } => {
let cache = match cache_type {
MediaCacheType::Image => &mut images.static_imgs,
MediaCacheType::Gif => &mut images.gifs,
};
cache.textures_cache.move_to_loaded(&url);
}
}
}
}

View File

@@ -0,0 +1,191 @@
use std::collections::HashMap;
use nostrdb::Note;
use crate::jobs::{Job, JobError, JobParamsOwned};
#[derive(Clone)]
pub struct ImageMetadata {
pub blurhash: String,
pub dimensions: Option<PixelDimensions>, // width and height in pixels
}
#[derive(Clone, Debug)]
pub struct PixelDimensions {
pub x: u32,
pub y: u32,
}
impl PixelDimensions {
pub fn to_points(&self, ppp: f32) -> PointDimensions {
PointDimensions {
x: (self.x as f32) / ppp,
y: (self.y as f32) / ppp,
}
}
}
#[derive(Clone, Debug)]
pub struct PointDimensions {
pub x: f32,
pub y: f32,
}
impl PointDimensions {
pub fn to_pixels(self, ui: &egui::Ui) -> PixelDimensions {
PixelDimensions {
x: (self.x * ui.pixels_per_point()).round() as u32,
y: (self.y * ui.pixels_per_point()).round() as u32,
}
}
pub fn to_vec(self) -> egui::Vec2 {
egui::Vec2::new(self.x, self.y)
}
}
impl ImageMetadata {
pub fn scaled_pixel_dimensions(
&self,
ui: &egui::Ui,
available_points: PointDimensions,
) -> PixelDimensions {
let max_pixels = available_points.to_pixels(ui);
let Some(defined_dimensions) = &self.dimensions else {
return max_pixels;
};
if defined_dimensions.x == 0 || defined_dimensions.y == 0 {
tracing::error!("The blur dimensions should not be zero");
return max_pixels;
}
if defined_dimensions.y <= max_pixels.y {
return defined_dimensions.clone();
}
let scale_factor = (max_pixels.y as f32) / (defined_dimensions.y as f32);
let max_width_scaled = scale_factor * (defined_dimensions.x as f32);
PixelDimensions {
x: (max_width_scaled.round() as u32),
y: max_pixels.y,
}
}
}
/// Find blurhashes in image metadata and update our cache
pub fn update_imeta_blurhashes(note: &Note, blurs: &mut HashMap<String, ImageMetadata>) {
for tag in note.tags() {
let mut tag_iter = tag.into_iter();
if tag_iter
.next()
.and_then(|s| s.str())
.filter(|s| *s == "imeta")
.is_none()
{
continue;
}
let Some((url, blur)) = find_blur(tag_iter) else {
continue;
};
blurs.insert(url.to_string(), blur);
}
}
fn find_blur(tag_iter: nostrdb::TagIter<'_>) -> Option<(String, ImageMetadata)> {
let mut url = None;
let mut blurhash = None;
let mut dims = None;
for tag_elem in tag_iter {
let Some(s) = tag_elem.str() else { continue };
let mut split = s.split_whitespace();
let Some(first) = split.next() else { continue };
let Some(second) = split.next() else { continue };
match first {
"url" => url = Some(second),
"blurhash" => blurhash = Some(second),
"dim" => dims = Some(second),
_ => {}
}
if url.is_some() && blurhash.is_some() && dims.is_some() {
break;
}
}
let url = url?;
let blurhash = blurhash?;
let dimensions = dims.and_then(|d| {
let mut split = d.split('x');
let width = split.next()?.parse::<u32>().ok()?;
let height = split.next()?.parse::<u32>().ok()?;
Some(PixelDimensions {
x: width,
y: height,
})
});
Some((
url.to_string(),
ImageMetadata {
blurhash: blurhash.to_string(),
dimensions,
},
))
}
#[derive(Clone)]
pub enum ObfuscationType {
Blurhash(ImageMetadata),
Default,
}
pub fn compute_blurhash(
params: Option<JobParamsOwned>,
dims: PixelDimensions,
) -> Result<Job, JobError> {
#[allow(irrefutable_let_patterns)]
let Some(JobParamsOwned::Blurhash(params)) = params
else {
return Err(JobError::InvalidParameters);
};
let maybe_handle = match generate_blurhash_texturehandle(
&params.ctx,
&params.blurhash,
&params.url,
dims.x,
dims.y,
) {
Ok(tex) => Some(tex),
Err(e) => {
tracing::error!("failed to render blurhash: {e}");
None
}
};
Ok(Job::Blurhash(maybe_handle))
}
fn generate_blurhash_texturehandle(
ctx: &egui::Context,
blurhash: &str,
url: &str,
width: u32,
height: u32,
) -> Result<egui::TextureHandle, crate::Error> {
let bytes = blurhash::decode(blurhash, width, height, 1.0)
.map_err(|e| crate::Error::Generic(e.to_string()))?;
let img = egui::ColorImage::from_rgba_unmultiplied([width as usize, height as usize], &bytes);
Ok(ctx.load_texture(url, img, Default::default()))
}

View File

@@ -0,0 +1,121 @@
use std::{
sync::mpsc::TryRecvError,
time::{Instant, SystemTime},
};
use crate::{GifState, GifStateMap, TextureState, TexturedImage, TexturesCache};
use egui::TextureHandle;
pub fn ensure_latest_texture_from_cache(
ui: &egui::Ui,
url: &str,
gifs: &mut GifStateMap,
textures: &mut TexturesCache,
) -> Option<TextureHandle> {
let tstate = textures.cache.get_mut(url)?;
let TextureState::Loaded(img) = tstate.into() else {
return None;
};
Some(ensure_latest_texture(ui, url, gifs, img))
}
pub fn ensure_latest_texture(
ui: &egui::Ui,
url: &str,
gifs: &mut GifStateMap,
img: &mut TexturedImage,
) -> TextureHandle {
match img {
TexturedImage::Static(handle) => handle.clone(),
TexturedImage::Animated(animation) => {
if let Some(receiver) = &animation.receiver {
loop {
match receiver.try_recv() {
Ok(frame) => animation.other_frames.push(frame),
Err(TryRecvError::Empty) => {
break;
}
Err(TryRecvError::Disconnected) => {
animation.receiver = None;
break;
}
}
}
}
let now = Instant::now();
let (texture, maybe_new_state, request_next_repaint) = match gifs.get(url) {
Some(prev_state) => {
let should_advance =
now - prev_state.last_frame_rendered >= prev_state.last_frame_duration;
if should_advance {
let maybe_new_index = if animation.receiver.is_some()
|| prev_state.last_frame_index < animation.num_frames() - 1
{
prev_state.last_frame_index + 1
} else {
0
};
match animation.get_frame(maybe_new_index) {
Some(frame) => {
let next_frame_time = SystemTime::now().checked_add(frame.delay);
(
&frame.texture,
Some(GifState {
last_frame_rendered: now,
last_frame_duration: frame.delay,
next_frame_time,
last_frame_index: maybe_new_index,
}),
next_frame_time,
)
}
None => {
let (tex, state) =
match animation.get_frame(prev_state.last_frame_index) {
Some(frame) => (&frame.texture, None),
None => (&animation.first_frame.texture, None),
};
(tex, state, prev_state.next_frame_time)
}
}
} else {
let (tex, state) = match animation.get_frame(prev_state.last_frame_index) {
Some(frame) => (&frame.texture, None),
None => (&animation.first_frame.texture, None),
};
(tex, state, prev_state.next_frame_time)
}
}
None => (
&animation.first_frame.texture,
Some(GifState {
last_frame_rendered: now,
last_frame_duration: animation.first_frame.delay,
next_frame_time: None,
last_frame_index: 0,
}),
None,
),
};
if let Some(new_state) = maybe_new_state {
gifs.insert(url.to_owned(), new_state);
}
if let Some(req) = request_next_repaint {
tracing::trace!("requesting repaint for {url} after {req:?}");
// 24fps for gif is fine
ui.ctx()
.request_repaint_after(std::time::Duration::from_millis(41));
}
texture.clone()
}
}
}

View File

@@ -0,0 +1,475 @@
use crate::{Animation, ImageFrame, MediaCache, MediaCacheType, TextureFrame, TexturedImage};
use egui::{pos2, Color32, ColorImage, Context, Rect, Sense, SizeHint};
use image::codecs::gif::GifDecoder;
use image::imageops::FilterType;
use image::{AnimationDecoder, DynamicImage, FlatSamples, Frame};
use poll_promise::Promise;
use std::collections::VecDeque;
use std::io::Cursor;
use std::path::PathBuf;
use std::path::{self, Path};
use std::sync::mpsc;
use std::sync::mpsc::SyncSender;
use std::thread;
use std::time::Duration;
use tokio::fs;
// NOTE(jb55): chatgpt wrote this because I was too dumb to
pub fn aspect_fill(
ui: &mut egui::Ui,
sense: Sense,
texture_id: egui::TextureId,
aspect_ratio: f32,
) -> egui::Response {
let frame = ui.available_rect_before_wrap(); // Get the available frame space in the current layout
let frame_ratio = frame.width() / frame.height();
let (width, height) = if frame_ratio > aspect_ratio {
// Frame is wider than the content
(frame.width(), frame.width() / aspect_ratio)
} else {
// Frame is taller than the content
(frame.height() * aspect_ratio, frame.height())
};
let content_rect = Rect::from_min_size(
frame.min
+ egui::vec2(
(frame.width() - width) / 2.0,
(frame.height() - height) / 2.0,
),
egui::vec2(width, height),
);
// Set the clipping rectangle to the frame
//let clip_rect = ui.clip_rect(); // Preserve the original clipping rectangle
//ui.set_clip_rect(frame);
let uv = Rect::from_min_max(pos2(0.0, 0.0), pos2(1.0, 1.0));
let (response, painter) = ui.allocate_painter(ui.available_size(), sense);
// Draw the texture within the calculated rect, potentially clipping it
painter.rect_filled(content_rect, 0.0, ui.ctx().style().visuals.window_fill());
painter.image(texture_id, content_rect, uv, Color32::WHITE);
// Restore the original clipping rectangle
//ui.set_clip_rect(clip_rect);
response
}
#[profiling::function]
pub fn round_image(image: &mut ColorImage) {
// The radius to the edge of of the avatar circle
let edge_radius = image.size[0] as f32 / 2.0;
let edge_radius_squared = edge_radius * edge_radius;
for (pixnum, pixel) in image.pixels.iter_mut().enumerate() {
// y coordinate
let uy = pixnum / image.size[0];
let y = uy as f32;
let y_offset = edge_radius - y;
// x coordinate
let ux = pixnum % image.size[0];
let x = ux as f32;
let x_offset = edge_radius - x;
// The radius to this pixel (may be inside or outside the circle)
let pixel_radius_squared: f32 = x_offset * x_offset + y_offset * y_offset;
// If inside of the avatar circle
if pixel_radius_squared <= edge_radius_squared {
// squareroot to find how many pixels we are from the edge
let pixel_radius: f32 = pixel_radius_squared.sqrt();
let distance = edge_radius - pixel_radius;
// If we are within 1 pixel of the edge, we should fade, to
// antialias the edge of the circle. 1 pixel from the edge should
// be 100% of the original color, and right on the edge should be
// 0% of the original color.
if distance <= 1.0 {
*pixel = Color32::from_rgba_premultiplied(
(pixel.r() as f32 * distance) as u8,
(pixel.g() as f32 * distance) as u8,
(pixel.b() as f32 * distance) as u8,
(pixel.a() as f32 * distance) as u8,
);
}
} else {
// Outside of the avatar circle
*pixel = Color32::TRANSPARENT;
}
}
}
/// If the image's longest dimension is greater than max_edge, downscale
fn resize_image_if_too_big(
image: image::DynamicImage,
max_edge: u32,
filter: FilterType,
) -> image::DynamicImage {
// if we have no size hint, resize to something reasonable
let w = image.width();
let h = image.height();
let long = w.max(h);
if long > max_edge {
let scale = max_edge as f32 / long as f32;
let new_w = (w as f32 * scale).round() as u32;
let new_h = (h as f32 * scale).round() as u32;
image.resize(new_w, new_h, filter)
} else {
image
}
}
///
/// Process an image, resizing so we don't blow up video memory or even crash
///
/// For profile pictures, make them round and small to fit the size hint
/// For everything else, either:
///
/// - resize to the size hint
/// - keep the size if the longest dimension is less than MAX_IMG_LENGTH
/// - resize if any larger, using [`resize_image_if_too_big`]
///
#[profiling::function]
fn process_image(imgtyp: ImageType, mut image: image::DynamicImage) -> ColorImage {
const MAX_IMG_LENGTH: u32 = 512;
const FILTER_TYPE: FilterType = FilterType::CatmullRom;
match imgtyp {
ImageType::Content(size_hint) => {
let image = match size_hint {
None => resize_image_if_too_big(image, MAX_IMG_LENGTH, FILTER_TYPE),
Some((w, h)) => image.resize(w, h, FILTER_TYPE),
};
let image_buffer = image.into_rgba8();
ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
image_buffer.as_flat_samples().as_slice(),
)
}
ImageType::Profile(size) => {
// Crop square
let smaller = image.width().min(image.height());
if image.width() > smaller {
let excess = image.width() - smaller;
image = image.crop_imm(excess / 2, 0, image.width() - excess, image.height());
} else if image.height() > smaller {
let excess = image.height() - smaller;
image = image.crop_imm(0, excess / 2, image.width(), image.height() - excess);
}
let image = image.resize(size, size, FilterType::CatmullRom); // DynamicImage
let image_buffer = image.into_rgba8(); // RgbaImage (ImageBuffer)
let mut color_image = ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
image_buffer.as_flat_samples().as_slice(),
);
round_image(&mut color_image);
color_image
}
}
}
#[profiling::function]
fn parse_img_response(
response: ehttp::Response,
imgtyp: ImageType,
) -> Result<ColorImage, crate::Error> {
let content_type = response.content_type().unwrap_or_default();
let size_hint = match imgtyp {
ImageType::Profile(size) => SizeHint::Size(size, size),
ImageType::Content(Some((w, h))) => SizeHint::Size(w, h),
ImageType::Content(None) => SizeHint::default(),
};
if content_type.starts_with("image/svg") {
profiling::scope!("load_svg");
let mut color_image =
egui_extras::image::load_svg_bytes_with_size(&response.bytes, Some(size_hint))?;
round_image(&mut color_image);
Ok(color_image)
} else if content_type.starts_with("image/") {
profiling::scope!("load_from_memory");
let dyn_image = image::load_from_memory(&response.bytes)?;
Ok(process_image(imgtyp, dyn_image))
} else {
Err(format!("Expected image, found content-type {content_type:?}").into())
}
}
fn fetch_img_from_disk(
ctx: &egui::Context,
url: &str,
path: &path::Path,
cache_type: MediaCacheType,
) -> Promise<Option<Result<TexturedImage, crate::Error>>> {
let ctx = ctx.clone();
let url = url.to_owned();
let path = path.to_owned();
Promise::spawn_async(async move {
Some(async_fetch_img_from_disk(ctx, url, &path, cache_type).await)
})
}
async fn async_fetch_img_from_disk(
ctx: egui::Context,
url: String,
path: &path::Path,
cache_type: MediaCacheType,
) -> Result<TexturedImage, crate::Error> {
match cache_type {
MediaCacheType::Image => {
let data = fs::read(path).await?;
let image_buffer = image::load_from_memory(&data).map_err(crate::Error::Image)?;
let img = buffer_to_color_image(
image_buffer.as_flat_samples_u8(),
image_buffer.width(),
image_buffer.height(),
);
Ok(TexturedImage::Static(ctx.load_texture(
&url,
img,
Default::default(),
)))
}
MediaCacheType::Gif => {
let gif_bytes = fs::read(path).await?; // Read entire file into a Vec<u8>
generate_gif(ctx, url, path, gif_bytes, false, |i| {
buffer_to_color_image(i.as_flat_samples_u8(), i.width(), i.height())
})
}
}
}
fn generate_gif(
ctx: egui::Context,
url: String,
path: &path::Path,
data: Vec<u8>,
write_to_disk: bool,
process_to_egui: impl Fn(DynamicImage) -> ColorImage + Send + Copy + 'static,
) -> Result<TexturedImage, crate::Error> {
let decoder = {
let reader = Cursor::new(data.as_slice());
GifDecoder::new(reader)?
};
let (tex_input, tex_output) = mpsc::sync_channel(4);
let (maybe_encoder_input, maybe_encoder_output) = if write_to_disk {
let (inp, out) = mpsc::sync_channel(4);
(Some(inp), Some(out))
} else {
(None, None)
};
let mut frames: VecDeque<Frame> = decoder
.into_frames()
.collect::<std::result::Result<VecDeque<_>, image::ImageError>>()
.map_err(|e| crate::Error::Generic(e.to_string()))?;
let first_frame = frames.pop_front().map(|frame| {
generate_animation_frame(
&ctx,
&url,
0,
frame,
maybe_encoder_input.as_ref(),
process_to_egui,
)
});
let cur_url = url.clone();
thread::spawn(move || {
for (index, frame) in frames.into_iter().enumerate() {
let texture_frame = generate_animation_frame(
&ctx,
&cur_url,
index,
frame,
maybe_encoder_input.as_ref(),
process_to_egui,
);
if tex_input.send(texture_frame).is_err() {
tracing::debug!("AnimationTextureFrame mpsc stopped abruptly");
break;
}
}
});
if let Some(encoder_output) = maybe_encoder_output {
let path = path.to_owned();
thread::spawn(move || {
let mut imgs = Vec::new();
while let Ok(img) = encoder_output.recv() {
imgs.push(img);
}
if let Err(e) = MediaCache::write_gif(&path, &url, imgs) {
tracing::error!("Could not write gif to disk: {e}");
}
});
}
first_frame.map_or_else(
|| {
Err(crate::Error::Generic(
"first frame not found for gif".to_owned(),
))
},
|first_frame| {
Ok(TexturedImage::Animated(Animation {
other_frames: Default::default(),
receiver: Some(tex_output),
first_frame,
}))
},
)
}
fn generate_animation_frame(
ctx: &egui::Context,
url: &str,
index: usize,
frame: image::Frame,
maybe_encoder_input: Option<&SyncSender<ImageFrame>>,
process_to_egui: impl Fn(DynamicImage) -> ColorImage + Send + 'static,
) -> TextureFrame {
let delay = Duration::from(frame.delay());
let img = DynamicImage::ImageRgba8(frame.into_buffer());
let color_img = process_to_egui(img);
if let Some(sender) = maybe_encoder_input {
if let Err(e) = sender.send(ImageFrame {
delay,
image: color_img.clone(),
}) {
tracing::error!("ImageFrame mpsc unexpectedly closed: {e}");
}
}
TextureFrame {
delay,
texture: ctx.load_texture(format!("{url}{index}"), color_img, Default::default()),
}
}
fn buffer_to_color_image(
samples: Option<FlatSamples<&[u8]>>,
width: u32,
height: u32,
) -> ColorImage {
// TODO(jb55): remove unwrap here
let flat_samples = samples.unwrap();
ColorImage::from_rgba_unmultiplied([width as usize, height as usize], flat_samples.as_slice())
}
pub fn fetch_binary_from_disk(path: PathBuf) -> Result<Vec<u8>, crate::Error> {
std::fs::read(path).map_err(|e| crate::Error::Generic(e.to_string()))
}
/// Controls type-specific handling
#[derive(Debug, Clone, Copy)]
pub enum ImageType {
/// Profile Image (size)
Profile(u32),
/// Content Image with optional size hint
Content(Option<(u32, u32)>),
}
pub fn fetch_img(
img_cache_path: &Path,
ctx: &egui::Context,
url: &str,
imgtyp: ImageType,
cache_type: MediaCacheType,
) -> Promise<Option<Result<TexturedImage, crate::Error>>> {
let key = MediaCache::key(url);
let path = img_cache_path.join(key);
if path.exists() {
fetch_img_from_disk(ctx, url, &path, cache_type)
} else {
fetch_img_from_net(img_cache_path, ctx, url, imgtyp, cache_type)
}
// TODO: fetch image from local cache
}
fn fetch_img_from_net(
cache_path: &path::Path,
ctx: &egui::Context,
url: &str,
imgtyp: ImageType,
cache_type: MediaCacheType,
) -> Promise<Option<Result<TexturedImage, crate::Error>>> {
let (sender, promise) = Promise::new();
let request = ehttp::Request::get(url);
let ctx = ctx.clone();
let cloned_url = url.to_owned();
let cache_path = cache_path.to_owned();
ehttp::fetch(request, move |response| {
let handle = response.map_err(crate::Error::Generic).and_then(|resp| {
match cache_type {
MediaCacheType::Image => {
let img = parse_img_response(resp, imgtyp);
img.map(|img| {
let texture_handle =
ctx.load_texture(&cloned_url, img.clone(), Default::default());
// write to disk
std::thread::spawn(move || {
MediaCache::write(&cache_path, &cloned_url, img)
});
TexturedImage::Static(texture_handle)
})
}
MediaCacheType::Gif => {
let gif_bytes = resp.bytes;
generate_gif(
ctx.clone(),
cloned_url,
&cache_path,
gif_bytes,
true,
move |img| process_image(imgtyp, img),
)
}
}
});
sender.send(Some(handle)); // send the results back to the UI thread.
ctx.request_repaint();
});
promise
}
pub fn fetch_no_pfp_promise(
ctx: &Context,
cache: &MediaCache,
) -> Promise<Option<Result<TexturedImage, crate::Error>>> {
crate::media::images::fetch_img(
&cache.cache_dir,
ctx,
crate::profile::no_pfp_url(),
ImageType::Profile(128),
MediaCacheType::Image,
)
}

View File

@@ -0,0 +1 @@

View File

@@ -0,0 +1,14 @@
pub mod action;
pub mod blur;
pub mod gif;
pub mod images;
pub mod imeta;
pub mod renderable;
pub use action::MediaAction;
pub use blur::{
compute_blurhash, update_imeta_blurhashes, ImageMetadata, ObfuscationType, PixelDimensions,
PointDimensions,
};
pub use images::ImageType;
pub use renderable::RenderableMedia;

View File

@@ -0,0 +1,9 @@
use super::ObfuscationType;
use crate::MediaCacheType;
/// Media that is prepared for rendering. Use [`Images::get_renderable_media`] to get these
pub struct RenderableMedia {
pub url: String,
pub media_type: MediaCacheType,
pub obfuscation_type: ObfuscationType,
}

View File

@@ -1,8 +1,7 @@
use super::context::ContextSelection;
use crate::{zaps::NoteZapTargetOwned, Images, MediaCacheType, TexturedImage};
use crate::{zaps::NoteZapTargetOwned, MediaAction};
use egui::Vec2;
use enostr::{NoteId, Pubkey};
use poll_promise::Promise;
#[derive(Debug)]
pub struct ScrollInfo {
@@ -61,62 +60,3 @@ pub struct ZapTargetAmount {
pub target: NoteZapTargetOwned,
pub specified_msats: Option<u64>, // if None use default amount
}
pub enum MediaAction {
FetchImage {
url: String,
cache_type: MediaCacheType,
no_pfp_promise: Promise<Option<Result<TexturedImage, crate::Error>>>,
},
DoneLoading {
url: String,
cache_type: MediaCacheType,
},
}
impl std::fmt::Debug for MediaAction {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::FetchImage {
url,
cache_type,
no_pfp_promise,
} => f
.debug_struct("FetchNoPfpImage")
.field("url", url)
.field("cache_type", cache_type)
.field("no_pfp_promise ready", &no_pfp_promise.ready().is_some())
.finish(),
Self::DoneLoading { url, cache_type } => f
.debug_struct("DoneLoading")
.field("url", url)
.field("cache_type", cache_type)
.finish(),
}
}
}
impl MediaAction {
pub fn process(self, images: &mut Images) {
match self {
MediaAction::FetchImage {
url,
cache_type,
no_pfp_promise: promise,
} => {
images
.get_cache_mut(cache_type)
.textures_cache
.insert_pending(&url, promise);
}
MediaAction::DoneLoading { url, cache_type } => {
let cache = match cache_type {
MediaCacheType::Image => &mut images.static_imgs,
MediaCacheType::Gif => &mut images.gifs,
};
cache.textures_cache.move_to_loaded(&url);
}
}
}
}

View File

@@ -1,7 +1,7 @@
mod action;
mod context;
pub use action::{MediaAction, NoteAction, ScrollInfo, ZapAction, ZapTargetAmount};
pub use action::{NoteAction, ScrollInfo, ZapAction, ZapTargetAmount};
pub use context::{BroadcastContext, ContextSelection, NoteContextSelection};
use crate::Accounts;