Fullscreen MediaViewer refactor

- Moved media related logic into notedeck instead of the ui crate,
  since they pertain to Images/ImageCache based systems

- Made RenderableMedia owned to make it less of a nightmware
  to work with and the perf should be negligible

- Added a ImageMetadata cache to Images. This is referenced
  whenever we encounter an image so we don't have to
  redo the work all of the time

- Relpaced our ad-hoc, hand(vibe?)-coded panning and zoom logic
  with the Scene widget, which is explicitly designed for
  this use case

- Extracted and detangle fullscreen media rendering from inside of note
  rendering.  We instead let the application decide what action they
  want to perform when note media is clicked on.

- We add an on_view_media action to MediaAction for the application to
  handle. The Columns app uses this toggle a FullscreenMedia app
  option bits whenever we get a MediaAction::ViewMedis(urls).

Signed-off-by: William Casarin <jb55@jb55.com>
This commit is contained in:
William Casarin
2025-07-25 13:45:54 -07:00
parent 96ab4ee681
commit 3d18db8fd2
45 changed files with 1284 additions and 1222 deletions

View File

@@ -1,194 +0,0 @@
use std::collections::HashMap;
use nostrdb::Note;
use crate::jobs::{Job, JobError, JobParamsOwned};
#[derive(Clone)]
pub struct Blur<'a> {
pub blurhash: &'a str,
pub dimensions: Option<PixelDimensions>, // width and height in pixels
}
#[derive(Clone, Debug)]
pub struct PixelDimensions {
pub x: u32,
pub y: u32,
}
impl PixelDimensions {
pub fn to_points(&self, ppp: f32) -> PointDimensions {
PointDimensions {
x: (self.x as f32) / ppp,
y: (self.y as f32) / ppp,
}
}
}
#[derive(Clone, Debug)]
pub struct PointDimensions {
pub x: f32,
pub y: f32,
}
impl PointDimensions {
pub fn to_pixels(self, ui: &egui::Ui) -> PixelDimensions {
PixelDimensions {
x: (self.x * ui.pixels_per_point()).round() as u32,
y: (self.y * ui.pixels_per_point()).round() as u32,
}
}
pub fn to_vec(self) -> egui::Vec2 {
egui::Vec2::new(self.x, self.y)
}
}
impl Blur<'_> {
pub fn scaled_pixel_dimensions(
&self,
ui: &egui::Ui,
available_points: PointDimensions,
) -> PixelDimensions {
let max_pixels = available_points.to_pixels(ui);
let Some(defined_dimensions) = &self.dimensions else {
return max_pixels;
};
if defined_dimensions.x == 0 || defined_dimensions.y == 0 {
tracing::error!("The blur dimensions should not be zero");
return max_pixels;
}
if defined_dimensions.y <= max_pixels.y {
return defined_dimensions.clone();
}
let scale_factor = (max_pixels.y as f32) / (defined_dimensions.y as f32);
let max_width_scaled = scale_factor * (defined_dimensions.x as f32);
PixelDimensions {
x: (max_width_scaled.round() as u32),
y: max_pixels.y,
}
}
}
pub fn imeta_blurhashes<'a>(note: &'a Note) -> HashMap<&'a str, Blur<'a>> {
let mut blurs = HashMap::new();
for tag in note.tags() {
let mut tag_iter = tag.into_iter();
if tag_iter
.next()
.and_then(|s| s.str())
.filter(|s| *s == "imeta")
.is_none()
{
continue;
}
let Some((url, blur)) = find_blur(tag_iter) else {
continue;
};
blurs.insert(url, blur);
}
blurs
}
fn find_blur(tag_iter: nostrdb::TagIter) -> Option<(&str, Blur)> {
let mut url = None;
let mut blurhash = None;
let mut dims = None;
for tag_elem in tag_iter {
let Some(s) = tag_elem.str() else { continue };
let mut split = s.split_whitespace();
let Some(first) = split.next() else { continue };
let Some(second) = split.next() else { continue };
match first {
"url" => url = Some(second),
"blurhash" => blurhash = Some(second),
"dim" => dims = Some(second),
_ => {}
}
if url.is_some() && blurhash.is_some() && dims.is_some() {
break;
}
}
let url = url?;
let blurhash = blurhash?;
let dimensions = dims.and_then(|d| {
let mut split = d.split('x');
let width = split.next()?.parse::<u32>().ok()?;
let height = split.next()?.parse::<u32>().ok()?;
Some(PixelDimensions {
x: width,
y: height,
})
});
Some((
url,
Blur {
blurhash,
dimensions,
},
))
}
#[derive(Clone)]
pub enum ObfuscationType<'a> {
Blurhash(Blur<'a>),
Default,
}
pub(crate) fn compute_blurhash(
params: Option<JobParamsOwned>,
dims: PixelDimensions,
) -> Result<Job, JobError> {
#[allow(irrefutable_let_patterns)]
let Some(JobParamsOwned::Blurhash(params)) = params
else {
return Err(JobError::InvalidParameters);
};
let maybe_handle = match generate_blurhash_texturehandle(
&params.ctx,
&params.blurhash,
&params.url,
dims.x,
dims.y,
) {
Ok(tex) => Some(tex),
Err(e) => {
tracing::error!("failed to render blurhash: {e}");
None
}
};
Ok(Job::Blurhash(maybe_handle))
}
fn generate_blurhash_texturehandle(
ctx: &egui::Context,
blurhash: &str,
url: &str,
width: u32,
height: u32,
) -> notedeck::Result<egui::TextureHandle> {
let bytes = blurhash::decode(blurhash, width, height, 1.0)
.map_err(|e| notedeck::Error::Generic(e.to_string()))?;
let img = egui::ColorImage::from_rgba_unmultiplied([width as usize, height as usize], &bytes);
Ok(ctx.load_texture(url, img, Default::default()))
}

View File

@@ -1,126 +0,0 @@
use std::{
sync::mpsc::TryRecvError,
time::{Instant, SystemTime},
};
use egui::TextureHandle;
use notedeck::{GifState, GifStateMap, TexturedImage};
pub struct LatextTexture<'a> {
pub texture: &'a TextureHandle,
pub request_next_repaint: Option<SystemTime>,
}
/// This is necessary because other repaint calls can effectively steal our repaint request.
/// So we must keep on requesting to repaint at our desired time to ensure our repaint goes through.
/// See [`egui::Context::request_repaint_after`]
pub fn handle_repaint<'a>(ui: &egui::Ui, latest: LatextTexture<'a>) -> &'a TextureHandle {
if let Some(_repaint) = latest.request_next_repaint {
// 24fps for gif is fine
ui.ctx()
.request_repaint_after(std::time::Duration::from_millis(41));
}
latest.texture
}
#[must_use = "caller should pass the return value to `gif::handle_repaint`"]
pub fn retrieve_latest_texture<'a>(
url: &str,
gifs: &'a mut GifStateMap,
cached_image: &'a mut TexturedImage,
) -> LatextTexture<'a> {
match cached_image {
TexturedImage::Static(texture) => LatextTexture {
texture,
request_next_repaint: None,
},
TexturedImage::Animated(animation) => {
if let Some(receiver) = &animation.receiver {
loop {
match receiver.try_recv() {
Ok(frame) => animation.other_frames.push(frame),
Err(TryRecvError::Empty) => {
break;
}
Err(TryRecvError::Disconnected) => {
animation.receiver = None;
break;
}
}
}
}
let now = Instant::now();
let (texture, maybe_new_state, request_next_repaint) = match gifs.get(url) {
Some(prev_state) => {
let should_advance =
now - prev_state.last_frame_rendered >= prev_state.last_frame_duration;
if should_advance {
let maybe_new_index = if animation.receiver.is_some()
|| prev_state.last_frame_index < animation.num_frames() - 1
{
prev_state.last_frame_index + 1
} else {
0
};
match animation.get_frame(maybe_new_index) {
Some(frame) => {
let next_frame_time = SystemTime::now().checked_add(frame.delay);
(
&frame.texture,
Some(GifState {
last_frame_rendered: now,
last_frame_duration: frame.delay,
next_frame_time,
last_frame_index: maybe_new_index,
}),
next_frame_time,
)
}
None => {
let (tex, state) =
match animation.get_frame(prev_state.last_frame_index) {
Some(frame) => (&frame.texture, None),
None => (&animation.first_frame.texture, None),
};
(tex, state, prev_state.next_frame_time)
}
}
} else {
let (tex, state) = match animation.get_frame(prev_state.last_frame_index) {
Some(frame) => (&frame.texture, None),
None => (&animation.first_frame.texture, None),
};
(tex, state, prev_state.next_frame_time)
}
}
None => (
&animation.first_frame.texture,
Some(GifState {
last_frame_rendered: now,
last_frame_duration: animation.first_frame.delay,
next_frame_time: None,
last_frame_index: 0,
}),
None,
),
};
if let Some(new_state) = maybe_new_state {
gifs.insert(url.to_owned(), new_state);
}
if let Some(req) = request_next_repaint {
tracing::trace!("requesting repaint for {url} after {req:?}");
}
LatextTexture {
texture,
request_next_repaint,
}
}
}
}

View File

@@ -1,510 +1 @@
use egui::{pos2, Color32, ColorImage, Context, Rect, Sense, SizeHint};
use image::codecs::gif::GifDecoder;
use image::imageops::FilterType;
use image::{AnimationDecoder, DynamicImage, FlatSamples, Frame};
use notedeck::{
Animation, GifStateMap, ImageFrame, Images, LoadableTextureState, MediaCache, MediaCacheType,
TextureFrame, TextureState, TexturedImage,
};
use poll_promise::Promise;
use std::collections::VecDeque;
use std::io::Cursor;
use std::path::PathBuf;
use std::path::{self, Path};
use std::sync::mpsc;
use std::sync::mpsc::SyncSender;
use std::thread;
use std::time::Duration;
use tokio::fs;
// NOTE(jb55): chatgpt wrote this because I was too dumb to
pub fn aspect_fill(
ui: &mut egui::Ui,
sense: Sense,
texture_id: egui::TextureId,
aspect_ratio: f32,
) -> egui::Response {
let frame = ui.available_rect_before_wrap(); // Get the available frame space in the current layout
let frame_ratio = frame.width() / frame.height();
let (width, height) = if frame_ratio > aspect_ratio {
// Frame is wider than the content
(frame.width(), frame.width() / aspect_ratio)
} else {
// Frame is taller than the content
(frame.height() * aspect_ratio, frame.height())
};
let content_rect = Rect::from_min_size(
frame.min
+ egui::vec2(
(frame.width() - width) / 2.0,
(frame.height() - height) / 2.0,
),
egui::vec2(width, height),
);
// Set the clipping rectangle to the frame
//let clip_rect = ui.clip_rect(); // Preserve the original clipping rectangle
//ui.set_clip_rect(frame);
let uv = Rect::from_min_max(pos2(0.0, 0.0), pos2(1.0, 1.0));
let (response, painter) = ui.allocate_painter(ui.available_size(), sense);
// Draw the texture within the calculated rect, potentially clipping it
painter.rect_filled(content_rect, 0.0, ui.ctx().style().visuals.window_fill());
painter.image(texture_id, content_rect, uv, Color32::WHITE);
// Restore the original clipping rectangle
//ui.set_clip_rect(clip_rect);
response
}
#[profiling::function]
pub fn round_image(image: &mut ColorImage) {
// The radius to the edge of of the avatar circle
let edge_radius = image.size[0] as f32 / 2.0;
let edge_radius_squared = edge_radius * edge_radius;
for (pixnum, pixel) in image.pixels.iter_mut().enumerate() {
// y coordinate
let uy = pixnum / image.size[0];
let y = uy as f32;
let y_offset = edge_radius - y;
// x coordinate
let ux = pixnum % image.size[0];
let x = ux as f32;
let x_offset = edge_radius - x;
// The radius to this pixel (may be inside or outside the circle)
let pixel_radius_squared: f32 = x_offset * x_offset + y_offset * y_offset;
// If inside of the avatar circle
if pixel_radius_squared <= edge_radius_squared {
// squareroot to find how many pixels we are from the edge
let pixel_radius: f32 = pixel_radius_squared.sqrt();
let distance = edge_radius - pixel_radius;
// If we are within 1 pixel of the edge, we should fade, to
// antialias the edge of the circle. 1 pixel from the edge should
// be 100% of the original color, and right on the edge should be
// 0% of the original color.
if distance <= 1.0 {
*pixel = Color32::from_rgba_premultiplied(
(pixel.r() as f32 * distance) as u8,
(pixel.g() as f32 * distance) as u8,
(pixel.b() as f32 * distance) as u8,
(pixel.a() as f32 * distance) as u8,
);
}
} else {
// Outside of the avatar circle
*pixel = Color32::TRANSPARENT;
}
}
}
/// If the image's longest dimension is greater than max_edge, downscale
fn resize_image_if_too_big(
image: image::DynamicImage,
max_edge: u32,
filter: FilterType,
) -> image::DynamicImage {
// if we have no size hint, resize to something reasonable
let w = image.width();
let h = image.height();
let long = w.max(h);
if long > max_edge {
let scale = max_edge as f32 / long as f32;
let new_w = (w as f32 * scale).round() as u32;
let new_h = (h as f32 * scale).round() as u32;
image.resize(new_w, new_h, filter)
} else {
image
}
}
///
/// Process an image, resizing so we don't blow up video memory or even crash
///
/// For profile pictures, make them round and small to fit the size hint
/// For everything else, either:
///
/// - resize to the size hint
/// - keep the size if the longest dimension is less than MAX_IMG_LENGTH
/// - resize if any larger, using [`resize_image_if_too_big`]
///
#[profiling::function]
fn process_image(imgtyp: ImageType, mut image: image::DynamicImage) -> ColorImage {
const MAX_IMG_LENGTH: u32 = 512;
const FILTER_TYPE: FilterType = FilterType::CatmullRom;
match imgtyp {
ImageType::Content(size_hint) => {
let image = match size_hint {
None => resize_image_if_too_big(image, MAX_IMG_LENGTH, FILTER_TYPE),
Some((w, h)) => image.resize(w, h, FILTER_TYPE),
};
let image_buffer = image.into_rgba8();
ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
image_buffer.as_flat_samples().as_slice(),
)
}
ImageType::Profile(size) => {
// Crop square
let smaller = image.width().min(image.height());
if image.width() > smaller {
let excess = image.width() - smaller;
image = image.crop_imm(excess / 2, 0, image.width() - excess, image.height());
} else if image.height() > smaller {
let excess = image.height() - smaller;
image = image.crop_imm(0, excess / 2, image.width(), image.height() - excess);
}
let image = image.resize(size, size, FilterType::CatmullRom); // DynamicImage
let image_buffer = image.into_rgba8(); // RgbaImage (ImageBuffer)
let mut color_image = ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
image_buffer.as_flat_samples().as_slice(),
);
round_image(&mut color_image);
color_image
}
}
}
#[profiling::function]
fn parse_img_response(
response: ehttp::Response,
imgtyp: ImageType,
) -> Result<ColorImage, notedeck::Error> {
let content_type = response.content_type().unwrap_or_default();
let size_hint = match imgtyp {
ImageType::Profile(size) => SizeHint::Size(size, size),
ImageType::Content(Some((w, h))) => SizeHint::Size(w, h),
ImageType::Content(None) => SizeHint::default(),
};
if content_type.starts_with("image/svg") {
profiling::scope!("load_svg");
let mut color_image =
egui_extras::image::load_svg_bytes_with_size(&response.bytes, Some(size_hint))?;
round_image(&mut color_image);
Ok(color_image)
} else if content_type.starts_with("image/") {
profiling::scope!("load_from_memory");
let dyn_image = image::load_from_memory(&response.bytes)?;
Ok(process_image(imgtyp, dyn_image))
} else {
Err(format!("Expected image, found content-type {content_type:?}").into())
}
}
fn fetch_img_from_disk(
ctx: &egui::Context,
url: &str,
path: &path::Path,
cache_type: MediaCacheType,
) -> Promise<Option<Result<TexturedImage, notedeck::Error>>> {
let ctx = ctx.clone();
let url = url.to_owned();
let path = path.to_owned();
Promise::spawn_async(async move {
Some(async_fetch_img_from_disk(ctx, url, &path, cache_type).await)
})
}
async fn async_fetch_img_from_disk(
ctx: egui::Context,
url: String,
path: &path::Path,
cache_type: MediaCacheType,
) -> Result<TexturedImage, notedeck::Error> {
match cache_type {
MediaCacheType::Image => {
let data = fs::read(path).await?;
let image_buffer = image::load_from_memory(&data).map_err(notedeck::Error::Image)?;
let img = buffer_to_color_image(
image_buffer.as_flat_samples_u8(),
image_buffer.width(),
image_buffer.height(),
);
Ok(TexturedImage::Static(ctx.load_texture(
&url,
img,
Default::default(),
)))
}
MediaCacheType::Gif => {
let gif_bytes = fs::read(path).await?; // Read entire file into a Vec<u8>
generate_gif(ctx, url, path, gif_bytes, false, |i| {
buffer_to_color_image(i.as_flat_samples_u8(), i.width(), i.height())
})
}
}
}
fn generate_gif(
ctx: egui::Context,
url: String,
path: &path::Path,
data: Vec<u8>,
write_to_disk: bool,
process_to_egui: impl Fn(DynamicImage) -> ColorImage + Send + Copy + 'static,
) -> Result<TexturedImage, notedeck::Error> {
let decoder = {
let reader = Cursor::new(data.as_slice());
GifDecoder::new(reader)?
};
let (tex_input, tex_output) = mpsc::sync_channel(4);
let (maybe_encoder_input, maybe_encoder_output) = if write_to_disk {
let (inp, out) = mpsc::sync_channel(4);
(Some(inp), Some(out))
} else {
(None, None)
};
let mut frames: VecDeque<Frame> = decoder
.into_frames()
.collect::<std::result::Result<VecDeque<_>, image::ImageError>>()
.map_err(|e| notedeck::Error::Generic(e.to_string()))?;
let first_frame = frames.pop_front().map(|frame| {
generate_animation_frame(
&ctx,
&url,
0,
frame,
maybe_encoder_input.as_ref(),
process_to_egui,
)
});
let cur_url = url.clone();
thread::spawn(move || {
for (index, frame) in frames.into_iter().enumerate() {
let texture_frame = generate_animation_frame(
&ctx,
&cur_url,
index,
frame,
maybe_encoder_input.as_ref(),
process_to_egui,
);
if tex_input.send(texture_frame).is_err() {
tracing::debug!("AnimationTextureFrame mpsc stopped abruptly");
break;
}
}
});
if let Some(encoder_output) = maybe_encoder_output {
let path = path.to_owned();
thread::spawn(move || {
let mut imgs = Vec::new();
while let Ok(img) = encoder_output.recv() {
imgs.push(img);
}
if let Err(e) = MediaCache::write_gif(&path, &url, imgs) {
tracing::error!("Could not write gif to disk: {e}");
}
});
}
first_frame.map_or_else(
|| {
Err(notedeck::Error::Generic(
"first frame not found for gif".to_owned(),
))
},
|first_frame| {
Ok(TexturedImage::Animated(Animation {
other_frames: Default::default(),
receiver: Some(tex_output),
first_frame,
}))
},
)
}
fn generate_animation_frame(
ctx: &egui::Context,
url: &str,
index: usize,
frame: image::Frame,
maybe_encoder_input: Option<&SyncSender<ImageFrame>>,
process_to_egui: impl Fn(DynamicImage) -> ColorImage + Send + 'static,
) -> TextureFrame {
let delay = Duration::from(frame.delay());
let img = DynamicImage::ImageRgba8(frame.into_buffer());
let color_img = process_to_egui(img);
if let Some(sender) = maybe_encoder_input {
if let Err(e) = sender.send(ImageFrame {
delay,
image: color_img.clone(),
}) {
tracing::error!("ImageFrame mpsc unexpectedly closed: {e}");
}
}
TextureFrame {
delay,
texture: ctx.load_texture(format!("{url}{index}"), color_img, Default::default()),
}
}
fn buffer_to_color_image(
samples: Option<FlatSamples<&[u8]>>,
width: u32,
height: u32,
) -> ColorImage {
// TODO(jb55): remove unwrap here
let flat_samples = samples.unwrap();
ColorImage::from_rgba_unmultiplied([width as usize, height as usize], flat_samples.as_slice())
}
pub fn fetch_binary_from_disk(path: PathBuf) -> Result<Vec<u8>, notedeck::Error> {
std::fs::read(path).map_err(|e| notedeck::Error::Generic(e.to_string()))
}
/// Controls type-specific handling
#[derive(Debug, Clone, Copy)]
pub enum ImageType {
/// Profile Image (size)
Profile(u32),
/// Content Image with optional size hint
Content(Option<(u32, u32)>),
}
pub fn fetch_img(
img_cache_path: &Path,
ctx: &egui::Context,
url: &str,
imgtyp: ImageType,
cache_type: MediaCacheType,
) -> Promise<Option<Result<TexturedImage, notedeck::Error>>> {
let key = MediaCache::key(url);
let path = img_cache_path.join(key);
if path.exists() {
fetch_img_from_disk(ctx, url, &path, cache_type)
} else {
fetch_img_from_net(img_cache_path, ctx, url, imgtyp, cache_type)
}
// TODO: fetch image from local cache
}
fn fetch_img_from_net(
cache_path: &path::Path,
ctx: &egui::Context,
url: &str,
imgtyp: ImageType,
cache_type: MediaCacheType,
) -> Promise<Option<Result<TexturedImage, notedeck::Error>>> {
let (sender, promise) = Promise::new();
let request = ehttp::Request::get(url);
let ctx = ctx.clone();
let cloned_url = url.to_owned();
let cache_path = cache_path.to_owned();
ehttp::fetch(request, move |response| {
let handle = response.map_err(notedeck::Error::Generic).and_then(|resp| {
match cache_type {
MediaCacheType::Image => {
let img = parse_img_response(resp, imgtyp);
img.map(|img| {
let texture_handle =
ctx.load_texture(&cloned_url, img.clone(), Default::default());
// write to disk
std::thread::spawn(move || {
MediaCache::write(&cache_path, &cloned_url, img)
});
TexturedImage::Static(texture_handle)
})
}
MediaCacheType::Gif => {
let gif_bytes = resp.bytes;
generate_gif(
ctx.clone(),
cloned_url,
&cache_path,
gif_bytes,
true,
move |img| process_image(imgtyp, img),
)
}
}
});
sender.send(Some(handle)); // send the results back to the UI thread.
ctx.request_repaint();
});
promise
}
pub fn get_render_state<'a>(
ctx: &Context,
images: &'a mut Images,
cache_type: MediaCacheType,
url: &str,
img_type: ImageType,
) -> RenderState<'a> {
let cache = match cache_type {
MediaCacheType::Image => &mut images.static_imgs,
MediaCacheType::Gif => &mut images.gifs,
};
let cur_state = cache.textures_cache.handle_and_get_or_insert(url, || {
crate::images::fetch_img(&cache.cache_dir, ctx, url, img_type, cache_type)
});
RenderState {
texture_state: cur_state,
gifs: &mut images.gif_states,
}
}
pub struct LoadableRenderState<'a> {
pub texture_state: LoadableTextureState<'a>,
pub gifs: &'a mut GifStateMap,
}
pub struct RenderState<'a> {
pub texture_state: TextureState<'a>,
pub gifs: &'a mut GifStateMap,
}
pub fn fetch_no_pfp_promise(
ctx: &Context,
cache: &MediaCache,
) -> Promise<Option<Result<TexturedImage, notedeck::Error>>> {
crate::images::fetch_img(
&cache.cache_dir,
ctx,
notedeck::profile::no_pfp_url(),
ImageType::Profile(128),
MediaCacheType::Image,
)
}

View File

@@ -1,153 +0,0 @@
use egui::TextureHandle;
use hashbrown::{hash_map::RawEntryMut, HashMap};
use notedeck::JobPool;
use poll_promise::Promise;
#[derive(Default)]
pub struct JobsCache {
jobs: HashMap<JobIdOwned, JobState>,
}
pub enum JobState {
Pending(Promise<Option<Result<Job, JobError>>>),
Error(JobError),
Completed(Job),
}
pub enum JobError {
InvalidParameters,
}
#[derive(Debug)]
pub enum JobParams<'a> {
Blurhash(BlurhashParams<'a>),
}
#[derive(Debug)]
pub enum JobParamsOwned {
Blurhash(BlurhashParamsOwned),
}
impl<'a> From<BlurhashParams<'a>> for BlurhashParamsOwned {
fn from(params: BlurhashParams<'a>) -> Self {
BlurhashParamsOwned {
blurhash: params.blurhash.to_owned(),
url: params.url.to_owned(),
ctx: params.ctx.clone(),
}
}
}
impl<'a> From<JobParams<'a>> for JobParamsOwned {
fn from(params: JobParams<'a>) -> Self {
match params {
JobParams::Blurhash(bp) => JobParamsOwned::Blurhash(bp.into()),
}
}
}
#[derive(Debug)]
pub struct BlurhashParams<'a> {
pub blurhash: &'a str,
pub url: &'a str,
pub ctx: &'a egui::Context,
}
#[derive(Debug)]
pub struct BlurhashParamsOwned {
pub blurhash: String,
pub url: String,
pub ctx: egui::Context,
}
impl JobsCache {
pub fn get_or_insert_with<
'a,
F: FnOnce(Option<JobParamsOwned>) -> Result<Job, JobError> + Send + 'static,
>(
&'a mut self,
job_pool: &mut JobPool,
jobid: &JobId,
params: Option<JobParams>,
run_job: F,
) -> &'a mut JobState {
match self.jobs.raw_entry_mut().from_key(jobid) {
RawEntryMut::Occupied(entry) => 's: {
let mut state = entry.into_mut();
let JobState::Pending(promise) = &mut state else {
break 's state;
};
let Some(res) = promise.ready_mut() else {
break 's state;
};
let Some(res) = res.take() else {
tracing::error!("Failed to take the promise for job: {:?}", jobid);
break 's state;
};
*state = match res {
Ok(j) => JobState::Completed(j),
Err(e) => JobState::Error(e),
};
state
}
RawEntryMut::Vacant(entry) => {
let owned_params = params.map(JobParams::into);
let wrapped: Box<dyn FnOnce() -> Option<Result<Job, JobError>> + Send + 'static> =
Box::new(move || Some(run_job(owned_params)));
let promise = Promise::spawn_async(job_pool.schedule(wrapped));
let (_, state) = entry.insert(jobid.into(), JobState::Pending(promise));
state
}
}
}
pub fn get(&self, jobid: &JobId) -> Option<&JobState> {
self.jobs.get(jobid)
}
}
impl<'a> From<&JobId<'a>> for JobIdOwned {
fn from(jobid: &JobId<'a>) -> Self {
match jobid {
JobId::Blurhash(s) => JobIdOwned::Blurhash(s.to_string()),
}
}
}
impl hashbrown::Equivalent<JobIdOwned> for JobId<'_> {
fn equivalent(&self, key: &JobIdOwned) -> bool {
match (self, key) {
(JobId::Blurhash(a), JobIdOwned::Blurhash(b)) => *a == b.as_str(),
}
}
}
#[derive(Debug, PartialEq, Eq, Clone, Hash)]
enum JobIdOwned {
Blurhash(String), // image URL
}
#[derive(Debug, Hash)]
pub enum JobId<'a> {
Blurhash(&'a str), // image URL
}
pub enum Job {
Blurhash(Option<TextureHandle>),
}
impl std::fmt::Debug for Job {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Job::Blurhash(_) => write!(f, "Blurhash"),
}
}
}

View File

@@ -1,13 +1,11 @@
pub mod anim;
pub mod app_images;
pub mod blur;
pub mod colors;
pub mod constants;
pub mod context_menu;
pub mod gif;
pub mod icons;
pub mod images;
pub mod jobs;
pub mod media;
pub mod mention;
pub mod note;
pub mod profile;

View File

@@ -0,0 +1,3 @@
mod viewer;
pub use viewer::{MediaViewer, MediaViewerState};

View File

@@ -0,0 +1,232 @@
/// Spiral layout for media galleries
use egui::{pos2, vec2, Color32, Rect, Sense, TextureId, Vec2};
#[derive(Clone, Copy, Debug)]
pub struct ImageItem {
pub texture: TextureId,
pub ar: f32, // width / height (must be > 0)
}
#[derive(Clone, Debug)]
struct Placed {
texture: TextureId,
rect: Rect,
}
#[derive(Clone, Copy, Debug)]
pub struct LayoutParams {
pub gutter: f32,
pub h_min: f32,
pub h_max: f32,
pub w_min: f32,
pub w_max: f32,
pub seed_center: bool,
}
pub fn layout_spiral(images: &[ImageItem], params: LayoutParams) -> (Vec<Placed>, Vec2) {
if images.is_empty() {
return (Vec::new(), vec2(0.0, 0.0));
}
let eps = f32::EPSILON;
let g = params.gutter.max(0.0);
let h_min = params.h_min.max(1.0);
let h_max = params.h_max.max(h_min);
let w_min = params.w_min.max(1.0);
let w_max = params.w_max.max(w_min);
let mut placed = Vec::with_capacity(images.len());
// Build around origin; normalize at the end.
let mut x_min = 0.0f32;
let mut x_max = 0.0f32;
let mut y_min = 0.0f32;
let mut y_max = 0.0f32;
// dir: 0 right-col, 1 top-row, 2 left-col, 3 bottom-row
let mut dir = 0usize;
let mut i = 0usize;
// Optional seed: center a single image
if params.seed_center && i < images.len() {
let ar = images[i].ar.max(eps);
let h = ((h_min + h_max) * 0.5).clamp(h_min, h_max);
let w = ar * h;
let rect = Rect::from_center_size(pos2(0.0, 0.0), vec2(w, h));
placed.push(Placed { texture: images[i].texture, rect });
x_min = rect.min.x;
x_max = rect.max.x;
y_min = rect.min.y;
y_max = rect.max.y;
i += 1;
dir = 1; // start by adding a row above
} else {
// ensure non-empty bbox for the first strip
x_min = 0.0; x_max = 1.0; y_min = 0.0; y_max = 1.0;
}
// --- helpers -------------------------------------------------------------
// Choose how many items fit and the strip size S (W for column, H for row).
fn choose_k<F: Fn(&ImageItem) -> f32>(
images: &[ImageItem],
L: f32,
g: f32,
s_min: f32,
s_max: f32,
weight: F,
) -> (usize, f32) {
// prefix sums of weights (sum over first k items)
let mut pref = Vec::with_capacity(images.len() + 1);
pref.push(0.0);
for im in images {
pref.push(pref.last().copied().unwrap_or(0.0) + weight(im));
}
let k_max = images.len().max(1);
let mut chosen_k = 1usize;
let mut chosen_s = f32::NAN;
for k in 1..=k_max {
let L_eff = (L - g * (k as f32 - 1.0)).max(1.0);
let sum_w = pref[k].max(f32::EPSILON);
let s = (L_eff / sum_w).max(1.0);
if s > s_max && k < k_max {
continue; // too big; add one more to thin the strip
}
if s < s_min {
// prefer one fewer if possible
if k > 1 {
let k2 = k - 1;
let L_eff2 = (L - g * (k2 as f32 - 1.0)).max(1.0);
let sum_w2 = pref[k2].max(f32::EPSILON);
chosen_k = k2;
chosen_s = (L_eff2 / sum_w2).max(1.0);
} else {
chosen_k = 1;
chosen_s = s_min;
}
return (chosen_k, chosen_s);
}
return (k, s); // within bounds
}
// Fell through: use k_max and clamp
let L_eff = (L - g * (k_max as f32 - 1.0)).max(1.0);
let sum_w = pref[k_max].max(f32::EPSILON);
let s = (L_eff / sum_w).clamp(s_min, s_max);
(k_max, s)
}
// Place a column (top→bottom). Returns the new right/left edge.
fn place_column(
placed: &mut Vec<Placed>,
strip: &[ImageItem],
W: f32,
x: f32,
y_top: f32,
g: f32,
) -> f32 {
let mut y = y_top;
for (idx, im) in strip.iter().enumerate() {
let h = (W / im.ar.max(f32::EPSILON)).max(1.0);
let rect = Rect::from_min_size(pos2(x, y), vec2(W, h));
placed.push(Placed { texture: im.texture, rect });
y += h;
if idx + 1 != strip.len() { y += g; }
}
x + W
}
// Place a row (left→right). Returns the new top/bottom edge.
fn place_row(
placed: &mut Vec<Placed>,
strip: &[ImageItem],
H: f32,
x_left: f32,
y: f32,
g: f32,
) -> f32 {
let mut x = x_left;
for (idx, im) in strip.iter().enumerate() {
let w = (im.ar.max(f32::EPSILON) * H).max(1.0);
let rect = Rect::from_min_size(pos2(x, y), vec2(w, H));
placed.push(Placed { texture: im.texture, rect });
x += w;
if idx + 1 != strip.len() { x += g; }
}
y + H
}
// --- main loop -----------------------------------------------------------
while i < images.len() {
let remaining = &images[i..];
if dir % 2 == 0 {
// COLUMN (dir 0: right, 2: left)
let L = (y_max - y_min).max(1.0);
let (k, W) = choose_k(
remaining,
L, g, w_min, w_max,
|im| 1.0 / im.ar.max(f32::EPSILON),
);
let x = if dir == 0 { x_max + g } else { x_min - g - W };
let new_edge = place_column(&mut placed, &remaining[..k], W, x, y_min, g);
if dir == 0 { x_max = new_edge; } else { x_min = x; }
i += k;
} else {
// ROW (dir 1: top, 3: bottom)
let L = (x_max - x_min).max(1.0);
let (k, H) = choose_k(
remaining,
L, g, h_min, h_max,
|im| im.ar.max(f32::EPSILON),
);
let y = if dir == 1 { y_max + g } else { y_min - g - H };
let new_edge = place_row(&mut placed, &remaining[..k], H, x_min, y, g);
if dir == 1 { y_max = new_edge; } else { y_min = y; }
i += k;
}
dir = (dir + 1) % 4;
}
// Normalize so bbox top-left is (0,0)
let shift = vec2(-x_min, -y_min);
for p in &mut placed {
p.rect = p.rect.translate(shift);
}
let total_size = vec2(x_max - x_min, y_max - y_min);
(placed, total_size)
}
pub fn spiral_gallery(ui: &mut egui::Ui, images: &[ImageItem], params: LayoutParams) {
use egui::{ScrollArea, Stroke};
let (placed, size) = layout_spiral(images, params);
ScrollArea::both().auto_shrink([false, false]).show(ui, |ui| {
let (rect, _resp) = ui.allocate_exact_size(size, Sense::hover());
let painter = ui.painter_at(rect);
painter.rect_stroke(
Rect::from_min_size(rect.min, size),
0.0,
Stroke::new(1.0, Color32::DARK_GRAY),
);
let uv = Rect::from_min_max(pos2(0.0, 0.0), pos2(1.0, 1.0));
for p in &placed {
let r = Rect::from_min_max(rect.min + p.rect.min.to_vec2(),
rect.min + p.rect.max.to_vec2());
painter.image(p.texture, r, uv, Color32::WHITE);
}
});
}

View File

@@ -0,0 +1,118 @@
use egui::{pos2, Color32, Rect};
use notedeck::{ImageType, Images};
/// State used in the MediaViewer ui widget.
///
#[derive(Default)]
pub struct MediaViewerState {
pub urls: Vec<String>,
}
/// A panning, scrolling, optionally fullscreen, and tiling media viewer
pub struct MediaViewer<'a> {
state: &'a MediaViewerState,
fullscreen: bool,
}
impl<'a> MediaViewer<'a> {
pub fn new(state: &'a MediaViewerState) -> Self {
let fullscreen = false;
Self { state, fullscreen }
}
pub fn fullscreen(mut self, enable: bool) -> Self {
self.fullscreen = enable;
self
}
pub fn ui(&self, images: &mut Images, ui: &mut egui::Ui) {
if self.fullscreen {
egui::Window::new("Media Viewer")
.title_bar(false)
.fixed_size(ui.ctx().screen_rect().size())
.fixed_pos(ui.ctx().screen_rect().min)
.frame(egui::Frame::NONE)
.show(ui.ctx(), |ui| self.ui_content(images, ui));
} else {
self.ui_content(images, ui);
}
}
fn ui_content(&self, images: &mut Images, ui: &mut egui::Ui) {
let avail_rect = ui.available_rect_before_wrap();
// TODO: id_salt
let id = ui.id().with("media_viewer");
let mut scene_rect = ui.ctx().data(|d| d.get_temp(id)).unwrap_or(avail_rect);
let prev = scene_rect;
// Draw background
ui.painter()
.rect_filled(avail_rect, 0.0, egui::Color32::from_black_alpha(128));
egui::Scene::new()
.zoom_range(0.0..=10.0) // enhance 🔬
.show(ui, &mut scene_rect, |ui| {
self.render_image_tiles(images, ui);
});
if scene_rect != prev {
ui.ctx().data_mut(|d| d.insert_temp(id, scene_rect));
}
}
///
/// Tile a scene with images.
///
/// TODO(jb55): Let's improve image tiling over time, spiraling outward. We
/// should have a way to click "next" and have the scene smoothly transition and
/// focus on the next image
fn render_image_tiles(&self, images: &mut Images, ui: &mut egui::Ui) {
for url in &self.state.urls {
// fetch image texture
let Some(texture) = images.latest_texture(ui, url, ImageType::Content(None)) else {
continue;
};
// the area the next image will be put in.
let mut img_rect = ui.available_rect_before_wrap();
if !ui.is_rect_visible(img_rect) {
// just stop rendering images if we're going out of the scene
// basic culling when we have lots of images
break;
}
{
let size = texture.size_vec2();
img_rect.set_height(size.y);
img_rect.set_width(size.x);
let uv = Rect::from_min_max(pos2(0.0, 0.0), pos2(1.0, 1.0));
// image actions
/*
let response = ui.interact(
render_rect,
carousel_id.with("img"),
Sense::click(),
);
if response.clicked() {
ui.data_mut(|data| {
data.insert_temp(carousel_id.with("show_popup"), true);
});
} else if background_response.clicked() || response.clicked_elsewhere() {
ui.data_mut(|data| {
data.insert_temp(carousel_id.with("show_popup"), false);
});
}
*/
// Paint image
ui.painter()
.image(texture.id(), img_rect, uv, Color32::WHITE);
ui.advance_cursor_after_rect(img_rect);
}
}
}
}

View File

@@ -1,19 +1,15 @@
use std::cell::OnceCell;
use crate::{
blur::imeta_blurhashes,
jobs::JobsCache,
note::{NoteAction, NoteOptions, NoteResponse, NoteView},
secondary_label,
};
use notedeck::{JobsCache, RenderableMedia};
use egui::{Color32, Hyperlink, RichText};
use nostrdb::{BlockType, Mention, Note, NoteKey, Transaction};
use tracing::warn;
use notedeck::{IsFollowing, NoteCache, NoteContext};
use super::media::{find_renderable_media, image_carousel, RenderableMedia};
use super::media::image_carousel;
use notedeck::{update_imeta_blurhashes, IsFollowing, NoteCache, NoteContext};
pub struct NoteContents<'a, 'd> {
note_context: &'a mut NoteContext<'d>,
@@ -127,11 +123,11 @@ pub fn render_note_preview(
#[allow(clippy::too_many_arguments)]
#[profiling::function]
pub fn render_note_contents(
pub fn render_note_contents<'a>(
ui: &mut egui::Ui,
note_context: &mut NoteContext,
txn: &Transaction,
note: &Note,
note: &'a Note,
options: NoteOptions,
jobs: &mut JobsCache,
) -> NoteResponse {
@@ -152,7 +148,6 @@ pub fn render_note_contents(
}
let mut supported_medias: Vec<RenderableMedia> = vec![];
let blurhashes = OnceCell::new();
let response = ui.horizontal_wrapped(|ui| {
let blocks = if let Ok(blocks) = note_context.ndb.get_blocks_by_key(txn, note_key) {
@@ -223,15 +218,15 @@ pub fn render_note_contents(
let mut found_supported = || -> bool {
let url = block.as_str();
let blurs = blurhashes.get_or_init(|| imeta_blurhashes(note));
if !note_context.img_cache.metadata.contains_key(url) {
update_imeta_blurhashes(note, &mut note_context.img_cache.metadata);
}
let Some(media_type) =
find_renderable_media(&mut note_context.img_cache.urls, blurs, url)
else {
let Some(media) = note_context.img_cache.get_renderable_media(url) else {
return false;
};
supported_medias.push(media_type);
supported_medias.push(media);
true
};
@@ -311,6 +306,7 @@ pub fn render_note_contents(
.key
.pubkey
.bytes();
let trusted_media = is_self
|| note_context
.accounts

View File

@@ -1,24 +1,22 @@
use std::{collections::HashMap, path::Path};
use std::path::Path;
use egui::{
Button, Color32, Context, CornerRadius, FontId, Image, Response, RichText, Sense,
TextureHandle, UiBuilder, Window,
};
use egui::{Button, Color32, Context, CornerRadius, FontId, Image, Response, TextureHandle};
use notedeck::{
fonts::get_font_size, note::MediaAction, show_one_error_message, supported_mime_hosted_at_url,
tr, GifState, GifStateMap, Images, JobPool, Localization, MediaCache, MediaCacheType,
NotedeckTextStyle, TexturedImage, TexturesCache, UrlMimes,
compute_blurhash, fonts::get_font_size, show_one_error_message, tr, BlurhashParams,
GifStateMap, Images, Job, JobId, JobParams, JobPool, JobState, JobsCache, Localization,
MediaAction, MediaCacheType, NotedeckTextStyle, ObfuscationType, PointDimensions,
RenderableMedia, TexturedImage, TexturesCache,
};
use crate::{
app_images,
blur::{compute_blurhash, Blur, ObfuscationType, PointDimensions},
colors::PINK,
gif::{handle_repaint, retrieve_latest_texture},
images::{fetch_no_pfp_promise, get_render_state, ImageType},
jobs::{BlurhashParams, Job, JobId, JobParams, JobState, JobsCache},
AnimationHelper, PulseAlpha,
};
use notedeck::media::gif::ensure_latest_texture;
use notedeck::media::images::{fetch_no_pfp_promise, ImageType};
use crate::{app_images, AnimationHelper, PulseAlpha};
pub enum MediaViewAction {
/// Used to handle escape presses when the media viewer is open
EscapePressed,
}
#[allow(clippy::too_many_arguments)]
pub(crate) fn image_carousel(
@@ -36,7 +34,6 @@ pub(crate) fn image_carousel(
let height = 360.0;
let width = ui.available_width();
let show_popup = get_show_popup(ui, popup_id(carousel_id));
let mut action = None;
//let has_touch_screen = ui.ctx().input(|i| i.has_touch_screen());
@@ -46,6 +43,7 @@ pub(crate) fn image_carousel(
.id_salt(carousel_id)
.show(ui, |ui| {
ui.horizontal(|ui| {
let mut media_action: Option<(usize, MediaUIAction)> = None;
for (i, media) in medias.iter().enumerate() {
let RenderableMedia {
url,
@@ -57,7 +55,6 @@ pub(crate) fn image_carousel(
MediaCacheType::Image => &mut img_cache.static_imgs,
MediaCacheType::Gif => &mut img_cache.gifs,
};
let media_state = get_content_media_render_state(
ui,
job_pool,
@@ -68,7 +65,7 @@ pub(crate) fn image_carousel(
url,
*media_type,
&cache.cache_dir,
blur_type.clone(),
blur_type,
);
if let Some(cur_action) = render_media(
@@ -79,43 +76,25 @@ pub(crate) fn image_carousel(
height,
i18n,
) {
// clicked the media, lets set the active index
if let MediaUIAction::Clicked = cur_action {
set_show_popup(ui, popup_id(carousel_id), true);
set_selected_index(ui, selection_id(carousel_id), i);
}
action = cur_action.to_media_action(
ui.ctx(),
url,
*media_type,
cache,
ImageType::Content(Some((width as u32, height as u32))),
);
media_action = Some((i, cur_action));
}
}
if let Some((i, media_action)) = &media_action {
action = media_action.to_media_action(
ui.ctx(),
medias,
*i,
img_cache,
ImageType::Content(Some((width as u32, height as u32))),
);
}
})
.response
})
.inner
});
if show_popup {
if medias.is_empty() {
return None;
};
let current_image_index = update_selected_image_index(ui, carousel_id, medias.len() as i32);
show_full_screen_media(
ui,
medias,
current_image_index,
img_cache,
carousel_id,
i18n,
);
}
action
}
@@ -130,146 +109,55 @@ impl MediaUIAction {
pub fn to_media_action(
&self,
ctx: &egui::Context,
url: &str,
cache_type: MediaCacheType,
cache: &mut MediaCache,
medias: &[RenderableMedia],
selected: usize,
img_cache: &Images,
img_type: ImageType,
) -> Option<MediaAction> {
match self {
MediaUIAction::Clicked => {
tracing::debug!("{} clicked", url);
None
}
MediaUIAction::Clicked => Some(MediaAction::ViewMedias(
medias.iter().map(|m| m.url.to_owned()).collect(),
)),
MediaUIAction::Unblur => Some(MediaAction::FetchImage {
url: url.to_owned(),
cache_type,
no_pfp_promise: crate::images::fetch_img(
MediaUIAction::Unblur => {
let url = &medias[selected].url;
let cache = img_cache.get_cache(medias[selected].media_type);
let cache_type = cache.cache_type;
let no_pfp_promise = notedeck::media::images::fetch_img(
&cache.cache_dir,
ctx,
url,
img_type,
cache_type,
),
}),
);
Some(MediaAction::FetchImage {
url: url.to_owned(),
cache_type,
no_pfp_promise,
})
}
MediaUIAction::Error => {
if !matches!(img_type, ImageType::Profile(_)) {
return None;
};
let cache = img_cache.get_cache(medias[selected].media_type);
let cache_type = cache.cache_type;
Some(MediaAction::FetchImage {
url: url.to_owned(),
url: medias[selected].url.to_owned(),
cache_type,
no_pfp_promise: fetch_no_pfp_promise(ctx, cache),
})
}
MediaUIAction::DoneLoading => Some(MediaAction::DoneLoading {
url: url.to_owned(),
cache_type,
url: medias[selected].url.to_owned(),
cache_type: img_cache.get_cache(medias[selected].media_type).cache_type,
}),
}
}
}
fn show_full_screen_media(
ui: &mut egui::Ui,
medias: &[RenderableMedia],
index: usize,
img_cache: &mut Images,
carousel_id: egui::Id,
i18n: &mut Localization,
) {
Window::new("image_popup")
.title_bar(false)
.fixed_size(ui.ctx().screen_rect().size())
.fixed_pos(ui.ctx().screen_rect().min)
.frame(egui::Frame::NONE)
.show(ui.ctx(), |ui| {
ui.centered_and_justified(|ui| 's: {
let image_url = medias[index].url;
let media_type = medias[index].media_type;
tracing::trace!(
"show_full_screen_media using img {} @ {} for carousel_id {:?}",
image_url,
index,
carousel_id
);
let cur_state = get_render_state(
ui.ctx(),
img_cache,
media_type,
image_url,
ImageType::Content(None),
);
let notedeck::TextureState::Loaded(textured_image) = cur_state.texture_state else {
break 's;
};
render_full_screen_media(
ui,
medias.len(),
index,
textured_image,
cur_state.gifs,
image_url,
carousel_id,
i18n,
);
})
});
}
fn set_selected_index(ui: &mut egui::Ui, sel_id: egui::Id, index: usize) {
ui.data_mut(|d| {
d.insert_temp(sel_id, index);
});
}
fn get_selected_index(ui: &egui::Ui, selection_id: egui::Id) -> usize {
ui.data(|d| d.get_temp(selection_id).unwrap_or(0))
}
/// Checks to see if we have any left/right key presses and updates the carousel index
fn update_selected_image_index(ui: &mut egui::Ui, carousel_id: egui::Id, num_urls: i32) -> usize {
if num_urls > 1 {
let (next_image, prev_image) = ui.data(|data| {
(
data.get_temp(carousel_id.with("next_image"))
.unwrap_or_default(),
data.get_temp(carousel_id.with("prev_image"))
.unwrap_or_default(),
)
});
if next_image
|| ui.input(|i| i.key_pressed(egui::Key::ArrowRight) || i.key_pressed(egui::Key::L))
{
let ind = select_next_media(ui, carousel_id, num_urls, 1);
tracing::debug!("carousel selecting right {}/{}", ind + 1, num_urls);
if next_image {
ui.data_mut(|data| data.remove_temp::<bool>(carousel_id.with("next_image")));
}
ind
} else if prev_image
|| ui.input(|i| i.key_pressed(egui::Key::ArrowLeft) || i.key_pressed(egui::Key::H))
{
let ind = select_next_media(ui, carousel_id, num_urls, -1);
tracing::debug!("carousel selecting left {}/{}", ind + 1, num_urls);
if prev_image {
ui.data_mut(|data| data.remove_temp::<bool>(carousel_id.with("prev_image")));
}
ind
} else {
get_selected_index(ui, selection_id(carousel_id))
}
} else {
0
}
}
#[allow(clippy::too_many_arguments)]
pub fn get_content_media_render_state<'a>(
ui: &mut egui::Ui,
@@ -281,11 +169,11 @@ pub fn get_content_media_render_state<'a>(
url: &'a str,
cache_type: MediaCacheType,
cache_dir: &Path,
obfuscation_type: ObfuscationType<'a>,
obfuscation_type: &'a ObfuscationType,
) -> MediaRenderState<'a> {
let render_type = if media_trusted {
cache.handle_and_get_or_insert_loadable(url, || {
crate::images::fetch_img(
notedeck::media::images::fetch_img(
cache_dir,
ui.ctx(),
url,
@@ -332,7 +220,7 @@ pub fn get_content_media_render_state<'a>(
fn get_obfuscated<'a>(
ui: &mut egui::Ui,
url: &str,
obfuscation_type: ObfuscationType<'a>,
obfuscation_type: &'a ObfuscationType,
job_pool: &'a mut JobPool,
jobs: &'a mut JobsCache,
height: f32,
@@ -342,7 +230,7 @@ fn get_obfuscated<'a>(
};
let params = BlurhashParams {
blurhash: renderable_blur.blurhash,
blurhash: &renderable_blur.blurhash,
url,
ctx: ui.ctx(),
};
@@ -379,336 +267,6 @@ fn get_obfuscated<'a>(
ObfuscatedTexture::Blur(texture_handle)
}
// simple selector memory
fn select_next_media(
ui: &mut egui::Ui,
carousel_id: egui::Id,
num_urls: i32,
direction: i32,
) -> usize {
let sel_id = selection_id(carousel_id);
let current = get_selected_index(ui, sel_id) as i32;
let next = current + direction;
let next = if next >= num_urls {
0
} else if next < 0 {
num_urls - 1
} else {
next
};
if next != current {
set_selected_index(ui, sel_id, next as usize);
}
next as usize
}
#[allow(clippy::too_many_arguments)]
fn render_full_screen_media(
ui: &mut egui::Ui,
num_urls: usize,
index: usize,
renderable_media: &mut TexturedImage,
gifs: &mut HashMap<String, GifState>,
image_url: &str,
carousel_id: egui::Id,
i18n: &mut Localization,
) {
const TOP_BAR_HEIGHT: f32 = 30.0;
const BOTTOM_BAR_HEIGHT: f32 = 60.0;
let screen_rect = ui.ctx().screen_rect();
let screen_size = screen_rect.size();
// Escape key closes popup
if ui.input(|i| i.key_pressed(egui::Key::Escape)) {
ui.ctx().memory_mut(|mem| {
mem.data.insert_temp(carousel_id.with("show_popup"), false);
});
}
// Draw background
ui.painter()
.rect_filled(screen_rect, 0.0, Color32::from_black_alpha(230));
let background_response = ui.interact(
screen_rect,
carousel_id.with("background"),
egui::Sense::click(),
);
// Zoom & pan state
let zoom_id = carousel_id.with("zoom_level");
let pan_id = carousel_id.with("pan_offset");
let mut zoom: f32 = ui
.ctx()
.memory(|mem| mem.data.get_temp(zoom_id).unwrap_or(1.0));
let mut pan_offset = ui
.ctx()
.memory(|mem| mem.data.get_temp(pan_id).unwrap_or(egui::Vec2::ZERO));
// Handle scroll to zoom
if ui.input(|i| i.pointer.hover_pos()).is_some() {
let scroll_delta = ui.input(|i| i.smooth_scroll_delta);
if scroll_delta.y != 0.0 {
let zoom_factor = if scroll_delta.y > 0.0 { 1.05 } else { 0.95 };
zoom = (zoom * zoom_factor).clamp(0.1, 5.0);
if zoom <= 1.0 {
pan_offset = egui::Vec2::ZERO;
}
ui.ctx().memory_mut(|mem| {
mem.data.insert_temp(zoom_id, zoom);
mem.data.insert_temp(pan_id, pan_offset);
});
}
}
// Fetch image
let texture = handle_repaint(
ui,
retrieve_latest_texture(image_url, gifs, renderable_media),
);
let texture_size = texture.size_vec2();
let topbar_rect = egui::Rect::from_min_max(
screen_rect.min + egui::vec2(0.0, 0.0),
screen_rect.min + egui::vec2(screen_size.x, TOP_BAR_HEIGHT),
);
let topbar_response = ui.interact(
topbar_rect,
carousel_id.with("topbar"),
egui::Sense::click(),
);
let mut keep_popup_open = false;
if topbar_response.clicked() {
keep_popup_open = true;
}
ui.allocate_new_ui(
UiBuilder::new()
.max_rect(topbar_rect)
.layout(egui::Layout::top_down(egui::Align::RIGHT)),
|ui| {
let color = ui.style().visuals.noninteractive().fg_stroke.color;
ui.add_space(10.0);
ui.horizontal(|ui| {
let label_reponse = ui
.label(RichText::new(image_url).color(color).small())
.on_hover_text(image_url);
if label_reponse.double_clicked()
|| label_reponse.clicked()
|| label_reponse.hovered()
{
keep_popup_open = true;
ui.ctx().copy_text(image_url.to_owned());
}
});
},
);
// Calculate available rect for image
let image_rect = egui::Rect::from_min_max(
screen_rect.min + egui::vec2(0.0, TOP_BAR_HEIGHT),
screen_rect.max - egui::vec2(0.0, BOTTOM_BAR_HEIGHT),
);
let image_area_size = image_rect.size();
let scale = (image_area_size.x / texture_size.x)
.min(image_area_size.y / texture_size.y)
.min(1.0);
let scaled_size = texture_size * scale * zoom;
let visible_width = scaled_size.x.min(image_area_size.x);
let visible_height = scaled_size.y.min(image_area_size.y);
let max_pan_x = ((scaled_size.x - visible_width) / 2.0).max(0.0);
let max_pan_y = ((scaled_size.y - visible_height) / 2.0).max(0.0);
pan_offset.x = if max_pan_x > 0.0 {
pan_offset.x.clamp(-max_pan_x, max_pan_x)
} else {
0.0
};
pan_offset.y = if max_pan_y > 0.0 {
pan_offset.y.clamp(-max_pan_y, max_pan_y)
} else {
0.0
};
let render_rect = egui::Rect::from_center_size(
image_rect.center(),
egui::vec2(visible_width, visible_height),
);
// Compute UVs for zoom & pan
let uv_min = egui::pos2(
0.5 - (visible_width / scaled_size.x) / 2.0 + pan_offset.x / scaled_size.x,
0.5 - (visible_height / scaled_size.y) / 2.0 + pan_offset.y / scaled_size.y,
);
let uv_max = egui::pos2(
uv_min.x + visible_width / scaled_size.x,
uv_min.y + visible_height / scaled_size.y,
);
// Paint image
ui.painter().image(
texture.id(),
render_rect,
egui::Rect::from_min_max(uv_min, uv_max),
Color32::WHITE,
);
// image actions
let response = ui.interact(
render_rect,
carousel_id.with("img"),
Sense::click_and_drag(),
);
let swipe_accum_id = carousel_id.with("swipe_accum");
let mut swipe_delta = ui.ctx().memory(|mem| {
mem.data
.get_temp::<egui::Vec2>(swipe_accum_id)
.unwrap_or(egui::Vec2::ZERO)
});
// Handle pan via drag
if response.dragged() {
let delta = response.drag_delta();
swipe_delta += delta;
ui.ctx().memory_mut(|mem| {
mem.data.insert_temp(swipe_accum_id, swipe_delta);
});
pan_offset -= delta;
pan_offset.x = pan_offset.x.clamp(-max_pan_x, max_pan_x);
pan_offset.y = pan_offset.y.clamp(-max_pan_y, max_pan_y);
ui.ctx()
.memory_mut(|mem| mem.data.insert_temp(pan_id, pan_offset));
}
// Double click to reset
if response.double_clicked() {
zoom = 1.0;
pan_offset = egui::Vec2::ZERO;
ui.ctx().memory_mut(|mem| {
mem.data.insert_temp(pan_id, pan_offset);
mem.data.insert_temp(zoom_id, zoom);
});
}
let swipe_threshold = 50.0;
if response.drag_stopped() {
if swipe_delta.x.abs() > swipe_threshold && swipe_delta.y.abs() < swipe_threshold {
if swipe_delta.x < 0.0 {
ui.ctx().data_mut(|data| {
keep_popup_open = true;
data.insert_temp(carousel_id.with("next_image"), true);
});
} else if swipe_delta.x > 0.0 {
ui.ctx().data_mut(|data| {
keep_popup_open = true;
data.insert_temp(carousel_id.with("prev_image"), true);
});
}
}
ui.ctx().memory_mut(|mem| {
mem.data.remove::<egui::Vec2>(swipe_accum_id);
});
}
// bottom bar
if num_urls > 1 {
let bottom_rect = egui::Rect::from_min_max(
screen_rect.max - egui::vec2(screen_size.x, BOTTOM_BAR_HEIGHT),
screen_rect.max,
);
let full_response = ui.interact(
bottom_rect,
carousel_id.with("bottom_bar"),
egui::Sense::click(),
);
if full_response.clicked() {
keep_popup_open = true;
}
let mut clicked_index: Option<usize> = None;
#[allow(deprecated)]
ui.allocate_ui_at_rect(bottom_rect, |ui| {
let dot_radius = 7.0;
let dot_spacing = 20.0;
let color_active = PINK;
let color_inactive: Color32 = ui.style().visuals.widgets.inactive.bg_fill;
let center = bottom_rect.center();
for i in 0..num_urls {
let distance = egui::vec2(
(i as f32 - (num_urls as f32 - 1.0) / 2.0) * dot_spacing,
0.0,
);
let pos = center + distance;
let circle_color = if i == index {
color_active
} else {
color_inactive
};
let circle_rect = egui::Rect::from_center_size(
pos,
egui::vec2(dot_radius * 2.0, dot_radius * 2.0),
);
let resp = ui.interact(circle_rect, carousel_id.with(i), egui::Sense::click());
ui.painter().circle_filled(pos, dot_radius, circle_color);
if i != index && resp.hovered() {
ui.painter()
.circle_stroke(pos, dot_radius + 2.0, (1.0, PINK));
}
if resp.clicked() {
keep_popup_open = true;
if i != index {
clicked_index = Some(i);
}
}
}
});
if let Some(new_index) = clicked_index {
ui.ctx().data_mut(|data| {
data.insert_temp(selection_id(carousel_id), new_index);
});
}
}
if keep_popup_open || response.clicked() {
ui.data_mut(|data| {
data.insert_temp(carousel_id.with("show_popup"), true);
});
} else if background_response.clicked() || response.clicked_elsewhere() {
ui.data_mut(|data| {
data.insert_temp(carousel_id.with("show_popup"), false);
});
}
copy_link(i18n, image_url, &response);
}
fn copy_link(i18n: &mut Localization, url: &str, img_resp: &Response) {
img_resp.context_menu(|ui| {
if ui
@@ -905,12 +463,6 @@ fn render_default_blur_bg(ui: &mut egui::Ui, height: f32, url: &str, shimmer: bo
rect
}
pub(crate) struct RenderableMedia<'a> {
url: &'a str,
media_type: MediaCacheType,
obfuscation_type: ObfuscationType<'a>,
}
pub enum MediaRenderState<'a> {
ActualImage(&'a mut TexturedImage),
Transitioning {
@@ -927,14 +479,15 @@ pub enum ObfuscatedTexture<'a> {
Default,
}
/*
pub(crate) fn find_renderable_media<'a>(
urls: &mut UrlMimes,
blurhashes: &'a HashMap<&'a str, Blur<'a>>,
imeta: &'a HashMap<String, ImageMetadata>,
url: &'a str,
) -> Option<RenderableMedia<'a>> {
) -> Option<RenderableMedia> {
let media_type = supported_mime_hosted_at_url(urls, url)?;
let obfuscation_type = match blurhashes.get(url) {
let obfuscation_type = match imeta.get(url) {
Some(blur) => ObfuscationType::Blurhash(blur.clone()),
None => ObfuscationType::Default,
};
@@ -945,28 +498,7 @@ pub(crate) fn find_renderable_media<'a>(
obfuscation_type,
})
}
#[inline]
fn selection_id(carousel_id: egui::Id) -> egui::Id {
carousel_id.with("sel")
}
/// get the popup carousel window state
#[inline]
fn get_show_popup(ui: &egui::Ui, popup_id: egui::Id) -> bool {
ui.data(|data| data.get_temp(popup_id).unwrap_or(false))
}
/// set the popup carousel window state
#[inline]
fn set_show_popup(ui: &mut egui::Ui, popup_id: egui::Id, show_popup: bool) {
ui.data_mut(|data| data.insert_temp(popup_id, show_popup));
}
#[inline]
fn popup_id(carousel_id: egui::Id) -> egui::Id {
carousel_id.with("show_popup")
}
*/
fn render_success_media(
ui: &mut egui::Ui,
@@ -976,8 +508,8 @@ fn render_success_media(
height: f32,
i18n: &mut Localization,
) -> Response {
let texture = handle_repaint(ui, retrieve_latest_texture(url, gifs, tex));
let img = texture_to_image(texture, height);
let texture = ensure_latest_texture(ui, url, gifs, tex);
let img = texture_to_image(&texture, height);
let img_resp = ui.add(Button::image(img).frame(false));
copy_link(i18n, url, &img_resp);

View File

@@ -4,7 +4,6 @@ pub mod media;
pub mod options;
pub mod reply_description;
use crate::jobs::JobsCache;
use crate::{app_images, secondary_label};
use crate::{
profile::name::one_line_display_name_widget, widgets::x_button, ProfilePic, ProfilePreview,
@@ -14,13 +13,14 @@ use crate::{
pub use contents::{render_note_contents, render_note_preview, NoteContents};
pub use context::NoteContextButton;
use notedeck::get_current_wallet;
use notedeck::note::MediaAction;
use notedeck::note::ZapTargetAmount;
use notedeck::ui::is_narrow;
use notedeck::Accounts;
use notedeck::GlobalWallet;
use notedeck::Images;
use notedeck::JobsCache;
use notedeck::Localization;
use notedeck::MediaAction;
pub use options::NoteOptions;
pub use reply_description::reply_desc;

View File

@@ -2,8 +2,8 @@ use egui::{Label, RichText, Sense};
use nostrdb::{NoteReply, Transaction};
use super::NoteOptions;
use crate::{jobs::JobsCache, note::NoteView, Mention};
use notedeck::{tr, NoteAction, NoteContext};
use crate::{note::NoteView, Mention};
use notedeck::{tr, JobsCache, NoteAction, NoteContext};
// Rich text segment types for internationalized rendering
#[derive(Debug, Clone)]

View File

@@ -113,7 +113,7 @@ pub fn banner(ui: &mut egui::Ui, banner_url: Option<&str>, height: f32) -> egui:
banner_url
.and_then(|url| banner_texture(ui, url))
.map(|texture| {
crate::images::aspect_fill(
notedeck::media::images::aspect_fill(
ui,
egui::Sense::hover(),
texture.id,

View File

@@ -1,8 +1,9 @@
use crate::gif::{handle_repaint, retrieve_latest_texture};
use crate::images::{fetch_no_pfp_promise, get_render_state, ImageType};
use egui::{vec2, InnerResponse, Sense, Stroke, TextureHandle};
use notedeck::note::MediaAction;
use notedeck::get_render_state;
use notedeck::media::gif::ensure_latest_texture;
use notedeck::media::images::{fetch_no_pfp_promise, ImageType};
use notedeck::MediaAction;
use notedeck::{show_one_error_message, supported_mime_hosted_at_url, Images};
pub struct ProfilePic<'cache, 'url> {
@@ -140,12 +141,9 @@ fn render_pfp(
)
}
notedeck::TextureState::Loaded(textured_image) => {
let texture_handle = handle_repaint(
ui,
retrieve_latest_texture(url, cur_state.gifs, textured_image),
);
let texture_handle = ensure_latest_texture(ui, url, cur_state.gifs, textured_image);
egui::InnerResponse::new(None, pfp_image(ui, texture_handle, ui_size, border, sense))
egui::InnerResponse::new(None, pfp_image(ui, &texture_handle, ui_size, border, sense))
}
}
}