ui crate and chrome sidebar

Signed-off-by: William Casarin <jb55@jb55.com>
This commit is contained in:
William Casarin
2025-03-26 12:18:44 -07:00
parent 415a052602
commit 9c9b4199f5
38 changed files with 902 additions and 750 deletions

View File

@@ -0,0 +1,140 @@
use egui::{Pos2, Rect, Response, Sense};
/*
pub fn hover_expand(
ui: &mut egui::Ui,
id: egui::Id,
size: f32,
expand_size: f32,
anim_speed: f32,
) -> (egui::Rect, f32, egui::Response) {
// Allocate space for the profile picture with a fixed size
let default_size = size + expand_size;
let (rect, response) =
ui.allocate_exact_size(egui::vec2(default_size, default_size), egui::Sense::click());
let val = ui
.ctx()
.animate_bool_with_time(id, response.hovered(), anim_speed);
let size = size + val * expand_size;
(rect, size, response)
}
pub fn hover_expand_small(ui: &mut egui::Ui, id: egui::Id) -> (egui::Rect, f32, egui::Response) {
let size = 10.0;
let expand_size = 5.0;
let anim_speed = 0.05;
hover_expand(ui, id, size, expand_size, anim_speed)
}
*/
pub static ICON_EXPANSION_MULTIPLE: f32 = 1.2;
pub static ANIM_SPEED: f32 = 0.05;
pub struct AnimationHelper {
rect: Rect,
center: Pos2,
response: Response,
animation_progress: f32,
expansion_multiple: f32,
}
impl AnimationHelper {
pub fn new(
ui: &mut egui::Ui,
animation_name: impl std::hash::Hash,
max_size: egui::Vec2,
) -> Self {
let id = ui.id().with(animation_name);
let (rect, response) = ui.allocate_exact_size(max_size, Sense::click());
let animation_progress =
ui.ctx()
.animate_bool_with_time(id, response.hovered(), ANIM_SPEED);
Self {
rect,
center: rect.center(),
response,
animation_progress,
expansion_multiple: ICON_EXPANSION_MULTIPLE,
}
}
pub fn no_animation(ui: &mut egui::Ui, size: egui::Vec2) -> Self {
let (rect, response) = ui.allocate_exact_size(size, Sense::hover());
Self {
rect,
center: rect.center(),
response,
animation_progress: 0.0,
expansion_multiple: ICON_EXPANSION_MULTIPLE,
}
}
pub fn new_from_rect(
ui: &mut egui::Ui,
animation_name: impl std::hash::Hash,
animation_rect: egui::Rect,
) -> Self {
let id = ui.id().with(animation_name);
let response = ui.allocate_rect(animation_rect, Sense::click());
let animation_progress =
ui.ctx()
.animate_bool_with_time(id, response.hovered(), ANIM_SPEED);
Self {
rect: animation_rect,
center: animation_rect.center(),
response,
animation_progress,
expansion_multiple: ICON_EXPANSION_MULTIPLE,
}
}
pub fn scale_1d_pos(&self, min_object_size: f32) -> f32 {
let max_object_size = min_object_size * self.expansion_multiple;
if self.response.is_pointer_button_down_on() {
min_object_size
} else {
min_object_size + ((max_object_size - min_object_size) * self.animation_progress)
}
}
pub fn scale_radius(&self, min_diameter: f32) -> f32 {
self.scale_1d_pos((min_diameter - 1.0) / 2.0)
}
pub fn get_animation_rect(&self) -> egui::Rect {
self.rect
}
pub fn center(&self) -> Pos2 {
self.rect.center()
}
pub fn take_animation_response(self) -> egui::Response {
self.response
}
// Scale a minimum position from center to the current animation position
pub fn scale_from_center(&self, x_min: f32, y_min: f32) -> Pos2 {
Pos2::new(
self.center.x + self.scale_1d_pos(x_min),
self.center.y + self.scale_1d_pos(y_min),
)
}
pub fn scale_pos_from_center(&self, min_pos: Pos2) -> Pos2 {
self.scale_from_center(min_pos.x, min_pos.y)
}
/// New method for min/max scaling when needed
pub fn scale_1d_pos_min_max(&self, min_object_size: f32, max_object_size: f32) -> f32 {
min_object_size + ((max_object_size - min_object_size) * self.animation_progress)
}
}

View File

@@ -0,0 +1,6 @@
use egui::Color32;
pub const ALMOST_WHITE: Color32 = Color32::from_rgb(0xFA, 0xFA, 0xFA);
pub const MID_GRAY: Color32 = Color32::from_rgb(0xbd, 0xbd, 0xbd);
pub const PINK: Color32 = Color32::from_rgb(0xE4, 0x5A, 0xC9);
pub const TEAL: Color32 = Color32::from_rgb(0x77, 0xDC, 0xE1);

View File

@@ -0,0 +1,122 @@
use std::{
sync::mpsc::TryRecvError,
time::{Instant, SystemTime},
};
use egui::TextureHandle;
use notedeck::{GifState, GifStateMap, TexturedImage};
pub struct LatextTexture<'a> {
pub texture: &'a TextureHandle,
pub request_next_repaint: Option<SystemTime>,
}
/// This is necessary because other repaint calls can effectively steal our repaint request.
/// So we must keep on requesting to repaint at our desired time to ensure our repaint goes through.
/// See [`egui::Context::request_repaint_after`]
pub fn handle_repaint<'a>(ui: &egui::Ui, latest: LatextTexture<'a>) -> &'a TextureHandle {
if let Some(repaint) = latest.request_next_repaint {
if let Ok(dur) = repaint.duration_since(SystemTime::now()) {
ui.ctx().request_repaint_after(dur);
}
}
latest.texture
}
#[must_use = "caller should pass the return value to `gif::handle_repaint`"]
pub fn retrieve_latest_texture<'a>(
url: &str,
gifs: &'a mut GifStateMap,
cached_image: &'a mut TexturedImage,
) -> LatextTexture<'a> {
match cached_image {
TexturedImage::Static(texture) => LatextTexture {
texture,
request_next_repaint: None,
},
TexturedImage::Animated(animation) => {
if let Some(receiver) = &animation.receiver {
loop {
match receiver.try_recv() {
Ok(frame) => animation.other_frames.push(frame),
Err(TryRecvError::Empty) => {
break;
}
Err(TryRecvError::Disconnected) => {
animation.receiver = None;
break;
}
}
}
}
let now = Instant::now();
let (texture, maybe_new_state, request_next_repaint) = match gifs.get(url) {
Some(prev_state) => {
let should_advance =
now - prev_state.last_frame_rendered >= prev_state.last_frame_duration;
if should_advance {
let maybe_new_index = if animation.receiver.is_some()
|| prev_state.last_frame_index < animation.num_frames() - 1
{
prev_state.last_frame_index + 1
} else {
0
};
match animation.get_frame(maybe_new_index) {
Some(frame) => {
let next_frame_time = SystemTime::now().checked_add(frame.delay);
(
&frame.texture,
Some(GifState {
last_frame_rendered: now,
last_frame_duration: frame.delay,
next_frame_time,
last_frame_index: maybe_new_index,
}),
next_frame_time,
)
}
None => {
let (tex, state) =
match animation.get_frame(prev_state.last_frame_index) {
Some(frame) => (&frame.texture, None),
None => (&animation.first_frame.texture, None),
};
(tex, state, prev_state.next_frame_time)
}
}
} else {
let (tex, state) = match animation.get_frame(prev_state.last_frame_index) {
Some(frame) => (&frame.texture, None),
None => (&animation.first_frame.texture, None),
};
(tex, state, prev_state.next_frame_time)
}
}
None => (
&animation.first_frame.texture,
Some(GifState {
last_frame_rendered: now,
last_frame_duration: animation.first_frame.delay,
next_frame_time: None,
last_frame_index: 0,
}),
None,
),
};
if let Some(new_state) = maybe_new_state {
gifs.insert(url.to_owned(), new_state);
}
LatextTexture {
texture,
request_next_repaint,
}
}
}
}

View File

@@ -0,0 +1,488 @@
use crate::ProfilePic;
use egui::{pos2, Color32, ColorImage, Rect, Sense, SizeHint};
use image::codecs::gif::GifDecoder;
use image::imageops::FilterType;
use image::{AnimationDecoder, DynamicImage, FlatSamples, Frame};
use notedeck::{
Animation, GifStateMap, ImageFrame, Images, MediaCache, MediaCacheType, TextureFrame,
TexturedImage,
};
use poll_promise::Promise;
use std::collections::VecDeque;
use std::io::Cursor;
use std::path;
use std::path::PathBuf;
use std::sync::mpsc;
use std::sync::mpsc::SyncSender;
use std::thread;
use std::time::Duration;
use tokio::fs;
// NOTE(jb55): chatgpt wrote this because I was too dumb to
pub fn aspect_fill(
ui: &mut egui::Ui,
sense: Sense,
texture_id: egui::TextureId,
aspect_ratio: f32,
) -> egui::Response {
let frame = ui.available_rect_before_wrap(); // Get the available frame space in the current layout
let frame_ratio = frame.width() / frame.height();
let (width, height) = if frame_ratio > aspect_ratio {
// Frame is wider than the content
(frame.width(), frame.width() / aspect_ratio)
} else {
// Frame is taller than the content
(frame.height() * aspect_ratio, frame.height())
};
let content_rect = Rect::from_min_size(
frame.min
+ egui::vec2(
(frame.width() - width) / 2.0,
(frame.height() - height) / 2.0,
),
egui::vec2(width, height),
);
// Set the clipping rectangle to the frame
//let clip_rect = ui.clip_rect(); // Preserve the original clipping rectangle
//ui.set_clip_rect(frame);
let uv = Rect::from_min_max(pos2(0.0, 0.0), pos2(1.0, 1.0));
let (response, painter) = ui.allocate_painter(ui.available_size(), sense);
// Draw the texture within the calculated rect, potentially clipping it
painter.rect_filled(content_rect, 0.0, ui.ctx().style().visuals.window_fill());
painter.image(texture_id, content_rect, uv, Color32::WHITE);
// Restore the original clipping rectangle
//ui.set_clip_rect(clip_rect);
response
}
#[profiling::function]
pub fn round_image(image: &mut ColorImage) {
// The radius to the edge of of the avatar circle
let edge_radius = image.size[0] as f32 / 2.0;
let edge_radius_squared = edge_radius * edge_radius;
for (pixnum, pixel) in image.pixels.iter_mut().enumerate() {
// y coordinate
let uy = pixnum / image.size[0];
let y = uy as f32;
let y_offset = edge_radius - y;
// x coordinate
let ux = pixnum % image.size[0];
let x = ux as f32;
let x_offset = edge_radius - x;
// The radius to this pixel (may be inside or outside the circle)
let pixel_radius_squared: f32 = x_offset * x_offset + y_offset * y_offset;
// If inside of the avatar circle
if pixel_radius_squared <= edge_radius_squared {
// squareroot to find how many pixels we are from the edge
let pixel_radius: f32 = pixel_radius_squared.sqrt();
let distance = edge_radius - pixel_radius;
// If we are within 1 pixel of the edge, we should fade, to
// antialias the edge of the circle. 1 pixel from the edge should
// be 100% of the original color, and right on the edge should be
// 0% of the original color.
if distance <= 1.0 {
*pixel = Color32::from_rgba_premultiplied(
(pixel.r() as f32 * distance) as u8,
(pixel.g() as f32 * distance) as u8,
(pixel.b() as f32 * distance) as u8,
(pixel.a() as f32 * distance) as u8,
);
}
} else {
// Outside of the avatar circle
*pixel = Color32::TRANSPARENT;
}
}
}
#[profiling::function]
fn process_pfp_bitmap(imgtyp: ImageType, mut image: image::DynamicImage) -> ColorImage {
match imgtyp {
ImageType::Content => {
let image_buffer = image.clone().into_rgba8();
let color_image = ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
image_buffer.as_flat_samples().as_slice(),
);
color_image
}
ImageType::Profile(size) => {
// Crop square
let smaller = image.width().min(image.height());
if image.width() > smaller {
let excess = image.width() - smaller;
image = image.crop_imm(excess / 2, 0, image.width() - excess, image.height());
} else if image.height() > smaller {
let excess = image.height() - smaller;
image = image.crop_imm(0, excess / 2, image.width(), image.height() - excess);
}
let image = image.resize(size, size, FilterType::CatmullRom); // DynamicImage
let image_buffer = image.into_rgba8(); // RgbaImage (ImageBuffer)
let mut color_image = ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
image_buffer.as_flat_samples().as_slice(),
);
round_image(&mut color_image);
color_image
}
}
}
#[profiling::function]
fn parse_img_response(
response: ehttp::Response,
imgtyp: ImageType,
) -> Result<ColorImage, notedeck::Error> {
let content_type = response.content_type().unwrap_or_default();
let size_hint = match imgtyp {
ImageType::Profile(size) => SizeHint::Size(size, size),
ImageType::Content => SizeHint::default(),
};
if content_type.starts_with("image/svg") {
profiling::scope!("load_svg");
let mut color_image =
egui_extras::image::load_svg_bytes_with_size(&response.bytes, Some(size_hint))?;
round_image(&mut color_image);
Ok(color_image)
} else if content_type.starts_with("image/") {
profiling::scope!("load_from_memory");
let dyn_image = image::load_from_memory(&response.bytes)?;
Ok(process_pfp_bitmap(imgtyp, dyn_image))
} else {
Err(format!("Expected image, found content-type {:?}", content_type).into())
}
}
fn fetch_img_from_disk(
ctx: &egui::Context,
url: &str,
path: &path::Path,
cache_type: MediaCacheType,
) -> Promise<Result<TexturedImage, notedeck::Error>> {
let ctx = ctx.clone();
let url = url.to_owned();
let path = path.to_owned();
Promise::spawn_async(async move {
match cache_type {
MediaCacheType::Image => {
let data = fs::read(path).await?;
let image_buffer =
image::load_from_memory(&data).map_err(notedeck::Error::Image)?;
let img = buffer_to_color_image(
image_buffer.as_flat_samples_u8(),
image_buffer.width(),
image_buffer.height(),
);
Ok(TexturedImage::Static(ctx.load_texture(
&url,
img,
Default::default(),
)))
}
MediaCacheType::Gif => {
let gif_bytes = fs::read(path.clone()).await?; // Read entire file into a Vec<u8>
generate_gif(ctx, url, &path, gif_bytes, false, |i| {
buffer_to_color_image(i.as_flat_samples_u8(), i.width(), i.height())
})
}
}
})
}
fn generate_gif(
ctx: egui::Context,
url: String,
path: &path::Path,
data: Vec<u8>,
write_to_disk: bool,
process_to_egui: impl Fn(DynamicImage) -> ColorImage + Send + Copy + 'static,
) -> Result<TexturedImage, notedeck::Error> {
let decoder = {
let reader = Cursor::new(data.as_slice());
GifDecoder::new(reader)?
};
let (tex_input, tex_output) = mpsc::sync_channel(4);
let (maybe_encoder_input, maybe_encoder_output) = if write_to_disk {
let (inp, out) = mpsc::sync_channel(4);
(Some(inp), Some(out))
} else {
(None, None)
};
let mut frames: VecDeque<Frame> = decoder
.into_frames()
.collect::<std::result::Result<VecDeque<_>, image::ImageError>>()
.map_err(|e| notedeck::Error::Generic(e.to_string()))?;
let first_frame = frames.pop_front().map(|frame| {
generate_animation_frame(
&ctx,
&url,
0,
frame,
maybe_encoder_input.as_ref(),
process_to_egui,
)
});
let cur_url = url.clone();
thread::spawn(move || {
for (index, frame) in frames.into_iter().enumerate() {
let texture_frame = generate_animation_frame(
&ctx,
&cur_url,
index,
frame,
maybe_encoder_input.as_ref(),
process_to_egui,
);
if tex_input.send(texture_frame).is_err() {
tracing::error!("AnimationTextureFrame mpsc stopped abruptly");
break;
}
}
});
if let Some(encoder_output) = maybe_encoder_output {
let path = path.to_owned();
thread::spawn(move || {
let mut imgs = Vec::new();
while let Ok(img) = encoder_output.recv() {
imgs.push(img);
}
if let Err(e) = MediaCache::write_gif(&path, &url, imgs) {
tracing::error!("Could not write gif to disk: {e}");
}
});
}
first_frame.map_or_else(
|| {
Err(notedeck::Error::Generic(
"first frame not found for gif".to_owned(),
))
},
|first_frame| {
Ok(TexturedImage::Animated(Animation {
other_frames: Default::default(),
receiver: Some(tex_output),
first_frame,
}))
},
)
}
fn generate_animation_frame(
ctx: &egui::Context,
url: &str,
index: usize,
frame: image::Frame,
maybe_encoder_input: Option<&SyncSender<ImageFrame>>,
process_to_egui: impl Fn(DynamicImage) -> ColorImage + Send + 'static,
) -> TextureFrame {
let delay = Duration::from(frame.delay());
let img = DynamicImage::ImageRgba8(frame.into_buffer());
let color_img = process_to_egui(img);
if let Some(sender) = maybe_encoder_input {
if let Err(e) = sender.send(ImageFrame {
delay,
image: color_img.clone(),
}) {
tracing::error!("ImageFrame mpsc unexpectedly closed: {e}");
}
}
TextureFrame {
delay,
texture: ctx.load_texture(format!("{}{}", url, index), color_img, Default::default()),
}
}
fn buffer_to_color_image(
samples: Option<FlatSamples<&[u8]>>,
width: u32,
height: u32,
) -> ColorImage {
// TODO(jb55): remove unwrap here
let flat_samples = samples.unwrap();
ColorImage::from_rgba_unmultiplied([width as usize, height as usize], flat_samples.as_slice())
}
pub fn fetch_binary_from_disk(path: PathBuf) -> Result<Vec<u8>, notedeck::Error> {
std::fs::read(path).map_err(|e| notedeck::Error::Generic(e.to_string()))
}
/// Controls type-specific handling
#[derive(Debug, Clone, Copy)]
pub enum ImageType {
/// Profile Image (size)
Profile(u32),
/// Content Image
Content,
}
pub fn fetch_img(
img_cache: &MediaCache,
ctx: &egui::Context,
url: &str,
imgtyp: ImageType,
cache_type: MediaCacheType,
) -> Promise<Result<TexturedImage, notedeck::Error>> {
let key = MediaCache::key(url);
let path = img_cache.cache_dir.join(key);
if path.exists() {
fetch_img_from_disk(ctx, url, &path, cache_type)
} else {
fetch_img_from_net(&img_cache.cache_dir, ctx, url, imgtyp, cache_type)
}
// TODO: fetch image from local cache
}
fn fetch_img_from_net(
cache_path: &path::Path,
ctx: &egui::Context,
url: &str,
imgtyp: ImageType,
cache_type: MediaCacheType,
) -> Promise<Result<TexturedImage, notedeck::Error>> {
let (sender, promise) = Promise::new();
let request = ehttp::Request::get(url);
let ctx = ctx.clone();
let cloned_url = url.to_owned();
let cache_path = cache_path.to_owned();
ehttp::fetch(request, move |response| {
let handle = response.map_err(notedeck::Error::Generic).and_then(|resp| {
match cache_type {
MediaCacheType::Image => {
let img = parse_img_response(resp, imgtyp);
img.map(|img| {
let texture_handle =
ctx.load_texture(&cloned_url, img.clone(), Default::default());
// write to disk
std::thread::spawn(move || {
MediaCache::write(&cache_path, &cloned_url, img)
});
TexturedImage::Static(texture_handle)
})
}
MediaCacheType::Gif => {
let gif_bytes = resp.bytes;
generate_gif(
ctx.clone(),
cloned_url,
&cache_path,
gif_bytes,
true,
move |img| process_pfp_bitmap(imgtyp, img),
)
}
}
});
sender.send(handle); // send the results back to the UI thread.
ctx.request_repaint();
});
promise
}
#[allow(clippy::too_many_arguments)]
pub fn render_images(
ui: &mut egui::Ui,
images: &mut Images,
url: &str,
img_type: ImageType,
cache_type: MediaCacheType,
show_waiting: impl FnOnce(&mut egui::Ui),
show_error: impl FnOnce(&mut egui::Ui, String),
show_success: impl FnOnce(&mut egui::Ui, &str, &mut TexturedImage, &mut GifStateMap),
) -> egui::Response {
let cache = match cache_type {
MediaCacheType::Image => &mut images.static_imgs,
MediaCacheType::Gif => &mut images.gifs,
};
render_media_cache(
ui,
cache,
&mut images.gif_states,
url,
img_type,
cache_type,
show_waiting,
show_error,
show_success,
)
}
#[allow(clippy::too_many_arguments)]
fn render_media_cache(
ui: &mut egui::Ui,
cache: &mut MediaCache,
gif_states: &mut GifStateMap,
url: &str,
img_type: ImageType,
cache_type: MediaCacheType,
show_waiting: impl FnOnce(&mut egui::Ui),
show_error: impl FnOnce(&mut egui::Ui, String),
show_success: impl FnOnce(&mut egui::Ui, &str, &mut TexturedImage, &mut GifStateMap),
) -> egui::Response {
let m_cached_promise = cache.map().get(url);
if m_cached_promise.is_none() {
let res = crate::images::fetch_img(cache, ui.ctx(), url, img_type, cache_type.clone());
cache.map_mut().insert(url.to_owned(), res);
}
egui::Frame::NONE
.show(ui, |ui| {
match cache.map_mut().get_mut(url).and_then(|p| p.ready_mut()) {
None => show_waiting(ui),
Some(Err(err)) => {
let err = err.to_string();
let no_pfp = crate::images::fetch_img(
cache,
ui.ctx(),
ProfilePic::no_pfp_url(),
ImageType::Profile(128),
cache_type,
);
cache.map_mut().insert(url.to_owned(), no_pfp);
show_error(ui, err)
}
Some(Ok(renderable_media)) => show_success(ui, url, renderable_media, gif_states),
}
})
.response
}

View File

@@ -0,0 +1,8 @@
mod anim;
pub mod colors;
pub mod gif;
pub mod images;
pub mod profile;
pub use anim::AnimationHelper;
pub use profile::ProfilePic;

View File

@@ -0,0 +1,17 @@
use nostrdb::ProfileRecord;
pub mod picture;
pub use picture::ProfilePic;
pub fn get_profile_url<'a>(profile: Option<&ProfileRecord<'a>>) -> &'a str {
unwrap_profile_url(profile.and_then(|pr| pr.record().profile().and_then(|p| p.picture())))
}
pub fn unwrap_profile_url(maybe_url: Option<&str>) -> &str {
if let Some(url) = maybe_url {
url
} else {
ProfilePic::no_pfp_url()
}
}

View File

@@ -0,0 +1,146 @@
use crate::gif::{handle_repaint, retrieve_latest_texture};
use crate::images::{render_images, ImageType};
use egui::{vec2, Sense, Stroke, TextureHandle};
use notedeck::{supported_mime_hosted_at_url, Images};
pub struct ProfilePic<'cache, 'url> {
cache: &'cache mut Images,
url: &'url str,
size: f32,
border: Option<Stroke>,
}
impl egui::Widget for ProfilePic<'_, '_> {
fn ui(self, ui: &mut egui::Ui) -> egui::Response {
render_pfp(ui, self.cache, self.url, self.size, self.border)
}
}
impl<'cache, 'url> ProfilePic<'cache, 'url> {
pub fn new(cache: &'cache mut Images, url: &'url str) -> Self {
let size = Self::default_size() as f32;
ProfilePic {
cache,
url,
size,
border: None,
}
}
pub fn border_stroke(ui: &egui::Ui) -> Stroke {
Stroke::new(4.0, ui.visuals().panel_fill)
}
pub fn from_profile(
cache: &'cache mut Images,
profile: &nostrdb::ProfileRecord<'url>,
) -> Option<Self> {
profile
.record()
.profile()
.and_then(|p| p.picture())
.map(|url| ProfilePic::new(cache, url))
}
#[inline]
pub fn default_size() -> i8 {
38
}
#[inline]
pub fn medium_size() -> i8 {
32
}
#[inline]
pub fn small_size() -> i8 {
24
}
#[inline]
pub fn no_pfp_url() -> &'static str {
"https://damus.io/img/no-profile.svg"
}
#[inline]
pub fn size(mut self, size: f32) -> Self {
self.size = size;
self
}
#[inline]
pub fn border(mut self, stroke: Stroke) -> Self {
self.border = Some(stroke);
self
}
}
#[profiling::function]
fn render_pfp(
ui: &mut egui::Ui,
img_cache: &mut Images,
url: &str,
ui_size: f32,
border: Option<Stroke>,
) -> egui::Response {
// We will want to downsample these so it's not blurry on hi res displays
let img_size = 128u32;
let cache_type = supported_mime_hosted_at_url(&mut img_cache.urls, url)
.unwrap_or(notedeck::MediaCacheType::Image);
render_images(
ui,
img_cache,
url,
ImageType::Profile(img_size),
cache_type,
|ui| {
paint_circle(ui, ui_size, border);
},
|ui, _| {
paint_circle(ui, ui_size, border);
},
|ui, url, renderable_media, gifs| {
let texture_handle =
handle_repaint(ui, retrieve_latest_texture(url, gifs, renderable_media));
pfp_image(ui, texture_handle, ui_size, border);
},
)
}
#[profiling::function]
fn pfp_image(
ui: &mut egui::Ui,
img: &TextureHandle,
size: f32,
border: Option<Stroke>,
) -> egui::Response {
let (rect, response) = ui.allocate_at_least(vec2(size, size), Sense::hover());
if let Some(stroke) = border {
draw_bg_border(ui, rect.center(), size, stroke);
}
ui.put(rect, egui::Image::new(img).max_width(size));
response
}
fn paint_circle(ui: &mut egui::Ui, size: f32, border: Option<Stroke>) -> egui::Response {
let (rect, response) = ui.allocate_at_least(vec2(size, size), Sense::hover());
if let Some(stroke) = border {
draw_bg_border(ui, rect.center(), size, stroke);
}
ui.painter()
.circle_filled(rect.center(), size / 2.0, ui.visuals().weak_text_color());
response
}
fn draw_bg_border(ui: &mut egui::Ui, center: egui::Pos2, size: f32, stroke: Stroke) {
let border_size = size + (stroke.width * 2.0);
ui.painter()
.circle_filled(center, border_size / 2.0, stroke.color);
}