split notedeck into crates

This splits notedeck into crates, separating the browser chrome and
individual apps:

* notedeck: binary file, browser chrome
* notedeck_columns: our columns app
* enostr: same as before

We still need to do more work to cleanly separate the chrome apis
from the app apis. Soon I will create notedeck-notebook to see what
makes sense to be shared between the apps.

Some obvious ones that come to mind:

1. ImageCache

We will likely want to move this to the notedeck crate, as most apps
will want some kind of image cache. In web browsers, web pages do not
need to worry about this, so we will likely have to do something similar

2. Ndb

Since NdbRef is threadsafe and Ndb is an Arc<NdbRef>, it can be safely
copied to each app. This will simplify things. In the future we might
want to create an abstraction over this? Maybe each app shouldn't have
access to the same database... we assume the data in DBs are all public
anyways, but if we have unwrapped giftwraps that could be a problem.

3. RelayPool / Subscription Manager

The browser should probably maintain these. Then apps can use ken's
high level subscription manager api and not have to worry about
connection pool details

4. Accounts

Accounts and key management should be handled by the chrome. Apps should
only have a simple signer interface.

That's all for now, just something to think about!

Signed-off-by: William Casarin <jb55@jb55.com>
This commit is contained in:
William Casarin
2024-12-11 02:53:05 -08:00
parent 10cbdf15f0
commit 74c5f0c748
156 changed files with 194 additions and 252 deletions

View File

@@ -0,0 +1,58 @@
[package]
name = "notedeck_columns"
version = "0.2.0"
authors = ["William Casarin <jb55@jb55.com>"]
edition = "2021"
#rust-version = "1.60"
license = "GPLv3"
description = "A tweetdeck-style notedeck app"
[lib]
crate-type = ["lib", "cdylib"]
[dependencies]
base32 = { workspace = true }
bitflags = { workspace = true }
dirs = { workspace = true }
eframe = { workspace = true }
egui = { workspace = true }
egui_extras = { workspace = true }
egui_nav = { workspace = true }
egui_tabs = { workspace = true }
egui_virtual_list = { workspace = true }
ehttp = { workspace = true }
enostr = { workspace = true }
env_logger = { workspace = true }
hex = { workspace = true }
image = { workspace = true }
indexmap = { workspace = true }
log = { workspace = true }
nostrdb = { workspace = true }
open = { workspace = true }
poll-promise = { workspace = true }
puffin = { workspace = true, optional = true }
puffin_egui = { workspace = true, optional = true }
reqwest = { workspace = true }
serde = { workspace = true }
serde_derive = { workspace = true }
serde_json = { workspace = true }
strum = { workspace = true }
strum_macros = { workspace = true }
tokio = { workspace = true, features = ["macros", "rt-multi-thread", "fs"] }
tracing = { workspace = true }
tracing-appender = { workspace = true }
tracing-subscriber = { workspace = true }
url = { workspace = true }
urlencoding = { workspace = true }
uuid = { workspace = true }
[dev-dependencies]
tempfile = "3.13.0"
[target.'cfg(target_os = "macos")'.dependencies]
security-framework = "2.11.0"
[features]
default = []
profiling = ["puffin", "puffin_egui", "eframe/puffin"]

Binary file not shown.

After

Width:  |  Height:  |  Size: 18 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 51 KiB

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 266 KiB

View File

@@ -0,0 +1,261 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<svg
width="1024"
height="1024"
viewBox="0 0 1024 1024"
fill="none"
version="1.1"
id="svg21"
sodipodi:docname="damus-app-icon.svg"
inkscape:version="1.4 (e7c3feb1, 2024-10-09)"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview21"
pagecolor="#ffffff"
bordercolor="#000000"
borderopacity="0.25"
inkscape:showpageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:deskcolor="#d1d1d1"
inkscape:zoom="0.2800591"
inkscape:cx="724.847"
inkscape:cy="724.847"
inkscape:window-width="1104"
inkscape:window-height="771"
inkscape:window-x="222"
inkscape:window-y="38"
inkscape:window-maximized="0"
inkscape:current-layer="svg21" />
<g
id="g22"
transform="translate(-6.40822,-11.4789)">
<g
filter="url(#filter0_dii_3010_339)"
id="g1"
transform="translate(96.40822,102.9789)"
inkscape:label="logo-bg">
<path
fill-rule="evenodd"
clip-rule="evenodd"
d="m 834,256.627 c 0,-9.782 0.004,-19.565 -0.056,-29.348 -0.049,-8.241 -0.144,-16.48 -0.368,-24.717 -0.484,-17.953 -1.543,-36.06 -4.736,-53.813 C 825.602,130.741 820.314,113.98 811.981,97.6166 803.789,81.5337 793.088,66.8168 780.32,54.0578 767.555,41.2989 752.834,30.6049 736.746,22.4179 720.366,14.0829 703.587,8.79697 685.558,5.55998 667.803,2.37199 649.691,1.315 631.738,0.829997 623.495,0.606998 615.253,0.512998 607.008,0.461998 597.22,0.401999 587.432,0.406999 577.644,0.406999 L 463.997,0 h -85 L 267.361,0.406999 c -9.807,0 -19.614,-0.005 -29.421,0.054999 -8.262,0.051 -16.52,0.145 -24.779,0.367999 C 195.167,1.315 177.014,2.37299 159.217,5.56498 141.164,8.80097 124.36,14.0849 107.958,22.4139 91.8354,30.6019 77.0825,41.2969 64.2906,54.0578 51.5007,66.8158 40.7798,81.5297 32.5728,97.6096 24.2169,113.981 18.9189,130.752 15.673,148.77 c -3.196,17.746 -4.255,35.847 -4.742,53.792 -0.222,8.238 -0.318,16.477 -0.368,24.717 -0.06,9.784 -0.563,21.937 -0.563,31.72 l 0.003,110.09 -0.003,85.909 0.508,112.429 c 0,9.796 -0.004,19.592 0.055,29.388 0.05,8.252 0.146,16.502 0.369,24.751 0.486,17.976 1.547,36.109 4.746,53.886 3.2449,18.032 8.5419,34.817 16.8908,51.201 8.208,16.106 18.9309,30.842 31.7218,43.619 12.7909,12.777 27.5398,23.485 43.6594,31.684 16.412,8.346 33.224,13.639 51.288,16.88 17.789,3.193 35.936,4.252 53.923,4.737 8.259,0.223 16.518,0.318 24.78,0.368 9.807,0.06 19.613,0.056 29.42,0.056 L 380.006,824 h 85.211 l 112.427,-0.004 c 9.788,0 19.576,0.005 29.364,-0.055 8.245,-0.05 16.487,-0.145 24.73,-0.368 17.96,-0.486 36.078,-1.546 53.841,-4.741 18.018,-3.241 34.789,-8.532 51.16,-16.873 16.092,-8.198 30.815,-18.908 43.581,-31.687 12.766,-12.775 23.466,-27.509 31.658,-43.612 8.338,-16.392 13.626,-33.185 16.866,-51.229 3.19,-17.77 4.248,-35.896 4.733,-53.865 0.223,-8.25 0.318,-16.5 0.367,-24.751 0.061,-9.796 0.056,-19.592 0.056,-29.388 0,0 -0.006,-110.444 -0.006,-112.429 v -85.999 c 0,-1.466 0.006,-112.372 0.006,-112.372 z"
fill="url(#paint0_linear_3010_339)"
id="path1"
style="fill:url(#paint0_linear_3010_339)" />
</g>
<g
id="g21"
inkscape:label="center-logo"
transform="translate(96.40822,99.978896)">
<path
d="M 343.319,671.664 C 240,748.442 240,152 240,152 c 206.638,45.258 413.278,90.517 413.276,189.925 -0.003,99.409 -206.637,252.961 -309.957,329.739 z"
fill="url(#paint1_linear_3010_339)"
stroke="#ffffff"
stroke-width="30.3537"
id="path2"
style="fill:url(#paint1_linear_3010_339)" />
<path
d="m 240.68,255.493 135.608,68.759 -36.29,-143.247 z"
fill="#ffffff"
fill-opacity="0.325424"
stroke="#ffffff"
stroke-width="6.07075"
id="path3" />
<path
d="M 374.627,322.975 361.121,455.329 249.025,343.233 Z"
fill="#ffffff"
fill-opacity="0.274576"
stroke="#ffffff"
stroke-width="6.07075"
id="path4" />
<path
d="M 373.276,323.65 461.738,210.879 540.07,330.403 Z"
fill="#ffffff"
fill-opacity="0.379661"
stroke="#ffffff"
stroke-width="6.07075"
id="path5" />
<path
d="M 374.626,324.326 548.172,491.794 539.393,330.403 Z"
fill="#ffffff"
fill-opacity="0.447458"
stroke="#ffffff"
stroke-width="6.07075"
id="path6" />
<path
d="M 360.445,454.654 548.847,493.145 375.301,324.326 Z"
fill="#ffffff"
fill-opacity="0.20678"
stroke="#ffffff"
stroke-width="6.07075"
id="path7" />
<path
d="m 360.446,454.654 -86.435,99.941 189.752,22.959 z"
fill="#ffffff"
fill-opacity="0.244068"
stroke="#ffffff"
stroke-width="6.07075"
id="path8" />
<path
d="m 540.069,330.403 90.487,71.579 -39.841,-140.457 z"
fill="#ffffff"
fill-opacity="0.216949"
stroke="#ffffff"
stroke-width="6.07075"
id="path9" />
<path
d="m 360.702,460.732 c 3.356,0 6.077,-2.721 6.077,-6.078 0,-3.356 -2.721,-6.077 -6.077,-6.077 -3.357,0 -6.078,2.721 -6.078,6.077 0,3.357 2.721,6.078 6.078,6.078 z"
fill="#ffffff"
id="path10" />
<path
d="m 374.882,329.728 c 3.357,0 6.078,-2.721 6.078,-6.078 0,-3.356 -2.721,-6.077 -6.078,-6.077 -3.356,0 -6.077,2.721 -6.077,6.077 0,3.357 2.721,6.078 6.077,6.078 z"
fill="#ffffff"
id="path11" />
<path
d="m 539.905,336.225 c 3.356,0 6.077,-2.721 6.077,-6.077 0,-3.357 -2.721,-6.078 -6.077,-6.078 -3.357,0 -6.078,2.721 -6.078,6.078 0,3.356 2.721,6.077 6.078,6.077 z"
fill="#ffffff"
id="path12" />
</g>
</g>
<defs
id="defs21">
<filter
id="filter0_dii_3010_339"
x="0"
y="-3"
width="844"
height="847"
filterUnits="userSpaceOnUse"
color-interpolation-filters="sRGB">
<feFlood
flood-opacity="0"
result="BackgroundImageFix"
id="feFlood12" />
<feColorMatrix
in="SourceAlpha"
type="matrix"
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"
result="hardAlpha"
id="feColorMatrix12" />
<feOffset
dy="10"
id="feOffset12" />
<feGaussianBlur
stdDeviation="5"
id="feGaussianBlur12" />
<feColorMatrix
type="matrix"
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.3 0"
id="feColorMatrix13" />
<feBlend
mode="normal"
in2="BackgroundImageFix"
result="effect1_dropShadow_3010_339"
id="feBlend13" />
<feBlend
mode="normal"
in="SourceGraphic"
in2="effect1_dropShadow_3010_339"
result="shape"
id="feBlend14" />
<feColorMatrix
in="SourceAlpha"
type="matrix"
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"
result="hardAlpha"
id="feColorMatrix14" />
<feOffset
dy="4"
id="feOffset14" />
<feGaussianBlur
stdDeviation="1"
id="feGaussianBlur14" />
<feComposite
in2="hardAlpha"
operator="arithmetic"
k2="-1"
k3="1"
id="feComposite14"
k1="0"
k4="0" />
<feColorMatrix
type="matrix"
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.24 0"
id="feColorMatrix15" />
<feBlend
mode="normal"
in2="shape"
result="effect2_innerShadow_3010_339"
id="feBlend15" />
<feColorMatrix
in="SourceAlpha"
type="matrix"
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 127 0"
result="hardAlpha"
id="feColorMatrix16" />
<feOffset
dy="-3"
id="feOffset16" />
<feGaussianBlur
stdDeviation="2"
id="feGaussianBlur16" />
<feComposite
in2="hardAlpha"
operator="arithmetic"
k2="-1"
k3="1"
id="feComposite16"
k1="0"
k4="0" />
<feColorMatrix
type="matrix"
values="0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0.25 0"
id="feColorMatrix17" />
<feBlend
mode="normal"
in2="effect2_innerShadow_3010_339"
result="effect3_innerShadow_3010_339"
id="feBlend17" />
</filter>
<linearGradient
id="paint0_linear_3010_339"
x1="42.405701"
y1="800.86902"
x2="803.62"
y2="23.1313"
gradientUnits="userSpaceOnUse">
<stop
stop-color="#1C55FF"
id="stop17" />
<stop
offset="0.5"
stop-color="#7F35AB"
id="stop18" />
<stop
offset="1"
stop-color="#FF0BD6"
id="stop19" />
</linearGradient>
<linearGradient
id="paint1_linear_3010_339"
x1="224.823"
y1="410.40201"
x2="668.45203"
y2="410.40201"
gradientUnits="userSpaceOnUse"
gradientTransform="translate(0,3)">
<stop
stop-color="#0DE8FF"
stop-opacity="0.780822"
id="stop20" />
<stop
offset="1"
stop-color="#D600FC"
stop-opacity="0.954338"
id="stop21" />
</linearGradient>
</defs>
</svg>

After

Width:  |  Height:  |  Size: 9.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 286 KiB

View File

@@ -0,0 +1,184 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="256mm"
height="256mm"
viewBox="0 0 256 256"
version="1.1"
id="svg5"
inkscape:version="1.2.1 (9c6d41e410, 2022-07-14)"
sodipodi:docname="damus.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview7"
pagecolor="#ffffff"
bordercolor="#000000"
borderopacity="0.25"
inkscape:pageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:blackoutopacity="0.0"
inkscape:document-units="mm"
showgrid="false"
inkscape:zoom="0.5946522"
inkscape:cx="406.11975"
inkscape:cy="491.88416"
inkscape:window-width="1920"
inkscape:window-height="1060"
inkscape:window-x="0"
inkscape:window-y="20"
inkscape:window-maximized="0"
inkscape:current-layer="svg5"
inkscape:showpageshadow="2"
inkscape:deskcolor="#d1d1d1" />
<defs
id="defs2">
<linearGradient
inkscape:collect="always"
id="linearGradient39361">
<stop
style="stop-color:#0de8ff;stop-opacity:0.78082192;"
offset="0"
id="stop39357" />
<stop
style="stop-color:#d600fc;stop-opacity:0.95433789;"
offset="1"
id="stop39359" />
</linearGradient>
<inkscape:path-effect
effect="bspline"
id="path-effect255"
is_visible="true"
lpeversion="1"
weight="33.333333"
steps="2"
helper_size="0"
apply_no_weight="true"
apply_with_weight="true"
only_selected="false" />
<linearGradient
inkscape:collect="always"
id="linearGradient2119">
<stop
style="stop-color:#1c55ff;stop-opacity:1;"
offset="0"
id="stop2115" />
<stop
style="stop-color:#7f35ab;stop-opacity:1;"
offset="0.5"
id="stop2123" />
<stop
style="stop-color:#ff0bd6;stop-opacity:1;"
offset="1"
id="stop2117" />
</linearGradient>
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient2119"
id="linearGradient2121"
x1="10.067794"
y1="248.81357"
x2="246.56145"
y2="7.1864405"
gradientUnits="userSpaceOnUse" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient39361"
id="linearGradient39367"
x1="62.104473"
y1="128.78963"
x2="208.25758"
y2="128.78963"
gradientUnits="userSpaceOnUse" />
</defs>
<g
inkscape:label="Background"
inkscape:groupmode="layer"
id="layer1"
sodipodi:insensitive="true">
<rect
style="fill:url(#linearGradient2121);fill-opacity:1;stroke-width:0.264583"
id="rect61"
width="256"
height="256"
x="-5.3875166e-08"
y="-1.0775033e-07"
ry="0"
inkscape:label="Gradient"
sodipodi:insensitive="true" />
</g>
<g
id="g407"
inkscape:label="Logo">
<g
id="layer2"
inkscape:label="LogoStroke"
style="display:inline">
<path
style="fill:url(#linearGradient39367);fill-opacity:1;stroke:#ffffff;stroke-width:10;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="M 101.1429,213.87373 C 67.104473,239.1681 67.104473,42.67112 67.104473,42.67112 135.18122,57.58146 203.25844,72.491904 203.25758,105.24181 c -8.6e-4,32.74991 -68.07625,83.33755 -102.11468,108.63192 z"
id="path253" />
</g>
<g
inkscape:groupmode="layer"
id="layer3"
inkscape:label="Poly">
<path
style="fill:#ffffff;fill-opacity:0.325424;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="M 67.32839,76.766948 112.00424,99.41949 100.04873,52.226693 Z"
id="path4648" />
<path
style="fill:#ffffff;fill-opacity:0.274576;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="M 111.45696,98.998695 107.00758,142.60261 70.077729,105.67276 Z"
id="path9299" />
<path
style="fill:#ffffff;fill-opacity:0.379661;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 111.01202,99.221164 29.14343,-37.15232 25.80641,39.377006 z"
id="path9301" />
<path
style="fill:#ffffff;fill-opacity:0.447458;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 111.45696,99.443631 57.17452,55.172309 -2.89209,-53.17009 z"
id="path9368" />
<path
style="fill:#ffffff;fill-opacity:0.20678;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 106.78511,142.38015 62.06884,12.68073 -57.17452,-55.617249 z"
id="path9370" />
<path
style="fill:#ffffff;fill-opacity:0.244068;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 106.78511,142.38015 -28.47603,32.9254 62.51378,7.56395 z"
id="path9372" />
<path
style="fill:#ffffff;fill-opacity:0.216949;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="M 165.96186,101.44585 195.7727,125.02756 182.64703,78.754017 Z"
id="path9374" />
</g>
<g
inkscape:groupmode="layer"
id="layer4"
inkscape:label="Vertices">
<circle
style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:4;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
id="path27764"
cx="106.86934"
cy="142.38014"
r="2.0022209" />
<circle
style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:4;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
id="circle28773"
cx="111.54119"
cy="99.221161"
r="2.0022209" />
<circle
style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:4;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
id="circle29091"
cx="165.90784"
cy="101.36163"
r="2.0022209" />
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 6.4 KiB

View File

@@ -0,0 +1,334 @@
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<!-- Created with Inkscape (http://www.inkscape.org/) -->
<svg
width="256mm"
height="256mm"
viewBox="0 0 256 256"
version="1.1"
id="svg5"
inkscape:version="1.3.2 (091e20ef0f, 2023-11-25)"
sodipodi:docname="damus_rounded.svg"
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape"
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd"
xmlns:xlink="http://www.w3.org/1999/xlink"
xmlns="http://www.w3.org/2000/svg"
xmlns:svg="http://www.w3.org/2000/svg">
<sodipodi:namedview
id="namedview7"
pagecolor="#ffffff"
bordercolor="#000000"
borderopacity="0.25"
inkscape:pageshadow="2"
inkscape:pageopacity="0.0"
inkscape:pagecheckerboard="0"
inkscape:blackoutopacity="0.0"
inkscape:document-units="mm"
showgrid="false"
inkscape:zoom="0.5946522"
inkscape:cx="405.27892"
inkscape:cy="543.17465"
inkscape:window-width="1920"
inkscape:window-height="1080"
inkscape:window-x="0"
inkscape:window-y="0"
inkscape:window-maximized="0"
inkscape:current-layer="layer1"
inkscape:showpageshadow="2"
inkscape:deskcolor="#d1d1d1" />
<defs
id="defs2">
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect9"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,80,0,1 @ F,0,1,1,0,80,0,1 @ F,0,0,1,0,80,0,1 @ F,0,0,1,0,80,0,1"
radius="0"
unit="px"
method="auto"
mode="F"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect8"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1"
radius="0"
unit="px"
method="auto"
mode="F"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect7"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1"
radius="0"
unit="px"
method="auto"
mode="F"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect6"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1"
radius="0"
unit="px"
method="auto"
mode="F"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect5"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1"
radius="0"
unit="px"
method="auto"
mode="F"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect4"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1"
radius="0"
unit="px"
method="auto"
mode="F"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect3"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1"
radius="0"
unit="px"
method="auto"
mode="F"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect2"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1"
radius="0"
unit="px"
method="auto"
mode="F"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<inkscape:path-effect
effect="fillet_chamfer"
id="path-effect1"
is_visible="true"
lpeversion="1"
nodesatellites_param="F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1 @ F,0,0,1,0,0,0,1"
radius="0"
unit="px"
method="auto"
mode="F"
chamfer_steps="1"
flexible="false"
use_knot_distance="true"
apply_no_radius="true"
apply_with_radius="true"
only_selected="false"
hide_knots="false" />
<linearGradient
inkscape:collect="always"
id="linearGradient39361">
<stop
style="stop-color:#0de8ff;stop-opacity:0.78082192;"
offset="0"
id="stop39357" />
<stop
style="stop-color:#d600fc;stop-opacity:0.95433789;"
offset="1"
id="stop39359" />
</linearGradient>
<inkscape:path-effect
effect="bspline"
id="path-effect255"
is_visible="true"
lpeversion="1"
weight="33.333333"
steps="2"
helper_size="0"
apply_no_weight="true"
apply_with_weight="true"
only_selected="false" />
<linearGradient
inkscape:collect="always"
id="linearGradient2119">
<stop
style="stop-color:#1c55ff;stop-opacity:1;"
offset="0"
id="stop2115" />
<stop
style="stop-color:#7f35ab;stop-opacity:1;"
offset="0.5"
id="stop2123" />
<stop
style="stop-color:#ff0bd6;stop-opacity:1;"
offset="1"
id="stop2117" />
</linearGradient>
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient2119"
id="linearGradient2121"
x1="10.067794"
y1="248.81357"
x2="246.56145"
y2="7.1864405"
gradientUnits="userSpaceOnUse" />
<linearGradient
inkscape:collect="always"
xlink:href="#linearGradient39361"
id="linearGradient39367"
x1="62.104473"
y1="128.78963"
x2="208.25758"
y2="128.78963"
gradientUnits="userSpaceOnUse" />
</defs>
<g
inkscape:label="Background"
inkscape:groupmode="layer"
id="layer1"
sodipodi:insensitive="true">
<path
id="rect61"
style="fill:url(#linearGradient2121);stroke-width:0.264583;opacity:1"
inkscape:label="Gradient"
d="m 80,-1.0775033e-7 h 96 A 80,80 45 0 1 256,80 v 96 a 80,80 135 0 1 -80,80 H 80 A 80,80 45 0 1 -5.3875166e-8,176 V 80 A 80,80 135 0 1 80,-1.0775033e-7 Z"
inkscape:path-effect="#path-effect9"
inkscape:original-d="M -5.3875166e-8,-1.0775033e-7 H 256 V 256 H -5.3875166e-8 Z" />
</g>
<g
id="g407"
inkscape:label="Logo">
<g
id="layer2"
inkscape:label="LogoStroke"
style="display:inline">
<path
style="fill:url(#linearGradient39367);fill-opacity:1;stroke:#ffffff;stroke-width:10;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="M 101.1429,213.87373 C 67.104473,239.1681 67.104473,42.67112 67.104473,42.67112 135.18122,57.58146 203.25844,72.491904 203.25758,105.24181 c -8.6e-4,32.74991 -68.07625,83.33755 -102.11468,108.63192 z"
id="path253" />
</g>
<g
inkscape:groupmode="layer"
id="layer3"
inkscape:label="Poly">
<path
style="fill:#ffffff;fill-opacity:0.325424;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="M 67.32839,76.766948 112.00424,99.41949 100.04873,52.226693 Z"
id="path4648" />
<path
style="fill:#ffffff;fill-opacity:0.274576;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="M 111.45696,98.998695 107.00758,142.60261 70.077729,105.67276 Z"
id="path9299" />
<path
style="fill:#ffffff;fill-opacity:0.379661;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 111.01202,99.221164 29.14343,-37.15232 25.80641,39.377006 z"
id="path9301" />
<path
style="fill:#ffffff;fill-opacity:0.447458;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 111.45696,99.443631 57.17452,55.172309 -2.89209,-53.17009 z"
id="path9368" />
<path
style="fill:#ffffff;fill-opacity:0.20678;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 106.78511,142.38015 62.06884,12.68073 -57.17452,-55.617249 z"
id="path9370" />
<path
style="fill:#ffffff;fill-opacity:0.244068;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="m 106.78511,142.38015 -28.47603,32.9254 62.51378,7.56395 z"
id="path9372" />
<path
style="fill:#ffffff;fill-opacity:0.216949;stroke:#ffffff;stroke-width:2;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
d="M 165.96186,101.44585 195.7727,125.02756 182.64703,78.754017 Z"
id="path9374" />
</g>
<g
inkscape:groupmode="layer"
id="layer4"
inkscape:label="Vertices">
<circle
style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:4;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
id="path27764"
cx="106.86934"
cy="142.38014"
r="2.0022209" />
<circle
style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:4;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
id="circle28773"
cx="111.54119"
cy="99.221161"
r="2.0022209" />
<circle
style="fill:#ffffff;fill-opacity:1;stroke:none;stroke-width:4;stroke-miterlimit:4;stroke-dasharray:none;stroke-opacity:1"
id="circle29091"
cx="165.90784"
cy="101.36163"
r="2.0022209" />
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 11 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 806 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 554 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 340 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 912 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 KiB

View File

@@ -0,0 +1 @@
<svg width="14" height="15" viewBox="0 0 14 15" fill="none" xmlns="http://www.w3.org/2000/svg"> <path d="M1 4.2C1 3.07989 1 2.51984 1.21799 2.09202C1.40973 1.71569 1.71569 1.40973 2.09202 1.21799C2.51984 1 3.07989 1 4.2 1H9.8C10.9201 1 11.4801 1 11.908 1.21799C12.2843 1.40973 12.5903 1.71569 12.782 2.09202C13 2.51984 13 3.07989 13 4.2V7.8C13 8.92013 13 9.48013 12.782 9.908C12.5903 10.2843 12.2843 10.5903 11.908 10.782C11.4801 11 10.9201 11 9.8 11H8.12247C7.70647 11 7.49847 11 7.29947 11.0409C7.12293 11.0771 6.95213 11.137 6.79167 11.219C6.6108 11.3114 6.44833 11.4413 6.12347 11.7012L4.53317 12.9735C4.25578 13.1954 4.11709 13.3063 4.00036 13.3065C3.89885 13.3066 3.80281 13.2604 3.73949 13.1811C3.66667 13.0899 3.66667 12.9123 3.66667 12.557V11C3.04669 11 2.73669 11 2.48236 10.9319C1.79218 10.7469 1.25308 10.2078 1.06815 9.51767C1 9.26333 1 8.95333 1 8.33333V4.2V4.2Z" stroke="white" stroke-width="1.5" stroke-linecap="round" stroke-linejoin="round"/> </svg>

After

Width:  |  Height:  |  Size: 969 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 808 B

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.4 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

View File

@@ -0,0 +1,28 @@
{
"name": "egui Template PWA",
"short_name": "egui-template-pwa",
"icons": [
{
"src": "./icon-256.png",
"sizes": "256x256",
"type": "image/png"
},
{
"src": "./maskable_icon_x512.png",
"sizes": "512x512",
"type": "image/png",
"purpose": "any maskable"
},
{
"src": "./icon-1024.png",
"sizes": "1024x1024",
"type": "image/png"
}
],
"lang": "en-US",
"id": "/index.html",
"start_url": "./index.html",
"display": "standalone",
"background_color": "white",
"theme_color": "white"
}

View File

@@ -0,0 +1,25 @@
var cacheName = 'egui-template-pwa';
var filesToCache = [
'./',
'./index.html',
'./eframe_template.js',
'./eframe_template_bg.wasm',
];
/* Start the service worker and cache all of the app's content */
self.addEventListener('install', function (e) {
e.waitUntil(
caches.open(cacheName).then(function (cache) {
return cache.addAll(filesToCache);
})
);
});
/* Serve cached content when offline */
self.addEventListener('fetch', function (e) {
e.respondWith(
caches.match(e.request).then(function (response) {
return response || fetch(e.request);
})
);
});

View File

@@ -0,0 +1,10 @@
use std::process::Command;
fn main() {
if let Ok(output) = Command::new("git").args(["rev-parse", "HEAD"]).output() {
if output.status.success() {
let hash = String::from_utf8_lossy(&output.stdout);
println!("cargo:rustc-env=GIT_COMMIT_HASH={}", hash.trim());
}
}
}

View File

@@ -0,0 +1,20 @@
#[inline]
pub fn floor_char_boundary(s: &str, index: usize) -> usize {
if index >= s.len() {
s.len()
} else {
let lower_bound = index.saturating_sub(3);
let new_index = s.as_bytes()[lower_bound..=index]
.iter()
.rposition(|b| is_utf8_char_boundary(*b));
// SAFETY: we know that the character boundary will be within four bytes
unsafe { lower_bound + new_index.unwrap_unchecked() }
}
}
#[inline]
fn is_utf8_char_boundary(c: u8) -> bool {
// This is bit magic equivalent to: b < 128 || b >= 192
(c as i8) >= -0x40
}

View File

@@ -0,0 +1,682 @@
use std::cmp::Ordering;
use std::collections::{BTreeMap, BTreeSet};
use std::sync::Arc;
use url::Url;
use uuid::Uuid;
use enostr::{ClientMessage, FilledKeypair, FullKeypair, Keypair, RelayPool};
use nostrdb::{Filter, Ndb, Note, NoteKey, Subscription, Transaction};
use crate::app::get_active_columns_mut;
use crate::decks::DecksCache;
use crate::{
imgcache::ImageCache,
login_manager::AcquireKeyState,
muted::Muted,
route::Route,
storage::{KeyStorageResponse, KeyStorageType},
ui::{
account_login_view::{AccountLoginResponse, AccountLoginView},
accounts::{AccountsView, AccountsViewResponse},
},
unknowns::SingleUnkIdAction,
unknowns::UnknownIds,
user_account::UserAccount,
};
use tracing::{debug, error, info};
mod route;
pub use route::{AccountsAction, AccountsRoute, AccountsRouteResponse};
pub struct AccountRelayData {
filter: Filter,
subid: String,
sub: Option<Subscription>,
local: BTreeSet<String>, // used locally but not advertised
advertised: BTreeSet<String>, // advertised via NIP-65
}
impl AccountRelayData {
pub fn new(ndb: &Ndb, pool: &mut RelayPool, pubkey: &[u8; 32]) -> Self {
// Construct a filter for the user's NIP-65 relay list
let filter = Filter::new()
.authors([pubkey])
.kinds([10002])
.limit(1)
.build();
// Local ndb subscription
let ndbsub = ndb
.subscribe(&[filter.clone()])
.expect("ndb relay list subscription");
// Query the ndb immediately to see if the user list is already there
let txn = Transaction::new(ndb).expect("transaction");
let lim = filter.limit().unwrap_or(crate::filter::default_limit()) as i32;
let nks = ndb
.query(&txn, &[filter.clone()], lim)
.expect("query user relays results")
.iter()
.map(|qr| qr.note_key)
.collect::<Vec<NoteKey>>();
let relays = Self::harvest_nip65_relays(ndb, &txn, &nks);
debug!(
"pubkey {}: initial relays {:?}",
hex::encode(pubkey),
relays
);
// Id for future remote relay subscriptions
let subid = Uuid::new_v4().to_string();
// Add remote subscription to existing relays
pool.subscribe(subid.clone(), vec![filter.clone()]);
AccountRelayData {
filter,
subid,
sub: Some(ndbsub),
local: BTreeSet::new(),
advertised: relays.into_iter().collect(),
}
}
// standardize the format (ie, trailing slashes) to avoid dups
pub fn canonicalize_url(url: &str) -> String {
match Url::parse(url) {
Ok(parsed_url) => parsed_url.to_string(),
Err(_) => url.to_owned(), // If parsing fails, return the original URL.
}
}
fn harvest_nip65_relays(ndb: &Ndb, txn: &Transaction, nks: &[NoteKey]) -> Vec<String> {
let mut relays = Vec::new();
for nk in nks.iter() {
if let Ok(note) = ndb.get_note_by_key(txn, *nk) {
for tag in note.tags() {
match tag.get(0).and_then(|t| t.variant().str()) {
Some("r") => {
if let Some(url) = tag.get(1).and_then(|f| f.variant().str()) {
relays.push(Self::canonicalize_url(url));
}
}
Some("alt") => {
// ignore for now
}
Some(x) => {
error!("harvest_nip65_relays: unexpected tag type: {}", x);
}
None => {
error!("harvest_nip65_relays: invalid tag");
}
}
}
}
}
relays
}
}
pub struct AccountMutedData {
filter: Filter,
subid: String,
sub: Option<Subscription>,
muted: Arc<Muted>,
}
impl AccountMutedData {
pub fn new(ndb: &Ndb, pool: &mut RelayPool, pubkey: &[u8; 32]) -> Self {
// Construct a filter for the user's NIP-51 muted list
let filter = Filter::new()
.authors([pubkey])
.kinds([10000])
.limit(1)
.build();
// Local ndb subscription
let ndbsub = ndb
.subscribe(&[filter.clone()])
.expect("ndb muted subscription");
// Query the ndb immediately to see if the user's muted list is already there
let txn = Transaction::new(ndb).expect("transaction");
let lim = filter.limit().unwrap_or(crate::filter::default_limit()) as i32;
let nks = ndb
.query(&txn, &[filter.clone()], lim)
.expect("query user muted results")
.iter()
.map(|qr| qr.note_key)
.collect::<Vec<NoteKey>>();
let muted = Self::harvest_nip51_muted(ndb, &txn, &nks);
debug!("pubkey {}: initial muted {:?}", hex::encode(pubkey), muted);
// Id for future remote relay subscriptions
let subid = Uuid::new_v4().to_string();
// Add remote subscription to existing relays
pool.subscribe(subid.clone(), vec![filter.clone()]);
AccountMutedData {
filter,
subid,
sub: Some(ndbsub),
muted: Arc::new(muted),
}
}
fn harvest_nip51_muted(ndb: &Ndb, txn: &Transaction, nks: &[NoteKey]) -> Muted {
let mut muted = Muted::default();
for nk in nks.iter() {
if let Ok(note) = ndb.get_note_by_key(txn, *nk) {
for tag in note.tags() {
match tag.get(0).and_then(|t| t.variant().str()) {
Some("p") => {
if let Some(id) = tag.get(1).and_then(|f| f.variant().id()) {
muted.pubkeys.insert(*id);
}
}
Some("t") => {
if let Some(str) = tag.get(1).and_then(|f| f.variant().str()) {
muted.hashtags.insert(str.to_string());
}
}
Some("word") => {
if let Some(str) = tag.get(1).and_then(|f| f.variant().str()) {
muted.words.insert(str.to_string());
}
}
Some("e") => {
if let Some(id) = tag.get(1).and_then(|f| f.variant().id()) {
muted.threads.insert(*id);
}
}
Some("alt") => {
// maybe we can ignore these?
}
Some(x) => error!("query_nip51_muted: unexpected tag: {}", x),
None => error!(
"query_nip51_muted: bad tag value: {:?}",
tag.get_unchecked(0).variant()
),
}
}
}
}
muted
}
}
pub struct AccountData {
relay: AccountRelayData,
muted: AccountMutedData,
}
/// The interface for managing the user's accounts.
/// Represents all user-facing operations related to account management.
pub struct Accounts {
currently_selected_account: Option<usize>,
accounts: Vec<UserAccount>,
key_store: KeyStorageType,
account_data: BTreeMap<[u8; 32], AccountData>,
forced_relays: BTreeSet<String>,
bootstrap_relays: BTreeSet<String>,
needs_relay_config: bool,
}
#[must_use = "You must call process_login_action on this to handle unknown ids"]
pub struct RenderAccountAction {
pub accounts_action: Option<AccountsAction>,
pub unk_id_action: SingleUnkIdAction,
}
impl RenderAccountAction {
// Simple wrapper around processing the unknown action to expose too
// much internal logic. This allows us to have a must_use on our
// LoginAction type, otherwise the SingleUnkIdAction's must_use will
// be lost when returned in the login action
pub fn process_action(&mut self, ids: &mut UnknownIds, ndb: &Ndb, txn: &Transaction) {
self.unk_id_action.process_action(ids, ndb, txn);
}
}
/// Render account management views from a route
#[allow(clippy::too_many_arguments)]
pub fn render_accounts_route(
ui: &mut egui::Ui,
ndb: &Ndb,
col: usize,
img_cache: &mut ImageCache,
accounts: &mut Accounts,
decks: &mut DecksCache,
login_state: &mut AcquireKeyState,
route: AccountsRoute,
) -> RenderAccountAction {
let resp = match route {
AccountsRoute::Accounts => AccountsView::new(ndb, accounts, img_cache)
.ui(ui)
.inner
.map(AccountsRouteResponse::Accounts),
AccountsRoute::AddAccount => AccountLoginView::new(login_state)
.ui(ui)
.inner
.map(AccountsRouteResponse::AddAccount),
};
if let Some(resp) = resp {
match resp {
AccountsRouteResponse::Accounts(response) => {
let action = process_accounts_view_response(accounts, decks, col, response);
RenderAccountAction {
accounts_action: action,
unk_id_action: SingleUnkIdAction::no_action(),
}
}
AccountsRouteResponse::AddAccount(response) => {
let action = process_login_view_response(accounts, decks, response);
*login_state = Default::default();
let router = get_active_columns_mut(accounts, decks)
.column_mut(col)
.router_mut();
router.go_back();
action
}
}
} else {
RenderAccountAction {
accounts_action: None,
unk_id_action: SingleUnkIdAction::no_action(),
}
}
}
pub fn process_accounts_view_response(
accounts: &mut Accounts,
decks: &mut DecksCache,
col: usize,
response: AccountsViewResponse,
) -> Option<AccountsAction> {
let router = get_active_columns_mut(accounts, decks)
.column_mut(col)
.router_mut();
let mut selection = None;
match response {
AccountsViewResponse::RemoveAccount(index) => {
let acc_sel = AccountsAction::Remove(index);
info!("account selection: {:?}", acc_sel);
selection = Some(acc_sel);
}
AccountsViewResponse::SelectAccount(index) => {
let acc_sel = AccountsAction::Switch(index);
info!("account selection: {:?}", acc_sel);
selection = Some(acc_sel);
}
AccountsViewResponse::RouteToLogin => {
router.route_to(Route::add_account());
}
}
selection
}
impl Accounts {
pub fn new(key_store: KeyStorageType, forced_relays: Vec<String>) -> Self {
let accounts = if let KeyStorageResponse::ReceivedResult(res) = key_store.get_keys() {
res.unwrap_or_default()
} else {
Vec::new()
};
let currently_selected_account = get_selected_index(&accounts, &key_store);
let account_data = BTreeMap::new();
let forced_relays: BTreeSet<String> = forced_relays
.into_iter()
.map(|u| AccountRelayData::canonicalize_url(&u))
.collect();
let bootstrap_relays = [
"wss://relay.damus.io",
// "wss://pyramid.fiatjaf.com", // Uncomment if needed
"wss://nos.lol",
"wss://nostr.wine",
"wss://purplepag.es",
]
.iter()
.map(|&url| url.to_string())
.map(|u| AccountRelayData::canonicalize_url(&u))
.collect();
Accounts {
currently_selected_account,
accounts,
key_store,
account_data,
forced_relays,
bootstrap_relays,
needs_relay_config: true,
}
}
pub fn get_accounts(&self) -> &Vec<UserAccount> {
&self.accounts
}
pub fn get_account(&self, ind: usize) -> Option<&UserAccount> {
self.accounts.get(ind)
}
pub fn find_account(&self, pk: &[u8; 32]) -> Option<&UserAccount> {
self.accounts.iter().find(|acc| acc.pubkey.bytes() == pk)
}
pub fn remove_account(&mut self, index: usize) {
if let Some(account) = self.accounts.get(index) {
let _ = self.key_store.remove_key(account);
self.accounts.remove(index);
if let Some(selected_index) = self.currently_selected_account {
match selected_index.cmp(&index) {
Ordering::Greater => {
self.select_account(selected_index - 1);
}
Ordering::Equal => {
if self.accounts.is_empty() {
// If no accounts remain, clear the selection
self.clear_selected_account();
} else if index >= self.accounts.len() {
// If the removed account was the last one, select the new last account
self.select_account(self.accounts.len() - 1);
} else {
// Otherwise, select the account at the same position
self.select_account(index);
}
}
Ordering::Less => {}
}
}
}
}
fn contains_account(&self, pubkey: &[u8; 32]) -> Option<ContainsAccount> {
for (index, account) in self.accounts.iter().enumerate() {
let has_pubkey = account.pubkey.bytes() == pubkey;
let has_nsec = account.secret_key.is_some();
if has_pubkey {
return Some(ContainsAccount { has_nsec, index });
}
}
None
}
#[must_use = "UnknownIdAction's must be handled. Use .process_unknown_id_action()"]
pub fn add_account(&mut self, account: Keypair) -> RenderAccountAction {
let pubkey = account.pubkey;
let switch_to_index = if let Some(contains_acc) = self.contains_account(pubkey.bytes()) {
if account.secret_key.is_some() && !contains_acc.has_nsec {
info!(
"user provided nsec, but we already have npub {}. Upgrading to nsec",
pubkey
);
let _ = self.key_store.add_key(&account);
self.accounts[contains_acc.index] = account;
} else {
info!("already have account, not adding {}", pubkey);
}
contains_acc.index
} else {
info!("adding new account {}", pubkey);
let _ = self.key_store.add_key(&account);
self.accounts.push(account);
self.accounts.len() - 1
};
RenderAccountAction {
accounts_action: Some(AccountsAction::Switch(switch_to_index)),
unk_id_action: SingleUnkIdAction::pubkey(pubkey),
}
}
pub fn num_accounts(&self) -> usize {
self.accounts.len()
}
pub fn get_selected_account_index(&self) -> Option<usize> {
self.currently_selected_account
}
pub fn selected_or_first_nsec(&self) -> Option<FilledKeypair<'_>> {
self.get_selected_account()
.and_then(|kp| kp.to_full())
.or_else(|| self.accounts.iter().find_map(|a| a.to_full()))
}
pub fn get_selected_account(&self) -> Option<&UserAccount> {
if let Some(account_index) = self.currently_selected_account {
if let Some(account) = self.get_account(account_index) {
Some(account)
} else {
None
}
} else {
None
}
}
pub fn select_account(&mut self, index: usize) {
if let Some(account) = self.accounts.get(index) {
self.currently_selected_account = Some(index);
self.key_store.select_key(Some(account.pubkey));
}
}
pub fn clear_selected_account(&mut self) {
self.currently_selected_account = None;
self.key_store.select_key(None);
}
pub fn mutefun(&self) -> Box<dyn Fn(&Note) -> bool> {
if let Some(index) = self.currently_selected_account {
if let Some(account) = self.accounts.get(index) {
let pubkey = account.pubkey.bytes();
if let Some(account_data) = self.account_data.get(pubkey) {
let muted = Arc::clone(&account_data.muted.muted);
return Box::new(move |note: &Note| muted.is_muted(note));
}
}
}
Box::new(|_: &Note| false)
}
pub fn send_initial_filters(&mut self, pool: &mut RelayPool, relay_url: &str) {
for data in self.account_data.values() {
pool.send_to(
&ClientMessage::req(data.relay.subid.clone(), vec![data.relay.filter.clone()]),
relay_url,
);
pool.send_to(
&ClientMessage::req(data.muted.subid.clone(), vec![data.muted.filter.clone()]),
relay_url,
);
}
}
// Returns added and removed accounts
fn delta_accounts(&self) -> (Vec<[u8; 32]>, Vec<[u8; 32]>) {
let mut added = Vec::new();
for pubkey in self.accounts.iter().map(|a| a.pubkey.bytes()) {
if !self.account_data.contains_key(pubkey) {
added.push(*pubkey);
}
}
let mut removed = Vec::new();
for pubkey in self.account_data.keys() {
if self.contains_account(pubkey).is_none() {
removed.push(*pubkey);
}
}
(added, removed)
}
fn handle_added_account(&mut self, ndb: &Ndb, pool: &mut RelayPool, pubkey: &[u8; 32]) {
debug!("handle_added_account {}", hex::encode(pubkey));
// Create the user account data
let new_account_data = AccountData {
relay: AccountRelayData::new(ndb, pool, pubkey),
muted: AccountMutedData::new(ndb, pool, pubkey),
};
self.account_data.insert(*pubkey, new_account_data);
}
fn handle_removed_account(&mut self, pubkey: &[u8; 32]) {
debug!("handle_removed_account {}", hex::encode(pubkey));
// FIXME - we need to unsubscribe here
self.account_data.remove(pubkey);
}
fn poll_for_updates(&mut self, ndb: &Ndb) -> bool {
let mut changed = false;
for (pubkey, data) in &mut self.account_data {
if let Some(sub) = data.relay.sub {
let nks = ndb.poll_for_notes(sub, 1);
if !nks.is_empty() {
let txn = Transaction::new(ndb).expect("txn");
let relays = AccountRelayData::harvest_nip65_relays(ndb, &txn, &nks);
debug!(
"pubkey {}: updated relays {:?}",
hex::encode(pubkey),
relays
);
data.relay.advertised = relays.into_iter().collect();
changed = true;
}
}
if let Some(sub) = data.muted.sub {
let nks = ndb.poll_for_notes(sub, 1);
if !nks.is_empty() {
let txn = Transaction::new(ndb).expect("txn");
let muted = AccountMutedData::harvest_nip51_muted(ndb, &txn, &nks);
debug!("pubkey {}: updated muted {:?}", hex::encode(pubkey), muted);
data.muted.muted = Arc::new(muted);
changed = true;
}
}
}
changed
}
fn update_relay_configuration(
&mut self,
pool: &mut RelayPool,
wakeup: impl Fn() + Send + Sync + Clone + 'static,
) {
// If forced relays are set use them only
let mut desired_relays = self.forced_relays.clone();
// Compose the desired relay lists from the accounts
if desired_relays.is_empty() {
for data in self.account_data.values() {
desired_relays.extend(data.relay.local.iter().cloned());
desired_relays.extend(data.relay.advertised.iter().cloned());
}
}
// If no relays are specified at this point use the bootstrap list
if desired_relays.is_empty() {
desired_relays = self.bootstrap_relays.clone();
}
debug!("current relays: {:?}", pool.urls());
debug!("desired relays: {:?}", desired_relays);
let add: BTreeSet<String> = desired_relays.difference(&pool.urls()).cloned().collect();
let sub: BTreeSet<String> = pool.urls().difference(&desired_relays).cloned().collect();
if !add.is_empty() {
debug!("configuring added relays: {:?}", add);
let _ = pool.add_urls(add, wakeup);
}
if !sub.is_empty() {
debug!("removing unwanted relays: {:?}", sub);
pool.remove_urls(&sub);
}
debug!("current relays: {:?}", pool.urls());
}
pub fn update(&mut self, ndb: &Ndb, pool: &mut RelayPool, ctx: &egui::Context) {
// IMPORTANT - This function is called in the UI update loop,
// make sure it is fast when idle
// On the initial update the relays need config even if nothing changes below
let mut relays_changed = self.needs_relay_config;
let ctx2 = ctx.clone();
let wakeup = move || {
ctx2.request_repaint();
};
// Were any accounts added or removed?
let (added, removed) = self.delta_accounts();
for pk in added {
self.handle_added_account(ndb, pool, &pk);
relays_changed = true;
}
for pk in removed {
self.handle_removed_account(&pk);
relays_changed = true;
}
// Did any accounts receive updates (ie NIP-65 relay lists)
relays_changed = self.poll_for_updates(ndb) || relays_changed;
// If needed, update the relay configuration
if relays_changed {
self.update_relay_configuration(pool, wakeup);
self.needs_relay_config = false;
}
}
}
fn get_selected_index(accounts: &[UserAccount], keystore: &KeyStorageType) -> Option<usize> {
match keystore.get_selected_key() {
KeyStorageResponse::ReceivedResult(Ok(Some(pubkey))) => {
return accounts.iter().position(|account| account.pubkey == pubkey);
}
KeyStorageResponse::ReceivedResult(Err(e)) => error!("Error getting selected key: {}", e),
KeyStorageResponse::Waiting | KeyStorageResponse::ReceivedResult(Ok(None)) => {}
};
None
}
pub fn process_login_view_response(
manager: &mut Accounts,
decks: &mut DecksCache,
response: AccountLoginResponse,
) -> RenderAccountAction {
let (r, pubkey) = match response {
AccountLoginResponse::CreateNew => {
let kp = FullKeypair::generate().to_keypair();
let pubkey = kp.pubkey;
(manager.add_account(kp), pubkey)
}
AccountLoginResponse::LoginWith(keypair) => {
let pubkey = keypair.pubkey;
(manager.add_account(keypair), pubkey)
}
};
decks.add_deck_default(pubkey);
r
}
#[derive(Default)]
struct ContainsAccount {
pub has_nsec: bool,
pub index: usize,
}

View File

@@ -0,0 +1,19 @@
use super::{AccountLoginResponse, AccountsViewResponse};
use serde::{Deserialize, Serialize};
pub enum AccountsRouteResponse {
Accounts(AccountsViewResponse),
AddAccount(AccountLoginResponse),
}
#[derive(Debug, Eq, PartialEq, Clone, Copy, Serialize, Deserialize)]
pub enum AccountsRoute {
Accounts,
AddAccount,
}
#[derive(Debug)]
pub enum AccountsAction {
Switch(usize),
Remove(usize),
}

View File

@@ -0,0 +1,163 @@
use crate::{
column::Columns,
muted::MuteFun,
note::NoteRef,
notecache::NoteCache,
notes_holder::{NotesHolder, NotesHolderStorage},
profile::Profile,
route::{Route, Router},
thread::Thread,
};
use enostr::{NoteId, Pubkey, RelayPool};
use nostrdb::{Ndb, Transaction};
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub enum NoteAction {
Reply(NoteId),
Quote(NoteId),
OpenThread(NoteId),
OpenProfile(Pubkey),
}
pub struct NewNotes {
pub id: [u8; 32],
pub notes: Vec<NoteRef>,
}
pub enum NotesHolderResult {
NewNotes(NewNotes),
}
/// open_thread is called when a note is selected and we need to navigate
/// to a thread It is responsible for managing the subscription and
/// making sure the thread is up to date. In a sense, it's a model for
/// the thread view. We don't have a concept of model/view/controller etc
/// in egui, but this is the closest thing to that.
#[allow(clippy::too_many_arguments)]
fn open_thread(
ndb: &Ndb,
txn: &Transaction,
router: &mut Router<Route>,
note_cache: &mut NoteCache,
pool: &mut RelayPool,
threads: &mut NotesHolderStorage<Thread>,
selected_note: &[u8; 32],
is_muted: &MuteFun,
) -> Option<NotesHolderResult> {
router.route_to(Route::thread(NoteId::new(selected_note.to_owned())));
let root_id = crate::note::root_note_id_from_selected_id(ndb, note_cache, txn, selected_note);
Thread::open(ndb, note_cache, txn, pool, threads, root_id, is_muted)
}
impl NoteAction {
#[allow(clippy::too_many_arguments)]
pub fn execute(
self,
ndb: &Ndb,
router: &mut Router<Route>,
threads: &mut NotesHolderStorage<Thread>,
profiles: &mut NotesHolderStorage<Profile>,
note_cache: &mut NoteCache,
pool: &mut RelayPool,
txn: &Transaction,
is_muted: &MuteFun,
) -> Option<NotesHolderResult> {
match self {
NoteAction::Reply(note_id) => {
router.route_to(Route::reply(note_id));
None
}
NoteAction::OpenThread(note_id) => open_thread(
ndb,
txn,
router,
note_cache,
pool,
threads,
note_id.bytes(),
is_muted,
),
NoteAction::OpenProfile(pubkey) => {
router.route_to(Route::profile(pubkey));
Profile::open(
ndb,
note_cache,
txn,
pool,
profiles,
pubkey.bytes(),
is_muted,
)
}
NoteAction::Quote(note_id) => {
router.route_to(Route::quote(note_id));
None
}
}
}
/// Execute the NoteAction and process the NotesHolderResult
#[allow(clippy::too_many_arguments)]
pub fn execute_and_process_result(
self,
ndb: &Ndb,
columns: &mut Columns,
col: usize,
threads: &mut NotesHolderStorage<Thread>,
profiles: &mut NotesHolderStorage<Profile>,
note_cache: &mut NoteCache,
pool: &mut RelayPool,
txn: &Transaction,
is_muted: &MuteFun,
) {
let router = columns.column_mut(col).router_mut();
if let Some(br) = self.execute(
ndb, router, threads, profiles, note_cache, pool, txn, is_muted,
) {
br.process(ndb, note_cache, txn, threads, is_muted);
}
}
}
impl NotesHolderResult {
pub fn new_notes(notes: Vec<NoteRef>, id: [u8; 32]) -> Self {
NotesHolderResult::NewNotes(NewNotes::new(notes, id))
}
pub fn process<N: NotesHolder>(
&self,
ndb: &Ndb,
note_cache: &mut NoteCache,
txn: &Transaction,
storage: &mut NotesHolderStorage<N>,
is_muted: &MuteFun,
) {
match self {
// update the thread for next render if we have new notes
NotesHolderResult::NewNotes(new_notes) => {
let holder = storage
.notes_holder_mutated(ndb, note_cache, txn, &new_notes.id, is_muted)
.get_ptr();
new_notes.process(holder);
}
}
}
}
impl NewNotes {
pub fn new(notes: Vec<NoteRef>, id: [u8; 32]) -> Self {
NewNotes { notes, id }
}
/// Simple helper for processing a NewThreadNotes result. It simply
/// inserts/merges the notes into the thread cache
pub fn process<N: NotesHolder>(&self, thread: &mut N) {
// threads are chronological, ie reversed from reverse-chronological, the default.
let reversed = true;
thread.get_view().insert(&self.notes, reversed);
}
}

View File

@@ -0,0 +1,843 @@
use crate::{
accounts::Accounts,
app_creation::setup_cc,
app_size_handler::AppSizeHandler,
args::Args,
column::Columns,
decks::{Decks, DecksCache, FALLBACK_PUBKEY},
draft::Drafts,
filter::FilterState,
frame_history::FrameHistory,
imgcache::ImageCache,
nav,
notecache::NoteCache,
notes_holder::NotesHolderStorage,
profile::Profile,
storage::{self, DataPath, DataPathType, Directory, FileKeyStorage, KeyStorageType},
subscriptions::{SubKind, Subscriptions},
support::Support,
thread::Thread,
timeline::{self, Timeline},
ui::{self, DesktopSidePanel},
unknowns::UnknownIds,
view_state::ViewState,
Result,
};
use enostr::{ClientMessage, Keypair, Pubkey, RelayEvent, RelayMessage, RelayPool};
use uuid::Uuid;
use egui::{Context, Frame, Style};
use egui_extras::{Size, StripBuilder};
use nostrdb::{Config, Ndb, Transaction};
use std::collections::HashMap;
use std::path::Path;
use std::time::Duration;
use tracing::{error, info, trace, warn};
#[derive(Debug, Eq, PartialEq, Clone)]
pub enum DamusState {
Initializing,
Initialized,
}
/// We derive Deserialize/Serialize so we can persist app state on shutdown.
pub struct Damus {
state: DamusState,
pub note_cache: NoteCache,
pub pool: RelayPool,
pub decks_cache: DecksCache,
pub ndb: Ndb,
pub view_state: ViewState,
pub unknown_ids: UnknownIds,
pub drafts: Drafts,
pub threads: NotesHolderStorage<Thread>,
pub profiles: NotesHolderStorage<Profile>,
pub img_cache: ImageCache,
pub accounts: Accounts,
pub subscriptions: Subscriptions,
pub app_rect_handler: AppSizeHandler,
pub support: Support,
frame_history: crate::frame_history::FrameHistory,
pub path: DataPath,
// TODO: make these bitflags
pub debug: bool,
pub since_optimize: bool,
pub textmode: bool,
}
fn handle_key_events(input: &egui::InputState, _pixels_per_point: f32, columns: &mut Columns) {
for event in &input.raw.events {
if let egui::Event::Key {
key, pressed: true, ..
} = event
{
match key {
egui::Key::J => {
columns.select_down();
}
egui::Key::K => {
columns.select_up();
}
egui::Key::H => {
columns.select_left();
}
egui::Key::L => {
columns.select_left();
}
_ => {}
}
}
}
}
fn try_process_event(damus: &mut Damus, ctx: &egui::Context) -> Result<()> {
let ppp = ctx.pixels_per_point();
let current_columns = get_active_columns_mut(&damus.accounts, &mut damus.decks_cache);
ctx.input(|i| handle_key_events(i, ppp, current_columns));
let ctx2 = ctx.clone();
let wakeup = move || {
ctx2.request_repaint();
};
damus.pool.keepalive_ping(wakeup);
// NOTE: we don't use the while let loop due to borrow issues
#[allow(clippy::while_let_loop)]
loop {
let ev = if let Some(ev) = damus.pool.try_recv() {
ev.into_owned()
} else {
break;
};
match (&ev.event).into() {
RelayEvent::Opened => {
damus
.accounts
.send_initial_filters(&mut damus.pool, &ev.relay);
timeline::send_initial_timeline_filters(
&damus.ndb,
damus.since_optimize,
get_active_columns_mut(&damus.accounts, &mut damus.decks_cache),
&mut damus.subscriptions,
&mut damus.pool,
&ev.relay,
);
}
// TODO: handle reconnects
RelayEvent::Closed => warn!("{} connection closed", &ev.relay),
RelayEvent::Error(e) => error!("{}: {}", &ev.relay, e),
RelayEvent::Other(msg) => trace!("other event {:?}", &msg),
RelayEvent::Message(msg) => process_message(damus, &ev.relay, &msg),
}
}
let current_columns = get_active_columns_mut(&damus.accounts, &mut damus.decks_cache);
let n_timelines = current_columns.timelines().len();
for timeline_ind in 0..n_timelines {
let is_ready = {
let timeline = &mut current_columns.timelines[timeline_ind];
timeline::is_timeline_ready(
&damus.ndb,
&mut damus.pool,
&mut damus.note_cache,
timeline,
&damus.accounts.mutefun(),
)
};
if is_ready {
let txn = Transaction::new(&damus.ndb).expect("txn");
if let Err(err) = Timeline::poll_notes_into_view(
timeline_ind,
current_columns.timelines_mut(),
&damus.ndb,
&txn,
&mut damus.unknown_ids,
&mut damus.note_cache,
&damus.accounts.mutefun(),
) {
error!("poll_notes_into_view: {err}");
}
} else {
// TODO: show loading?
}
}
if damus.unknown_ids.ready_to_send() {
unknown_id_send(damus);
}
Ok(())
}
fn unknown_id_send(damus: &mut Damus) {
let filter = damus.unknown_ids.filter().expect("filter");
info!(
"Getting {} unknown ids from relays",
damus.unknown_ids.ids().len()
);
let msg = ClientMessage::req("unknownids".to_string(), filter);
damus.unknown_ids.clear();
damus.pool.send(&msg);
}
#[cfg(feature = "profiling")]
fn setup_profiling() {
puffin::set_scopes_on(true); // tell puffin to collect data
}
fn update_damus(damus: &mut Damus, ctx: &egui::Context) {
damus.accounts.update(&damus.ndb, &mut damus.pool, ctx); // update user relay and mute lists
match damus.state {
DamusState::Initializing => {
#[cfg(feature = "profiling")]
setup_profiling();
damus.state = DamusState::Initialized;
// this lets our eose handler know to close unknownids right away
damus
.subscriptions()
.insert("unknownids".to_string(), SubKind::OneShot);
if let Err(err) = timeline::setup_initial_nostrdb_subs(
&damus.ndb,
&mut damus.note_cache,
&mut damus.decks_cache,
&damus.accounts.mutefun(),
) {
warn!("update_damus init: {err}");
}
}
DamusState::Initialized => (),
};
if let Err(err) = try_process_event(damus, ctx) {
error!("error processing event: {}", err);
}
damus.app_rect_handler.try_save_app_size(ctx);
}
fn process_event(damus: &mut Damus, _subid: &str, event: &str) {
#[cfg(feature = "profiling")]
puffin::profile_function!();
//info!("processing event {}", event);
if let Err(_err) = damus.ndb.process_event(event) {
error!("error processing event {}", event);
}
}
fn handle_eose(damus: &mut Damus, subid: &str, relay_url: &str) -> Result<()> {
let sub_kind = if let Some(sub_kind) = damus.subscriptions().get(subid) {
sub_kind
} else {
let n_subids = damus.subscriptions().len();
warn!(
"got unknown eose subid {}, {} tracked subscriptions",
subid, n_subids
);
return Ok(());
};
match *sub_kind {
SubKind::Timeline(_) => {
// eose on timeline? whatevs
}
SubKind::Initial => {
let txn = Transaction::new(&damus.ndb)?;
UnknownIds::update(
&txn,
&mut damus.unknown_ids,
get_active_columns(&damus.accounts, &damus.decks_cache),
&damus.ndb,
&mut damus.note_cache,
);
// this is possible if this is the first time
if damus.unknown_ids.ready_to_send() {
unknown_id_send(damus);
}
}
// oneshot subs just close when they're done
SubKind::OneShot => {
let msg = ClientMessage::close(subid.to_string());
damus.pool.send_to(&msg, relay_url);
}
SubKind::FetchingContactList(timeline_uid) => {
let timeline = if let Some(tl) =
get_active_columns_mut(&damus.accounts, &mut damus.decks_cache)
.find_timeline_mut(timeline_uid)
{
tl
} else {
error!(
"timeline uid:{} not found for FetchingContactList",
timeline_uid
);
return Ok(());
};
let filter_state = timeline.filter.get(relay_url);
// If this request was fetching a contact list, our filter
// state should be "FetchingRemote". We look at the local
// subscription for that filter state and get the subscription id
let local_sub = if let FilterState::FetchingRemote(unisub) = filter_state {
unisub.local
} else {
// TODO: we could have multiple contact list results, we need
// to check to see if this one is newer and use that instead
warn!(
"Expected timeline to have FetchingRemote state but was {:?}",
timeline.filter
);
return Ok(());
};
info!(
"got contact list from {}, updating filter_state to got_remote",
relay_url
);
// We take the subscription id and pass it to the new state of
// "GotRemote". This will let future frames know that it can try
// to look for the contact list in nostrdb.
timeline
.filter
.set_relay_state(relay_url.to_string(), FilterState::got_remote(local_sub));
}
}
Ok(())
}
fn process_message(damus: &mut Damus, relay: &str, msg: &RelayMessage) {
match msg {
RelayMessage::Event(subid, ev) => process_event(damus, subid, ev),
RelayMessage::Notice(msg) => warn!("Notice from {}: {}", relay, msg),
RelayMessage::OK(cr) => info!("OK {:?}", cr),
RelayMessage::Eose(sid) => {
if let Err(err) = handle_eose(damus, sid, relay) {
error!("error handling eose: {}", err);
}
}
}
}
fn render_damus(damus: &mut Damus, ctx: &Context) {
if ui::is_narrow(ctx) {
render_damus_mobile(ctx, damus);
} else {
render_damus_desktop(ctx, damus);
}
ctx.request_repaint_after(Duration::from_secs(1));
#[cfg(feature = "profiling")]
puffin_egui::profiler_window(ctx);
}
/*
fn determine_key_storage_type() -> KeyStorageType {
#[cfg(target_os = "macos")]
{
KeyStorageType::MacOS
}
#[cfg(target_os = "linux")]
{
KeyStorageType::Linux
}
#[cfg(not(any(target_os = "macos", target_os = "linux")))]
{
KeyStorageType::None
}
}
*/
impl Damus {
/// Called once before the first frame.
pub fn new<P: AsRef<Path>>(ctx: &egui::Context, data_path: P, args: Vec<String>) -> Self {
// arg parsing
let parsed_args = Args::parse(&args);
let is_mobile = parsed_args.is_mobile.unwrap_or(ui::is_compiled_as_mobile());
// Some people have been running notedeck in debug, let's catch that!
if !cfg!(test) && cfg!(debug_assertions) && !parsed_args.debug {
println!("--- WELCOME TO DAMUS NOTEDECK! ---");
println!("It looks like are running notedeck in debug mode, unless you are a developer, this is not likely what you want.");
println!("If you are a developer, run `cargo run -- --debug` to skip this message.");
println!("For everyone else, try again with `cargo run --release`. Enjoy!");
println!("---------------------------------");
panic!();
}
setup_cc(ctx, is_mobile, parsed_args.light);
let data_path = parsed_args
.datapath
.unwrap_or(data_path.as_ref().to_str().expect("db path ok").to_string());
let path = DataPath::new(&data_path);
let dbpath_str = parsed_args
.dbpath
.unwrap_or_else(|| path.path(DataPathType::Db).to_str().unwrap().to_string());
let _ = std::fs::create_dir_all(&dbpath_str);
let imgcache_dir = path.path(DataPathType::Cache).join(ImageCache::rel_dir());
let _ = std::fs::create_dir_all(imgcache_dir.clone());
let mapsize = if cfg!(target_os = "windows") {
// 16 Gib on windows because it actually creates the file
1024usize * 1024usize * 1024usize * 16usize
} else {
// 1 TiB for everything else since its just virtually mapped
1024usize * 1024usize * 1024usize * 1024usize
};
let config = Config::new().set_ingester_threads(4).set_mapsize(mapsize);
let keystore = if parsed_args.use_keystore {
let keys_path = path.path(DataPathType::Keys);
let selected_key_path = path.path(DataPathType::SelectedKey);
KeyStorageType::FileSystem(FileKeyStorage::new(
Directory::new(keys_path),
Directory::new(selected_key_path),
))
} else {
KeyStorageType::None
};
let mut accounts = Accounts::new(keystore, parsed_args.relays);
let num_keys = parsed_args.keys.len();
let mut unknown_ids = UnknownIds::default();
let ndb = Ndb::new(&dbpath_str, &config).expect("ndb");
{
let txn = Transaction::new(&ndb).expect("txn");
for key in parsed_args.keys {
info!("adding account: {}", key.pubkey);
accounts
.add_account(key)
.process_action(&mut unknown_ids, &ndb, &txn);
}
}
if num_keys != 0 {
accounts.select_account(0);
}
// AccountManager will setup the pool on first update
let pool = RelayPool::new();
let account = accounts
.get_selected_account()
.as_ref()
.map(|a| a.pubkey.bytes());
let decks_cache = if !parsed_args.columns.is_empty() {
info!("DecksCache: loading from command line arguments");
let mut columns: Columns = Columns::new();
for col in parsed_args.columns {
if let Some(timeline) = col.into_timeline(&ndb, account) {
columns.add_new_timeline_column(timeline);
}
}
columns_to_decks_cache(columns, account)
} else if let Some(decks_cache) = storage::load_decks_cache(&path, &ndb) {
info!(
"DecksCache: loading from disk {}",
crate::storage::DECKS_CACHE_FILE
);
decks_cache
} else if let Some(cols) = storage::deserialize_columns(&path, &ndb, account) {
info!(
"DecksCache: loading from disk at depreciated location {}",
crate::storage::COLUMNS_FILE
);
columns_to_decks_cache(cols, account)
} else {
info!("DecksCache: creating new with demo configuration");
let mut cache = DecksCache::new_with_demo_config(&ndb);
for account in accounts.get_accounts() {
cache.add_deck_default(account.pubkey);
}
set_demo(&mut cache, &ndb, &mut accounts, &mut unknown_ids);
cache
};
let debug = parsed_args.debug;
let app_rect_handler = AppSizeHandler::new(&path);
let support = Support::new(&path);
Self {
pool,
debug,
unknown_ids,
subscriptions: Subscriptions::default(),
since_optimize: parsed_args.since_optimize,
threads: NotesHolderStorage::default(),
profiles: NotesHolderStorage::default(),
drafts: Drafts::default(),
state: DamusState::Initializing,
img_cache: ImageCache::new(imgcache_dir),
note_cache: NoteCache::default(),
textmode: parsed_args.textmode,
ndb,
accounts,
frame_history: FrameHistory::default(),
view_state: ViewState::default(),
path,
app_rect_handler,
support,
decks_cache,
}
}
pub fn pool_mut(&mut self) -> &mut RelayPool {
&mut self.pool
}
pub fn ndb(&self) -> &Ndb {
&self.ndb
}
pub fn drafts_mut(&mut self) -> &mut Drafts {
&mut self.drafts
}
pub fn img_cache_mut(&mut self) -> &mut ImageCache {
&mut self.img_cache
}
pub fn accounts(&self) -> &Accounts {
&self.accounts
}
pub fn accounts_mut(&mut self) -> &mut Accounts {
&mut self.accounts
}
pub fn view_state_mut(&mut self) -> &mut ViewState {
&mut self.view_state
}
pub fn columns_mut(&mut self) -> &mut Columns {
get_active_columns_mut(&self.accounts, &mut self.decks_cache)
}
pub fn columns(&self) -> &Columns {
get_active_columns(&self.accounts, &self.decks_cache)
}
pub fn gen_subid(&self, kind: &SubKind) -> String {
if self.debug {
format!("{:?}", kind)
} else {
Uuid::new_v4().to_string()
}
}
pub fn mock<P: AsRef<Path>>(data_path: P) -> Self {
let decks_cache = DecksCache::default();
let path = DataPath::new(&data_path);
let imgcache_dir = path.path(DataPathType::Cache).join(ImageCache::rel_dir());
let _ = std::fs::create_dir_all(imgcache_dir.clone());
let debug = true;
let app_rect_handler = AppSizeHandler::new(&path);
let support = Support::new(&path);
let config = Config::new().set_ingester_threads(2);
Self {
debug,
unknown_ids: UnknownIds::default(),
subscriptions: Subscriptions::default(),
since_optimize: true,
threads: NotesHolderStorage::default(),
profiles: NotesHolderStorage::default(),
drafts: Drafts::default(),
state: DamusState::Initializing,
pool: RelayPool::new(),
img_cache: ImageCache::new(imgcache_dir),
note_cache: NoteCache::default(),
textmode: false,
ndb: Ndb::new(
path.path(DataPathType::Db)
.to_str()
.expect("db path should be ok"),
&config,
)
.expect("ndb"),
accounts: Accounts::new(KeyStorageType::None, vec![]),
frame_history: FrameHistory::default(),
view_state: ViewState::default(),
path,
app_rect_handler,
support,
decks_cache,
}
}
pub fn subscriptions(&mut self) -> &mut HashMap<String, SubKind> {
&mut self.subscriptions.subs
}
pub fn note_cache_mut(&mut self) -> &mut NoteCache {
&mut self.note_cache
}
pub fn unknown_ids_mut(&mut self) -> &mut UnknownIds {
&mut self.unknown_ids
}
pub fn threads(&self) -> &NotesHolderStorage<Thread> {
&self.threads
}
pub fn threads_mut(&mut self) -> &mut NotesHolderStorage<Thread> {
&mut self.threads
}
pub fn note_cache(&self) -> &NoteCache {
&self.note_cache
}
}
/*
fn circle_icon(ui: &mut egui::Ui, openness: f32, response: &egui::Response) {
let stroke = ui.style().interact(&response).fg_stroke;
let radius = egui::lerp(2.0..=3.0, openness);
ui.painter()
.circle_filled(response.rect.center(), radius, stroke.color);
}
*/
fn render_damus_mobile(ctx: &egui::Context, app: &mut Damus) {
#[cfg(feature = "profiling")]
puffin::profile_function!();
//let routes = app.timelines[0].routes.clone();
main_panel(&ctx.style(), ui::is_narrow(ctx)).show(ctx, |ui| {
if !app.columns().columns().is_empty()
&& nav::render_nav(0, app, ui).process_render_nav_response(app)
{
storage::save_decks_cache(&app.path, &app.decks_cache);
}
});
}
fn main_panel(style: &Style, narrow: bool) -> egui::CentralPanel {
let inner_margin = egui::Margin {
top: if narrow { 50.0 } else { 0.0 },
left: 0.0,
right: 0.0,
bottom: 0.0,
};
egui::CentralPanel::default().frame(Frame {
inner_margin,
fill: style.visuals.panel_fill,
..Default::default()
})
}
fn render_damus_desktop(ctx: &egui::Context, app: &mut Damus) {
#[cfg(feature = "profiling")]
puffin::profile_function!();
let screen_size = ctx.screen_rect().width();
let calc_panel_width = (screen_size
/ get_active_columns(&app.accounts, &app.decks_cache).num_columns() as f32)
- 30.0;
let min_width = 320.0;
let need_scroll = calc_panel_width < min_width;
let panel_sizes = if need_scroll {
Size::exact(min_width)
} else {
Size::remainder()
};
main_panel(&ctx.style(), ui::is_narrow(ctx)).show(ctx, |ui| {
ui.spacing_mut().item_spacing.x = 0.0;
if need_scroll {
egui::ScrollArea::horizontal().show(ui, |ui| {
timelines_view(ui, panel_sizes, app);
});
} else {
timelines_view(ui, panel_sizes, app);
}
});
}
fn timelines_view(ui: &mut egui::Ui, sizes: Size, app: &mut Damus) {
StripBuilder::new(ui)
.size(Size::exact(ui::side_panel::SIDE_PANEL_WIDTH))
.sizes(
sizes,
get_active_columns(&app.accounts, &app.decks_cache).num_columns(),
)
.clip(true)
.horizontal(|mut strip| {
let mut side_panel_action: Option<nav::SwitchingAction> = None;
strip.cell(|ui| {
let rect = ui.available_rect_before_wrap();
let side_panel = DesktopSidePanel::new(
&app.ndb,
&mut app.img_cache,
app.accounts.get_selected_account(),
&app.decks_cache,
)
.show(ui);
if side_panel.response.clicked() || side_panel.response.secondary_clicked() {
if let Some(action) = DesktopSidePanel::perform_action(
&mut app.decks_cache,
&app.accounts,
&mut app.support,
side_panel.action,
) {
side_panel_action = Some(action);
}
}
// vertical sidebar line
ui.painter().vline(
rect.right(),
rect.y_range(),
ui.visuals().widgets.noninteractive.bg_stroke,
);
});
let mut save_cols = false;
if let Some(action) = side_panel_action {
save_cols = save_cols || action.process(app);
}
let num_cols = app.columns().num_columns();
let mut responses = Vec::with_capacity(num_cols);
for col_index in 0..num_cols {
strip.cell(|ui| {
let rect = ui.available_rect_before_wrap();
responses.push(nav::render_nav(col_index, app, ui));
// vertical line
ui.painter().vline(
rect.right(),
rect.y_range(),
ui.visuals().widgets.noninteractive.bg_stroke,
);
});
//strip.cell(|ui| timeline::timeline_view(ui, app, timeline_ind));
}
for response in responses {
let save = response.process_render_nav_response(app);
save_cols = save_cols || save;
}
if save_cols {
storage::save_decks_cache(&app.path, &app.decks_cache);
}
});
}
impl eframe::App for Damus {
/// Called by the frame work to save state before shutdown.
fn save(&mut self, _storage: &mut dyn eframe::Storage) {
//eframe::set_value(storage, eframe::APP_KEY, self);
}
/// Called each time the UI needs repainting, which may be many times per second.
/// Put your widgets into a `SidePanel`, `TopPanel`, `CentralPanel`, `Window` or `Area`.
fn update(&mut self, ctx: &egui::Context, frame: &mut eframe::Frame) {
self.frame_history
.on_new_frame(ctx.input(|i| i.time), frame.info().cpu_usage);
#[cfg(feature = "profiling")]
puffin::GlobalProfiler::lock().new_frame();
update_damus(self, ctx);
render_damus(self, ctx);
}
}
pub fn get_active_columns<'a>(accounts: &Accounts, decks_cache: &'a DecksCache) -> &'a Columns {
get_decks(accounts, decks_cache).active().columns()
}
pub fn get_decks<'a>(accounts: &Accounts, decks_cache: &'a DecksCache) -> &'a Decks {
let key = if let Some(acc) = accounts.get_selected_account() {
&acc.pubkey
} else {
decks_cache.get_fallback_pubkey()
};
decks_cache.decks(key)
}
pub fn get_active_columns_mut<'a>(
accounts: &Accounts,
decks_cache: &'a mut DecksCache,
) -> &'a mut Columns {
get_decks_mut(accounts, decks_cache)
.active_mut()
.columns_mut()
}
pub fn get_decks_mut<'a>(accounts: &Accounts, decks_cache: &'a mut DecksCache) -> &'a mut Decks {
if let Some(acc) = accounts.get_selected_account() {
decks_cache.decks_mut(&acc.pubkey)
} else {
decks_cache.fallback_mut()
}
}
pub fn set_demo(
decks_cache: &mut DecksCache,
ndb: &Ndb,
accounts: &mut Accounts,
unk_ids: &mut UnknownIds,
) {
let txn = Transaction::new(ndb).expect("txn");
accounts
.add_account(Keypair::only_pubkey(*decks_cache.get_fallback_pubkey()))
.process_action(unk_ids, ndb, &txn);
accounts.select_account(accounts.num_accounts() - 1);
}
fn columns_to_decks_cache(cols: Columns, key: Option<&[u8; 32]>) -> DecksCache {
let mut account_to_decks: HashMap<Pubkey, Decks> = Default::default();
let decks = Decks::new(crate::decks::Deck::new_with_columns(
crate::decks::Deck::default().icon,
"My Deck".to_owned(),
cols,
));
let account = if let Some(key) = key {
Pubkey::new(*key)
} else {
FALLBACK_PUBKEY()
};
account_to_decks.insert(account, decks);
DecksCache::new(account_to_decks)
}

View File

@@ -0,0 +1,83 @@
use crate::{
app_size_handler::AppSizeHandler,
app_style::{add_custom_style, dark_mode, light_mode},
fonts::setup_fonts,
storage::DataPath,
};
use eframe::NativeOptions;
//pub const UI_SCALE_FACTOR: f32 = 0.2;
pub fn generate_native_options(paths: DataPath) -> NativeOptions {
let window_builder = Box::new(move |builder: egui::ViewportBuilder| {
let builder = builder
.with_fullsize_content_view(true)
.with_titlebar_shown(false)
.with_title_shown(false)
.with_icon(std::sync::Arc::new(
eframe::icon_data::from_png_bytes(app_icon()).expect("icon"),
));
if let Some(window_size) = AppSizeHandler::new(&paths).get_app_size() {
builder.with_inner_size(window_size)
} else {
builder
}
});
eframe::NativeOptions {
window_builder: Some(window_builder),
viewport: egui::ViewportBuilder::default().with_icon(std::sync::Arc::new(
eframe::icon_data::from_png_bytes(app_icon()).expect("icon"),
)),
..Default::default()
}
}
fn generate_native_options_with_builder_modifiers(
apply_builder_modifiers: fn(egui::ViewportBuilder) -> egui::ViewportBuilder,
) -> NativeOptions {
let window_builder =
Box::new(move |builder: egui::ViewportBuilder| apply_builder_modifiers(builder));
eframe::NativeOptions {
window_builder: Some(window_builder),
..Default::default()
}
}
pub fn app_icon() -> &'static [u8; 271986] {
std::include_bytes!("../assets/damus-app-icon.png")
}
pub fn generate_mobile_emulator_native_options() -> eframe::NativeOptions {
generate_native_options_with_builder_modifiers(|builder| {
builder
.with_fullsize_content_view(true)
.with_titlebar_shown(false)
.with_title_shown(false)
.with_inner_size([405.0, 915.0])
.with_icon(eframe::icon_data::from_png_bytes(app_icon()).expect("icon"))
})
}
pub fn setup_cc(ctx: &egui::Context, is_mobile: bool, light: bool) {
setup_fonts(ctx);
//ctx.set_pixels_per_point(ctx.pixels_per_point() + UI_SCALE_FACTOR);
//ctx.set_pixels_per_point(1.0);
//
//
//ctx.tessellation_options_mut(|to| to.feathering = false);
egui_extras::install_image_loaders(ctx);
if light {
ctx.set_visuals(light_mode())
} else {
ctx.set_visuals(dark_mode(is_mobile));
}
ctx.all_styles_mut(|style| add_custom_style(is_mobile, style));
}

View File

@@ -0,0 +1,85 @@
use std::time::{Duration, Instant};
use egui::Context;
use tracing::info;
use crate::storage::{write_file, DataPath, DataPathType, Directory};
pub struct AppSizeHandler {
directory: Directory,
saved_size: Option<egui::Vec2>,
last_saved: Instant,
}
static FILE_NAME: &str = "app_size.json";
static DELAY: Duration = Duration::from_millis(500);
impl AppSizeHandler {
pub fn new(path: &DataPath) -> Self {
let directory = Directory::new(path.path(DataPathType::Setting));
Self {
directory,
saved_size: None,
last_saved: Instant::now() - DELAY,
}
}
pub fn try_save_app_size(&mut self, ctx: &Context) {
// There doesn't seem to be a way to check if user is resizing window, so if the rect is different than last saved, we'll wait DELAY before saving again to avoid spamming io
if self.last_saved.elapsed() >= DELAY {
internal_try_save_app_size(&self.directory, &mut self.saved_size, ctx);
self.last_saved = Instant::now();
}
}
pub fn get_app_size(&self) -> Option<egui::Vec2> {
if self.saved_size.is_some() {
return self.saved_size;
}
if let Ok(file_contents) = self.directory.get_file(FILE_NAME.to_owned()) {
if let Ok(rect) = serde_json::from_str::<egui::Vec2>(&file_contents) {
return Some(rect);
}
} else {
info!("Could not find {}", FILE_NAME);
}
None
}
}
fn internal_try_save_app_size(
interactor: &Directory,
maybe_saved_size: &mut Option<egui::Vec2>,
ctx: &Context,
) {
let cur_size = ctx.input(|i| i.screen_rect.size());
if let Some(saved_size) = maybe_saved_size {
if cur_size != *saved_size {
try_save_size(interactor, cur_size, maybe_saved_size);
}
} else {
try_save_size(interactor, cur_size, maybe_saved_size);
}
}
fn try_save_size(
interactor: &Directory,
cur_size: egui::Vec2,
maybe_saved_size: &mut Option<egui::Vec2>,
) {
if let Ok(serialized_rect) = serde_json::to_string(&cur_size) {
if write_file(
&interactor.file_path,
FILE_NAME.to_owned(),
&serialized_rect,
)
.is_ok()
{
info!("wrote size {}", cur_size,);
*maybe_saved_size = Some(cur_size);
}
}
}

View File

@@ -0,0 +1,240 @@
use crate::{
colors::{desktop_dark_color_theme, light_color_theme, mobile_dark_color_theme, ColorTheme},
fonts::NamedFontFamily,
ui::is_narrow,
};
use egui::{
epaint::Shadow,
style::{Interaction, Selection, WidgetVisuals, Widgets},
Button, FontFamily, FontId, Rounding, Stroke, Style, TextStyle, Ui, Visuals,
};
use strum::IntoEnumIterator;
use strum_macros::EnumIter;
const WIDGET_ROUNDING: Rounding = Rounding::same(8.0);
pub fn light_mode() -> Visuals {
create_themed_visuals(light_color_theme(), Visuals::light())
}
pub fn dark_mode(mobile: bool) -> Visuals {
create_themed_visuals(
if mobile {
mobile_dark_color_theme()
} else {
desktop_dark_color_theme()
},
Visuals::dark(),
)
}
pub fn user_requested_visuals_change(
oled: bool,
cur_darkmode: bool,
ui: &mut Ui,
) -> Option<Visuals> {
if cur_darkmode {
if ui
.add(Button::new("").frame(false))
.on_hover_text("Switch to light mode")
.clicked()
{
return Some(light_mode());
}
} else if ui
.add(Button::new("🌙").frame(false))
.on_hover_text("Switch to dark mode")
.clicked()
{
return Some(dark_mode(oled));
}
None
}
/// Create custom text sizes for any FontSizes
pub fn add_custom_style(is_mobile: bool, style: &mut Style) {
let font_size = if is_mobile {
mobile_font_size
} else {
desktop_font_size
};
style.text_styles = NotedeckTextStyle::iter()
.map(|text_style| {
(
text_style.text_style(),
FontId::new(font_size(&text_style), text_style.font_family()),
)
})
.collect();
style.interaction = Interaction {
tooltip_delay: 0.1,
show_tooltips_only_when_still: false,
..Interaction::default()
};
#[cfg(debug_assertions)]
{
style.debug.show_interactive_widgets = true;
style.debug.debug_on_hover_with_all_modifiers = true;
}
}
pub fn desktop_font_size(text_style: &NotedeckTextStyle) -> f32 {
match text_style {
NotedeckTextStyle::Heading => 48.0,
NotedeckTextStyle::Heading2 => 24.0,
NotedeckTextStyle::Heading3 => 20.0,
NotedeckTextStyle::Heading4 => 14.0,
NotedeckTextStyle::Body => 16.0,
NotedeckTextStyle::Monospace => 13.0,
NotedeckTextStyle::Button => 13.0,
NotedeckTextStyle::Small => 12.0,
NotedeckTextStyle::Tiny => 10.0,
}
}
pub fn mobile_font_size(text_style: &NotedeckTextStyle) -> f32 {
// TODO: tweak text sizes for optimal mobile viewing
match text_style {
NotedeckTextStyle::Heading => 48.0,
NotedeckTextStyle::Heading2 => 24.0,
NotedeckTextStyle::Heading3 => 20.0,
NotedeckTextStyle::Heading4 => 14.0,
NotedeckTextStyle::Body => 13.0,
NotedeckTextStyle::Monospace => 13.0,
NotedeckTextStyle::Button => 13.0,
NotedeckTextStyle::Small => 12.0,
NotedeckTextStyle::Tiny => 10.0,
}
}
pub fn get_font_size(ctx: &egui::Context, text_style: &NotedeckTextStyle) -> f32 {
if is_narrow(ctx) {
mobile_font_size(text_style)
} else {
desktop_font_size(text_style)
}
}
#[derive(Copy, Clone, Eq, PartialEq, Debug, EnumIter)]
pub enum NotedeckTextStyle {
Heading,
Heading2,
Heading3,
Heading4,
Body,
Monospace,
Button,
Small,
Tiny,
}
impl NotedeckTextStyle {
pub fn text_style(&self) -> TextStyle {
match self {
Self::Heading => TextStyle::Heading,
Self::Heading2 => TextStyle::Name("Heading2".into()),
Self::Heading3 => TextStyle::Name("Heading3".into()),
Self::Heading4 => TextStyle::Name("Heading4".into()),
Self::Body => TextStyle::Body,
Self::Monospace => TextStyle::Monospace,
Self::Button => TextStyle::Button,
Self::Small => TextStyle::Small,
Self::Tiny => TextStyle::Name("Tiny".into()),
}
}
pub fn font_family(&self) -> FontFamily {
match self {
Self::Heading => FontFamily::Proportional,
Self::Heading2 => FontFamily::Proportional,
Self::Heading3 => FontFamily::Proportional,
Self::Heading4 => FontFamily::Proportional,
Self::Body => FontFamily::Proportional,
Self::Monospace => FontFamily::Monospace,
Self::Button => FontFamily::Proportional,
Self::Small => FontFamily::Proportional,
Self::Tiny => FontFamily::Proportional,
}
}
}
pub fn create_themed_visuals(theme: ColorTheme, default: Visuals) -> Visuals {
Visuals {
hyperlink_color: theme.hyperlink_color,
override_text_color: Some(theme.text_color),
panel_fill: theme.panel_fill,
selection: Selection {
bg_fill: theme.selection_color,
stroke: Stroke {
width: 1.0,
color: theme.selection_color,
},
},
warn_fg_color: theme.warn_fg_color,
widgets: Widgets {
noninteractive: WidgetVisuals {
bg_fill: theme.noninteractive_bg_fill,
weak_bg_fill: theme.noninteractive_weak_bg_fill,
bg_stroke: Stroke {
width: 1.0,
color: theme.noninteractive_bg_stroke_color,
},
fg_stroke: Stroke {
width: 1.0,
color: theme.noninteractive_fg_stroke_color,
},
rounding: WIDGET_ROUNDING,
..default.widgets.noninteractive
},
inactive: WidgetVisuals {
bg_fill: theme.inactive_bg_fill,
weak_bg_fill: theme.inactive_weak_bg_fill,
bg_stroke: Stroke {
width: 1.0,
color: theme.inactive_bg_stroke_color,
},
rounding: WIDGET_ROUNDING,
..default.widgets.inactive
},
hovered: WidgetVisuals {
rounding: WIDGET_ROUNDING,
..default.widgets.hovered
},
active: WidgetVisuals {
rounding: WIDGET_ROUNDING,
..default.widgets.active
},
open: WidgetVisuals {
..default.widgets.open
},
},
extreme_bg_color: theme.extreme_bg_color,
error_fg_color: theme.err_fg_color,
window_rounding: Rounding::same(8.0),
window_fill: theme.window_fill,
window_shadow: Shadow {
offset: [0.0, 8.0].into(),
blur: 24.0,
spread: 0.0,
color: egui::Color32::from_rgba_unmultiplied(0x6D, 0x6D, 0x6D, 0x14),
},
window_stroke: Stroke {
width: 1.0,
color: theme.window_stroke_color,
},
image_loading_spinners: false,
..default
}
}
pub static DECK_ICON_SIZE: f32 = 24.0;
pub fn deck_icon_font_sized(size: f32) -> FontId {
egui::FontId::new(size, emoji_font_family())
}
pub fn emoji_font_family() -> FontFamily {
egui::FontFamily::Name(NamedFontFamily::Emoji.as_str().into())
}

View File

@@ -0,0 +1,323 @@
use crate::filter::FilterState;
use crate::timeline::{PubkeySource, Timeline, TimelineKind};
use enostr::{Filter, Keypair, Pubkey, SecretKey};
use nostrdb::Ndb;
use tracing::{debug, error, info};
pub struct Args {
pub columns: Vec<ArgColumn>,
pub relays: Vec<String>,
pub is_mobile: Option<bool>,
pub keys: Vec<Keypair>,
pub since_optimize: bool,
pub light: bool,
pub debug: bool,
pub textmode: bool,
pub use_keystore: bool,
pub dbpath: Option<String>,
pub datapath: Option<String>,
}
impl Args {
pub fn parse(args: &[String]) -> Self {
let mut res = Args {
columns: vec![],
relays: vec![],
is_mobile: None,
keys: vec![],
light: false,
since_optimize: true,
debug: false,
textmode: false,
use_keystore: true,
dbpath: None,
datapath: None,
};
let mut i = 0;
let len = args.len();
while i < len {
let arg = &args[i];
if arg == "--mobile" {
res.is_mobile = Some(true);
} else if arg == "--light" {
res.light = true;
} else if arg == "--dark" {
res.light = false;
} else if arg == "--debug" {
res.debug = true;
} else if arg == "--textmode" {
res.textmode = true;
} else if arg == "--pub" || arg == "--npub" {
i += 1;
let pubstr = if let Some(next_arg) = args.get(i) {
next_arg
} else {
error!("sec argument missing?");
continue;
};
if let Ok(pk) = Pubkey::parse(pubstr) {
res.keys.push(Keypair::only_pubkey(pk));
} else {
error!(
"failed to parse {} argument. Make sure to use hex or npub.",
arg
);
}
} else if arg == "--sec" || arg == "--nsec" {
i += 1;
let secstr = if let Some(next_arg) = args.get(i) {
next_arg
} else {
error!("sec argument missing?");
continue;
};
if let Ok(sec) = SecretKey::parse(secstr) {
res.keys.push(Keypair::from_secret(sec));
} else {
error!(
"failed to parse {} argument. Make sure to use hex or nsec.",
arg
);
}
} else if arg == "--no-since-optimize" {
res.since_optimize = false;
} else if arg == "--filter" {
i += 1;
let filter = if let Some(next_arg) = args.get(i) {
next_arg
} else {
error!("filter argument missing?");
continue;
};
if let Ok(filter) = Filter::from_json(filter) {
res.columns.push(ArgColumn::Generic(vec![filter]));
} else {
error!("failed to parse filter '{}'", filter);
}
} else if arg == "--dbpath" {
i += 1;
let path = if let Some(next_arg) = args.get(i) {
next_arg
} else {
error!("dbpath argument missing?");
continue;
};
res.dbpath = Some(path.clone());
} else if arg == "--datapath" {
i += 1;
let path = if let Some(next_arg) = args.get(i) {
next_arg
} else {
error!("datapath argument missing?");
continue;
};
res.datapath = Some(path.clone());
} else if arg == "-r" || arg == "--relay" {
i += 1;
let relay = if let Some(next_arg) = args.get(i) {
next_arg
} else {
error!("relay argument missing?");
continue;
};
res.relays.push(relay.clone());
} else if arg == "--column" || arg == "-c" {
i += 1;
let column_name = if let Some(next_arg) = args.get(i) {
next_arg
} else {
error!("column argument missing");
continue;
};
if let Some(rest) = column_name.strip_prefix("contacts:") {
if let Ok(pubkey) = Pubkey::parse(rest) {
info!("contact column for user {}", pubkey.hex());
res.columns
.push(ArgColumn::Timeline(TimelineKind::contact_list(
PubkeySource::Explicit(pubkey),
)))
} else {
error!("error parsing contacts pubkey {}", rest);
continue;
}
} else if column_name == "contacts" {
res.columns
.push(ArgColumn::Timeline(TimelineKind::contact_list(
PubkeySource::DeckAuthor,
)))
} else if let Some(notif_pk_str) = column_name.strip_prefix("notifications:") {
if let Ok(pubkey) = Pubkey::parse(notif_pk_str) {
info!("got notifications column for user {}", pubkey.hex());
res.columns
.push(ArgColumn::Timeline(TimelineKind::notifications(
PubkeySource::Explicit(pubkey),
)))
} else {
error!("error parsing notifications pubkey {}", notif_pk_str);
continue;
}
} else if column_name == "notifications" {
debug!("got notification column for default user");
res.columns
.push(ArgColumn::Timeline(TimelineKind::notifications(
PubkeySource::DeckAuthor,
)))
} else if column_name == "profile" {
debug!("got profile column for default user");
res.columns.push(ArgColumn::Timeline(TimelineKind::profile(
PubkeySource::DeckAuthor,
)))
} else if column_name == "universe" {
debug!("got universe column");
res.columns
.push(ArgColumn::Timeline(TimelineKind::Universe))
} else if let Some(profile_pk_str) = column_name.strip_prefix("profile:") {
if let Ok(pubkey) = Pubkey::parse(profile_pk_str) {
info!("got profile column for user {}", pubkey.hex());
res.columns.push(ArgColumn::Timeline(TimelineKind::profile(
PubkeySource::Explicit(pubkey),
)))
} else {
error!("error parsing profile pubkey {}", profile_pk_str);
continue;
}
}
} else if arg == "--filter-file" || arg == "-f" {
i += 1;
let filter_file = if let Some(next_arg) = args.get(i) {
next_arg
} else {
error!("filter file argument missing?");
continue;
};
let data = if let Ok(data) = std::fs::read(filter_file) {
data
} else {
error!("failed to read filter file '{}'", filter_file);
continue;
};
if let Some(filter) = std::str::from_utf8(&data)
.ok()
.and_then(|s| Filter::from_json(s).ok())
{
res.columns.push(ArgColumn::Generic(vec![filter]));
} else {
error!("failed to parse filter in '{}'", filter_file);
}
} else if arg == "--no-keystore" {
res.use_keystore = false;
}
i += 1;
}
res
}
}
/// A way to define columns from the commandline. Can be column kinds or
/// generic queries
#[derive(Debug)]
pub enum ArgColumn {
Timeline(TimelineKind),
Generic(Vec<Filter>),
}
impl ArgColumn {
pub fn into_timeline(self, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option<Timeline> {
match self {
ArgColumn::Generic(filters) => Some(Timeline::new(
TimelineKind::Generic,
FilterState::ready(filters),
)),
ArgColumn::Timeline(tk) => tk.into_timeline(ndb, user),
}
}
}
#[cfg(test)]
mod tests {
use crate::app::Damus;
use std::path::{Path, PathBuf};
fn create_tmp_dir() -> PathBuf {
tempfile::TempDir::new()
.expect("tmp path")
.path()
.to_path_buf()
}
fn rmrf(path: impl AsRef<Path>) {
let _ = std::fs::remove_dir_all(path);
}
/// Ensure dbpath actually sets the dbpath correctly.
#[tokio::test]
async fn test_dbpath() {
let datapath = create_tmp_dir();
let dbpath = create_tmp_dir();
let args = vec![
"--datapath",
&datapath.to_str().unwrap(),
"--dbpath",
&dbpath.to_str().unwrap(),
]
.iter()
.map(|s| s.to_string())
.collect();
let ctx = egui::Context::default();
let _app = Damus::new(&ctx, &datapath, args);
assert!(Path::new(&dbpath.join("data.mdb")).exists());
assert!(Path::new(&dbpath.join("lock.mdb")).exists());
assert!(!Path::new(&datapath.join("db")).exists());
rmrf(datapath);
rmrf(dbpath);
}
#[tokio::test]
async fn test_column_args() {
let tmpdir = create_tmp_dir();
let npub = "npub1xtscya34g58tk0z605fvr788k263gsu6cy9x0mhnm87echrgufzsevkk5s";
let args = vec![
"--no-keystore",
"--pub",
npub,
"-c",
"notifications",
"-c",
"contacts",
]
.iter()
.map(|s| s.to_string())
.collect();
let ctx = egui::Context::default();
let app = Damus::new(&ctx, &tmpdir, args);
assert_eq!(app.columns().columns().len(), 2);
let tl1 = app.columns().column(0).router().top().timeline_id();
let tl2 = app.columns().column(1).router().top().timeline_id();
assert_eq!(tl1.is_some(), true);
assert_eq!(tl2.is_some(), true);
let timelines = app.columns().timelines();
assert!(timelines[0].kind.is_notifications());
assert!(timelines[1].kind.is_contacts());
rmrf(tmpdir);
}
}

View File

@@ -0,0 +1,115 @@
use egui::Color32;
pub const PURPLE: Color32 = Color32::from_rgb(0xCC, 0x43, 0xC5);
const PURPLE_ALT: Color32 = Color32::from_rgb(0x82, 0x56, 0xDD);
// TODO: This should not be exposed publicly
pub const PINK: Color32 = Color32::from_rgb(0xE4, 0x5A, 0xC9);
//pub const DARK_BG: Color32 = egui::Color32::from_rgb(40, 44, 52);
pub const GRAY_SECONDARY: Color32 = Color32::from_rgb(0x8A, 0x8A, 0x8A);
const BLACK: Color32 = Color32::from_rgb(0x00, 0x00, 0x00);
const RED_700: Color32 = Color32::from_rgb(0xC7, 0x37, 0x5A);
const ORANGE_700: Color32 = Color32::from_rgb(0xF6, 0xB1, 0x4A);
// BACKGROUNDS
const SEMI_DARKER_BG: Color32 = Color32::from_rgb(0x39, 0x39, 0x39);
const DARKER_BG: Color32 = Color32::from_rgb(0x1F, 0x1F, 0x1F);
const DARK_BG: Color32 = Color32::from_rgb(0x2C, 0x2C, 0x2C);
const DARK_ISH_BG: Color32 = Color32::from_rgb(0x25, 0x25, 0x25);
const SEMI_DARK_BG: Color32 = Color32::from_rgb(0x44, 0x44, 0x44);
const LIGHTER_GRAY: Color32 = Color32::from_rgb(0xf8, 0xf8, 0xf8);
const LIGHT_GRAY: Color32 = Color32::from_rgb(0xc8, 0xc8, 0xc8); // 78%
pub const MID_GRAY: Color32 = Color32::from_rgb(0xbd, 0xbd, 0xbd);
const DARKER_GRAY: Color32 = Color32::from_rgb(0xa5, 0xa5, 0xa5); // 65%
const EVEN_DARKER_GRAY: Color32 = Color32::from_rgb(0x89, 0x89, 0x89); // 54%
pub const ALMOST_WHITE: Color32 = Color32::from_rgb(0xFA, 0xFA, 0xFA);
pub struct ColorTheme {
// VISUALS
pub panel_fill: Color32,
pub extreme_bg_color: Color32,
pub text_color: Color32,
pub err_fg_color: Color32,
pub warn_fg_color: Color32,
pub hyperlink_color: Color32,
pub selection_color: Color32,
// WINDOW
pub window_fill: Color32,
pub window_stroke_color: Color32,
// NONINTERACTIVE WIDGET
pub noninteractive_bg_fill: Color32,
pub noninteractive_weak_bg_fill: Color32,
pub noninteractive_bg_stroke_color: Color32,
pub noninteractive_fg_stroke_color: Color32,
// INACTIVE WIDGET
pub inactive_bg_stroke_color: Color32,
pub inactive_bg_fill: Color32,
pub inactive_weak_bg_fill: Color32,
}
pub fn desktop_dark_color_theme() -> ColorTheme {
ColorTheme {
// VISUALS
panel_fill: DARKER_BG,
extreme_bg_color: DARK_ISH_BG,
text_color: Color32::WHITE,
err_fg_color: RED_700,
warn_fg_color: ORANGE_700,
hyperlink_color: PURPLE,
selection_color: PURPLE_ALT,
// WINDOW
window_fill: DARK_ISH_BG,
window_stroke_color: DARK_BG,
// NONINTERACTIVE WIDGET
noninteractive_bg_fill: DARK_ISH_BG,
noninteractive_weak_bg_fill: DARK_BG,
noninteractive_bg_stroke_color: SEMI_DARKER_BG,
noninteractive_fg_stroke_color: GRAY_SECONDARY,
// INACTIVE WIDGET
inactive_bg_stroke_color: SEMI_DARKER_BG,
inactive_bg_fill: Color32::from_rgb(0x25, 0x25, 0x25),
inactive_weak_bg_fill: SEMI_DARK_BG,
}
}
pub fn mobile_dark_color_theme() -> ColorTheme {
ColorTheme {
panel_fill: Color32::BLACK,
noninteractive_weak_bg_fill: Color32::from_rgb(0x1F, 0x1F, 0x1F),
..desktop_dark_color_theme()
}
}
pub fn light_color_theme() -> ColorTheme {
ColorTheme {
// VISUALS
panel_fill: Color32::WHITE,
extreme_bg_color: LIGHTER_GRAY,
text_color: BLACK,
err_fg_color: RED_700,
warn_fg_color: ORANGE_700,
hyperlink_color: PURPLE,
selection_color: PURPLE_ALT,
// WINDOW
window_fill: Color32::WHITE,
window_stroke_color: DARKER_GRAY,
// NONINTERACTIVE WIDGET
noninteractive_bg_fill: Color32::WHITE,
noninteractive_weak_bg_fill: LIGHTER_GRAY,
noninteractive_bg_stroke_color: LIGHT_GRAY,
noninteractive_fg_stroke_color: GRAY_SECONDARY,
// INACTIVE WIDGET
inactive_bg_stroke_color: EVEN_DARKER_GRAY,
inactive_bg_fill: LIGHT_GRAY,
inactive_weak_bg_fill: EVEN_DARKER_GRAY,
}
}

View File

@@ -0,0 +1,224 @@
use crate::route::{Route, Router};
use crate::timeline::{Timeline, TimelineId};
use indexmap::IndexMap;
use std::iter::Iterator;
use std::sync::atomic::{AtomicU32, Ordering};
use tracing::warn;
#[derive(Clone)]
pub struct Column {
router: Router<Route>,
}
impl Column {
pub fn new(routes: Vec<Route>) -> Self {
let router = Router::new(routes);
Column { router }
}
pub fn router(&self) -> &Router<Route> {
&self.router
}
pub fn router_mut(&mut self) -> &mut Router<Route> {
&mut self.router
}
}
#[derive(Default)]
pub struct Columns {
/// Columns are simply routers into settings, timelines, etc
columns: IndexMap<u32, Column>,
/// Timeline state is not tied to routing logic separately, so that
/// different columns can navigate to and from settings to timelines,
/// etc.
pub timelines: IndexMap<u32, Timeline>,
/// The selected column for key navigation
selected: i32,
}
static UIDS: AtomicU32 = AtomicU32::new(0);
impl Columns {
pub fn new() -> Self {
Columns::default()
}
pub fn add_new_timeline_column(&mut self, timeline: Timeline) {
let id = Self::get_new_id();
let routes = vec![Route::timeline(timeline.id)];
self.timelines.insert(id, timeline);
self.columns.insert(id, Column::new(routes));
}
pub fn add_timeline_to_column(&mut self, col: usize, timeline: Timeline) {
let col_id = self.get_column_id_at_index(col);
self.column_mut(col)
.router_mut()
.route_to_replaced(Route::timeline(timeline.id));
self.timelines.insert(col_id, timeline);
}
pub fn new_column_picker(&mut self) {
self.add_column(Column::new(vec![Route::AddColumn(
crate::ui::add_column::AddColumnRoute::Base,
)]));
}
pub fn insert_intermediary_routes(&mut self, intermediary_routes: Vec<IntermediaryRoute>) {
let id = Self::get_new_id();
let routes = intermediary_routes
.into_iter()
.map(|r| match r {
IntermediaryRoute::Timeline(timeline) => {
let route = Route::timeline(timeline.id);
self.timelines.insert(id, timeline);
route
}
IntermediaryRoute::Route(route) => route,
})
.collect();
self.columns.insert(id, Column::new(routes));
}
fn get_new_id() -> u32 {
UIDS.fetch_add(1, Ordering::Relaxed)
}
pub fn add_column_at(&mut self, column: Column, index: u32) {
self.columns.insert(index, column);
}
pub fn add_column(&mut self, column: Column) {
self.columns.insert(Self::get_new_id(), column);
}
pub fn columns_mut(&mut self) -> Vec<&mut Column> {
self.columns.values_mut().collect()
}
pub fn num_columns(&self) -> usize {
self.columns.len()
}
// Get the first router in the columns if there are columns present.
// Otherwise, create a new column picker and return the router
pub fn get_first_router(&mut self) -> &mut Router<Route> {
if self.columns.is_empty() {
self.new_column_picker();
}
self.columns
.get_index_mut(0)
.expect("There should be at least one column")
.1
.router_mut()
}
pub fn timeline_mut(&mut self, timeline_ind: usize) -> &mut Timeline {
self.timelines
.get_index_mut(timeline_ind)
.expect("expected index to be in bounds")
.1
}
pub fn column(&self, ind: usize) -> &Column {
self.columns
.get_index(ind)
.expect("Expected index to be in bounds")
.1
}
pub fn columns(&self) -> Vec<&Column> {
self.columns.values().collect()
}
pub fn get_column_id_at_index(&self, ind: usize) -> u32 {
*self
.columns
.get_index(ind)
.expect("expected index to be within bounds")
.0
}
pub fn selected(&mut self) -> &mut Column {
self.columns
.get_index_mut(self.selected as usize)
.expect("Expected selected index to be in bounds")
.1
}
pub fn timelines_mut(&mut self) -> Vec<&mut Timeline> {
self.timelines.values_mut().collect()
}
pub fn timelines(&self) -> Vec<&Timeline> {
self.timelines.values().collect()
}
pub fn find_timeline_mut(&mut self, id: TimelineId) -> Option<&mut Timeline> {
self.timelines_mut().into_iter().find(|tl| tl.id == id)
}
pub fn find_timeline(&self, id: TimelineId) -> Option<&Timeline> {
self.timelines().into_iter().find(|tl| tl.id == id)
}
pub fn column_mut(&mut self, ind: usize) -> &mut Column {
self.columns
.get_index_mut(ind)
.expect("Expected index to be in bounds")
.1
}
pub fn find_timeline_for_column_index(&self, ind: usize) -> Option<&Timeline> {
let col_id = self.get_column_id_at_index(ind);
self.timelines.get(&col_id)
}
pub fn select_down(&mut self) {
warn!("todo: implement select_down");
}
pub fn select_up(&mut self) {
warn!("todo: implement select_up");
}
pub fn select_left(&mut self) {
if self.selected - 1 < 0 {
return;
}
self.selected -= 1;
}
pub fn select_right(&mut self) {
if self.selected + 1 >= self.columns.len() as i32 {
return;
}
self.selected += 1;
}
pub fn delete_column(&mut self, index: usize) {
if let Some((key, _)) = self.columns.get_index_mut(index) {
self.timelines.shift_remove(key);
}
self.columns.shift_remove_index(index);
if self.columns.is_empty() {
self.new_column_picker();
}
}
}
pub enum IntermediaryRoute {
Timeline(Timeline),
Route(Route),
}
pub enum ColumnsAction {
// Switch(usize), TODO: could use for keyboard selection
Remove(usize),
}

View File

@@ -0,0 +1,64 @@
use crate::{app_style::emoji_font_family, decks::Deck};
/// State for UI creating/editing deck
pub struct DeckState {
pub deck_name: String,
pub selected_glyph: Option<char>,
pub selecting_glyph: bool,
pub warn_no_title: bool,
pub warn_no_icon: bool,
glyph_options: Option<Vec<char>>,
}
impl DeckState {
pub fn load(&mut self, deck: &Deck) {
self.deck_name = deck.name.clone();
self.selected_glyph = Some(deck.icon);
}
pub fn from_deck(deck: &Deck) -> Self {
let deck_name = deck.name.clone();
let selected_glyph = Some(deck.icon);
Self {
deck_name,
selected_glyph,
..Default::default()
}
}
pub fn clear(&mut self) {
*self = Default::default();
}
pub fn get_glyph_options(&mut self, ui: &egui::Ui) -> &Vec<char> {
self.glyph_options
.get_or_insert_with(|| available_characters(ui, emoji_font_family()))
}
}
impl Default for DeckState {
fn default() -> Self {
Self {
deck_name: Default::default(),
selected_glyph: Default::default(),
selecting_glyph: true,
warn_no_icon: Default::default(),
warn_no_title: Default::default(),
glyph_options: Default::default(),
}
}
}
fn available_characters(ui: &egui::Ui, family: egui::FontFamily) -> Vec<char> {
ui.fonts(|f| {
f.lock()
.fonts
.font(&egui::FontId::new(10.0, family)) // size is arbitrary for getting the characters
.characters()
.iter()
.map(|(chr, _v)| chr)
.filter(|chr| !chr.is_whitespace() && !chr.is_ascii_control())
.copied()
.collect()
})
}

View File

@@ -0,0 +1,326 @@
use std::collections::{hash_map::ValuesMut, HashMap};
use enostr::Pubkey;
use nostrdb::Ndb;
use tracing::{error, info};
use crate::{
accounts::AccountsRoute,
column::{Column, Columns},
route::Route,
timeline::{self, Timeline, TimelineKind},
ui::{add_column::AddColumnRoute, configure_deck::ConfigureDeckResponse},
};
pub static FALLBACK_PUBKEY: fn() -> Pubkey = || {
Pubkey::from_hex("aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe").unwrap()
};
pub enum DecksAction {
Switch(usize),
Removing(usize),
}
pub struct DecksCache {
account_to_decks: HashMap<Pubkey, Decks>,
fallback_pubkey: Pubkey,
}
impl Default for DecksCache {
fn default() -> Self {
let mut account_to_decks: HashMap<Pubkey, Decks> = Default::default();
account_to_decks.insert(FALLBACK_PUBKEY(), Decks::default());
DecksCache::new(account_to_decks)
}
}
impl DecksCache {
pub fn new(account_to_decks: HashMap<Pubkey, Decks>) -> Self {
let fallback_pubkey = FALLBACK_PUBKEY();
Self {
account_to_decks,
fallback_pubkey,
}
}
pub fn new_with_demo_config(ndb: &Ndb) -> Self {
let mut account_to_decks: HashMap<Pubkey, Decks> = Default::default();
let fallback_pubkey = FALLBACK_PUBKEY();
account_to_decks.insert(fallback_pubkey, demo_decks(fallback_pubkey, ndb));
DecksCache::new(account_to_decks)
}
pub fn decks(&self, key: &Pubkey) -> &Decks {
self.account_to_decks
.get(key)
.unwrap_or_else(|| self.fallback())
}
pub fn decks_mut(&mut self, key: &Pubkey) -> &mut Decks {
self.account_to_decks.entry(*key).or_default()
}
pub fn fallback(&self) -> &Decks {
self.account_to_decks
.get(&self.fallback_pubkey)
.unwrap_or_else(|| panic!("fallback deck not found"))
}
pub fn fallback_mut(&mut self) -> &mut Decks {
self.account_to_decks
.get_mut(&self.fallback_pubkey)
.unwrap_or_else(|| panic!("fallback deck not found"))
}
pub fn add_deck_default(&mut self, key: Pubkey) {
self.account_to_decks.insert(key, Decks::default());
info!(
"Adding new default deck for {:?}. New decks size is {}",
key,
self.account_to_decks.get(&key).unwrap().decks.len()
);
}
pub fn add_decks(&mut self, key: Pubkey, decks: Decks) {
self.account_to_decks.insert(key, decks);
info!(
"Adding new deck for {:?}. New decks size is {}",
key,
self.account_to_decks.get(&key).unwrap().decks.len()
);
}
pub fn add_deck(&mut self, key: Pubkey, deck: Deck) {
match self.account_to_decks.entry(key) {
std::collections::hash_map::Entry::Occupied(mut entry) => {
let decks = entry.get_mut();
decks.add_deck(deck);
info!(
"Created new deck for {:?}. New number of decks is {}",
key,
decks.decks.len()
);
}
std::collections::hash_map::Entry::Vacant(entry) => {
info!("Created first deck for {:?}", key);
entry.insert(Decks::new(deck));
}
}
}
pub fn remove_for(&mut self, key: &Pubkey) {
info!("Removing decks for {:?}", key);
self.account_to_decks.remove(key);
}
pub fn get_fallback_pubkey(&self) -> &Pubkey {
&self.fallback_pubkey
}
pub fn get_all_decks_mut(&mut self) -> ValuesMut<Pubkey, Decks> {
self.account_to_decks.values_mut()
}
pub fn get_mapping(&self) -> &HashMap<Pubkey, Decks> {
&self.account_to_decks
}
}
pub struct Decks {
active_deck: usize,
removal_request: Option<usize>,
decks: Vec<Deck>,
}
impl Default for Decks {
fn default() -> Self {
Decks::new(Deck::default())
}
}
impl Decks {
pub fn new(deck: Deck) -> Self {
let decks = vec![deck];
Decks {
active_deck: 0,
removal_request: None,
decks,
}
}
pub fn from_decks(active_deck: usize, decks: Vec<Deck>) -> Self {
Self {
active_deck,
removal_request: None,
decks,
}
}
pub fn active(&self) -> &Deck {
self.decks
.get(self.active_deck)
.expect("active_deck index was invalid")
}
pub fn active_mut(&mut self) -> &mut Deck {
self.decks
.get_mut(self.active_deck)
.expect("active_deck index was invalid")
}
pub fn decks(&self) -> &Vec<Deck> {
&self.decks
}
pub fn decks_mut(&mut self) -> &mut Vec<Deck> {
&mut self.decks
}
pub fn add_deck(&mut self, deck: Deck) {
self.decks.push(deck);
}
pub fn active_index(&self) -> usize {
self.active_deck
}
pub fn set_active(&mut self, index: usize) {
if index < self.decks.len() {
self.active_deck = index;
} else {
error!(
"requested deck change that is invalid. decks len: {}, requested index: {}",
self.decks.len(),
index
);
}
}
pub fn remove_deck(&mut self, index: usize) {
if index < self.decks.len() {
if self.decks.len() > 1 {
self.decks.remove(index);
let info_prefix = format!("Removed deck at index {}", index);
match index.cmp(&self.active_deck) {
std::cmp::Ordering::Less => {
info!(
"{}. The active deck was index {}, now it is {}",
info_prefix,
self.active_deck,
self.active_deck - 1
);
self.active_deck -= 1
}
std::cmp::Ordering::Greater => {
info!(
"{}. Active deck remains at index {}.",
info_prefix, self.active_deck
)
}
std::cmp::Ordering::Equal => {
if index != 0 {
info!(
"{}. Active deck was index {}, now it is {}",
info_prefix,
self.active_deck,
self.active_deck - 1
);
self.active_deck -= 1;
} else {
info!(
"{}. Active deck remains at index {}.",
info_prefix, self.active_deck
)
}
}
}
self.removal_request = None;
} else {
error!("attempted unsucessfully to remove the last deck for this account");
}
} else {
error!("index was out of bounds");
}
}
}
pub struct Deck {
pub icon: char,
pub name: String,
columns: Columns,
}
impl Default for Deck {
fn default() -> Self {
let mut columns = Columns::default();
columns.new_column_picker();
Self {
icon: '🇩',
name: String::from("Default Deck"),
columns,
}
}
}
impl Deck {
pub fn new(icon: char, name: String) -> Self {
let mut columns = Columns::default();
columns.new_column_picker();
Self {
icon,
name,
columns,
}
}
pub fn new_with_columns(icon: char, name: String, columns: Columns) -> Self {
Self {
icon,
name,
columns,
}
}
pub fn columns(&self) -> &Columns {
&self.columns
}
pub fn columns_mut(&mut self) -> &mut Columns {
&mut self.columns
}
pub fn edit(&mut self, changes: ConfigureDeckResponse) {
self.name = changes.name;
self.icon = changes.icon;
}
}
pub fn demo_decks(demo_pubkey: Pubkey, ndb: &Ndb) -> Decks {
let deck = {
let mut columns = Columns::default();
columns.add_column(Column::new(vec![
Route::AddColumn(AddColumnRoute::Base),
Route::Accounts(AccountsRoute::Accounts),
]));
if let Some(timeline) =
TimelineKind::contact_list(timeline::PubkeySource::Explicit(demo_pubkey))
.into_timeline(ndb, Some(demo_pubkey.bytes()))
{
columns.add_new_timeline_column(timeline);
}
columns.add_new_timeline_column(Timeline::hashtag("introductions".to_string()));
Deck {
icon: '🇩',
name: String::from("Demo Deck"),
columns,
}
};
Decks::new(deck)
}

View File

@@ -0,0 +1,46 @@
use crate::ui::note::PostType;
use std::collections::HashMap;
#[derive(Default)]
pub struct Draft {
pub buffer: String,
}
#[derive(Default)]
pub struct Drafts {
replies: HashMap<[u8; 32], Draft>,
quotes: HashMap<[u8; 32], Draft>,
compose: Draft,
}
impl Drafts {
pub fn compose_mut(&mut self) -> &mut Draft {
&mut self.compose
}
pub fn get_from_post_type(&mut self, post_type: &PostType) -> &mut Draft {
match post_type {
PostType::New => self.compose_mut(),
PostType::Quote(note_id) => self.quote_mut(note_id.bytes()),
PostType::Reply(note_id) => self.reply_mut(note_id.bytes()),
}
}
pub fn reply_mut(&mut self, id: &[u8; 32]) -> &mut Draft {
self.replies.entry(*id).or_default()
}
pub fn quote_mut(&mut self, id: &[u8; 32]) -> &mut Draft {
self.quotes.entry(*id).or_default()
}
}
impl Draft {
pub fn new() -> Self {
Draft::default()
}
pub fn clear(&mut self) {
self.buffer = "".to_string();
}
}

View File

@@ -0,0 +1,126 @@
use std::{fmt, io};
#[derive(Debug, Clone, Copy, Eq, PartialEq)]
pub enum FilterError {
EmptyContactList,
}
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub enum SubscriptionError {
//#[error("No active subscriptions")]
NoActive,
/// When a timeline has an unexpected number
/// of active subscriptions. Should only happen if there
/// is a bug in notedeck
//#[error("Unexpected subscription count")]
UnexpectedSubscriptionCount(i32),
}
impl Error {
pub fn unexpected_sub_count(c: i32) -> Self {
Error::SubscriptionError(SubscriptionError::UnexpectedSubscriptionCount(c))
}
pub fn no_active_sub() -> Self {
Error::SubscriptionError(SubscriptionError::NoActive)
}
}
impl fmt::Display for SubscriptionError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::NoActive => write!(f, "No active subscriptions"),
Self::UnexpectedSubscriptionCount(c) => {
write!(f, "Unexpected subscription count: {}", c)
}
}
}
}
#[derive(Debug)]
pub enum Error {
TimelineNotFound,
LoadFailed,
SubscriptionError(SubscriptionError),
Filter(FilterError),
Io(io::Error),
Nostr(enostr::Error),
Ndb(nostrdb::Error),
Image(image::error::ImageError),
Generic(String),
}
impl Error {
pub fn empty_contact_list() -> Self {
Error::Filter(FilterError::EmptyContactList)
}
}
impl fmt::Display for FilterError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::EmptyContactList => {
write!(f, "empty contact list")
}
}
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::SubscriptionError(e) => {
write!(f, "{e}")
}
Self::TimelineNotFound => write!(f, "Timeline not found"),
Self::LoadFailed => {
write!(f, "load failed")
}
Self::Filter(e) => {
write!(f, "{e}")
}
Self::Nostr(e) => write!(f, "{e}"),
Self::Ndb(e) => write!(f, "{e}"),
Self::Image(e) => write!(f, "{e}"),
Self::Generic(e) => write!(f, "{e}"),
Self::Io(e) => write!(f, "{e}"),
}
}
}
impl From<String> for Error {
fn from(s: String) -> Self {
Error::Generic(s)
}
}
impl From<nostrdb::Error> for Error {
fn from(e: nostrdb::Error) -> Self {
Error::Ndb(e)
}
}
impl From<image::error::ImageError> for Error {
fn from(err: image::error::ImageError) -> Self {
Error::Image(err)
}
}
impl From<enostr::Error> for Error {
fn from(err: enostr::Error) -> Self {
Error::Nostr(err)
}
}
impl From<io::Error> for Error {
fn from(err: io::Error) -> Self {
Error::Io(err)
}
}
impl From<FilterError> for Error {
fn from(err: FilterError) -> Self {
Error::Filter(err)
}
}

View File

@@ -0,0 +1,266 @@
use crate::error::{Error, FilterError};
use crate::note::NoteRef;
use crate::Result;
use nostrdb::{Filter, FilterBuilder, Note, Subscription};
use std::collections::HashMap;
use tracing::{debug, warn};
/// A unified subscription has a local and remote component. The remote subid
/// tracks data received remotely, and local
#[derive(Debug, Clone)]
pub struct UnifiedSubscription {
pub local: Subscription,
pub remote: String,
}
/// Each relay can have a different filter state. For example, some
/// relays may have the contact list, some may not. Let's capture all of
/// these states so that some relays don't stop the states of other
/// relays.
#[derive(Debug)]
pub struct FilterStates {
pub initial_state: FilterState,
pub states: HashMap<String, FilterState>,
}
impl FilterStates {
pub fn get(&mut self, relay: &str) -> &FilterState {
// if our initial state is ready, then just use that
if let FilterState::Ready(_) = self.initial_state {
&self.initial_state
} else {
// otherwise we look at relay states
if !self.states.contains_key(relay) {
self.states
.insert(relay.to_string(), self.initial_state.clone());
}
self.states.get(relay).unwrap()
}
}
pub fn get_any_gotremote(&self) -> Option<(&str, Subscription)> {
for (k, v) in self.states.iter() {
if let FilterState::GotRemote(sub) = v {
return Some((k, *sub));
}
}
None
}
pub fn get_any_ready(&self) -> Option<&Vec<Filter>> {
if let FilterState::Ready(fs) = &self.initial_state {
Some(fs)
} else {
for (_k, v) in self.states.iter() {
if let FilterState::Ready(ref fs) = v {
return Some(fs);
}
}
None
}
}
pub fn new(initial_state: FilterState) -> Self {
Self {
initial_state,
states: HashMap::new(),
}
}
pub fn set_relay_state(&mut self, relay: String, state: FilterState) {
if self.states.contains_key(&relay) {
let current_state = self.states.get(&relay).unwrap();
debug!(
"set_relay_state: {:?} -> {:?} on {}",
current_state, state, &relay,
);
}
self.states.insert(relay, state);
}
}
/// We may need to fetch some data from relays before our filter is ready.
/// [`FilterState`] tracks this.
#[derive(Debug, Clone)]
pub enum FilterState {
NeedsRemote(Vec<Filter>),
FetchingRemote(UnifiedSubscription),
GotRemote(Subscription),
Ready(Vec<Filter>),
Broken(FilterError),
}
impl FilterState {
/// We tried to fetch a filter but we wither got no data or the data
/// was corrupted, preventing us from getting to the Ready state.
/// Just mark the timeline as broken so that we can signal to the
/// user that something went wrong
pub fn broken(reason: FilterError) -> Self {
Self::Broken(reason)
}
/// The filter is ready
pub fn ready(filter: Vec<Filter>) -> Self {
Self::Ready(filter)
}
/// We need some data from relays before we can continue. Example:
/// for home timelines where we don't have a contact list yet. We
/// need to fetch the contact list before we have the right timeline
/// filter.
pub fn needs_remote(filter: Vec<Filter>) -> Self {
Self::NeedsRemote(filter)
}
/// We got the remote data. Local data should be available to build
/// the filter for the [`FilterState::Ready`] state
pub fn got_remote(local_sub: Subscription) -> Self {
Self::GotRemote(local_sub)
}
/// We have sent off a remote subscription to get data needed for the
/// filter. The string is the subscription id
pub fn fetching_remote(sub_id: String, local_sub: Subscription) -> Self {
let unified_sub = UnifiedSubscription {
local: local_sub,
remote: sub_id,
};
Self::FetchingRemote(unified_sub)
}
}
pub fn should_since_optimize(limit: u64, num_notes: usize) -> bool {
// rough heuristic for bailing since optimization if we don't have enough notes
limit as usize <= num_notes
}
pub fn since_optimize_filter_with(filter: Filter, notes: &[NoteRef], since_gap: u64) -> Filter {
// Get the latest entry in the events
if notes.is_empty() {
return filter;
}
// get the latest note
let latest = notes[0];
let since = latest.created_at - since_gap;
filter.since_mut(since)
}
pub fn since_optimize_filter(filter: Filter, notes: &[NoteRef]) -> Filter {
since_optimize_filter_with(filter, notes, 60)
}
pub fn default_limit() -> u64 {
500
}
pub fn default_remote_limit() -> u64 {
250
}
pub struct FilteredTags {
pub authors: Option<FilterBuilder>,
pub hashtags: Option<FilterBuilder>,
}
impl FilteredTags {
pub fn into_follow_filter(self) -> Vec<Filter> {
self.into_filter([1], default_limit())
}
// TODO: make this more general
pub fn into_filter<I>(self, kinds: I, limit: u64) -> Vec<Filter>
where
I: IntoIterator<Item = u64> + Copy,
{
let mut filters: Vec<Filter> = Vec::with_capacity(2);
if let Some(authors) = self.authors {
filters.push(authors.kinds(kinds).limit(limit).build())
}
if let Some(hashtags) = self.hashtags {
filters.push(hashtags.kinds(kinds).limit(limit).build())
}
filters
}
}
/// Create a filter from tags. This can be used to create a filter
/// from a contact list
pub fn filter_from_tags(note: &Note) -> Result<FilteredTags> {
let mut author_filter = Filter::new();
let mut hashtag_filter = Filter::new();
let mut author_res: Option<FilterBuilder> = None;
let mut hashtag_res: Option<FilterBuilder> = None;
let mut author_count = 0i32;
let mut hashtag_count = 0i32;
let tags = note.tags();
author_filter.start_authors_field()?;
hashtag_filter.start_tags_field('t')?;
for tag in tags {
if tag.count() < 2 {
continue;
}
let t = if let Some(t) = tag.get_unchecked(0).variant().str() {
t
} else {
continue;
};
if t == "p" {
let author = if let Some(author) = tag.get_unchecked(1).variant().id() {
author
} else {
continue;
};
author_filter.add_id_element(author)?;
author_count += 1;
} else if t == "t" {
let hashtag = if let Some(hashtag) = tag.get_unchecked(1).variant().str() {
hashtag
} else {
continue;
};
hashtag_filter.add_str_element(hashtag)?;
hashtag_count += 1;
}
}
author_filter.end_field();
hashtag_filter.end_field();
if author_count == 0 && hashtag_count == 0 {
warn!("no authors or hashtags found in contact list");
return Err(Error::empty_contact_list());
}
debug!(
"adding {} authors and {} hashtags to contact filter",
author_count, hashtag_count
);
// if we hit these ooms, we need to expand filter buffer size
if author_count > 0 {
author_res = Some(author_filter)
}
if hashtag_count > 0 {
hashtag_res = Some(hashtag_filter)
}
Ok(FilteredTags {
authors: author_res,
hashtags: hashtag_res,
})
}

View File

@@ -0,0 +1,156 @@
use egui::{FontData, FontDefinitions, FontTweak};
use std::collections::BTreeMap;
use tracing::debug;
pub enum NamedFontFamily {
Medium,
Bold,
Emoji,
}
impl NamedFontFamily {
pub fn as_str(&mut self) -> &'static str {
match self {
Self::Bold => "bold",
Self::Medium => "medium",
Self::Emoji => "emoji",
}
}
pub fn as_family(&mut self) -> egui::FontFamily {
egui::FontFamily::Name(self.as_str().into())
}
}
// Use gossip's approach to font loading. This includes japanese fonts
// for rending stuff from japanese users.
pub fn setup_fonts(ctx: &egui::Context) {
let mut font_data: BTreeMap<String, FontData> = BTreeMap::new();
let mut families = BTreeMap::new();
font_data.insert(
"Onest".to_owned(),
FontData::from_static(include_bytes!(
"../assets/fonts/onest/OnestRegular1602-hint.ttf"
)),
);
font_data.insert(
"OnestMedium".to_owned(),
FontData::from_static(include_bytes!(
"../assets/fonts/onest/OnestMedium1602-hint.ttf"
)),
);
font_data.insert(
"DejaVuSans".to_owned(),
FontData::from_static(include_bytes!("../assets/fonts/DejaVuSansSansEmoji.ttf")),
);
font_data.insert(
"OnestBold".to_owned(),
FontData::from_static(include_bytes!(
"../assets/fonts/onest/OnestBold1602-hint.ttf"
)),
);
/*
font_data.insert(
"DejaVuSansBold".to_owned(),
FontData::from_static(include_bytes!(
"../assets/fonts/DejaVuSans-Bold-SansEmoji.ttf"
)),
);
font_data.insert(
"DejaVuSans".to_owned(),
FontData::from_static(include_bytes!("../assets/fonts/DejaVuSansSansEmoji.ttf")),
);
font_data.insert(
"DejaVuSansBold".to_owned(),
FontData::from_static(include_bytes!(
"../assets/fonts/DejaVuSans-Bold-SansEmoji.ttf"
)),
);
*/
font_data.insert(
"Inconsolata".to_owned(),
FontData::from_static(include_bytes!("../assets/fonts/Inconsolata-Regular.ttf")).tweak(
FontTweak {
scale: 1.22, // This font is smaller than DejaVuSans
y_offset_factor: -0.18, // and too low
y_offset: 0.0,
baseline_offset_factor: 0.0,
},
),
);
font_data.insert(
"NotoSansCJK".to_owned(),
FontData::from_static(include_bytes!("../assets/fonts/NotoSansCJK-Regular.ttc")),
);
font_data.insert(
"NotoSansThai".to_owned(),
FontData::from_static(include_bytes!("../assets/fonts/NotoSansThai-Regular.ttf")),
);
// Some good looking emojis. Use as first priority:
font_data.insert(
"NotoEmoji".to_owned(),
FontData::from_static(include_bytes!("../assets/fonts/NotoEmoji-Regular.ttf")).tweak(
FontTweak {
scale: 1.1, // make them a touch larger
y_offset_factor: 0.0,
y_offset: 0.0,
baseline_offset_factor: 0.0,
},
),
);
let base_fonts = vec![
"DejaVuSans".to_owned(),
"NotoEmoji".to_owned(),
"NotoSansCJK".to_owned(),
"NotoSansThai".to_owned(),
];
let mut proportional = vec!["Onest".to_owned()];
proportional.extend(base_fonts.clone());
let mut medium = vec!["OnestMedium".to_owned()];
medium.extend(base_fonts.clone());
let mut mono = vec!["Inconsolata".to_owned()];
mono.extend(base_fonts.clone());
let mut bold = vec!["OnestBold".to_owned()];
bold.extend(base_fonts.clone());
let emoji = vec!["NotoEmoji".to_owned()];
families.insert(egui::FontFamily::Proportional, proportional);
families.insert(egui::FontFamily::Monospace, mono);
families.insert(
egui::FontFamily::Name(NamedFontFamily::Medium.as_str().into()),
medium,
);
families.insert(
egui::FontFamily::Name(NamedFontFamily::Bold.as_str().into()),
bold,
);
families.insert(
egui::FontFamily::Name(NamedFontFamily::Emoji.as_str().into()),
emoji,
);
debug!("fonts: {:?}", families);
let defs = FontDefinitions {
font_data,
families,
};
ctx.set_fonts(defs);
}

View File

@@ -0,0 +1,48 @@
use egui::util::History;
pub struct FrameHistory {
frame_times: History<f32>,
}
impl Default for FrameHistory {
fn default() -> Self {
let max_age: f32 = 1.0;
let max_len = (max_age * 300.0).round() as usize;
Self {
frame_times: History::new(0..max_len, max_age),
}
}
}
impl FrameHistory {
// Called first
pub fn on_new_frame(&mut self, now: f64, previous_frame_time: Option<f32>) {
let previous_frame_time = previous_frame_time.unwrap_or_default();
if let Some(latest) = self.frame_times.latest_mut() {
*latest = previous_frame_time; // rewrite history now that we know
}
self.frame_times.add(now, previous_frame_time); // projected
}
#[allow(unused)]
pub fn mean_frame_time(&self) -> f32 {
self.frame_times.average().unwrap_or_default()
}
#[allow(unused)]
pub fn fps(&self) -> f32 {
1.0 / self.frame_times.mean_time_interval().unwrap_or_default()
}
pub fn _ui(&mut self, ui: &mut egui::Ui) {
ui.label(format!(
"Mean CPU usage: {:.2} ms / frame",
1e3 * self.mean_frame_time()
))
.on_hover_text(
"Includes egui layout and tessellation time.\n\
Does not include GPU usage, nor overhead for sending data to GPU.",
);
egui::warn_if_debug_build(ui);
}
}

View File

@@ -0,0 +1,258 @@
use crate::error::Error;
use crate::imgcache::ImageCache;
use crate::result::Result;
use egui::{pos2, Color32, ColorImage, Rect, Sense, SizeHint, TextureHandle};
use image::imageops::FilterType;
use poll_promise::Promise;
use std::path;
use tokio::fs;
//pub type ImageCacheKey = String;
//pub type ImageCacheValue = Promise<Result<TextureHandle>>;
//pub type ImageCache = HashMap<String, ImageCacheValue>;
// NOTE(jb55): chatgpt wrote this because I was too dumb to
pub fn aspect_fill(
ui: &mut egui::Ui,
sense: Sense,
texture_id: egui::TextureId,
aspect_ratio: f32,
) -> egui::Response {
let frame = ui.available_rect_before_wrap(); // Get the available frame space in the current layout
let frame_ratio = frame.width() / frame.height();
let (width, height) = if frame_ratio > aspect_ratio {
// Frame is wider than the content
(frame.width(), frame.width() / aspect_ratio)
} else {
// Frame is taller than the content
(frame.height() * aspect_ratio, frame.height())
};
let content_rect = Rect::from_min_size(
frame.min
+ egui::vec2(
(frame.width() - width) / 2.0,
(frame.height() - height) / 2.0,
),
egui::vec2(width, height),
);
// Set the clipping rectangle to the frame
//let clip_rect = ui.clip_rect(); // Preserve the original clipping rectangle
//ui.set_clip_rect(frame);
let uv = Rect::from_min_max(pos2(0.0, 0.0), pos2(1.0, 1.0));
let (response, painter) = ui.allocate_painter(ui.available_size(), sense);
// Draw the texture within the calculated rect, potentially clipping it
painter.rect_filled(content_rect, 0.0, ui.ctx().style().visuals.window_fill());
painter.image(texture_id, content_rect, uv, Color32::WHITE);
// Restore the original clipping rectangle
//ui.set_clip_rect(clip_rect);
response
}
pub fn round_image(image: &mut ColorImage) {
#[cfg(feature = "profiling")]
puffin::profile_function!();
// The radius to the edge of of the avatar circle
let edge_radius = image.size[0] as f32 / 2.0;
let edge_radius_squared = edge_radius * edge_radius;
for (pixnum, pixel) in image.pixels.iter_mut().enumerate() {
// y coordinate
let uy = pixnum / image.size[0];
let y = uy as f32;
let y_offset = edge_radius - y;
// x coordinate
let ux = pixnum % image.size[0];
let x = ux as f32;
let x_offset = edge_radius - x;
// The radius to this pixel (may be inside or outside the circle)
let pixel_radius_squared: f32 = x_offset * x_offset + y_offset * y_offset;
// If inside of the avatar circle
if pixel_radius_squared <= edge_radius_squared {
// squareroot to find how many pixels we are from the edge
let pixel_radius: f32 = pixel_radius_squared.sqrt();
let distance = edge_radius - pixel_radius;
// If we are within 1 pixel of the edge, we should fade, to
// antialias the edge of the circle. 1 pixel from the edge should
// be 100% of the original color, and right on the edge should be
// 0% of the original color.
if distance <= 1.0 {
*pixel = Color32::from_rgba_premultiplied(
(pixel.r() as f32 * distance) as u8,
(pixel.g() as f32 * distance) as u8,
(pixel.b() as f32 * distance) as u8,
(pixel.a() as f32 * distance) as u8,
);
}
} else {
// Outside of the avatar circle
*pixel = Color32::TRANSPARENT;
}
}
}
fn process_pfp_bitmap(imgtyp: ImageType, image: &mut image::DynamicImage) -> ColorImage {
#[cfg(feature = "profiling")]
puffin::profile_function!();
match imgtyp {
ImageType::Content(w, h) => {
let image = image.resize(w, h, FilterType::CatmullRom); // DynamicImage
let image_buffer = image.into_rgba8(); // RgbaImage (ImageBuffer)
let color_image = ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
image_buffer.as_flat_samples().as_slice(),
);
color_image
}
ImageType::Profile(size) => {
// Crop square
let smaller = image.width().min(image.height());
if image.width() > smaller {
let excess = image.width() - smaller;
*image = image.crop_imm(excess / 2, 0, image.width() - excess, image.height());
} else if image.height() > smaller {
let excess = image.height() - smaller;
*image = image.crop_imm(0, excess / 2, image.width(), image.height() - excess);
}
let image = image.resize(size, size, FilterType::CatmullRom); // DynamicImage
let image_buffer = image.into_rgba8(); // RgbaImage (ImageBuffer)
let mut color_image = ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
image_buffer.as_flat_samples().as_slice(),
);
round_image(&mut color_image);
color_image
}
}
}
fn parse_img_response(response: ehttp::Response, imgtyp: ImageType) -> Result<ColorImage> {
#[cfg(feature = "profiling")]
puffin::profile_function!();
let content_type = response.content_type().unwrap_or_default();
let size_hint = match imgtyp {
ImageType::Profile(size) => SizeHint::Size(size, size),
ImageType::Content(w, h) => SizeHint::Size(w, h),
};
if content_type.starts_with("image/svg") {
#[cfg(feature = "profiling")]
puffin::profile_scope!("load_svg");
let mut color_image =
egui_extras::image::load_svg_bytes_with_size(&response.bytes, Some(size_hint))?;
round_image(&mut color_image);
Ok(color_image)
} else if content_type.starts_with("image/") {
#[cfg(feature = "profiling")]
puffin::profile_scope!("load_from_memory");
let mut dyn_image = image::load_from_memory(&response.bytes)?;
Ok(process_pfp_bitmap(imgtyp, &mut dyn_image))
} else {
Err(format!("Expected image, found content-type {:?}", content_type).into())
}
}
fn fetch_img_from_disk(
ctx: &egui::Context,
url: &str,
path: &path::Path,
) -> Promise<Result<TextureHandle>> {
let ctx = ctx.clone();
let url = url.to_owned();
let path = path.to_owned();
Promise::spawn_async(async move {
let data = fs::read(path).await?;
let image_buffer = image::load_from_memory(&data)?;
// TODO: remove unwrap here
let flat_samples = image_buffer.as_flat_samples_u8().unwrap();
let img = ColorImage::from_rgba_unmultiplied(
[
image_buffer.width() as usize,
image_buffer.height() as usize,
],
flat_samples.as_slice(),
);
Ok(ctx.load_texture(&url, img, Default::default()))
})
}
/// Controls type-specific handling
#[derive(Debug, Clone, Copy)]
pub enum ImageType {
/// Profile Image (size)
Profile(u32),
/// Content Image (width, height)
Content(u32, u32),
}
pub fn fetch_img(
img_cache: &ImageCache,
ctx: &egui::Context,
url: &str,
imgtyp: ImageType,
) -> Promise<Result<TextureHandle>> {
let key = ImageCache::key(url);
let path = img_cache.cache_dir.join(key);
if path.exists() {
fetch_img_from_disk(ctx, url, &path)
} else {
fetch_img_from_net(&img_cache.cache_dir, ctx, url, imgtyp)
}
// TODO: fetch image from local cache
}
fn fetch_img_from_net(
cache_path: &path::Path,
ctx: &egui::Context,
url: &str,
imgtyp: ImageType,
) -> Promise<Result<TextureHandle>> {
let (sender, promise) = Promise::new();
let request = ehttp::Request::get(url);
let ctx = ctx.clone();
let cloned_url = url.to_owned();
let cache_path = cache_path.to_owned();
ehttp::fetch(request, move |response| {
let handle = response
.map_err(Error::Generic)
.and_then(|resp| parse_img_response(resp, imgtyp))
.map(|img| {
let texture_handle = ctx.load_texture(&cloned_url, img.clone(), Default::default());
// write to disk
std::thread::spawn(move || ImageCache::write(&cache_path, &cloned_url, img));
texture_handle
});
sender.send(handle); // send the results back to the UI thread.
ctx.request_repaint();
});
promise
}

View File

@@ -0,0 +1,62 @@
use crate::Result;
use egui::TextureHandle;
use poll_promise::Promise;
use egui::ColorImage;
use std::collections::HashMap;
use std::fs::File;
use std::path;
pub type ImageCacheValue = Promise<Result<TextureHandle>>;
pub type ImageCacheMap = HashMap<String, ImageCacheValue>;
pub struct ImageCache {
pub cache_dir: path::PathBuf,
url_imgs: ImageCacheMap,
}
impl ImageCache {
pub fn new(cache_dir: path::PathBuf) -> Self {
Self {
cache_dir,
url_imgs: HashMap::new(),
}
}
pub fn rel_dir() -> &'static str {
"img"
}
pub fn write(cache_dir: &path::Path, url: &str, data: ColorImage) -> Result<()> {
let file_path = cache_dir.join(Self::key(url));
let file = File::options()
.write(true)
.create(true)
.truncate(true)
.open(file_path)?;
let encoder = image::codecs::webp::WebPEncoder::new_lossless(file);
encoder.encode(
data.as_raw(),
data.size[0] as u32,
data.size[1] as u32,
image::ColorType::Rgba8.into(),
)?;
Ok(())
}
pub fn key(url: &str) -> String {
base32::encode(base32::Alphabet::Crockford, url.as_bytes())
}
pub fn map(&self) -> &ImageCacheMap {
&self.url_imgs
}
pub fn map_mut(&mut self) -> &mut ImageCacheMap {
&mut self.url_imgs
}
}

View File

@@ -0,0 +1,220 @@
use std::collections::HashMap;
use std::str::FromStr;
use crate::Error;
use enostr::{Keypair, Pubkey, SecretKey};
use poll_promise::Promise;
use reqwest::{Request, Response};
use serde::{Deserialize, Serialize};
#[derive(Debug, PartialEq)]
pub enum AcquireKeyError {
InvalidKey,
Nip05Failed(String),
}
impl std::fmt::Display for AcquireKeyError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
AcquireKeyError::InvalidKey => write!(f, "The inputted key is invalid."),
AcquireKeyError::Nip05Failed(e) => {
write!(f, "Failed to get pubkey from Nip05 address: {e}")
}
}
}
}
impl std::error::Error for AcquireKeyError {}
#[derive(Deserialize, Serialize)]
pub struct Nip05Result {
pub names: HashMap<String, String>,
pub relays: Option<HashMap<String, Vec<String>>>,
}
async fn parse_nip05_response(response: Response) -> Result<Nip05Result, Error> {
match response.bytes().await {
Ok(bytes) => {
serde_json::from_slice::<Nip05Result>(&bytes).map_err(|e| Error::Generic(e.to_string()))
}
Err(e) => Err(Error::Generic(e.to_string())),
}
}
fn get_pubkey_from_result(result: Nip05Result, user: String) -> Result<Pubkey, Error> {
match result.names.get(&user).to_owned() {
Some(pubkey_str) => Pubkey::from_hex(pubkey_str).map_err(|e| {
Error::Generic("Could not parse pubkey: ".to_string() + e.to_string().as_str())
}),
None => Err(Error::Generic("Could not find user in json.".to_string())),
}
}
async fn get_nip05_pubkey(id: &str) -> Result<Pubkey, Error> {
let mut parts = id.split('@');
let user = match parts.next() {
Some(user) => user,
None => {
return Err(Error::Generic(
"Address does not contain username.".to_string(),
));
}
};
let host = match parts.next() {
Some(host) => host,
None => {
return Err(Error::Generic(
"Nip05 address does not contain host.".to_string(),
));
}
};
if parts.next().is_some() {
return Err(Error::Generic(
"Nip05 address contains extraneous parts.".to_string(),
));
}
let url = format!("https://{host}/.well-known/nostr.json?name={user}");
let request = Request::new(reqwest::Method::GET, url.parse().unwrap());
let cloned_user = user.to_string();
let client = reqwest::Client::new();
match client.execute(request).await {
Ok(resp) => match parse_nip05_response(resp).await {
Ok(result) => match get_pubkey_from_result(result, cloned_user) {
Ok(pubkey) => Ok(pubkey),
Err(e) => Err(Error::Generic(e.to_string())),
},
Err(e) => Err(Error::Generic(e.to_string())),
},
Err(e) => Err(Error::Generic(e.to_string())),
}
}
fn retrieving_nip05_pubkey(key: &str) -> bool {
key.contains('@')
}
pub fn perform_key_retrieval(key: &str) -> Promise<Result<Keypair, AcquireKeyError>> {
let key_string = String::from(key);
Promise::spawn_async(async move { get_key(&key_string).await })
}
/// Attempts to turn a string slice key from the user into a Nostr-Sdk Keypair object.
/// The `key` can be in any of the following formats:
/// - Public Bech32 key (prefix "npub"): "npub1xyz..."
/// - Private Bech32 key (prefix "nsec"): "nsec1xyz..."
/// - Public hex key: "02a1..."
/// - Private hex key: "5dab..."
/// - NIP-05 address: "example@nostr.com"
///
pub async fn get_key(key: &str) -> Result<Keypair, AcquireKeyError> {
let tmp_key: &str = if let Some(stripped) = key.strip_prefix('@') {
stripped
} else {
key
};
if retrieving_nip05_pubkey(tmp_key) {
match get_nip05_pubkey(tmp_key).await {
Ok(pubkey) => Ok(Keypair::only_pubkey(pubkey)),
Err(e) => Err(AcquireKeyError::Nip05Failed(e.to_string())),
}
} else if let Ok(pubkey) = Pubkey::try_from_bech32_string(tmp_key, true) {
Ok(Keypair::only_pubkey(pubkey))
} else if let Ok(pubkey) = Pubkey::try_from_hex_str_with_verify(tmp_key) {
Ok(Keypair::only_pubkey(pubkey))
} else if let Ok(secret_key) = SecretKey::from_str(tmp_key) {
Ok(Keypair::from_secret(secret_key))
} else {
Err(AcquireKeyError::InvalidKey)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::promise_assert;
#[tokio::test]
async fn test_pubkey_async() {
let pubkey_str = "npub1xtscya34g58tk0z605fvr788k263gsu6cy9x0mhnm87echrgufzsevkk5s";
let expected_pubkey =
Pubkey::try_from_bech32_string(pubkey_str, false).expect("Should not have errored.");
let login_key_result = get_key(pubkey_str).await;
assert_eq!(Ok(Keypair::only_pubkey(expected_pubkey)), login_key_result);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_pubkey() {
let pubkey_str = "npub1xtscya34g58tk0z605fvr788k263gsu6cy9x0mhnm87echrgufzsevkk5s";
let expected_pubkey =
Pubkey::try_from_bech32_string(pubkey_str, false).expect("Should not have errored.");
let login_key_result = perform_key_retrieval(pubkey_str);
promise_assert!(
assert_eq,
Ok(Keypair::only_pubkey(expected_pubkey)),
&login_key_result
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_hex_pubkey() {
let pubkey_str = "32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245";
let expected_pubkey = Pubkey::from_hex(pubkey_str).expect("Should not have errored.");
let login_key_result = perform_key_retrieval(pubkey_str);
promise_assert!(
assert_eq,
Ok(Keypair::only_pubkey(expected_pubkey)),
&login_key_result
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_privkey() {
let privkey_str = "nsec1g8wt3hlwjpa4827xylr3r0lccufxltyekhraexes8lqmpp2hensq5aujhs";
let expected_privkey = SecretKey::from_str(privkey_str).expect("Should not have errored.");
let login_key_result = perform_key_retrieval(privkey_str);
promise_assert!(
assert_eq,
Ok(Keypair::from_secret(expected_privkey)),
&login_key_result
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_hex_privkey() {
let privkey_str = "41dcb8dfee907b53abc627c711bff8c7126fac99b5c7dc9b303fc1b08557cce0";
let expected_privkey = SecretKey::from_str(privkey_str).expect("Should not have errored.");
let login_key_result = perform_key_retrieval(privkey_str);
promise_assert!(
assert_eq,
Ok(Keypair::from_secret(expected_privkey)),
&login_key_result
);
}
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_nip05() {
let nip05_str = "damus@damus.io";
let expected_pubkey = Pubkey::try_from_bech32_string(
"npub18m76awca3y37hkvuneavuw6pjj4525fw90necxmadrvjg0sdy6qsngq955",
false,
)
.expect("Should not have errored.");
let login_key_result = perform_key_retrieval(nip05_str);
promise_assert!(
assert_eq,
Ok(Keypair::only_pubkey(expected_pubkey)),
&login_key_result
);
}
}

View File

@@ -0,0 +1,168 @@
mod app;
//mod camera;
mod error;
//mod note;
//mod block;
mod abbrev;
pub mod accounts;
mod actionbar;
pub mod app_creation;
mod app_size_handler;
mod app_style;
mod args;
mod colors;
mod column;
mod deck_state;
mod decks;
mod draft;
mod filter;
mod fonts;
mod frame_history;
mod images;
mod imgcache;
mod key_parsing;
pub mod login_manager;
mod multi_subscriber;
mod muted;
mod nav;
mod note;
mod notecache;
mod notes_holder;
mod post;
mod profile;
pub mod relay_pool_manager;
mod result;
mod route;
mod subscriptions;
mod support;
mod test_data;
mod thread;
mod time;
mod timecache;
mod timeline;
pub mod ui;
mod unknowns;
mod user_account;
mod view_state;
#[cfg(test)]
#[macro_use]
mod test_utils;
pub mod storage;
pub use app::Damus;
pub use error::Error;
pub use profile::DisplayName;
#[cfg(target_os = "android")]
use winit::platform::android::EventLoopBuilderExtAndroid;
pub type Result<T> = std::result::Result<T, error::Error>;
//#[cfg(target_os = "android")]
//use egui_android::run_android;
#[cfg(target_os = "android")]
use winit::platform::android::activity::AndroidApp;
#[cfg(target_os = "android")]
#[no_mangle]
#[tokio::main]
pub async fn android_main(app: AndroidApp) {
std::env::set_var("RUST_BACKTRACE", "full");
android_logger::init_once(android_logger::Config::default().with_min_level(log::Level::Info));
let path = app.internal_data_path().expect("data path");
let mut options = eframe::NativeOptions::default();
options.renderer = eframe::Renderer::Wgpu;
// Clone `app` to use it both in the closure and later in the function
let app_clone_for_event_loop = app.clone();
options.event_loop_builder = Some(Box::new(move |builder| {
builder.with_android_app(app_clone_for_event_loop);
}));
let app_args = get_app_args(app);
let _res = eframe::run_native(
"Damus Notedeck",
options,
Box::new(move |cc| Ok(Box::new(Damus::new(&cc.egui_ctx, path, app_args)))),
);
}
#[cfg(target_os = "android")]
use serde_json::Value;
#[cfg(target_os = "android")]
use std::fs;
#[cfg(target_os = "android")]
use std::path::PathBuf;
/*
Read args from a config file:
- allows use of more interesting args w/o risk of checking them in by mistake
- allows use of different args w/o rebuilding the app
- uses compiled in defaults if config file missing or broken
Example android-config.json:
```
{
"args": [
"--npub",
"npub1h50pnxqw9jg7dhr906fvy4mze2yzawf895jhnc3p7qmljdugm6gsrurqev",
"-c",
"contacts",
"-c",
"notifications"
]
}
```
Install/update android-config.json with:
```
adb push android-config.json /sdcard/Android/data/com.damus.app/files/android-config.json
```
Using internal storage would be better but it seems hard to get the config file onto
the device ...
*/
#[cfg(target_os = "android")]
fn get_app_args(app: AndroidApp) -> Vec<String> {
let external_data_path: PathBuf = app
.external_data_path()
.expect("external data path")
.to_path_buf();
let config_file = external_data_path.join("android-config.json");
let default_args = vec![
"--pub",
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245",
"-c",
"contacts",
"-c",
"notifications",
"-c",
"notifications:3efdaebb1d8923ebd99c9e7ace3b4194ab45512e2be79c1b7d68d9243e0d2681",
]
.into_iter()
.map(|s| s.to_string())
.collect();
if config_file.exists() {
if let Ok(config_contents) = fs::read_to_string(config_file) {
if let Ok(json) = serde_json::from_str::<Value>(&config_contents) {
if let Some(args_array) = json.get("args").and_then(|v| v.as_array()) {
let config_args = args_array
.iter()
.filter_map(|v| v.as_str().map(String::from))
.collect();
return config_args;
}
}
}
}
default_args // Return the default args if config is missing or invalid
}

View File

@@ -0,0 +1,146 @@
use crate::key_parsing::perform_key_retrieval;
use crate::key_parsing::AcquireKeyError;
use egui::{TextBuffer, TextEdit};
use enostr::Keypair;
use poll_promise::Promise;
/// The state data for acquiring a nostr key
#[derive(Default)]
pub struct AcquireKeyState {
desired_key: String,
promise_query: Option<(String, Promise<Result<Keypair, AcquireKeyError>>)>,
error: Option<AcquireKeyError>,
key_on_error: Option<String>,
should_create_new: bool,
}
impl<'a> AcquireKeyState {
pub fn new() -> Self {
AcquireKeyState::default()
}
/// Get the textedit for the UI without exposing the key variable
pub fn get_acquire_textedit(
&'a mut self,
textedit_closure: fn(&'a mut dyn TextBuffer) -> TextEdit<'a>,
) -> TextEdit<'a> {
textedit_closure(&mut self.desired_key)
}
/// User pressed the 'acquire' button
pub fn apply_acquire(&'a mut self) {
let new_promise = match &self.promise_query {
Some((query, _)) => {
if query != &self.desired_key {
Some(perform_key_retrieval(&self.desired_key))
} else {
None
}
}
None => Some(perform_key_retrieval(&self.desired_key)),
};
if let Some(new_promise) = new_promise {
self.promise_query = Some((self.desired_key.clone(), new_promise));
}
}
/// Whether to indicate to the user that there is a network operation occuring
pub fn is_awaiting_network(&self) -> bool {
self.promise_query.is_some()
}
/// Whether to indicate to the user that a login error occured
pub fn check_for_error(&'a mut self) -> Option<&'a AcquireKeyError> {
if let Some(error_key) = &self.key_on_error {
if self.desired_key != *error_key {
self.error = None;
self.key_on_error = None;
}
}
self.error.as_ref()
}
/// Whether to indicate to the user that a successful login occured
pub fn check_for_successful_login(&mut self) -> Option<Keypair> {
if let Some((_, promise)) = &mut self.promise_query {
if promise.ready().is_some() {
if let Some((_, promise)) = self.promise_query.take() {
match promise.block_and_take() {
Ok(key) => {
return Some(key);
}
Err(e) => {
self.error = Some(e);
self.key_on_error = Some(self.desired_key.clone());
}
};
}
}
}
None
}
pub fn should_create_new(&mut self) {
self.should_create_new = true;
}
pub fn check_for_create_new(&self) -> bool {
self.should_create_new
}
}
#[cfg(test)]
mod tests {
use enostr::Pubkey;
use super::*;
use std::time::{Duration, Instant};
#[tokio::test(flavor = "multi_thread", worker_threads = 2)]
async fn test_retrieve_key() {
let mut manager = AcquireKeyState::new();
let expected_str = "3efdaebb1d8923ebd99c9e7ace3b4194ab45512e2be79c1b7d68d9243e0d2681";
let expected_key = Keypair::only_pubkey(Pubkey::from_hex(expected_str).unwrap());
let start_time = Instant::now();
while start_time.elapsed() < Duration::from_millis(50u64) {
let cur_time = start_time.elapsed();
if cur_time < Duration::from_millis(10u64) {
let _ = manager.get_acquire_textedit(|text| {
text.clear();
text.insert_text("test", 0);
egui::TextEdit::singleline(text)
});
manager.apply_acquire();
} else if cur_time < Duration::from_millis(30u64) {
let _ = manager.get_acquire_textedit(|text| {
text.clear();
text.insert_text("test2", 0);
egui::TextEdit::singleline(text)
});
manager.apply_acquire();
} else {
let _ = manager.get_acquire_textedit(|text| {
text.clear();
text.insert_text(
"3efdaebb1d8923ebd99c9e7ace3b4194ab45512e2be79c1b7d68d9243e0d2681",
0,
);
egui::TextEdit::singleline(text)
});
manager.apply_acquire();
}
if let Some(key) = manager.check_for_successful_login() {
assert_eq!(expected_key, key);
return;
}
}
panic!("Test failed to get expected key.");
}
}

View File

@@ -0,0 +1,142 @@
use enostr::{Filter, RelayPool};
use nostrdb::{Ndb, Note, Transaction};
use tracing::{debug, error, info};
use uuid::Uuid;
use crate::{filter::UnifiedSubscription, muted::MuteFun, note::NoteRef, Error};
pub struct MultiSubscriber {
filters: Vec<Filter>,
sub: Option<UnifiedSubscription>,
subscribers: u32,
}
impl MultiSubscriber {
pub fn new(filters: Vec<Filter>) -> Self {
Self {
filters,
sub: None,
subscribers: 0,
}
}
fn real_subscribe(
ndb: &Ndb,
pool: &mut RelayPool,
filters: Vec<Filter>,
) -> Option<UnifiedSubscription> {
let subid = Uuid::new_v4().to_string();
let sub = ndb.subscribe(&filters).ok()?;
pool.subscribe(subid.clone(), filters);
Some(UnifiedSubscription {
local: sub,
remote: subid,
})
}
pub fn unsubscribe(&mut self, ndb: &Ndb, pool: &mut RelayPool) {
if self.subscribers == 0 {
error!("No subscribers to unsubscribe from");
return;
}
self.subscribers -= 1;
if self.subscribers == 0 {
let sub = match self.sub {
Some(ref sub) => sub,
None => {
error!("No remote subscription to unsubscribe from");
return;
}
};
let local_sub = &sub.local;
if let Err(e) = ndb.unsubscribe(*local_sub) {
error!(
"failed to unsubscribe from object: {e}, subid:{}, {} active subscriptions",
local_sub.id(),
ndb.subscription_count()
);
} else {
info!(
"Unsubscribed from object subid:{}. {} active subscriptions",
local_sub.id(),
ndb.subscription_count()
);
}
// unsub from remote
pool.unsubscribe(sub.remote.clone());
self.sub = None;
} else {
info!(
"Locally unsubscribing. {} active ndb subscriptions. {} active subscriptions for this object",
ndb.subscription_count(),
self.subscribers,
);
}
}
pub fn subscribe(&mut self, ndb: &Ndb, pool: &mut RelayPool) {
self.subscribers += 1;
if self.subscribers == 1 {
if self.sub.is_some() {
error!("Object is first subscriber, but it already had remote subscription");
return;
}
self.sub = Self::real_subscribe(ndb, pool, self.filters.clone());
info!(
"Remotely subscribing to object. {} total active subscriptions, {} on this object",
ndb.subscription_count(),
self.subscribers,
);
if self.sub.is_none() {
error!("Error subscribing remotely to object");
}
} else {
info!(
"Locally subscribing. {} total active subscriptions, {} for this object",
ndb.subscription_count(),
self.subscribers,
)
}
}
pub fn poll_for_notes(
&mut self,
ndb: &Ndb,
txn: &Transaction,
is_muted: &MuteFun,
) -> Result<Vec<NoteRef>, Error> {
let sub = self.sub.as_ref().ok_or(Error::no_active_sub())?;
let new_note_keys = ndb.poll_for_notes(sub.local, 500);
if new_note_keys.is_empty() {
return Ok(vec![]);
} else {
debug!("{} new notes! {:?}", new_note_keys.len(), new_note_keys);
}
let mut notes: Vec<Note<'_>> = Vec::with_capacity(new_note_keys.len());
for key in new_note_keys {
let note = if let Ok(note) = ndb.get_note_by_key(txn, key) {
note
} else {
continue;
};
if is_muted(&note) {
continue;
}
notes.push(note);
}
let note_refs: Vec<NoteRef> = notes.iter().map(|n| NoteRef::from_note(n)).collect();
Ok(note_refs)
}
}

View File

@@ -0,0 +1,61 @@
use nostrdb::Note;
use std::collections::BTreeSet;
use tracing::debug;
pub type MuteFun = dyn Fn(&Note) -> bool;
#[derive(Default)]
pub struct Muted {
// TODO - implement private mutes
pub pubkeys: BTreeSet<[u8; 32]>,
pub hashtags: BTreeSet<String>,
pub words: BTreeSet<String>,
pub threads: BTreeSet<[u8; 32]>,
}
impl std::fmt::Debug for Muted {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Muted")
.field(
"pubkeys",
&self.pubkeys.iter().map(hex::encode).collect::<Vec<_>>(),
)
.field("hashtags", &self.hashtags)
.field("words", &self.words)
.field(
"threads",
&self.threads.iter().map(hex::encode).collect::<Vec<_>>(),
)
.finish()
}
}
impl Muted {
pub fn is_muted(&self, note: &Note) -> bool {
if self.pubkeys.contains(note.pubkey()) {
debug!(
"{}: MUTED pubkey: {}",
hex::encode(note.id()),
hex::encode(note.pubkey())
);
return true;
}
// FIXME - Implement hashtag muting here
// TODO - let's not add this for now, we will likely need to
// have an optimized data structure in nostrdb to properly
// mute words. this mutes substrings which is not ideal.
//
// let content = note.content().to_lowercase();
// for word in &self.words {
// if content.contains(&word.to_lowercase()) {
// debug!("{}: MUTED word: {}", hex::encode(note.id()), word);
// return true;
// }
// }
// FIXME - Implement thread muting here
false
}
}

View File

@@ -0,0 +1,389 @@
use crate::{
accounts::{render_accounts_route, AccountsAction},
actionbar::NoteAction,
app::{get_active_columns, get_active_columns_mut, get_decks_mut},
column::ColumnsAction,
deck_state::DeckState,
decks::{Deck, DecksAction},
notes_holder::NotesHolder,
profile::Profile,
relay_pool_manager::RelayPoolManager,
route::Route,
thread::Thread,
timeline::{
route::{render_timeline_route, TimelineRoute},
Timeline,
},
ui::{
self,
add_column::render_add_column_routes,
column::NavTitle,
configure_deck::ConfigureDeckView,
edit_deck::{EditDeckResponse, EditDeckView},
note::{PostAction, PostType},
support::SupportView,
RelayView, View,
},
Damus,
};
use egui_nav::{Nav, NavAction, NavResponse, NavUiType};
use nostrdb::{Ndb, Transaction};
use tracing::{error, info};
#[allow(clippy::enum_variant_names)]
pub enum RenderNavAction {
Back,
RemoveColumn,
PostAction(PostAction),
NoteAction(NoteAction),
SwitchingAction(SwitchingAction),
}
pub enum SwitchingAction {
Accounts(AccountsAction),
Columns(ColumnsAction),
Decks(crate::decks::DecksAction),
}
impl SwitchingAction {
/// process the action, and return whether switching occured
pub fn process(&self, app: &mut Damus) -> bool {
match &self {
SwitchingAction::Accounts(account_action) => match *account_action {
AccountsAction::Switch(index) => app.accounts.select_account(index),
AccountsAction::Remove(index) => app.accounts.remove_account(index),
},
SwitchingAction::Columns(columns_action) => match *columns_action {
ColumnsAction::Remove(index) => {
get_active_columns_mut(&app.accounts, &mut app.decks_cache).delete_column(index)
}
},
SwitchingAction::Decks(decks_action) => match *decks_action {
DecksAction::Switch(index) => {
get_decks_mut(&app.accounts, &mut app.decks_cache).set_active(index)
}
DecksAction::Removing(index) => {
get_decks_mut(&app.accounts, &mut app.decks_cache).remove_deck(index)
}
},
}
true
}
}
impl From<PostAction> for RenderNavAction {
fn from(post_action: PostAction) -> Self {
Self::PostAction(post_action)
}
}
impl From<NoteAction> for RenderNavAction {
fn from(note_action: NoteAction) -> RenderNavAction {
Self::NoteAction(note_action)
}
}
pub type NotedeckNavResponse = NavResponse<Option<RenderNavAction>>;
pub struct RenderNavResponse {
column: usize,
response: NotedeckNavResponse,
}
impl RenderNavResponse {
#[allow(private_interfaces)]
pub fn new(column: usize, response: NotedeckNavResponse) -> Self {
RenderNavResponse { column, response }
}
#[must_use = "Make sure to save columns if result is true"]
pub fn process_render_nav_response(&self, app: &mut Damus) -> bool {
let mut switching_occured: bool = false;
let col = self.column;
if let Some(action) = self
.response
.response
.as_ref()
.or(self.response.title_response.as_ref())
{
// start returning when we're finished posting
match action {
RenderNavAction::Back => {
app.columns_mut().column_mut(col).router_mut().go_back();
}
RenderNavAction::RemoveColumn => {
let tl = app.columns().find_timeline_for_column_index(col);
if let Some(timeline) = tl {
unsubscribe_timeline(app.ndb(), timeline);
}
app.columns_mut().delete_column(col);
switching_occured = true;
}
RenderNavAction::PostAction(post_action) => {
let txn = Transaction::new(&app.ndb).expect("txn");
let _ = post_action.execute(&app.ndb, &txn, &mut app.pool, &mut app.drafts);
get_active_columns_mut(&app.accounts, &mut app.decks_cache)
.column_mut(col)
.router_mut()
.go_back();
}
RenderNavAction::NoteAction(note_action) => {
let txn = Transaction::new(&app.ndb).expect("txn");
note_action.execute_and_process_result(
&app.ndb,
get_active_columns_mut(&app.accounts, &mut app.decks_cache),
col,
&mut app.threads,
&mut app.profiles,
&mut app.note_cache,
&mut app.pool,
&txn,
&app.accounts.mutefun(),
);
}
RenderNavAction::SwitchingAction(switching_action) => {
switching_occured = switching_action.process(app);
}
}
}
if let Some(action) = self.response.action {
match action {
NavAction::Returned => {
let r = app.columns_mut().column_mut(col).router_mut().pop();
let txn = Transaction::new(&app.ndb).expect("txn");
if let Some(Route::Timeline(TimelineRoute::Thread(id))) = r {
let root_id = {
crate::note::root_note_id_from_selected_id(
&app.ndb,
&mut app.note_cache,
&txn,
id.bytes(),
)
};
Thread::unsubscribe_locally(
&txn,
&app.ndb,
&mut app.note_cache,
&mut app.threads,
&mut app.pool,
root_id,
&app.accounts.mutefun(),
);
}
if let Some(Route::Timeline(TimelineRoute::Profile(pubkey))) = r {
Profile::unsubscribe_locally(
&txn,
&app.ndb,
&mut app.note_cache,
&mut app.profiles,
&mut app.pool,
pubkey.bytes(),
&app.accounts.mutefun(),
);
}
switching_occured = true;
}
NavAction::Navigated => {
let cur_router = app.columns_mut().column_mut(col).router_mut();
cur_router.navigating = false;
if cur_router.is_replacing() {
cur_router.remove_previous_routes();
}
switching_occured = true;
}
NavAction::Dragging => {}
NavAction::Returning => {}
NavAction::Resetting => {}
NavAction::Navigating => {}
}
}
switching_occured
}
}
fn render_nav_body(
ui: &mut egui::Ui,
app: &mut Damus,
top: &Route,
col: usize,
) -> Option<RenderNavAction> {
match top {
Route::Timeline(tlr) => render_timeline_route(
&app.ndb,
get_active_columns_mut(&app.accounts, &mut app.decks_cache),
&mut app.drafts,
&mut app.img_cache,
&mut app.unknown_ids,
&mut app.note_cache,
&mut app.threads,
&mut app.profiles,
&mut app.accounts,
*tlr,
col,
app.textmode,
ui,
),
Route::Accounts(amr) => {
let mut action = render_accounts_route(
ui,
&app.ndb,
col,
&mut app.img_cache,
&mut app.accounts,
&mut app.decks_cache,
&mut app.view_state.login,
*amr,
);
let txn = Transaction::new(&app.ndb).expect("txn");
action.process_action(&mut app.unknown_ids, &app.ndb, &txn);
action
.accounts_action
.map(|f| RenderNavAction::SwitchingAction(SwitchingAction::Accounts(f)))
}
Route::Relays => {
let manager = RelayPoolManager::new(app.pool_mut());
RelayView::new(manager).ui(ui);
None
}
Route::ComposeNote => {
let kp = app.accounts.get_selected_account()?.to_full()?;
let draft = app.drafts.compose_mut();
let txn = Transaction::new(&app.ndb).expect("txn");
let post_response = ui::PostView::new(
&app.ndb,
draft,
PostType::New,
&mut app.img_cache,
&mut app.note_cache,
kp,
)
.ui(&txn, ui);
post_response.action.map(Into::into)
}
Route::AddColumn(route) => {
render_add_column_routes(ui, app, col, route);
None
}
Route::Support => {
SupportView::new(&mut app.support).show(ui);
None
}
Route::NewDeck => {
let id = ui.id().with("new-deck");
let new_deck_state = app.view_state.id_to_deck_state.entry(id).or_default();
let mut resp = None;
if let Some(config_resp) = ConfigureDeckView::new(new_deck_state).ui(ui) {
if let Some(cur_acc) = app.accounts.get_selected_account() {
app.decks_cache.add_deck(
cur_acc.pubkey,
Deck::new(config_resp.icon, config_resp.name),
);
// set new deck as active
let cur_index = get_decks_mut(&app.accounts, &mut app.decks_cache)
.decks()
.len()
- 1;
resp = Some(RenderNavAction::SwitchingAction(SwitchingAction::Decks(
DecksAction::Switch(cur_index),
)));
}
new_deck_state.clear();
get_active_columns_mut(&app.accounts, &mut app.decks_cache)
.get_first_router()
.go_back();
}
resp
}
Route::EditDeck(index) => {
let mut action = None;
let cur_deck = get_decks_mut(&app.accounts, &mut app.decks_cache)
.decks_mut()
.get_mut(*index)
.expect("index wasn't valid");
let id = ui.id().with((
"edit-deck",
app.accounts.get_selected_account().map(|k| k.pubkey),
index,
));
let deck_state = app
.view_state
.id_to_deck_state
.entry(id)
.or_insert_with(|| DeckState::from_deck(cur_deck));
if let Some(resp) = EditDeckView::new(deck_state).ui(ui) {
match resp {
EditDeckResponse::Edit(configure_deck_response) => {
cur_deck.edit(configure_deck_response);
}
EditDeckResponse::Delete => {
action = Some(RenderNavAction::SwitchingAction(SwitchingAction::Decks(
DecksAction::Removing(*index),
)));
}
}
get_active_columns_mut(&app.accounts, &mut app.decks_cache)
.get_first_router()
.go_back();
}
action
}
}
}
#[must_use = "RenderNavResponse must be handled by calling .process_render_nav_response(..)"]
pub fn render_nav(col: usize, app: &mut Damus, ui: &mut egui::Ui) -> RenderNavResponse {
let col_id = get_active_columns(&app.accounts, &app.decks_cache).get_column_id_at_index(col);
// TODO(jb55): clean up this router_mut mess by using Router<R> in egui-nav directly
let nav_response = Nav::new(&app.columns().column(col).router().routes().clone())
.navigating(app.columns_mut().column_mut(col).router_mut().navigating)
.returning(app.columns_mut().column_mut(col).router_mut().returning)
.id_source(egui::Id::new(col_id))
.show_mut(ui, |ui, render_type, nav| match render_type {
NavUiType::Title => NavTitle::new(
&app.ndb,
&mut app.img_cache,
get_active_columns_mut(&app.accounts, &mut app.decks_cache),
app.accounts.get_selected_account().map(|a| &a.pubkey),
nav.routes(),
)
.show(ui),
NavUiType::Body => render_nav_body(ui, app, nav.routes().last().expect("top"), col),
});
RenderNavResponse::new(col, nav_response)
}
fn unsubscribe_timeline(ndb: &Ndb, timeline: &Timeline) {
if let Some(sub_id) = timeline.subscription {
if let Err(e) = ndb.unsubscribe(sub_id) {
error!("unsubscribe error: {}", e);
} else {
info!(
"successfully unsubscribed from timeline {} with sub id {}",
timeline.id,
sub_id.id()
);
}
}
}

View File

@@ -0,0 +1,73 @@
use crate::notecache::NoteCache;
use nostrdb::{Ndb, Note, NoteKey, QueryResult, Transaction};
use std::cmp::Ordering;
#[derive(Debug, Eq, PartialEq, Copy, Clone)]
pub struct NoteRef {
pub key: NoteKey,
pub created_at: u64,
}
impl NoteRef {
pub fn new(key: NoteKey, created_at: u64) -> Self {
NoteRef { key, created_at }
}
pub fn from_note(note: &Note<'_>) -> Self {
let created_at = note.created_at();
let key = note.key().expect("todo: implement NoteBuf");
NoteRef::new(key, created_at)
}
pub fn from_query_result(qr: QueryResult<'_>) -> Self {
NoteRef {
key: qr.note_key,
created_at: qr.note.created_at(),
}
}
}
impl Ord for NoteRef {
fn cmp(&self, other: &Self) -> Ordering {
match self.created_at.cmp(&other.created_at) {
Ordering::Equal => self.key.cmp(&other.key),
Ordering::Less => Ordering::Greater,
Ordering::Greater => Ordering::Less,
}
}
}
impl PartialOrd for NoteRef {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
pub fn root_note_id_from_selected_id<'a>(
ndb: &Ndb,
note_cache: &mut NoteCache,
txn: &'a Transaction,
selected_note_id: &'a [u8; 32],
) -> &'a [u8; 32] {
let selected_note_key = if let Ok(key) = ndb
.get_notekey_by_id(txn, selected_note_id)
.map(NoteKey::new)
{
key
} else {
return selected_note_id;
};
let note = if let Ok(note) = ndb.get_note_by_key(txn, selected_note_key) {
note
} else {
return selected_note_id;
};
note_cache
.cached_note_or_insert(selected_note_key, &note)
.reply
.borrow(note.tags())
.root()
.map_or_else(|| selected_note_id, |nr| nr.id)
}

View File

@@ -0,0 +1,58 @@
use crate::time::time_ago_since;
use crate::timecache::TimeCached;
use nostrdb::{Note, NoteKey, NoteReply, NoteReplyBuf};
use std::collections::HashMap;
use std::time::Duration;
#[derive(Default)]
pub struct NoteCache {
pub cache: HashMap<NoteKey, CachedNote>,
}
impl NoteCache {
pub fn cached_note_or_insert_mut(&mut self, note_key: NoteKey, note: &Note) -> &mut CachedNote {
self.cache
.entry(note_key)
.or_insert_with(|| CachedNote::new(note))
}
pub fn cached_note(&self, note_key: NoteKey) -> Option<&CachedNote> {
self.cache.get(&note_key)
}
pub fn cache_mut(&mut self) -> &mut HashMap<NoteKey, CachedNote> {
&mut self.cache
}
pub fn cached_note_or_insert(&mut self, note_key: NoteKey, note: &Note) -> &CachedNote {
self.cache
.entry(note_key)
.or_insert_with(|| CachedNote::new(note))
}
}
#[derive(Clone)]
pub struct CachedNote {
reltime: TimeCached<String>,
pub reply: NoteReplyBuf,
}
impl CachedNote {
pub fn new(note: &Note<'_>) -> Self {
let created_at = note.created_at();
let reltime = TimeCached::new(
Duration::from_secs(1),
Box::new(move || time_ago_since(created_at)),
);
let reply = NoteReply::new(note.tags()).to_owned();
CachedNote { reltime, reply }
}
pub fn reltime_str_mut(&mut self) -> &str {
self.reltime.get_mut()
}
pub fn reltime_str(&self) -> Option<&str> {
self.reltime.get().map(|x| x.as_str())
}
}

View File

@@ -0,0 +1,219 @@
use std::collections::HashMap;
use enostr::{Filter, RelayPool};
use nostrdb::{Ndb, Transaction};
use tracing::{debug, info, warn};
use crate::{
actionbar::NotesHolderResult, multi_subscriber::MultiSubscriber, muted::MuteFun, note::NoteRef,
notecache::NoteCache, timeline::TimelineTab, unknowns::NoteRefsUnkIdAction, Error, Result,
};
pub struct NotesHolderStorage<M: NotesHolder> {
pub id_to_object: HashMap<[u8; 32], M>,
}
impl<M: NotesHolder> Default for NotesHolderStorage<M> {
fn default() -> Self {
NotesHolderStorage {
id_to_object: HashMap::new(),
}
}
}
pub enum Vitality<'a, M> {
Fresh(&'a mut M),
Stale(&'a mut M),
}
impl<'a, M> Vitality<'a, M> {
pub fn get_ptr(self) -> &'a mut M {
match self {
Self::Fresh(ptr) => ptr,
Self::Stale(ptr) => ptr,
}
}
pub fn is_stale(&self) -> bool {
match self {
Self::Fresh(_ptr) => false,
Self::Stale(_ptr) => true,
}
}
}
impl<M: NotesHolder> NotesHolderStorage<M> {
pub fn notes_holder_expected_mut(&mut self, id: &[u8; 32]) -> &mut M {
self.id_to_object
.get_mut(id)
.expect("notes_holder_expected_mut used but there was no NotesHolder")
}
pub fn notes_holder_mutated<'a>(
&'a mut self,
ndb: &Ndb,
note_cache: &mut NoteCache,
txn: &Transaction,
id: &[u8; 32],
is_muted: &MuteFun,
) -> Vitality<'a, M> {
// we can't use the naive hashmap entry API here because lookups
// require a copy, wait until we have a raw entry api. We could
// also use hashbrown?
if self.id_to_object.contains_key(id) {
return Vitality::Stale(self.notes_holder_expected_mut(id));
}
// we don't have the note holder, query for it!
let filters = M::filters(id);
let notes = if let Ok(results) = ndb.query(txn, &filters, 1000) {
results
.into_iter()
.map(NoteRef::from_query_result)
.collect()
} else {
debug!(
"got no results from NotesHolder lookup for {}",
hex::encode(id)
);
vec![]
};
if notes.is_empty() {
warn!("NotesHolder query returned 0 notes? ")
} else {
info!("found NotesHolder with {} notes", notes.len());
}
self.id_to_object.insert(
id.to_owned(),
M::new_notes_holder(txn, ndb, note_cache, id, M::filters(id), notes, is_muted),
);
Vitality::Fresh(self.id_to_object.get_mut(id).unwrap())
}
}
pub trait NotesHolder {
fn get_multi_subscriber(&mut self) -> Option<&mut MultiSubscriber>;
fn set_multi_subscriber(&mut self, subscriber: MultiSubscriber);
fn get_view(&mut self) -> &mut TimelineTab;
fn filters(for_id: &[u8; 32]) -> Vec<Filter>;
fn filters_since(for_id: &[u8; 32], since: u64) -> Vec<Filter>;
fn new_notes_holder(
txn: &Transaction,
ndb: &Ndb,
note_cache: &mut NoteCache,
id: &[u8; 32],
filters: Vec<Filter>,
notes: Vec<NoteRef>,
is_muted: &MuteFun,
) -> Self;
#[must_use = "process_action must be handled in the Ok(action) case"]
fn poll_notes_into_view(
&mut self,
txn: &Transaction,
ndb: &Ndb,
is_muted: &MuteFun,
) -> Result<NoteRefsUnkIdAction> {
if let Some(multi_subscriber) = self.get_multi_subscriber() {
let reversed = true;
let note_refs: Vec<NoteRef> = multi_subscriber.poll_for_notes(ndb, txn, is_muted)?;
self.get_view().insert(&note_refs, reversed);
Ok(NoteRefsUnkIdAction::new(note_refs))
} else {
Err(Error::Generic(
"NotesHolder unexpectedly has no MultiSubscriber".to_owned(),
))
}
}
/// Look for new thread notes since our last fetch
fn new_notes(notes: &[NoteRef], id: &[u8; 32], txn: &Transaction, ndb: &Ndb) -> Vec<NoteRef> {
if notes.is_empty() {
return vec![];
}
let last_note = notes[0];
let filters = Self::filters_since(id, last_note.created_at + 1);
if let Ok(results) = ndb.query(txn, &filters, 1000) {
debug!("got {} results from NotesHolder update", results.len());
results
.into_iter()
.map(NoteRef::from_query_result)
.collect()
} else {
debug!("got no results from NotesHolder update",);
vec![]
}
}
/// Local NotesHolder unsubscribe
fn unsubscribe_locally<M: NotesHolder>(
txn: &Transaction,
ndb: &Ndb,
note_cache: &mut NoteCache,
notes_holder_storage: &mut NotesHolderStorage<M>,
pool: &mut RelayPool,
id: &[u8; 32],
is_muted: &MuteFun,
) {
let notes_holder = notes_holder_storage
.notes_holder_mutated(ndb, note_cache, txn, id, is_muted)
.get_ptr();
if let Some(multi_subscriber) = notes_holder.get_multi_subscriber() {
multi_subscriber.unsubscribe(ndb, pool);
}
}
fn open<M: NotesHolder>(
ndb: &Ndb,
note_cache: &mut NoteCache,
txn: &Transaction,
pool: &mut RelayPool,
storage: &mut NotesHolderStorage<M>,
id: &[u8; 32],
is_muted: &MuteFun,
) -> Option<NotesHolderResult> {
let vitality = storage.notes_holder_mutated(ndb, note_cache, txn, id, is_muted);
let (holder, result) = match vitality {
Vitality::Stale(holder) => {
// The NotesHolder is stale, let's update it
let notes = M::new_notes(&holder.get_view().notes, id, txn, ndb);
let holder_result = if notes.is_empty() {
None
} else {
Some(NotesHolderResult::new_notes(notes, id.to_owned()))
};
//
// we can't insert and update the VirtualList now, because we
// are already borrowing it mutably. Let's pass it as a
// result instead
//
// holder.get_view().insert(&notes); <-- no
//
(holder, holder_result)
}
Vitality::Fresh(thread) => (thread, None),
};
let multi_subscriber = if let Some(multi_subscriber) = holder.get_multi_subscriber() {
multi_subscriber
} else {
let filters = M::filters(id);
holder.set_multi_subscriber(MultiSubscriber::new(filters));
holder.get_multi_subscriber().unwrap()
};
multi_subscriber.subscribe(ndb, pool);
result
}
}

View File

@@ -0,0 +1,122 @@
use enostr::FullKeypair;
use nostrdb::{Note, NoteBuilder, NoteReply};
use std::collections::HashSet;
pub struct NewPost {
pub content: String,
pub account: FullKeypair,
}
fn add_client_tag(builder: NoteBuilder<'_>) -> NoteBuilder<'_> {
builder
.start_tag()
.tag_str("client")
.tag_str("Damus Notedeck")
}
impl NewPost {
pub fn new(content: String, account: FullKeypair) -> Self {
NewPost { content, account }
}
pub fn to_note(&self, seckey: &[u8; 32]) -> Note {
add_client_tag(NoteBuilder::new())
.kind(1)
.content(&self.content)
.sign(seckey)
.build()
.expect("note should be ok")
}
pub fn to_reply(&self, seckey: &[u8; 32], replying_to: &Note) -> Note {
let builder = add_client_tag(NoteBuilder::new())
.kind(1)
.content(&self.content);
let nip10 = NoteReply::new(replying_to.tags());
let mut builder = if let Some(root) = nip10.root() {
builder
.start_tag()
.tag_str("e")
.tag_str(&hex::encode(root.id))
.tag_str("")
.tag_str("root")
.start_tag()
.tag_str("e")
.tag_str(&hex::encode(replying_to.id()))
.tag_str("")
.tag_str("reply")
.sign(seckey)
} else {
// we're replying to a post that isn't in a thread,
// just add a single reply-to-root tag
builder
.start_tag()
.tag_str("e")
.tag_str(&hex::encode(replying_to.id()))
.tag_str("")
.tag_str("root")
.sign(seckey)
};
let mut seen_p: HashSet<&[u8; 32]> = HashSet::new();
builder = builder
.start_tag()
.tag_str("p")
.tag_str(&hex::encode(replying_to.pubkey()));
seen_p.insert(replying_to.pubkey());
for tag in replying_to.tags() {
if tag.count() < 2 {
continue;
}
if tag.get_unchecked(0).variant().str() != Some("p") {
continue;
}
let id = if let Some(id) = tag.get_unchecked(1).variant().id() {
id
} else {
continue;
};
if seen_p.contains(id) {
continue;
}
seen_p.insert(id);
builder = builder.start_tag().tag_str("p").tag_str(&hex::encode(id));
}
builder
.sign(seckey)
.build()
.expect("expected build to work")
}
pub fn to_quote(&self, seckey: &[u8; 32], quoting: &Note) -> Note {
let new_content = format!(
"{}\nnostr:{}",
self.content,
enostr::NoteId::new(*quoting.id()).to_bech().unwrap()
);
NoteBuilder::new()
.kind(1)
.content(&new_content)
.start_tag()
.tag_str("q")
.tag_str(&hex::encode(quoting.id()))
.start_tag()
.tag_str("p")
.tag_str(&hex::encode(quoting.pubkey()))
.sign(seckey)
.build()
.expect("expected build to work")
}
}

View File

@@ -0,0 +1,132 @@
use enostr::{Filter, Pubkey};
use nostrdb::{FilterBuilder, Ndb, ProfileRecord, Transaction};
use crate::{
filter::{self, FilterState},
multi_subscriber::MultiSubscriber,
muted::MuteFun,
note::NoteRef,
notecache::NoteCache,
notes_holder::NotesHolder,
timeline::{copy_notes_into_timeline, PubkeySource, Timeline, TimelineKind},
};
pub enum DisplayName<'a> {
One(&'a str),
Both {
username: &'a str,
display_name: &'a str,
},
}
impl<'a> DisplayName<'a> {
pub fn username(&self) -> &'a str {
match self {
Self::One(n) => n,
Self::Both { username, .. } => username,
}
}
}
fn is_empty(s: &str) -> bool {
s.chars().all(|c| c.is_whitespace())
}
pub fn get_profile_name<'a>(record: &ProfileRecord<'a>) -> Option<DisplayName<'a>> {
let profile = record.record().profile()?;
let display_name = profile.display_name().filter(|n| !is_empty(n));
let name = profile.name().filter(|n| !is_empty(n));
match (display_name, name) {
(None, None) => None,
(Some(disp), None) => Some(DisplayName::One(disp)),
(None, Some(username)) => Some(DisplayName::One(username)),
(Some(display_name), Some(username)) => Some(DisplayName::Both {
display_name,
username,
}),
}
}
pub struct Profile {
pub timeline: Timeline,
pub multi_subscriber: Option<MultiSubscriber>,
}
impl Profile {
pub fn new(
txn: &Transaction,
ndb: &Ndb,
note_cache: &mut NoteCache,
source: PubkeySource,
filters: Vec<Filter>,
notes: Vec<NoteRef>,
is_muted: &MuteFun,
) -> Self {
let mut timeline =
Timeline::new(TimelineKind::profile(source), FilterState::ready(filters));
copy_notes_into_timeline(&mut timeline, txn, ndb, note_cache, notes, is_muted);
Profile {
timeline,
multi_subscriber: None,
}
}
fn filters_raw(pk: &[u8; 32]) -> Vec<FilterBuilder> {
vec![Filter::new()
.authors([pk])
.kinds([1])
.limit(filter::default_limit())]
}
}
impl NotesHolder for Profile {
fn get_multi_subscriber(&mut self) -> Option<&mut MultiSubscriber> {
self.multi_subscriber.as_mut()
}
fn get_view(&mut self) -> &mut crate::timeline::TimelineTab {
self.timeline.current_view_mut()
}
fn filters(for_id: &[u8; 32]) -> Vec<enostr::Filter> {
Profile::filters_raw(for_id)
.into_iter()
.map(|mut f| f.build())
.collect()
}
fn filters_since(for_id: &[u8; 32], since: u64) -> Vec<enostr::Filter> {
Profile::filters_raw(for_id)
.into_iter()
.map(|f| f.since(since).build())
.collect()
}
fn new_notes_holder(
txn: &Transaction,
ndb: &Ndb,
note_cache: &mut NoteCache,
id: &[u8; 32],
filters: Vec<Filter>,
notes: Vec<NoteRef>,
is_muted: &MuteFun,
) -> Self {
Profile::new(
txn,
ndb,
note_cache,
PubkeySource::Explicit(Pubkey::new(*id)),
filters,
notes,
is_muted,
)
}
fn set_multi_subscriber(&mut self, subscriber: MultiSubscriber) {
self.multi_subscriber = Some(subscriber);
}
}

View File

@@ -0,0 +1,54 @@
use enostr::RelayPool;
pub use enostr::RelayStatus;
/// The interface to a RelayPool for UI components.
/// Represents all user-facing operations that can be performed for a user's relays
pub struct RelayPoolManager<'a> {
pub pool: &'a mut RelayPool,
}
pub struct RelayInfo<'a> {
pub relay_url: &'a str,
pub status: &'a RelayStatus,
}
impl<'a> RelayPoolManager<'a> {
pub fn new(pool: &'a mut RelayPool) -> Self {
RelayPoolManager { pool }
}
pub fn get_relay_infos(&self) -> Vec<RelayInfo> {
self.pool
.relays
.iter()
.map(|relay| RelayInfo {
relay_url: &relay.relay.url,
status: &relay.relay.status,
})
.collect()
}
/// index of the Vec<RelayInfo> from get_relay_infos
pub fn remove_relay(&mut self, index: usize) {
if index < self.pool.relays.len() {
self.pool.relays.remove(index);
}
}
/// removes all specified relay indicies shown in get_relay_infos
pub fn remove_relays(&mut self, mut indices: Vec<usize>) {
indices.sort_unstable_by(|a, b| b.cmp(a));
indices.iter().for_each(|index| self.remove_relay(*index));
}
pub fn add_relay(&mut self, ctx: &egui::Context, relay_url: String) {
let _ = self.pool.add_url(relay_url, create_wakeup(ctx));
}
}
pub fn create_wakeup(ctx: &egui::Context) -> impl Fn() + Send + Sync + Clone + 'static {
let ctx = ctx.clone();
move || {
ctx.request_repaint();
}
}

View File

@@ -0,0 +1,3 @@
use crate::error::Error;
pub type Result<T> = std::result::Result<T, Error>;

View File

@@ -0,0 +1,214 @@
use enostr::{NoteId, Pubkey};
use std::{
borrow::Cow,
fmt::{self},
};
use crate::{
accounts::AccountsRoute,
column::Columns,
timeline::{TimelineId, TimelineRoute},
ui::add_column::AddColumnRoute,
};
/// App routing. These describe different places you can go inside Notedeck.
#[derive(Clone, Copy, Eq, PartialEq, Debug)]
pub enum Route {
Timeline(TimelineRoute),
Accounts(AccountsRoute),
Relays,
ComposeNote,
AddColumn(AddColumnRoute),
Support,
NewDeck,
EditDeck(usize),
}
impl Route {
pub fn timeline(timeline_id: TimelineId) -> Self {
Route::Timeline(TimelineRoute::Timeline(timeline_id))
}
pub fn timeline_id(&self) -> Option<&TimelineId> {
if let Route::Timeline(TimelineRoute::Timeline(tid)) = self {
Some(tid)
} else {
None
}
}
pub fn relays() -> Self {
Route::Relays
}
pub fn thread(thread_root: NoteId) -> Self {
Route::Timeline(TimelineRoute::Thread(thread_root))
}
pub fn profile(pubkey: Pubkey) -> Self {
Route::Timeline(TimelineRoute::Profile(pubkey))
}
pub fn reply(replying_to: NoteId) -> Self {
Route::Timeline(TimelineRoute::Reply(replying_to))
}
pub fn quote(quoting: NoteId) -> Self {
Route::Timeline(TimelineRoute::Quote(quoting))
}
pub fn accounts() -> Self {
Route::Accounts(AccountsRoute::Accounts)
}
pub fn add_account() -> Self {
Route::Accounts(AccountsRoute::AddAccount)
}
pub fn title(&self, columns: &Columns) -> Cow<'static, str> {
match self {
Route::Timeline(tlr) => match tlr {
TimelineRoute::Timeline(id) => {
let timeline = columns
.find_timeline(*id)
.expect("expected to find timeline");
timeline.kind.to_title()
}
TimelineRoute::Thread(_id) => Cow::Borrowed("Thread"),
TimelineRoute::Reply(_id) => Cow::Borrowed("Reply"),
TimelineRoute::Quote(_id) => Cow::Borrowed("Quote"),
TimelineRoute::Profile(_pubkey) => Cow::Borrowed("Profile"),
},
Route::Relays => Cow::Borrowed("Relays"),
Route::Accounts(amr) => match amr {
AccountsRoute::Accounts => Cow::Borrowed("Accounts"),
AccountsRoute::AddAccount => Cow::Borrowed("Add Account"),
},
Route::ComposeNote => Cow::Borrowed("Compose Note"),
Route::AddColumn(c) => match c {
AddColumnRoute::Base => Cow::Borrowed("Add Column"),
AddColumnRoute::UndecidedNotification => Cow::Borrowed("Add Notifications Column"),
AddColumnRoute::ExternalNotification => {
Cow::Borrowed("Add External Notifications Column")
}
AddColumnRoute::Hashtag => Cow::Borrowed("Add Hashtag Column"),
},
Route::Support => Cow::Borrowed("Damus Support"),
Route::NewDeck => Cow::Borrowed("Add Deck"),
Route::EditDeck(_) => Cow::Borrowed("Edit Deck"),
}
}
}
// TODO: add this to egui-nav so we don't have to deal with returning
// and navigating headaches
#[derive(Clone)]
pub struct Router<R: Clone> {
routes: Vec<R>,
pub returning: bool,
pub navigating: bool,
replacing: bool,
}
impl<R: Clone> Router<R> {
pub fn new(routes: Vec<R>) -> Self {
if routes.is_empty() {
panic!("routes can't be empty")
}
let returning = false;
let navigating = false;
let replacing = false;
Router {
routes,
returning,
navigating,
replacing,
}
}
pub fn route_to(&mut self, route: R) {
self.navigating = true;
self.routes.push(route);
}
// Route to R. Then when it is successfully placed, should call `remove_previous_routes` to remove all previous routes
pub fn route_to_replaced(&mut self, route: R) {
self.navigating = true;
self.replacing = true;
self.routes.push(route);
}
/// Go back, start the returning process
pub fn go_back(&mut self) -> Option<R> {
if self.returning || self.routes.len() == 1 {
return None;
}
self.returning = true;
self.prev().cloned()
}
/// Pop a route, should only be called on a NavRespose::Returned reseponse
pub fn pop(&mut self) -> Option<R> {
if self.routes.len() == 1 {
return None;
}
self.returning = false;
self.routes.pop()
}
pub fn remove_previous_routes(&mut self) {
let num_routes = self.routes.len();
if num_routes <= 1 {
return;
}
self.returning = false;
self.replacing = false;
self.routes.drain(..num_routes - 1);
}
pub fn is_replacing(&self) -> bool {
self.replacing
}
pub fn top(&self) -> &R {
self.routes.last().expect("routes can't be empty")
}
pub fn prev(&self) -> Option<&R> {
self.routes.get(self.routes.len() - 2)
}
pub fn routes(&self) -> &Vec<R> {
&self.routes
}
}
impl fmt::Display for Route {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Route::Timeline(tlr) => match tlr {
TimelineRoute::Timeline(name) => write!(f, "{}", name),
TimelineRoute::Thread(_id) => write!(f, "Thread"),
TimelineRoute::Profile(_id) => write!(f, "Profile"),
TimelineRoute::Reply(_id) => write!(f, "Reply"),
TimelineRoute::Quote(_id) => write!(f, "Quote"),
},
Route::Relays => write!(f, "Relays"),
Route::Accounts(amr) => match amr {
AccountsRoute::Accounts => write!(f, "Accounts"),
AccountsRoute::AddAccount => write!(f, "Add Account"),
},
Route::ComposeNote => write!(f, "Compose Note"),
Route::AddColumn(_) => write!(f, "Add Column"),
Route::Support => write!(f, "Support"),
Route::NewDeck => write!(f, "Add Deck"),
Route::EditDeck(_) => write!(f, "Edit Deck"),
}
}
}

View File

@@ -0,0 +1,803 @@
use std::{collections::HashMap, fmt, str::FromStr};
use enostr::{NoteId, Pubkey};
use nostrdb::Ndb;
use serde::{Deserialize, Serialize};
use tracing::{error, info};
use crate::{
accounts::AccountsRoute,
column::{Columns, IntermediaryRoute},
decks::{Deck, Decks, DecksCache},
route::Route,
timeline::{kind::ListKind, PubkeySource, TimelineKind, TimelineRoute},
ui::add_column::AddColumnRoute,
Error,
};
use super::{write_file, DataPath, DataPathType, Directory};
pub static DECKS_CACHE_FILE: &str = "decks_cache.json";
pub fn load_decks_cache(path: &DataPath, ndb: &Ndb) -> Option<DecksCache> {
let data_path = path.path(DataPathType::Setting);
let decks_cache_str = match Directory::new(data_path).get_file(DECKS_CACHE_FILE.to_owned()) {
Ok(s) => s,
Err(e) => {
error!(
"Could not read decks cache from file {}: {}",
DECKS_CACHE_FILE, e
);
return None;
}
};
let serializable_decks_cache =
serde_json::from_str::<SerializableDecksCache>(&decks_cache_str).ok()?;
serializable_decks_cache.decks_cache(ndb).ok()
}
pub fn save_decks_cache(path: &DataPath, decks_cache: &DecksCache) {
let serialized_decks_cache =
match serde_json::to_string(&SerializableDecksCache::to_serializable(decks_cache)) {
Ok(s) => s,
Err(e) => {
error!("Could not serialize decks cache: {}", e);
return;
}
};
let data_path = path.path(DataPathType::Setting);
if let Err(e) = write_file(
&data_path,
DECKS_CACHE_FILE.to_string(),
&serialized_decks_cache,
) {
error!(
"Could not write decks cache to file {}: {}",
DECKS_CACHE_FILE, e
);
} else {
info!("Successfully wrote decks cache to {}", DECKS_CACHE_FILE);
}
}
#[derive(Serialize, Deserialize)]
struct SerializableDecksCache {
#[serde(serialize_with = "serialize_map", deserialize_with = "deserialize_map")]
decks_cache: HashMap<Pubkey, SerializableDecks>,
}
impl SerializableDecksCache {
fn to_serializable(decks_cache: &DecksCache) -> Self {
SerializableDecksCache {
decks_cache: decks_cache
.get_mapping()
.iter()
.map(|(k, v)| (*k, SerializableDecks::from_decks(v)))
.collect(),
}
}
pub fn decks_cache(self, ndb: &Ndb) -> Result<DecksCache, Error> {
let account_to_decks = self
.decks_cache
.into_iter()
.map(|(pubkey, serializable_decks)| {
let deck_key = pubkey.bytes();
serializable_decks
.decks(ndb, deck_key)
.map(|decks| (pubkey, decks))
})
.collect::<Result<HashMap<Pubkey, Decks>, Error>>()?;
Ok(DecksCache::new(account_to_decks))
}
}
fn serialize_map<S>(
map: &HashMap<Pubkey, SerializableDecks>,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
let stringified_map: HashMap<String, &SerializableDecks> =
map.iter().map(|(k, v)| (k.hex(), v)).collect();
stringified_map.serialize(serializer)
}
fn deserialize_map<'de, D>(deserializer: D) -> Result<HashMap<Pubkey, SerializableDecks>, D::Error>
where
D: serde::Deserializer<'de>,
{
let stringified_map: HashMap<String, SerializableDecks> = HashMap::deserialize(deserializer)?;
stringified_map
.into_iter()
.map(|(k, v)| {
let key = Pubkey::from_hex(&k).map_err(serde::de::Error::custom)?;
Ok((key, v))
})
.collect()
}
#[derive(Serialize, Deserialize)]
struct SerializableDecks {
active_deck: usize,
decks: Vec<SerializableDeck>,
}
impl SerializableDecks {
pub fn from_decks(decks: &Decks) -> Self {
Self {
active_deck: decks.active_index(),
decks: decks
.decks()
.iter()
.map(SerializableDeck::from_deck)
.collect(),
}
}
fn decks(self, ndb: &Ndb, deck_key: &[u8; 32]) -> Result<Decks, Error> {
Ok(Decks::from_decks(
self.active_deck,
self.decks
.into_iter()
.map(|d| d.deck(ndb, deck_key))
.collect::<Result<_, _>>()?,
))
}
}
#[derive(Serialize, Deserialize)]
struct SerializableDeck {
metadata: Vec<String>,
columns: Vec<Vec<String>>,
}
#[derive(PartialEq, Clone)]
enum MetadataKeyword {
Icon,
Name,
}
impl MetadataKeyword {
const MAPPING: &'static [(&'static str, MetadataKeyword)] = &[
("icon", MetadataKeyword::Icon),
("name", MetadataKeyword::Name),
];
}
impl fmt::Display for MetadataKeyword {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(name) = MetadataKeyword::MAPPING
.iter()
.find(|(_, keyword)| keyword == self)
.map(|(name, _)| *name)
{
write!(f, "{}", name)
} else {
write!(f, "UnknownMetadataKeyword")
}
}
}
impl FromStr for MetadataKeyword {
type Err = Error;
fn from_str(serialized: &str) -> Result<Self, Self::Err> {
MetadataKeyword::MAPPING
.iter()
.find(|(name, _)| *name == serialized)
.map(|(_, keyword)| keyword.clone())
.ok_or(Error::Generic(
"Could not convert string to Keyword enum".to_owned(),
))
}
}
struct MetadataPayload {
keyword: MetadataKeyword,
value: String,
}
impl MetadataPayload {
fn new(keyword: MetadataKeyword, value: String) -> Self {
Self { keyword, value }
}
}
fn serialize_metadata(payloads: Vec<MetadataPayload>) -> Vec<String> {
payloads
.into_iter()
.map(|payload| format!("{}:{}", payload.keyword, payload.value))
.collect()
}
fn deserialize_metadata(serialized_metadatas: Vec<String>) -> Option<Vec<MetadataPayload>> {
let mut payloads = Vec::new();
for serialized_metadata in serialized_metadatas {
let cur_split: Vec<&str> = serialized_metadata.split(':').collect();
if cur_split.len() != 2 {
continue;
}
if let Ok(keyword) = MetadataKeyword::from_str(cur_split.first().unwrap()) {
payloads.push(MetadataPayload {
keyword,
value: cur_split.get(1).unwrap().to_string(),
});
}
}
if payloads.is_empty() {
None
} else {
Some(payloads)
}
}
impl SerializableDeck {
pub fn from_deck(deck: &Deck) -> Self {
let columns = serialize_columns(deck.columns());
let metadata = serialize_metadata(vec![
MetadataPayload::new(MetadataKeyword::Icon, deck.icon.to_string()),
MetadataPayload::new(MetadataKeyword::Name, deck.name.clone()),
]);
SerializableDeck { metadata, columns }
}
pub fn deck(self, ndb: &Ndb, deck_user: &[u8; 32]) -> Result<Deck, Error> {
let columns = deserialize_columns(ndb, deck_user, self.columns);
let deserialized_metadata = deserialize_metadata(self.metadata)
.ok_or(Error::Generic("Could not deserialize metadata".to_owned()))?;
let icon = deserialized_metadata
.iter()
.find(|p| p.keyword == MetadataKeyword::Icon)
.map_or_else(|| "🇩", |f| &f.value);
let name = deserialized_metadata
.iter()
.find(|p| p.keyword == MetadataKeyword::Name)
.map_or_else(|| "Deck", |f| &f.value)
.to_string();
Ok(Deck::new_with_columns(
icon.parse::<char>()
.map_err(|_| Error::Generic("could not convert String -> char".to_owned()))?,
name,
columns,
))
}
}
fn serialize_columns(columns: &Columns) -> Vec<Vec<String>> {
let mut cols_serialized: Vec<Vec<String>> = Vec::new();
for column in columns.columns() {
let mut column_routes = Vec::new();
for route in column.router().routes() {
if let Some(route_str) = serialize_route(route, columns) {
column_routes.push(route_str);
}
}
cols_serialized.push(column_routes);
}
cols_serialized
}
fn deserialize_columns(ndb: &Ndb, deck_user: &[u8; 32], serialized: Vec<Vec<String>>) -> Columns {
let mut cols = Columns::new();
for serialized_routes in serialized {
let mut cur_routes = Vec::new();
for serialized_route in serialized_routes {
let selections = Selection::from_serialized(&serialized_route);
if let Some(route_intermediary) = selections_to_route(selections.clone()) {
if let Some(ir) = route_intermediary.intermediary_route(ndb, Some(deck_user)) {
match &ir {
IntermediaryRoute::Route(Route::Timeline(TimelineRoute::Thread(_)))
| IntermediaryRoute::Route(Route::Timeline(TimelineRoute::Profile(_))) => {
// Do nothing. Threads & Profiles not yet supported for deserialization
}
IntermediaryRoute::Timeline(tl)
if matches!(tl.kind, TimelineKind::Profile(_)) =>
{
// Do nothing. Profiles aren't yet supported for deserialization
}
_ => cur_routes.push(ir),
}
}
} else {
error!(
"could not turn selections to RouteIntermediary: {:?}",
selections
);
}
}
if !cur_routes.is_empty() {
cols.insert_intermediary_routes(cur_routes);
}
}
cols
}
#[derive(Clone, Debug)]
enum Selection {
Keyword(Keyword),
Payload(String),
}
#[derive(Clone, PartialEq, Debug)]
enum Keyword {
Notifs,
Universe,
Contact,
Explicit,
DeckAuthor,
Profile,
Hashtag,
Generic,
Thread,
Reply,
Quote,
Account,
Show,
New,
Relay,
Compose,
Column,
NotificationSelection,
ExternalNotifSelection,
HashtagSelection,
Support,
Deck,
Edit,
}
impl Keyword {
const MAPPING: &'static [(&'static str, Keyword, bool)] = &[
("notifs", Keyword::Notifs, false),
("universe", Keyword::Universe, false),
("contact", Keyword::Contact, false),
("explicit", Keyword::Explicit, true),
("deck_author", Keyword::DeckAuthor, false),
("profile", Keyword::Profile, true),
("hashtag", Keyword::Hashtag, true),
("generic", Keyword::Generic, false),
("thread", Keyword::Thread, true),
("reply", Keyword::Reply, true),
("quote", Keyword::Quote, true),
("account", Keyword::Account, false),
("show", Keyword::Show, false),
("new", Keyword::New, false),
("relay", Keyword::Relay, false),
("compose", Keyword::Compose, false),
("column", Keyword::Column, false),
(
"notification_selection",
Keyword::NotificationSelection,
false,
),
(
"external_notif_selection",
Keyword::ExternalNotifSelection,
false,
),
("hashtag_selection", Keyword::HashtagSelection, false),
("support", Keyword::Support, false),
("deck", Keyword::Deck, false),
("edit", Keyword::Edit, true),
];
fn has_payload(&self) -> bool {
Keyword::MAPPING
.iter()
.find(|(_, keyword, _)| keyword == self)
.map(|(_, _, has_payload)| *has_payload)
.unwrap_or(false)
}
}
impl fmt::Display for Keyword {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(name) = Keyword::MAPPING
.iter()
.find(|(_, keyword, _)| keyword == self)
.map(|(name, _, _)| *name)
{
write!(f, "{}", name)
} else {
write!(f, "UnknownKeyword")
}
}
}
impl FromStr for Keyword {
type Err = Error;
fn from_str(serialized: &str) -> Result<Self, Self::Err> {
Keyword::MAPPING
.iter()
.find(|(name, _, _)| *name == serialized)
.map(|(_, keyword, _)| keyword.clone())
.ok_or(Error::Generic(
"Could not convert string to Keyword enum".to_owned(),
))
}
}
enum CleanIntermediaryRoute {
ToTimeline(TimelineKind),
ToRoute(Route),
}
impl CleanIntermediaryRoute {
fn intermediary_route(self, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option<IntermediaryRoute> {
match self {
CleanIntermediaryRoute::ToTimeline(timeline_kind) => Some(IntermediaryRoute::Timeline(
timeline_kind.into_timeline(ndb, user)?,
)),
CleanIntermediaryRoute::ToRoute(route) => Some(IntermediaryRoute::Route(route)),
}
}
}
// TODO: The public-accessible version will be a subset of this
fn serialize_route(route: &Route, columns: &Columns) -> Option<String> {
let mut selections: Vec<Selection> = Vec::new();
match route {
Route::Timeline(timeline_route) => match timeline_route {
TimelineRoute::Timeline(timeline_id) => {
if let Some(timeline) = columns.find_timeline(*timeline_id) {
match &timeline.kind {
TimelineKind::List(list_kind) => match list_kind {
ListKind::Contact(pubkey_source) => {
selections.push(Selection::Keyword(Keyword::Contact));
selections.extend(generate_pubkey_selections(pubkey_source));
}
},
TimelineKind::Notifications(pubkey_source) => {
selections.push(Selection::Keyword(Keyword::Notifs));
selections.extend(generate_pubkey_selections(pubkey_source));
}
TimelineKind::Profile(pubkey_source) => {
selections.push(Selection::Keyword(Keyword::Profile));
selections.extend(generate_pubkey_selections(pubkey_source));
}
TimelineKind::Universe => {
selections.push(Selection::Keyword(Keyword::Universe))
}
TimelineKind::Generic => {
selections.push(Selection::Keyword(Keyword::Generic))
}
TimelineKind::Hashtag(hashtag) => {
selections.push(Selection::Keyword(Keyword::Hashtag));
selections.push(Selection::Payload(hashtag.to_string()));
}
}
}
}
TimelineRoute::Thread(note_id) => {
selections.push(Selection::Keyword(Keyword::Thread));
selections.push(Selection::Payload(note_id.hex()));
}
TimelineRoute::Profile(pubkey) => {
selections.push(Selection::Keyword(Keyword::Profile));
selections.push(Selection::Keyword(Keyword::Explicit));
selections.push(Selection::Payload(pubkey.hex()));
}
TimelineRoute::Reply(note_id) => {
selections.push(Selection::Keyword(Keyword::Reply));
selections.push(Selection::Payload(note_id.hex()));
}
TimelineRoute::Quote(note_id) => {
selections.push(Selection::Keyword(Keyword::Quote));
selections.push(Selection::Payload(note_id.hex()));
}
},
Route::Accounts(accounts_route) => {
selections.push(Selection::Keyword(Keyword::Account));
match accounts_route {
AccountsRoute::Accounts => selections.push(Selection::Keyword(Keyword::Show)),
AccountsRoute::AddAccount => selections.push(Selection::Keyword(Keyword::New)),
}
}
Route::Relays => selections.push(Selection::Keyword(Keyword::Relay)),
Route::ComposeNote => selections.push(Selection::Keyword(Keyword::Compose)),
Route::AddColumn(add_column_route) => {
selections.push(Selection::Keyword(Keyword::Column));
match add_column_route {
AddColumnRoute::Base => (),
AddColumnRoute::UndecidedNotification => {
selections.push(Selection::Keyword(Keyword::NotificationSelection))
}
AddColumnRoute::ExternalNotification => {
selections.push(Selection::Keyword(Keyword::ExternalNotifSelection))
}
AddColumnRoute::Hashtag => {
selections.push(Selection::Keyword(Keyword::HashtagSelection))
}
}
}
Route::Support => selections.push(Selection::Keyword(Keyword::Support)),
Route::NewDeck => {
selections.push(Selection::Keyword(Keyword::Deck));
selections.push(Selection::Keyword(Keyword::New));
}
Route::EditDeck(index) => {
selections.push(Selection::Keyword(Keyword::Deck));
selections.push(Selection::Keyword(Keyword::Edit));
selections.push(Selection::Payload(index.to_string()));
}
}
if selections.is_empty() {
None
} else {
Some(
selections
.iter()
.map(|k| k.to_string())
.collect::<Vec<String>>()
.join(":"),
)
}
}
fn generate_pubkey_selections(source: &PubkeySource) -> Vec<Selection> {
let mut selections = Vec::new();
match source {
PubkeySource::Explicit(pubkey) => {
selections.push(Selection::Keyword(Keyword::Explicit));
selections.push(Selection::Payload(pubkey.hex()));
}
PubkeySource::DeckAuthor => {
selections.push(Selection::Keyword(Keyword::DeckAuthor));
}
}
selections
}
impl Selection {
fn from_serialized(serialized: &str) -> Vec<Self> {
let mut selections = Vec::new();
let seperator = ":";
let mut serialized_copy = serialized.to_string();
let mut buffer = serialized_copy.as_mut();
let mut next_is_payload = false;
while let Some(index) = buffer.find(seperator) {
if let Ok(keyword) = Keyword::from_str(&buffer[..index]) {
selections.push(Selection::Keyword(keyword.clone()));
if keyword.has_payload() {
next_is_payload = true;
}
}
buffer = &mut buffer[index + seperator.len()..];
}
if next_is_payload {
selections.push(Selection::Payload(buffer.to_string()));
} else if let Ok(keyword) = Keyword::from_str(buffer) {
selections.push(Selection::Keyword(keyword.clone()));
}
selections
}
}
fn selections_to_route(selections: Vec<Selection>) -> Option<CleanIntermediaryRoute> {
match selections.first()? {
Selection::Keyword(Keyword::Contact) => match selections.get(1)? {
Selection::Keyword(Keyword::Explicit) => {
if let Selection::Payload(hex) = selections.get(2)? {
Some(CleanIntermediaryRoute::ToTimeline(
TimelineKind::contact_list(PubkeySource::Explicit(
Pubkey::from_hex(hex.as_str()).ok()?,
)),
))
} else {
None
}
}
Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline(
TimelineKind::contact_list(PubkeySource::DeckAuthor),
)),
_ => None,
},
Selection::Keyword(Keyword::Notifs) => match selections.get(1)? {
Selection::Keyword(Keyword::Explicit) => {
if let Selection::Payload(hex) = selections.get(2)? {
Some(CleanIntermediaryRoute::ToTimeline(
TimelineKind::notifications(PubkeySource::Explicit(
Pubkey::from_hex(hex.as_str()).ok()?,
)),
))
} else {
None
}
}
Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline(
TimelineKind::notifications(PubkeySource::DeckAuthor),
)),
_ => None,
},
Selection::Keyword(Keyword::Profile) => match selections.get(1)? {
Selection::Keyword(Keyword::Explicit) => {
if let Selection::Payload(hex) = selections.get(2)? {
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::profile(
PubkeySource::Explicit(Pubkey::from_hex(hex.as_str()).ok()?),
)))
} else {
None
}
}
Selection::Keyword(Keyword::DeckAuthor) => Some(CleanIntermediaryRoute::ToTimeline(
TimelineKind::profile(PubkeySource::DeckAuthor),
)),
_ => None,
},
Selection::Keyword(Keyword::Universe) => {
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Universe))
}
Selection::Keyword(Keyword::Hashtag) => {
if let Selection::Payload(hashtag) = selections.get(1)? {
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Hashtag(
hashtag.to_string(),
)))
} else {
None
}
}
Selection::Keyword(Keyword::Generic) => {
Some(CleanIntermediaryRoute::ToTimeline(TimelineKind::Generic))
}
Selection::Keyword(Keyword::Thread) => {
if let Selection::Payload(hex) = selections.get(1)? {
Some(CleanIntermediaryRoute::ToRoute(Route::thread(
NoteId::from_hex(hex.as_str()).ok()?,
)))
} else {
None
}
}
Selection::Keyword(Keyword::Reply) => {
if let Selection::Payload(hex) = selections.get(1)? {
Some(CleanIntermediaryRoute::ToRoute(Route::reply(
NoteId::from_hex(hex.as_str()).ok()?,
)))
} else {
None
}
}
Selection::Keyword(Keyword::Quote) => {
if let Selection::Payload(hex) = selections.get(1)? {
Some(CleanIntermediaryRoute::ToRoute(Route::quote(
NoteId::from_hex(hex.as_str()).ok()?,
)))
} else {
None
}
}
Selection::Keyword(Keyword::Account) => match selections.get(1)? {
Selection::Keyword(Keyword::Show) => Some(CleanIntermediaryRoute::ToRoute(
Route::Accounts(AccountsRoute::Accounts),
)),
Selection::Keyword(Keyword::New) => Some(CleanIntermediaryRoute::ToRoute(
Route::Accounts(AccountsRoute::AddAccount),
)),
_ => None,
},
Selection::Keyword(Keyword::Relay) => Some(CleanIntermediaryRoute::ToRoute(Route::Relays)),
Selection::Keyword(Keyword::Compose) => {
Some(CleanIntermediaryRoute::ToRoute(Route::ComposeNote))
}
Selection::Keyword(Keyword::Column) => match selections.get(1)? {
Selection::Keyword(Keyword::NotificationSelection) => {
Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn(
AddColumnRoute::UndecidedNotification,
)))
}
Selection::Keyword(Keyword::ExternalNotifSelection) => {
Some(CleanIntermediaryRoute::ToRoute(Route::AddColumn(
AddColumnRoute::ExternalNotification,
)))
}
Selection::Keyword(Keyword::HashtagSelection) => Some(CleanIntermediaryRoute::ToRoute(
Route::AddColumn(AddColumnRoute::Hashtag),
)),
_ => None,
},
Selection::Keyword(Keyword::Support) => {
Some(CleanIntermediaryRoute::ToRoute(Route::Support))
}
Selection::Keyword(Keyword::Deck) => match selections.get(1)? {
Selection::Keyword(Keyword::New) => {
Some(CleanIntermediaryRoute::ToRoute(Route::NewDeck))
}
Selection::Keyword(Keyword::Edit) => {
if let Selection::Payload(index_str) = selections.get(2)? {
let parsed_index = index_str.parse::<usize>().ok()?;
Some(CleanIntermediaryRoute::ToRoute(Route::EditDeck(
parsed_index,
)))
} else {
None
}
}
_ => None,
},
Selection::Payload(_)
| Selection::Keyword(Keyword::Explicit)
| Selection::Keyword(Keyword::New)
| Selection::Keyword(Keyword::DeckAuthor)
| Selection::Keyword(Keyword::Show)
| Selection::Keyword(Keyword::NotificationSelection)
| Selection::Keyword(Keyword::ExternalNotifSelection)
| Selection::Keyword(Keyword::HashtagSelection)
| Selection::Keyword(Keyword::Edit) => None,
}
}
impl fmt::Display for Selection {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Selection::Keyword(keyword) => write!(f, "{}", keyword),
Selection::Payload(payload) => write!(f, "{}", payload),
}
}
}
#[cfg(test)]
mod tests {
use enostr::Pubkey;
use crate::{route::Route, test_data::test_app, timeline::TimelineRoute};
use super::deserialize_columns;
#[test]
fn test_deserialize_columns() {
let serialized = vec![
vec!["universe".to_owned()],
vec![
"notifs:explicit:aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"
.to_owned(),
],
];
let user =
Pubkey::from_hex("aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe")
.unwrap();
let app = test_app();
let cols = deserialize_columns(&app.ndb, user.bytes(), serialized);
assert_eq!(cols.columns().len(), 2);
let router = cols.column(0).router();
assert_eq!(router.routes().len(), 1);
if let Route::Timeline(TimelineRoute::Timeline(_)) = router.routes().first().unwrap() {
} else {
panic!("The first router route is not a TimelineRoute::Timeline variant");
}
let router = cols.column(1).router();
assert_eq!(router.routes().len(), 1);
if let Route::Timeline(TimelineRoute::Timeline(_)) = router.routes().first().unwrap() {
} else {
panic!("The second router route is not a TimelineRoute::Timeline variant");
}
}
}

View File

@@ -0,0 +1,176 @@
use eframe::Result;
use enostr::{Keypair, Pubkey, SerializableKeypair};
use crate::Error;
use super::{
file_storage::{delete_file, write_file, Directory},
key_storage_impl::{KeyStorageError, KeyStorageResponse},
};
static SELECTED_PUBKEY_FILE_NAME: &str = "selected_pubkey";
/// An OS agnostic file key storage implementation
#[derive(Debug, PartialEq)]
pub struct FileKeyStorage {
keys_directory: Directory,
selected_key_directory: Directory,
}
impl FileKeyStorage {
pub fn new(keys_directory: Directory, selected_key_directory: Directory) -> Self {
Self {
keys_directory,
selected_key_directory,
}
}
fn add_key_internal(&self, key: &Keypair) -> Result<(), KeyStorageError> {
write_file(
&self.keys_directory.file_path,
key.pubkey.hex(),
&serde_json::to_string(&SerializableKeypair::from_keypair(key, "", 7))
.map_err(|e| KeyStorageError::Addition(Error::Generic(e.to_string())))?,
)
.map_err(KeyStorageError::Addition)
}
fn get_keys_internal(&self) -> Result<Vec<Keypair>, KeyStorageError> {
let keys = self
.keys_directory
.get_files()
.map_err(KeyStorageError::Retrieval)?
.values()
.filter_map(|str_key| serde_json::from_str::<SerializableKeypair>(str_key).ok())
.map(|serializable_keypair| serializable_keypair.to_keypair(""))
.collect();
Ok(keys)
}
fn remove_key_internal(&self, key: &Keypair) -> Result<(), KeyStorageError> {
delete_file(&self.keys_directory.file_path, key.pubkey.hex())
.map_err(KeyStorageError::Removal)
}
fn get_selected_pubkey(&self) -> Result<Option<Pubkey>, KeyStorageError> {
let pubkey_str = self
.selected_key_directory
.get_file(SELECTED_PUBKEY_FILE_NAME.to_owned())
.map_err(KeyStorageError::Selection)?;
serde_json::from_str(&pubkey_str)
.map_err(|e| KeyStorageError::Selection(Error::Generic(e.to_string())))
}
fn select_pubkey(&self, pubkey: Option<Pubkey>) -> Result<(), KeyStorageError> {
if let Some(pubkey) = pubkey {
write_file(
&self.selected_key_directory.file_path,
SELECTED_PUBKEY_FILE_NAME.to_owned(),
&serde_json::to_string(&pubkey.hex())
.map_err(|e| KeyStorageError::Selection(Error::Generic(e.to_string())))?,
)
.map_err(KeyStorageError::Selection)
} else if self
.selected_key_directory
.get_file(SELECTED_PUBKEY_FILE_NAME.to_owned())
.is_ok()
{
// Case where user chose to have no selected pubkey, but one already exists
delete_file(
&self.selected_key_directory.file_path,
SELECTED_PUBKEY_FILE_NAME.to_owned(),
)
.map_err(KeyStorageError::Selection)
} else {
Ok(())
}
}
}
impl FileKeyStorage {
pub fn get_keys(&self) -> KeyStorageResponse<Vec<enostr::Keypair>> {
KeyStorageResponse::ReceivedResult(self.get_keys_internal())
}
pub fn add_key(&self, key: &enostr::Keypair) -> KeyStorageResponse<()> {
KeyStorageResponse::ReceivedResult(self.add_key_internal(key))
}
pub fn remove_key(&self, key: &enostr::Keypair) -> KeyStorageResponse<()> {
KeyStorageResponse::ReceivedResult(self.remove_key_internal(key))
}
pub fn get_selected_key(&self) -> KeyStorageResponse<Option<Pubkey>> {
KeyStorageResponse::ReceivedResult(self.get_selected_pubkey())
}
pub fn select_key(&self, key: Option<Pubkey>) -> KeyStorageResponse<()> {
KeyStorageResponse::ReceivedResult(self.select_pubkey(key))
}
}
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use super::*;
use enostr::Keypair;
static CREATE_TMP_DIR: fn() -> Result<PathBuf, Error> =
|| Ok(tempfile::TempDir::new()?.path().to_path_buf());
impl FileKeyStorage {
fn mock() -> Result<Self, Error> {
Ok(Self {
keys_directory: Directory::new(CREATE_TMP_DIR()?),
selected_key_directory: Directory::new(CREATE_TMP_DIR()?),
})
}
}
#[test]
fn test_basic() {
let kp = enostr::FullKeypair::generate().to_keypair();
let storage = FileKeyStorage::mock().unwrap();
let resp = storage.add_key(&kp);
assert_eq!(resp, KeyStorageResponse::ReceivedResult(Ok(())));
assert_num_storage(&storage.get_keys(), 1);
assert_eq!(
storage.remove_key(&kp),
KeyStorageResponse::ReceivedResult(Ok(()))
);
assert_num_storage(&storage.get_keys(), 0);
}
fn assert_num_storage(keys_response: &KeyStorageResponse<Vec<Keypair>>, n: usize) {
match keys_response {
KeyStorageResponse::ReceivedResult(Ok(keys)) => {
assert_eq!(keys.len(), n);
}
KeyStorageResponse::ReceivedResult(Err(_e)) => {
panic!("could not get keys");
}
KeyStorageResponse::Waiting => {
panic!("did not receive result");
}
}
}
#[test]
fn test_select_key() {
let kp = enostr::FullKeypair::generate().to_keypair();
let storage = FileKeyStorage::mock().unwrap();
let _ = storage.add_key(&kp);
assert_num_storage(&storage.get_keys(), 1);
let resp = storage.select_pubkey(Some(kp.pubkey));
assert!(resp.is_ok());
let resp = storage.get_selected_pubkey();
assert!(resp.is_ok());
}
}

View File

@@ -0,0 +1,271 @@
use std::{
collections::{HashMap, VecDeque},
fs::{self, File},
io::{self, BufRead},
path::{Path, PathBuf},
time::SystemTime,
};
use crate::Error;
#[derive(Debug, Clone)]
pub struct DataPath {
base: PathBuf,
}
impl DataPath {
pub fn new(base: impl AsRef<Path>) -> Self {
let base = base.as_ref().to_path_buf();
Self { base }
}
pub fn default_base() -> Option<PathBuf> {
dirs::data_local_dir().map(|pb| pb.join("notedeck"))
}
}
pub enum DataPathType {
Log,
Setting,
Keys,
SelectedKey,
Db,
Cache,
}
impl DataPath {
pub fn rel_path(&self, typ: DataPathType) -> PathBuf {
match typ {
DataPathType::Log => PathBuf::from("logs"),
DataPathType::Setting => PathBuf::from("settings"),
DataPathType::Keys => PathBuf::from("storage").join("accounts"),
DataPathType::SelectedKey => PathBuf::from("storage").join("selected_account"),
DataPathType::Db => PathBuf::from("db"),
DataPathType::Cache => PathBuf::from("cache"),
}
}
pub fn path(&self, typ: DataPathType) -> PathBuf {
self.base.join(self.rel_path(typ))
}
}
#[derive(Debug, PartialEq)]
pub struct Directory {
pub file_path: PathBuf,
}
impl Directory {
pub fn new(file_path: PathBuf) -> Self {
Self { file_path }
}
/// Get the files in the current directory where the key is the file name and the value is the file contents
pub fn get_files(&self) -> Result<HashMap<String, String>, Error> {
let dir = fs::read_dir(self.file_path.clone())?;
let map = dir
.filter_map(|f| f.ok())
.filter(|f| f.path().is_file())
.filter_map(|f| {
let file_name = f.file_name().into_string().ok()?;
let contents = fs::read_to_string(f.path()).ok()?;
Some((file_name, contents))
})
.collect();
Ok(map)
}
pub fn get_file_names(&self) -> Result<Vec<String>, Error> {
let dir = fs::read_dir(self.file_path.clone())?;
let names = dir
.filter_map(|f| f.ok())
.filter(|f| f.path().is_file())
.filter_map(|f| f.file_name().into_string().ok())
.collect();
Ok(names)
}
pub fn get_file(&self, file_name: String) -> Result<String, Error> {
let filepath = self.file_path.clone().join(file_name.clone());
if filepath.exists() && filepath.is_file() {
let filepath_str = filepath
.to_str()
.ok_or_else(|| Error::Generic("Could not turn path to string".to_owned()))?;
Ok(fs::read_to_string(filepath_str)?)
} else {
Err(Error::Generic(format!(
"Requested file was not found: {}",
file_name
)))
}
}
pub fn get_file_last_n_lines(&self, file_name: String, n: usize) -> Result<FileResult, Error> {
let filepath = self.file_path.clone().join(file_name.clone());
if filepath.exists() && filepath.is_file() {
let file = File::open(&filepath)?;
let reader = io::BufReader::new(file);
let mut queue: VecDeque<String> = VecDeque::with_capacity(n);
let mut total_lines_in_file = 0;
for line in reader.lines() {
let line = line?;
queue.push_back(line);
if queue.len() > n {
queue.pop_front();
}
total_lines_in_file += 1;
}
let output_num_lines = queue.len();
let output = queue.into_iter().collect::<Vec<String>>().join("\n");
Ok(FileResult {
output,
output_num_lines,
total_lines_in_file,
})
} else {
Err(Error::Generic(format!(
"Requested file was not found: {}",
file_name
)))
}
}
/// Get the file name which is most recently modified in the directory
pub fn get_most_recent(&self) -> Result<Option<String>, Error> {
let mut most_recent: Option<(SystemTime, String)> = None;
for entry in fs::read_dir(&self.file_path)? {
let entry = entry?;
let metadata = entry.metadata()?;
if metadata.is_file() {
let modified = metadata.modified()?;
let file_name = entry.file_name().to_string_lossy().to_string();
match most_recent {
Some((last_modified, _)) if modified > last_modified => {
most_recent = Some((modified, file_name));
}
None => {
most_recent = Some((modified, file_name));
}
_ => {}
}
}
}
Ok(most_recent.map(|(_, file_name)| file_name))
}
}
pub struct FileResult {
pub output: String,
pub output_num_lines: usize,
pub total_lines_in_file: usize,
}
/// Write the file to the directory
pub fn write_file(directory: &Path, file_name: String, data: &str) -> Result<(), Error> {
if !directory.exists() {
fs::create_dir_all(directory)?
}
std::fs::write(directory.join(file_name), data)?;
Ok(())
}
pub fn delete_file(directory: &Path, file_name: String) -> Result<(), Error> {
let file_to_delete = directory.join(file_name.clone());
if file_to_delete.exists() && file_to_delete.is_file() {
fs::remove_file(file_to_delete).map_err(Error::Io)
} else {
Err(Error::Generic(format!(
"Requested file to delete was not found: {}",
file_name
)))
}
}
#[cfg(test)]
mod tests {
use std::path::PathBuf;
use crate::{
storage::file_storage::{delete_file, write_file},
Error,
};
use super::Directory;
static CREATE_TMP_DIR: fn() -> Result<PathBuf, Error> =
|| Ok(tempfile::TempDir::new()?.path().to_path_buf());
#[test]
fn test_add_get_delete() {
if let Ok(path) = CREATE_TMP_DIR() {
let directory = Directory::new(path);
let file_name = "file_test_name.txt".to_string();
let file_contents = "test";
let write_res = write_file(&directory.file_path, file_name.clone(), file_contents);
assert!(write_res.is_ok());
if let Ok(asserted_file_contents) = directory.get_file(file_name.clone()) {
assert_eq!(asserted_file_contents, file_contents);
} else {
panic!("File not found");
}
let delete_res = delete_file(&directory.file_path, file_name);
assert!(delete_res.is_ok());
} else {
panic!("could not get interactor")
}
}
#[test]
fn test_get_multiple() {
if let Ok(path) = CREATE_TMP_DIR() {
let directory = Directory::new(path);
for i in 0..10 {
let file_name = format!("file{}.txt", i);
let write_res = write_file(&directory.file_path, file_name, "test");
assert!(write_res.is_ok());
}
if let Ok(files) = directory.get_files() {
for i in 0..10 {
let file_name = format!("file{}.txt", i);
assert!(files.contains_key(&file_name));
assert_eq!(files.get(&file_name).unwrap(), "test");
}
} else {
panic!("Files not found");
}
if let Ok(file_names) = directory.get_file_names() {
for i in 0..10 {
let file_name = format!("file{}.txt", i);
assert!(file_names.contains(&file_name));
}
} else {
panic!("File names not found");
}
for i in 0..10 {
let file_name = format!("file{}.txt", i);
assert!(delete_file(&directory.file_path, file_name).is_ok());
}
} else {
panic!("could not get interactor")
}
}
}

View File

@@ -0,0 +1,112 @@
use enostr::{Keypair, Pubkey};
use super::file_key_storage::FileKeyStorage;
use crate::Error;
#[cfg(target_os = "macos")]
use super::security_framework_key_storage::SecurityFrameworkKeyStorage;
#[derive(Debug, PartialEq)]
pub enum KeyStorageType {
None,
FileSystem(FileKeyStorage),
#[cfg(target_os = "macos")]
SecurityFramework(SecurityFrameworkKeyStorage),
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum KeyStorageResponse<R> {
Waiting,
ReceivedResult(Result<R, KeyStorageError>),
}
impl<R: PartialEq> PartialEq for KeyStorageResponse<R> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(KeyStorageResponse::Waiting, KeyStorageResponse::Waiting) => true,
(
KeyStorageResponse::ReceivedResult(Ok(r1)),
KeyStorageResponse::ReceivedResult(Ok(r2)),
) => r1 == r2,
(
KeyStorageResponse::ReceivedResult(Err(_)),
KeyStorageResponse::ReceivedResult(Err(_)),
) => true,
_ => false,
}
}
}
impl KeyStorageType {
pub fn get_keys(&self) -> KeyStorageResponse<Vec<Keypair>> {
match self {
Self::None => KeyStorageResponse::ReceivedResult(Ok(Vec::new())),
Self::FileSystem(f) => f.get_keys(),
#[cfg(target_os = "macos")]
Self::SecurityFramework(f) => f.get_keys(),
}
}
pub fn add_key(&self, key: &Keypair) -> KeyStorageResponse<()> {
let _ = key;
match self {
Self::None => KeyStorageResponse::ReceivedResult(Ok(())),
Self::FileSystem(f) => f.add_key(key),
#[cfg(target_os = "macos")]
Self::SecurityFramework(f) => f.add_key(key),
}
}
pub fn remove_key(&self, key: &Keypair) -> KeyStorageResponse<()> {
let _ = key;
match self {
Self::None => KeyStorageResponse::ReceivedResult(Ok(())),
Self::FileSystem(f) => f.remove_key(key),
#[cfg(target_os = "macos")]
Self::SecurityFramework(f) => f.remove_key(key),
}
}
pub fn get_selected_key(&self) -> KeyStorageResponse<Option<Pubkey>> {
match self {
Self::None => KeyStorageResponse::ReceivedResult(Ok(None)),
Self::FileSystem(f) => f.get_selected_key(),
#[cfg(target_os = "macos")]
Self::SecurityFramework(_) => unimplemented!(),
}
}
pub fn select_key(&self, key: Option<Pubkey>) -> KeyStorageResponse<()> {
match self {
Self::None => KeyStorageResponse::ReceivedResult(Ok(())),
Self::FileSystem(f) => f.select_key(key),
#[cfg(target_os = "macos")]
Self::SecurityFramework(_) => unimplemented!(),
}
}
}
#[allow(dead_code)]
#[derive(Debug)]
pub enum KeyStorageError {
Retrieval(Error),
Addition(Error),
Selection(Error),
Removal(Error),
OSError(Error),
}
impl std::fmt::Display for KeyStorageError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Self::Retrieval(e) => write!(f, "Failed to retrieve keys: {:?}", e),
Self::Addition(key) => write!(f, "Failed to add key: {:?}", key),
Self::Selection(pubkey) => write!(f, "Failed to select key: {:?}", pubkey),
Self::Removal(key) => write!(f, "Failed to remove key: {:?}", key),
Self::OSError(e) => write!(f, "OS had an error: {:?}", e),
}
}
}
impl std::error::Error for KeyStorageError {}

View File

@@ -0,0 +1,695 @@
use enostr::{NoteId, Pubkey};
use nostrdb::Ndb;
use serde::{Deserialize, Deserializer};
use tracing::error;
use crate::{
accounts::AccountsRoute,
column::{Columns, IntermediaryRoute},
route::Route,
timeline::{kind::ListKind, PubkeySource, Timeline, TimelineId, TimelineKind, TimelineRoute},
ui::add_column::AddColumnRoute,
Error,
};
use super::{DataPath, DataPathType, Directory};
pub static COLUMNS_FILE: &str = "columns.json";
fn columns_json(path: &DataPath) -> Option<String> {
let data_path = path.path(DataPathType::Setting);
Directory::new(data_path)
.get_file(COLUMNS_FILE.to_string())
.ok()
}
#[derive(Deserialize, Debug, PartialEq)]
enum MigrationTimelineRoute {
Timeline(u32),
Thread(String),
Profile(String),
Reply(String),
Quote(String),
}
impl MigrationTimelineRoute {
fn timeline_route(self) -> Option<TimelineRoute> {
match self {
MigrationTimelineRoute::Timeline(id) => {
Some(TimelineRoute::Timeline(TimelineId::new(id)))
}
MigrationTimelineRoute::Thread(note_id_hex) => {
Some(TimelineRoute::Thread(NoteId::from_hex(&note_id_hex).ok()?))
}
MigrationTimelineRoute::Profile(pubkey_hex) => {
Some(TimelineRoute::Profile(Pubkey::from_hex(&pubkey_hex).ok()?))
}
MigrationTimelineRoute::Reply(note_id_hex) => {
Some(TimelineRoute::Reply(NoteId::from_hex(&note_id_hex).ok()?))
}
MigrationTimelineRoute::Quote(note_id_hex) => {
Some(TimelineRoute::Quote(NoteId::from_hex(&note_id_hex).ok()?))
}
}
}
}
#[derive(Deserialize, Debug, PartialEq)]
enum MigrationRoute {
Timeline(MigrationTimelineRoute),
Accounts(MigrationAccountsRoute),
Relays,
ComposeNote,
AddColumn(MigrationAddColumnRoute),
Support,
}
impl MigrationRoute {
fn route(self) -> Option<Route> {
match self {
MigrationRoute::Timeline(migration_timeline_route) => {
Some(Route::Timeline(migration_timeline_route.timeline_route()?))
}
MigrationRoute::Accounts(migration_accounts_route) => {
Some(Route::Accounts(migration_accounts_route.accounts_route()))
}
MigrationRoute::Relays => Some(Route::Relays),
MigrationRoute::ComposeNote => Some(Route::ComposeNote),
MigrationRoute::AddColumn(migration_add_column_route) => Some(Route::AddColumn(
migration_add_column_route.add_column_route(),
)),
MigrationRoute::Support => Some(Route::Support),
}
}
}
#[derive(Deserialize, Debug, PartialEq)]
enum MigrationAccountsRoute {
Accounts,
AddAccount,
}
impl MigrationAccountsRoute {
fn accounts_route(self) -> AccountsRoute {
match self {
MigrationAccountsRoute::Accounts => AccountsRoute::Accounts,
MigrationAccountsRoute::AddAccount => AccountsRoute::AddAccount,
}
}
}
#[derive(Deserialize, Debug, PartialEq)]
enum MigrationAddColumnRoute {
Base,
UndecidedNotification,
ExternalNotification,
Hashtag,
}
impl MigrationAddColumnRoute {
fn add_column_route(self) -> AddColumnRoute {
match self {
MigrationAddColumnRoute::Base => AddColumnRoute::Base,
MigrationAddColumnRoute::UndecidedNotification => AddColumnRoute::UndecidedNotification,
MigrationAddColumnRoute::ExternalNotification => AddColumnRoute::ExternalNotification,
MigrationAddColumnRoute::Hashtag => AddColumnRoute::Hashtag,
}
}
}
#[derive(Debug, PartialEq)]
struct MigrationColumn {
routes: Vec<MigrationRoute>,
}
impl<'de> Deserialize<'de> for MigrationColumn {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let routes = Vec::<MigrationRoute>::deserialize(deserializer)?;
Ok(MigrationColumn { routes })
}
}
#[derive(Deserialize, Debug)]
struct MigrationColumns {
columns: Vec<MigrationColumn>,
timelines: Vec<MigrationTimeline>,
}
#[derive(Deserialize, Debug, Clone, PartialEq)]
struct MigrationTimeline {
id: u32,
kind: MigrationTimelineKind,
}
impl MigrationTimeline {
fn into_timeline(self, ndb: &Ndb, deck_user_pubkey: Option<&[u8; 32]>) -> Option<Timeline> {
self.kind
.into_timeline_kind()?
.into_timeline(ndb, deck_user_pubkey)
}
}
#[derive(Deserialize, Clone, Debug, PartialEq)]
enum MigrationListKind {
Contact(MigrationPubkeySource),
}
impl MigrationListKind {
fn list_kind(self) -> Option<ListKind> {
match self {
MigrationListKind::Contact(migration_pubkey_source) => {
Some(ListKind::Contact(migration_pubkey_source.pubkey_source()?))
}
}
}
}
#[derive(Deserialize, Clone, Debug, PartialEq)]
enum MigrationPubkeySource {
Explicit(String),
DeckAuthor,
}
impl MigrationPubkeySource {
fn pubkey_source(self) -> Option<PubkeySource> {
match self {
MigrationPubkeySource::Explicit(hex) => {
Some(PubkeySource::Explicit(Pubkey::from_hex(hex.as_str()).ok()?))
}
MigrationPubkeySource::DeckAuthor => Some(PubkeySource::DeckAuthor),
}
}
}
#[derive(Deserialize, Clone, Debug, PartialEq)]
enum MigrationTimelineKind {
List(MigrationListKind),
Notifications(MigrationPubkeySource),
Profile(MigrationPubkeySource),
Universe,
Generic,
Hashtag(String),
}
impl MigrationTimelineKind {
fn into_timeline_kind(self) -> Option<TimelineKind> {
match self {
MigrationTimelineKind::List(migration_list_kind) => {
Some(TimelineKind::List(migration_list_kind.list_kind()?))
}
MigrationTimelineKind::Notifications(migration_pubkey_source) => Some(
TimelineKind::Notifications(migration_pubkey_source.pubkey_source()?),
),
MigrationTimelineKind::Profile(migration_pubkey_source) => Some(TimelineKind::Profile(
migration_pubkey_source.pubkey_source()?,
)),
MigrationTimelineKind::Universe => Some(TimelineKind::Universe),
MigrationTimelineKind::Generic => Some(TimelineKind::Generic),
MigrationTimelineKind::Hashtag(hashtag) => Some(TimelineKind::Hashtag(hashtag)),
}
}
}
impl MigrationColumns {
fn into_columns(self, ndb: &Ndb, deck_pubkey: Option<&[u8; 32]>) -> Columns {
let mut columns = Columns::default();
for column in self.columns {
let mut cur_routes = Vec::new();
for route in column.routes {
match route {
MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(timeline_id)) => {
if let Some(migration_tl) =
self.timelines.iter().find(|tl| tl.id == timeline_id)
{
let tl = migration_tl.clone().into_timeline(ndb, deck_pubkey);
if let Some(tl) = tl {
cur_routes.push(IntermediaryRoute::Timeline(tl));
} else {
error!("Problem deserializing timeline {:?}", migration_tl);
}
}
}
MigrationRoute::Timeline(MigrationTimelineRoute::Thread(_thread)) => {}
_ => {
if let Some(route) = route.route() {
cur_routes.push(IntermediaryRoute::Route(route));
}
}
}
}
if !cur_routes.is_empty() {
columns.insert_intermediary_routes(cur_routes);
}
}
columns
}
}
fn string_to_columns(
serialized_columns: String,
ndb: &Ndb,
user: Option<&[u8; 32]>,
) -> Option<Columns> {
Some(
deserialize_columns_string(serialized_columns)
.ok()?
.into_columns(ndb, user),
)
}
pub fn deserialize_columns(path: &DataPath, ndb: &Ndb, user: Option<&[u8; 32]>) -> Option<Columns> {
string_to_columns(columns_json(path)?, ndb, user)
}
fn deserialize_columns_string(serialized_columns: String) -> Result<MigrationColumns, Error> {
serde_json::from_str::<MigrationColumns>(&serialized_columns)
.map_err(|e| Error::Generic(e.to_string()))
}
#[cfg(test)]
mod tests {
use crate::storage::migration::{
MigrationColumn, MigrationListKind, MigrationPubkeySource, MigrationRoute,
MigrationTimeline, MigrationTimelineKind, MigrationTimelineRoute,
};
impl MigrationColumn {
fn from_route(route: MigrationRoute) -> Self {
Self {
routes: vec![route],
}
}
fn from_routes(routes: Vec<MigrationRoute>) -> Self {
Self { routes }
}
}
impl MigrationTimeline {
fn new(id: u32, kind: MigrationTimelineKind) -> Self {
Self { id, kind }
}
}
use super::*;
#[test]
fn multi_column() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":2}}],[{"Timeline":{"Timeline":0}}],[{"Timeline":{"Timeline":1}}]],"timelines":[{"id":0,"kind":{"List":{"Contact":{"Explicit":"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"}}}},{"id":1,"kind":{"Hashtag":"introductions"}},{"id":2,"kind":"Universe"}]}"#; // Multi-column
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 3);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(2)
))
);
assert_eq!(
*migration_cols.columns.get(1).unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(0)
))
);
assert_eq!(
*migration_cols.columns.get(2).unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(1)
))
);
assert_eq!(migration_cols.timelines.len(), 3);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(
0,
MigrationTimelineKind::List(MigrationListKind::Contact(
MigrationPubkeySource::Explicit(
"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"
.to_owned()
)
))
)
);
assert_eq!(
*migration_cols.timelines.get(1).unwrap(),
MigrationTimeline::new(
1,
MigrationTimelineKind::Hashtag("introductions".to_owned())
)
);
assert_eq!(
*migration_cols.timelines.get(2).unwrap(),
MigrationTimeline::new(2, MigrationTimelineKind::Universe)
)
}
#[test]
fn base() {
let route = r#"{"columns":[[{"AddColumn":"Base"}]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::AddColumn(MigrationAddColumnRoute::Base))
);
assert!(migration_cols.timelines.is_empty());
}
#[test]
fn universe() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":0}}]],"timelines":[{"id":0,"kind":"Universe"}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(0)
))
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(0, MigrationTimelineKind::Universe)
)
}
#[test]
fn home() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":2}}]],"timelines":[{"id":2,"kind":{"List":{"Contact":{"Explicit":"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"}}}}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(2)
))
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(
2,
MigrationTimelineKind::List(MigrationListKind::Contact(
MigrationPubkeySource::Explicit(
"aa733081e4f0f79dd43023d8983265593f2b41a988671cfcef3f489b91ad93fe"
.to_owned()
)
))
)
)
}
#[test]
fn thread() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":7}},{"Timeline":{"Thread":"fb9b0c62bc91bbe28ca428fc85e310ae38795b94fb910e0f4e12962ced971f25"}}]],"timelines":[{"id":7,"kind":{"List":{"Contact":{"Explicit":"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"}}}}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(7),),
MigrationRoute::Timeline(MigrationTimelineRoute::Thread(
"fb9b0c62bc91bbe28ca428fc85e310ae38795b94fb910e0f4e12962ced971f25".to_owned()
)),
])
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(
7,
MigrationTimelineKind::List(MigrationListKind::Contact(
MigrationPubkeySource::Explicit(
"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"
.to_owned()
)
))
)
)
}
#[test]
fn profile() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":7}},{"Timeline":{"Profile":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"}}]],"timelines":[{"id":7,"kind":{"List":{"Contact":{"Explicit":"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"}}}}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::Timeline(MigrationTimelineRoute::Timeline(7),),
MigrationRoute::Timeline(MigrationTimelineRoute::Profile(
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245".to_owned()
)),
])
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(
7,
MigrationTimelineKind::List(MigrationListKind::Contact(
MigrationPubkeySource::Explicit(
"4a0510f26880d40e432f4865cb5714d9d3c200ca6ebb16b418ae6c555f574967"
.to_owned()
)
))
)
)
}
#[test]
fn your_notifs() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":5}}]],"timelines":[{"id":5,"kind":{"Notifications":"DeckAuthor"}}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(5)
))
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(
5,
MigrationTimelineKind::Notifications(MigrationPubkeySource::DeckAuthor)
)
)
}
#[test]
fn undecided_notifs() {
let route = r#"{"columns":[[{"AddColumn":"Base"},{"AddColumn":"UndecidedNotification"}]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
MigrationRoute::AddColumn(MigrationAddColumnRoute::UndecidedNotification),
])
);
assert!(migration_cols.timelines.is_empty());
}
#[test]
fn extern_notifs() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":4}}]],"timelines":[{"id":4,"kind":{"Notifications":{"Explicit":"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245"}}}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(4)
))
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(
4,
MigrationTimelineKind::Notifications(MigrationPubkeySource::Explicit(
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245".to_owned()
))
)
)
}
#[test]
fn hashtag() {
let route = r#"{"columns":[[{"Timeline":{"Timeline":6}}]],"timelines":[{"id":6,"kind":{"Hashtag":"notedeck"}}]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_route(MigrationRoute::Timeline(
MigrationTimelineRoute::Timeline(6)
))
);
assert_eq!(migration_cols.timelines.len(), 1);
assert_eq!(
*migration_cols.timelines.first().unwrap(),
MigrationTimeline::new(6, MigrationTimelineKind::Hashtag("notedeck".to_owned()))
)
}
#[test]
fn support() {
let route = r#"{"columns":[[{"AddColumn":"Base"},"Support"]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
MigrationRoute::Support
])
);
assert!(migration_cols.timelines.is_empty());
}
#[test]
fn post() {
let route = r#"{"columns":[[{"AddColumn":"Base"},"ComposeNote"]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
MigrationRoute::ComposeNote
])
);
assert!(migration_cols.timelines.is_empty());
}
#[test]
fn relay() {
let route = r#"{"columns":[[{"AddColumn":"Base"},"Relays"]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
MigrationRoute::Relays
])
);
assert!(migration_cols.timelines.is_empty());
}
#[test]
fn accounts() {
let route =
r#"{"columns":[[{"AddColumn":"Base"},{"Accounts":"Accounts"}]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
MigrationRoute::Accounts(MigrationAccountsRoute::Accounts),
])
);
assert!(migration_cols.timelines.is_empty());
}
#[test]
fn login() {
let route = r#"{"columns":[[{"AddColumn":"Base"},{"Accounts":"Accounts"},{"Accounts":"AddAccount"}]],"timelines":[]}"#;
let deserialized_columns = deserialize_columns_string(route.to_string());
assert!(deserialized_columns.is_ok());
let migration_cols = deserialized_columns.unwrap();
assert_eq!(migration_cols.columns.len(), 1);
assert_eq!(
*migration_cols.columns.first().unwrap(),
MigrationColumn::from_routes(vec![
MigrationRoute::AddColumn(MigrationAddColumnRoute::Base),
MigrationRoute::Accounts(MigrationAccountsRoute::Accounts),
MigrationRoute::Accounts(MigrationAccountsRoute::AddAccount),
])
);
assert!(migration_cols.timelines.is_empty());
}
}

View File

@@ -0,0 +1,15 @@
mod decks;
mod file_key_storage;
mod file_storage;
mod migration;
pub use decks::{load_decks_cache, save_decks_cache, DECKS_CACHE_FILE};
pub use file_key_storage::FileKeyStorage;
pub use file_storage::{delete_file, write_file, DataPath, DataPathType, Directory};
pub use migration::{deserialize_columns, COLUMNS_FILE};
#[cfg(target_os = "macos")]
mod security_framework_key_storage;
pub mod key_storage_impl;
pub use key_storage_impl::{KeyStorageResponse, KeyStorageType};

View File

@@ -0,0 +1,198 @@
use std::borrow::Cow;
use enostr::{Keypair, Pubkey, SecretKey};
use security_framework::{
item::{ItemClass, ItemSearchOptions, Limit, SearchResult},
passwords::{delete_generic_password, set_generic_password},
};
use tracing::error;
use crate::Error;
use super::{key_storage_impl::KeyStorageError, KeyStorageResponse};
#[derive(Debug, PartialEq)]
pub struct SecurityFrameworkKeyStorage {
pub service_name: Cow<'static, str>,
}
impl SecurityFrameworkKeyStorage {
pub fn new(service_name: String) -> Self {
SecurityFrameworkKeyStorage {
service_name: Cow::Owned(service_name),
}
}
fn add_key_internal(&self, key: &Keypair) -> Result<(), KeyStorageError> {
match set_generic_password(
&self.service_name,
key.pubkey.hex().as_str(),
key.secret_key
.as_ref()
.map_or_else(|| &[] as &[u8], |sc| sc.as_secret_bytes()),
) {
Ok(_) => Ok(()),
Err(e) => Err(KeyStorageError::Addition(Error::Generic(e.to_string()))),
}
}
fn get_pubkey_strings(&self) -> Vec<String> {
let search_results = ItemSearchOptions::new()
.class(ItemClass::generic_password())
.service(&self.service_name)
.load_attributes(true)
.limit(Limit::All)
.search();
let mut accounts = Vec::new();
if let Ok(search_results) = search_results {
for result in search_results {
if let Some(map) = result.simplify_dict() {
if let Some(val) = map.get("acct") {
accounts.push(val.clone());
}
}
}
}
accounts
}
fn get_pubkeys(&self) -> Vec<Pubkey> {
self.get_pubkey_strings()
.iter_mut()
.filter_map(|pubkey_str| Pubkey::from_hex(pubkey_str.as_str()).ok())
.collect()
}
fn get_privkey_bytes_for(&self, account: &str) -> Option<Vec<u8>> {
let search_result = ItemSearchOptions::new()
.class(ItemClass::generic_password())
.service(&self.service_name)
.load_data(true)
.account(account)
.search();
if let Ok(results) = search_result {
if let Some(SearchResult::Data(vec)) = results.first() {
return Some(vec.clone());
}
}
None
}
fn get_secret_key_for_pubkey(&self, pubkey: &Pubkey) -> Option<SecretKey> {
if let Some(bytes) = self.get_privkey_bytes_for(pubkey.hex().as_str()) {
SecretKey::from_slice(bytes.as_slice()).ok()
} else {
None
}
}
fn get_all_keypairs(&self) -> Vec<Keypair> {
self.get_pubkeys()
.iter()
.map(|pubkey| {
let maybe_secret = self.get_secret_key_for_pubkey(pubkey);
Keypair::new(*pubkey, maybe_secret)
})
.collect()
}
fn delete_key(&self, pubkey: &Pubkey) -> Result<(), KeyStorageError> {
match delete_generic_password(&self.service_name, pubkey.hex().as_str()) {
Ok(_) => Ok(()),
Err(e) => {
error!("delete key error {}", e);
Err(KeyStorageError::Removal(Error::Generic(e.to_string())))
}
}
}
}
impl SecurityFrameworkKeyStorage {
pub fn add_key(&self, key: &Keypair) -> KeyStorageResponse<()> {
KeyStorageResponse::ReceivedResult(self.add_key_internal(key))
}
pub fn get_keys(&self) -> KeyStorageResponse<Vec<Keypair>> {
KeyStorageResponse::ReceivedResult(Ok(self.get_all_keypairs()))
}
pub fn remove_key(&self, key: &Keypair) -> KeyStorageResponse<()> {
KeyStorageResponse::ReceivedResult(self.delete_key(&key.pubkey))
}
}
#[cfg(test)]
mod tests {
use super::*;
use enostr::FullKeypair;
static TEST_SERVICE_NAME: &str = "NOTEDECKTEST";
static STORAGE: SecurityFrameworkKeyStorage = SecurityFrameworkKeyStorage {
service_name: Cow::Borrowed(TEST_SERVICE_NAME),
};
// individual tests are ignored so test runner doesn't run them all concurrently
// TODO: a way to run them all serially should be devised
#[test]
#[ignore]
fn add_and_remove_test_pubkey_only() {
let num_keys_before_test = STORAGE.get_pubkeys().len();
let keypair = FullKeypair::generate().to_keypair();
let add_result = STORAGE.add_key_internal(&keypair);
assert!(add_result.is_ok());
let get_pubkeys_result = STORAGE.get_pubkeys();
assert_eq!(get_pubkeys_result.len() - num_keys_before_test, 1);
let remove_result = STORAGE.delete_key(&keypair.pubkey);
assert!(remove_result.is_ok());
let keys = STORAGE.get_pubkeys();
assert_eq!(keys.len() - num_keys_before_test, 0);
}
fn add_and_remove_full_n(n: usize) {
let num_keys_before_test = STORAGE.get_all_keypairs().len();
// there must be zero keys in storage for the test to work as intended
assert_eq!(num_keys_before_test, 0);
let expected_keypairs: Vec<Keypair> = (0..n)
.map(|_| FullKeypair::generate().to_keypair())
.collect();
expected_keypairs.iter().for_each(|keypair| {
let add_result = STORAGE.add_key_internal(keypair);
assert!(add_result.is_ok());
});
let asserted_keypairs = STORAGE.get_all_keypairs();
assert_eq!(expected_keypairs, asserted_keypairs);
expected_keypairs.iter().for_each(|keypair| {
let remove_result = STORAGE.delete_key(&keypair.pubkey);
assert!(remove_result.is_ok());
});
let num_keys_after_test = STORAGE.get_all_keypairs().len();
assert_eq!(num_keys_after_test, 0);
}
#[test]
#[ignore]
fn add_and_remove_full() {
add_and_remove_full_n(1);
}
#[test]
#[ignore]
fn add_and_remove_full_10() {
add_and_remove_full_n(10);
}
}

View File

@@ -0,0 +1,32 @@
use crate::timeline::{TimelineId, TimelineKind};
use std::collections::HashMap;
use uuid::Uuid;
#[derive(Debug, Clone)]
pub enum SubKind {
/// Initial subscription. This is the first time we do a remote subscription
/// for a timeline
Initial,
/// One shot requests, we can just close after we receive EOSE
OneShot,
Timeline(TimelineKind),
/// We are fetching a contact list so that we can use it for our follows
/// Filter.
// TODO: generalize this to any list?
FetchingContactList(TimelineId),
}
/// Subscriptions that need to be tracked at various stages. Sometimes we
/// need to do A, then B, then C. Tracking requests at various stages by
/// mapping uuid subids to explicit states happens here.
#[derive(Default)]
pub struct Subscriptions {
pub subs: HashMap<String, SubKind>,
}
pub fn new_sub_id() -> String {
Uuid::new_v4().to_string()
}

View File

@@ -0,0 +1,148 @@
use tracing::error;
use crate::storage::{DataPath, DataPathType, Directory};
pub struct Support {
directory: Directory,
mailto_url: String,
most_recent_log: Option<String>,
}
fn new_log_dir(paths: &DataPath) -> Directory {
Directory::new(paths.path(DataPathType::Log))
}
impl Support {
pub fn new(path: &DataPath) -> Self {
let directory = new_log_dir(path);
Self {
mailto_url: MailtoBuilder::new(SUPPORT_EMAIL.to_string())
.with_subject("Help Needed".to_owned())
.with_content(EMAIL_TEMPLATE.to_owned())
.build(),
directory,
most_recent_log: None,
}
}
}
static MAX_LOG_LINES: usize = 500;
static SUPPORT_EMAIL: &str = "support@damus.io";
static EMAIL_TEMPLATE: &str = concat!("version ", env!("CARGO_PKG_VERSION"), "\nCommit hash: ", env!("GIT_COMMIT_HASH"), "\n\nDescribe the bug you have encountered:\n<-- your statement here -->\n\n===== Paste your log below =====\n\n");
impl Support {
pub fn refresh(&mut self) {
self.most_recent_log = get_log_str(&self.directory);
}
pub fn get_mailto_url(&self) -> &str {
&self.mailto_url
}
pub fn get_log_dir(&self) -> Option<&str> {
self.directory.file_path.to_str()
}
pub fn get_most_recent_log(&self) -> Option<&String> {
self.most_recent_log.as_ref()
}
}
fn get_log_str(interactor: &Directory) -> Option<String> {
match interactor.get_most_recent() {
Ok(Some(most_recent_name)) => {
match interactor.get_file_last_n_lines(most_recent_name.clone(), MAX_LOG_LINES) {
Ok(file_output) => {
return Some(
get_prefix(
&most_recent_name,
file_output.output_num_lines,
file_output.total_lines_in_file,
) + &file_output.output,
)
}
Err(e) => {
error!(
"Error retrieving the last lines from file {}: {:?}",
most_recent_name, e
);
}
}
}
Ok(None) => {
error!("No files were found.");
}
Err(e) => {
error!("Error fetching the most recent file: {:?}", e);
}
}
None
}
fn get_prefix(file_name: &str, lines_displayed: usize, num_total_lines: usize) -> String {
format!(
"===\nDisplaying the last {} of {} lines in file {}\n===\n\n",
lines_displayed, num_total_lines, file_name,
)
}
struct MailtoBuilder {
content: Option<String>,
address: String,
subject: Option<String>,
}
impl MailtoBuilder {
fn new(address: String) -> Self {
Self {
content: None,
address,
subject: None,
}
}
// will be truncated so the whole URL is at most 2000 characters
pub fn with_content(mut self, content: String) -> Self {
self.content = Some(content);
self
}
pub fn with_subject(mut self, subject: String) -> Self {
self.subject = Some(subject);
self
}
pub fn build(self) -> String {
let mut url = String::new();
url.push_str("mailto:");
url.push_str(&self.address);
let has_subject = self.subject.is_some();
if has_subject || self.content.is_some() {
url.push('?');
}
if let Some(subject) = self.subject {
url.push_str("subject=");
url.push_str(&urlencoding::encode(&subject));
}
if let Some(content) = self.content {
if has_subject {
url.push('&');
}
url.push_str("body=");
let body = urlencoding::encode(&content);
url.push_str(&body);
}
url
}
}

View File

@@ -0,0 +1,111 @@
use std::path::Path;
use enostr::{FullKeypair, Pubkey, RelayPool};
use nostrdb::{ProfileRecord, Transaction};
use crate::{user_account::UserAccount, Damus};
#[allow(unused_must_use)]
pub fn sample_pool() -> RelayPool {
let mut pool = RelayPool::new();
let wakeup = move || {};
pool.add_url("wss://relay.damus.io".to_string(), wakeup);
pool.add_url("wss://eden.nostr.land".to_string(), wakeup);
pool.add_url("wss://nostr.wine".to_string(), wakeup);
pool.add_url("wss://nos.lol".to_string(), wakeup);
pool.add_url("wss://test_relay_url_long_00000000000000000000000000000000000000000000000000000000000000000000000000000000000".to_string(), wakeup);
for _ in 0..20 {
pool.add_url("tmp".to_string(), wakeup);
}
pool
}
// my (jb55) profile
const TEST_PROFILE_DATA: [u8; 448] = [
0x04, 0x00, 0x00, 0x00, 0x54, 0xfe, 0xff, 0xff, 0x34, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0xd6, 0xd9, 0xc6, 0x65, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x0a, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x00, 0x00, 0x66, 0x69, 0x78, 0x6d,
0x65, 0x00, 0x00, 0x00, 0x78, 0x01, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0xda, 0xff, 0xff, 0xff,
0x64, 0x01, 0x00, 0x00, 0x50, 0x01, 0x00, 0x00, 0x34, 0x01, 0x00, 0x00, 0x08, 0x01, 0x00, 0x00,
0xec, 0x00, 0x00, 0x00, 0xdc, 0x00, 0x00, 0x00, 0x78, 0x00, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x00,
0x00, 0x00, 0x16, 0x00, 0x24, 0x00, 0x18, 0x00, 0x14, 0x00, 0x20, 0x00, 0x0c, 0x00, 0x1c, 0x00,
0x04, 0x00, 0x00, 0x00, 0x10, 0x00, 0x08, 0x00, 0x52, 0x00, 0x00, 0x00, 0x49, 0x20, 0x6d, 0x61,
0x64, 0x65, 0x20, 0x64, 0x61, 0x6d, 0x75, 0x73, 0x2c, 0x20, 0x6e, 0x70, 0x75, 0x62, 0x73, 0x20,
0x61, 0x6e, 0x64, 0x20, 0x7a, 0x61, 0x70, 0x73, 0x2e, 0x20, 0x62, 0x61, 0x6e, 0x6e, 0x65, 0x64,
0x20, 0x62, 0x79, 0x20, 0x61, 0x70, 0x70, 0x6c, 0x65, 0x20, 0x26, 0x20, 0x74, 0x68, 0x65, 0x20,
0x63, 0x63, 0x70, 0x2e, 0x20, 0x6d, 0x79, 0x20, 0x6e, 0x6f, 0x74, 0x65, 0x73, 0x20, 0x61, 0x72,
0x65, 0x20, 0x6e, 0x6f, 0x74, 0x20, 0x66, 0x6f, 0x72, 0x20, 0x73, 0x61, 0x6c, 0x65, 0x00, 0x00,
0x5a, 0x00, 0x00, 0x00, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x6e, 0x6f, 0x73, 0x74,
0x72, 0x2e, 0x62, 0x75, 0x69, 0x6c, 0x64, 0x2f, 0x69, 0x2f, 0x33, 0x64, 0x36, 0x66, 0x32, 0x32,
0x64, 0x34, 0x35, 0x64, 0x39, 0x35, 0x65, 0x63, 0x63, 0x32, 0x63, 0x31, 0x39, 0x62, 0x31, 0x61,
0x63, 0x64, 0x65, 0x63, 0x35, 0x37, 0x61, 0x61, 0x31, 0x35, 0x66, 0x32, 0x64, 0x62, 0x61, 0x39,
0x63, 0x34, 0x32, 0x33, 0x62, 0x35, 0x33, 0x36, 0x65, 0x32, 0x36, 0x66, 0x63, 0x36, 0x32, 0x37,
0x30, 0x37, 0x63, 0x31, 0x32, 0x35, 0x66, 0x35, 0x35, 0x37, 0x2e, 0x6a, 0x70, 0x67, 0x00, 0x00,
0x04, 0x00, 0x00, 0x00, 0x6a, 0x62, 0x35, 0x35, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x00,
0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f, 0x64, 0x61, 0x6d, 0x75, 0x73, 0x2e, 0x69, 0x6f,
0x00, 0x00, 0x00, 0x00, 0x23, 0x00, 0x00, 0x00, 0x68, 0x74, 0x74, 0x70, 0x73, 0x3a, 0x2f, 0x2f,
0x63, 0x64, 0x6e, 0x2e, 0x6a, 0x62, 0x35, 0x35, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x69, 0x6d, 0x67,
0x2f, 0x72, 0x65, 0x64, 0x2d, 0x6d, 0x65, 0x2e, 0x6a, 0x70, 0x67, 0x00, 0x11, 0x00, 0x00, 0x00,
0x6a, 0x62, 0x35, 0x35, 0x40, 0x73, 0x65, 0x6e, 0x64, 0x73, 0x61, 0x74, 0x73, 0x2e, 0x6c, 0x6f,
0x6c, 0x00, 0x00, 0x00, 0x0a, 0x00, 0x00, 0x00, 0x5f, 0x40, 0x6a, 0x62, 0x35, 0x35, 0x2e, 0x63,
0x6f, 0x6d, 0x00, 0x00, 0x04, 0x00, 0x00, 0x00, 0x57, 0x69, 0x6c, 0x6c, 0x00, 0x00, 0x00, 0x00,
0x0c, 0x00, 0x24, 0x00, 0x04, 0x00, 0x0c, 0x00, 0x1c, 0x00, 0x14, 0x00, 0x00, 0x00, 0x00, 0x00,
];
/*
const TEST_PUBKEY: [u8; 32] = [
0x32, 0xe1, 0x82, 0x76, 0x35, 0x45, 0x0e, 0xbb, 0x3c, 0x5a, 0x7d, 0x12, 0xc1, 0xf8, 0xe7, 0xb2,
0xb5, 0x14, 0x43, 0x9a, 0xc1, 0x0a, 0x67, 0xee, 0xf3, 0xd9, 0xfd, 0x9c, 0x5c, 0x68, 0xe2, 0x45,
];
pub fn test_pubkey() -> &'static [u8; 32] {
&TEST_PUBKEY
}
*/
pub fn test_profile_record() -> ProfileRecord<'static> {
ProfileRecord::new_owned(&TEST_PROFILE_DATA).unwrap()
}
const TEN_ACCOUNT_HEXES: [&str; 10] = [
"3efdaebb1d8923ebd99c9e7ace3b4194ab45512e2be79c1b7d68d9243e0d2681",
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245",
"bd1e19980e2c91e6dc657e92c25762ca882eb9272d2579e221f037f93788de91",
"5c10ed0678805156d39ef1ef6d46110fe1e7e590ae04986ccf48ba1299cb53e2",
"4c96d763eb2fe01910f7e7220b7c7ecdbe1a70057f344b9f79c28af080c3ee30",
"edf16b1dd61eab353a83af470cc13557029bff6827b4cb9b7fc9bdb632a2b8e6",
"3efdaebb1d8923ebd99c9e7ace3b4194ab45512e2be79c1b7d68d9243e0d2681",
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245",
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245",
"32e1827635450ebb3c5a7d12c1f8e7b2b514439ac10a67eef3d9fd9c5c68e245",
];
pub fn get_test_accounts() -> Vec<UserAccount> {
TEN_ACCOUNT_HEXES
.iter()
.map(|account_hex| {
let mut kp = FullKeypair::generate().to_keypair();
kp.pubkey = Pubkey::from_hex(account_hex).unwrap();
kp
})
.collect()
}
pub fn test_app() -> Damus {
let db_dir = Path::new(".");
let path = db_dir.to_str().unwrap();
let mut app = Damus::mock(path);
let accounts = get_test_accounts();
let txn = Transaction::new(&app.ndb).expect("txn");
for account in accounts {
app.accounts_mut()
.add_account(account)
.process_action(&mut app.unknown_ids, &app.ndb, &txn)
}
app
}

Some files were not shown because too many files have changed in this diff Show More