[ENG-1096] Image Conversions (#1364)

* derive `Type` on `*Extension`

* update `image`, `webp` and add `rspc::Error` support to the `sd-images` crate

* rename function for verbosity

* cleanup clippy allows and use scaling fn for svg conversions

* remove dead comment

* specify `generic` image extensions and add fn to enumerate all compatible extensions

* re-exports and inline hot provided method

* another inline on a provided method

* `formatter` -> `handler` and return an explicit error if the extension isn't supported, and add `convert_image()`

* `get_ext()` inlined helper fn

* add mutation for conversion and query for getting all supported extensions

* fix `svg` scaling and generation

* update bindings

* rm old const

* temporary test for all exts

* verbosity and no manual drop

* remove dbg test

* big enum and remove `cur` extension as `image` doesn't support it

* add huge extension enum

* hopefully get path from file path id and location id

* main merge fix

* chore: update toml and lockfile

* security: update webp

* comment entire crypto crate for build times

* add bincode, huge enum and custom ser/de

* Revert "comment entire crypto crate for build times"

This reverts commit 458addaad6.

* lockfile

* theoretically working ser/de

* add svg scaling

* fix pdf matching bug and use options

* move image scaling fn

* major consts overhaul with good ser/de

* disable heif checking for now due to new trait api

* make the generic handler truly generic

* fix pdf types

* fix pdf scaling so that it's 120dpi A4 paper size by default - height/aspect res are maintained

* heavy cleanup

* note to myself

* bindings & update core to use the scaling fn from the images crate (reducing code dupes)

* move vulnerable `aovec` to be a dev dep so it stays out of release builds (it's only used in tests)

* remoev exif crate from the core as it's handled in a sep. crate

* silence a clippy warning and update lockfile

* lower the maximum file size a slight amount

* bindings and minor dep bumps

* Some improvements

---------

Co-authored-by: Ericson Fogo Soares <ericson.ds999@gmail.com>
This commit is contained in:
jake 2023-10-10 03:57:37 +01:00 committed by GitHub
parent 2796907ba5
commit fb8af0832b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
25 changed files with 1173 additions and 619 deletions

834
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -28,7 +28,7 @@ sd-crypto = { path = "../crates/crypto", features = [
"keymanager",
] }
sd-images = { path = "../crates/images" }
sd-images = { path = "../crates/images", features = ["rspc", "serde", "specta"] }
sd-file-ext = { path = "../crates/file-ext" }
sd-sync = { path = "../crates/sync" }
sd-p2p = { path = "../crates/p2p", features = ["specta", "serde"] }
@ -52,7 +52,6 @@ tokio = { workspace = true, features = [
"time",
"process",
] }
kamadak-exif = "0.5.5"
base64 = "0.21.4"
serde = { version = "1.0", features = ["derive"] }
chrono = { version = "0.4.30", features = ["serde"] }
@ -99,7 +98,6 @@ futures-concurrency = "7.4.3"
async-channel = "1.9"
tokio-util = { version = "0.7.8", features = ["io"] }
slotmap = "1.0.6"
aovec = "1.1.0"
flate2 = "1.0.27"
tar = "0.4.40"
tempfile = "^3.8.0"
@ -117,3 +115,4 @@ version = "0.1.5"
[dev-dependencies]
tracing-test = "^0.2.4"
aovec = "1.1.0"

View file

@ -7,7 +7,7 @@ use crate::{
file_path_helper::{
file_path_to_isolate, file_path_to_isolate_with_id, FilePathError, IsolatedFilePathData,
},
find_location, LocationError,
get_location_path_from_location_id, LocationError,
},
object::{
fs::{
@ -26,11 +26,14 @@ use crate::{
};
use sd_file_ext::{extensions::ImageExtension, kind::ObjectKind};
use sd_images::ConvertableExtension;
use sd_media_metadata::MediaMetadata;
use std::{
ffi::OsString,
path::{Path, PathBuf},
str::FromStr,
sync::Arc,
};
use chrono::Utc;
@ -39,7 +42,7 @@ use regex::Regex;
use rspc::{alpha::AlphaRouter, ErrorCode};
use serde::Deserialize;
use specta::Type;
use tokio::{fs, io};
use tokio::{fs, io, task::spawn_blocking};
use tracing::{error, warn};
use super::{Ctx, R};
@ -132,13 +135,8 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
.map_err(LocationError::MissingField)?;
let location_id = isolated_path.location_id();
let location_path = find_location(&library, location_id)
.select(location::select!({ path }))
.exec()
.await?
.ok_or(LocationError::IdNotFound(location_id))?
.path
.ok_or(LocationError::MissingPath(location_id))?;
let location_path =
get_location_path_from_location_id(&library.db, location_id).await?;
Ok(Path::new(&location_path)
.join(&isolated_path)
@ -264,13 +262,10 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
))
.await?;
let location_path = maybe_missing(
maybe_location
.ok_or(LocationError::IdNotFound(args.location_id))?
.path,
"location.path",
)
.map_err(LocationError::from)?;
let location_path = maybe_location
.ok_or(LocationError::IdNotFound(args.location_id))?
.path
.ok_or(LocationError::MissingPath(args.location_id))?;
let file_path = maybe_file_path.ok_or(LocationError::FilePath(
FilePathError::IdNotFound(args.file_path_ids[0]),
@ -317,6 +312,140 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
}
})
})
.procedure("convertImage", {
#[derive(Type, Deserialize)]
struct ConvertImageArgs {
location_id: location::id::Type,
file_path_id: file_path::id::Type,
delete_src: bool, // if set, we delete the src image after
desired_extension: ConvertableExtension,
quality_percentage: Option<i32>, // 1% - 125%
}
R.with2(library())
.mutation(|(_, library), args: ConvertImageArgs| async move {
// TODO:(fogodev) I think this will have to be a Job due to possibly being too much CPU Bound for rspc
let location_path =
get_location_path_from_location_id(&library.db, args.location_id).await?;
let isolated_path = IsolatedFilePathData::try_from(
library
.db
.file_path()
.find_unique(file_path::id::equals(args.file_path_id))
.select(file_path_to_isolate::select())
.exec()
.await?
.ok_or(LocationError::FilePath(FilePathError::IdNotFound(
args.file_path_id,
)))?,
)?;
let path = Path::new(&location_path).join(&isolated_path);
if let Err(e) = fs::metadata(&path).await {
if e.kind() == io::ErrorKind::NotFound {
return Err(LocationError::FilePath(FilePathError::NotFound(
path.into_boxed_path(),
))
.into());
} else {
return Err(FileIOError::from((
path,
e,
"Got an error trying to read metadata from image to convert",
))
.into());
}
}
args.quality_percentage.map(|x| x.clamp(1, 125));
let path = Arc::new(path);
let output_extension =
Arc::new(OsString::from(args.desired_extension.to_string()));
// TODO(fogodev): Refactor this if Rust get async scoped spawns someday
let inner_path = Arc::clone(&path);
let inner_output_extension = Arc::clone(&output_extension);
let image = spawn_blocking(move || {
sd_images::convert_image(inner_path.as_ref(), &inner_output_extension).map(
|mut image| {
if let Some(quality_percentage) = args.quality_percentage {
image = image.resize(
image.width()
* (quality_percentage as f32 / 100_f32) as u32,
image.height()
* (quality_percentage as f32 / 100_f32) as u32,
image::imageops::FilterType::Triangle,
);
}
image
},
)
})
.await
.map_err(|e| {
error!("{e:#?}");
rspc::Error::new(
ErrorCode::InternalServerError,
"Had an internal problem converting image".to_string(),
)
})??;
let output_path = path.with_extension(output_extension.as_ref());
if fs::metadata(&output_path)
.await
.map(|_| true)
.map_err(|e| {
FileIOError::from(
(
&output_path,
e,
"Got an error trying to check if the desired converted file already exists"
)
)
})? {
return Err(rspc::Error::new(
ErrorCode::Conflict,
"There is already a file with same name and extension in this directory"
.to_string(),
));
} else {
fs::write(&output_path, image.as_bytes())
.await
.map_err(|e| {
FileIOError::from((
output_path,
e,
"There was an error while writing the image to the output path",
))
})?;
}
if args.delete_src {
fs::remove_file(path.as_ref()).await.map_err(|e| {
// Let's also invalidate the query here, because we succeeded in converting the file
invalidate_query!(library, "search.paths");
FileIOError::from((
path.as_ref(),
e,
"There was an error while deleting the source image",
))
})?;
}
invalidate_query!(library, "search.paths");
Ok(())
})
})
.procedure("getConvertableImageExtensions", {
R.query(|_, _: ()| async move { Ok(sd_images::all_compatible_extensions()) })
})
.procedure("eraseFiles", {
R.with2(library())
.mutation(|(node, library), args: FileEraserJobInit| async move {
@ -541,17 +670,12 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
}
}
R.with2(library())
.mutation(|(_, library), args: RenameFileArgs| async move {
let location_path = find_location(&library, args.location_id)
.select(location::select!({ path }))
.exec()
.await?
.ok_or(LocationError::IdNotFound(args.location_id))?
.path
.ok_or(LocationError::MissingPath(args.location_id))?;
R.with2(library()).mutation(
|(_, library), RenameFileArgs { location_id, kind }: RenameFileArgs| async move {
let location_path =
get_location_path_from_location_id(&library.db, location_id).await?;
let res = match args.kind {
let res = match kind {
RenameKind::One(one) => {
RenameFileArgs::rename_one(one, location_path, &library).await
}
@ -564,6 +688,7 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
invalidate_query!(library, "search.objects");
res
})
},
)
})
}

View file

@ -77,31 +77,34 @@ pub enum LocationError {
impl From<LocationError> for rspc::Error {
fn from(err: LocationError) -> Self {
use LocationError::*;
match err {
// Not found errors
LocationError::PathNotFound(_)
| LocationError::UuidNotFound(_)
| LocationError::IdNotFound(_) => {
rspc::Error::with_cause(ErrorCode::NotFound, err.to_string(), err)
PathNotFound(_)
| UuidNotFound(_)
| IdNotFound(_)
| FilePath(FilePathError::IdNotFound(_) | FilePathError::NotFound(_)) => {
Self::with_cause(ErrorCode::NotFound, err.to_string(), err)
}
// User's fault errors
LocationError::NotDirectory(_)
| LocationError::NestedLocation(_)
| LocationError::LocationAlreadyExists(_) => {
rspc::Error::with_cause(ErrorCode::BadRequest, err.to_string(), err)
NotDirectory(_) | NestedLocation(_) | LocationAlreadyExists(_) => {
Self::with_cause(ErrorCode::BadRequest, err.to_string(), err)
}
// Custom error message is used to differenciate these errors in the frontend
// TODO: A better solution would be for rspc to support sending custom data alongside errors
LocationError::NeedRelink { .. } => {
rspc::Error::with_cause(ErrorCode::Conflict, "NEED_RELINK".to_owned(), err)
NeedRelink { .. } => {
Self::with_cause(ErrorCode::Conflict, "NEED_RELINK".to_owned(), err)
}
LocationError::AddLibraryToMetadata(_) => {
rspc::Error::with_cause(ErrorCode::Conflict, "ADD_LIBRARY".to_owned(), err)
AddLibraryToMetadata(_) => {
Self::with_cause(ErrorCode::Conflict, "ADD_LIBRARY".to_owned(), err)
}
_ => rspc::Error::with_cause(ErrorCode::InternalServerError, err.to_string(), err),
// Internal errors
MissingField(missing_error) => missing_error.into(),
_ => Self::with_cause(ErrorCode::InternalServerError, err.to_string(), err),
}
}
}

View file

@ -1005,3 +1005,24 @@ pub async fn update_location_size(
Ok(())
}
pub async fn get_location_path_from_location_id(
db: &PrismaClient,
location_id: file_path::id::Type,
) -> Result<PathBuf, LocationError> {
db.location()
.find_unique(location::id::equals(location_id))
.exec()
.await
.map_err(Into::into)
.and_then(|maybe_location| {
maybe_location
.ok_or(LocationError::IdNotFound(location_id))
.and_then(|location| {
location
.path
.map(PathBuf::from)
.ok_or(LocationError::MissingPath(location_id))
})
})
}

View file

@ -4,6 +4,7 @@ use crate::{
CurrentStep, JobError, JobInitOutput, JobResult, JobStepOutput, StatefulJob, WorkerContext,
},
library::Library,
location::get_location_path_from_location_id,
prisma::{file_path, location},
util::{db::maybe_missing, error::FileIOError},
};
@ -16,7 +17,7 @@ use specta::Type;
use tokio::{fs, io};
use tracing::warn;
use super::{get_location_path_from_location_id, get_many_files_datas, FileData};
use super::{error::FileSystemJobsError, get_many_files_datas, FileData};
#[derive(Serialize, Deserialize, Hash, Type, Debug)]
pub struct FileDeleterJobInit {
@ -45,7 +46,8 @@ impl StatefulJob for FileDeleterJobInit {
get_location_path_from_location_id(db, init.location_id).await?,
&init.file_path_ids,
)
.await?;
.await
.map_err(FileSystemJobsError::from)?;
// Must fill in the data, otherwise the job will not run
*data = Some(());

View file

@ -5,7 +5,7 @@ use crate::{
StatefulJob, WorkerContext,
},
library::Library,
location::file_path_helper::IsolatedFilePathData,
location::{file_path_helper::IsolatedFilePathData, get_location_path_from_location_id},
prisma::{file_path, location},
util::{db::maybe_missing, error::FileIOError},
};
@ -23,8 +23,8 @@ use tokio::{
use tracing::trace;
use super::{
error::FileSystemJobsError, get_file_data_from_isolated_file_path,
get_location_path_from_location_id, get_many_files_datas, FileData,
error::FileSystemJobsError, get_file_data_from_isolated_file_path, get_many_files_datas,
FileData,
};
#[serde_as]
@ -70,7 +70,9 @@ impl StatefulJob for FileEraserJobInit {
let init = self;
let Library { db, .. } = &*ctx.library;
let location_path = get_location_path_from_location_id(db, init.location_id).await?;
let location_path = get_location_path_from_location_id(db, init.location_id)
.await
.map_err(FileSystemJobsError::from)?;
let steps = get_many_files_datas(db, &location_path, &init.file_path_ids).await?;
@ -132,29 +134,34 @@ impl StatefulJob for FileEraserJobInit {
Ok((more_steps, new_metadata).into())
} else {
let mut file = OpenOptions::new()
.read(true)
.write(true)
.open(&step.full_path)
.await
.map_err(|e| FileIOError::from((&step.full_path, e)))?;
let file_len = file
.metadata()
.await
.map_err(|e| FileIOError::from((&step.full_path, e)))?
.len();
{
let mut file = OpenOptions::new()
.read(true)
.write(true)
.open(&step.full_path)
.await
.map_err(|e| FileIOError::from((&step.full_path, e)))?;
let file_len = file
.metadata()
.await
.map_err(|e| FileIOError::from((&step.full_path, e)))?
.len();
sd_crypto::fs::erase::erase(&mut file, file_len as usize, init.passes).await?;
trace!(
"Overwriting file: {} with {} passes",
step.full_path.display(),
init.passes
);
file.set_len(0)
.await
.map_err(|e| FileIOError::from((&step.full_path, e)))?;
file.flush()
.await
.map_err(|e| FileIOError::from((&step.full_path, e)))?;
drop(file);
sd_crypto::fs::erase::erase(&mut file, file_len as usize, init.passes).await?;
trace!("Erasing file: {}", step.full_path.display());
file.set_len(0)
.await
.map_err(|e| FileIOError::from((&step.full_path, e)))?;
file.flush()
.await
.map_err(|e| FileIOError::from((&step.full_path, e)))?;
}
fs::remove_file(&step.full_path)
.await

View file

@ -44,22 +44,6 @@ pub struct FileData {
pub full_path: PathBuf,
}
pub async fn get_location_path_from_location_id(
db: &PrismaClient,
location_id: file_path::id::Type,
) -> Result<PathBuf, FileSystemJobsError> {
let location = db
.location()
.find_unique(location::id::equals(location_id))
.exec()
.await?
.ok_or(FileSystemJobsError::Location(LocationError::IdNotFound(
location_id,
)))?;
Ok(maybe_missing(location.path, "location.path")?.into())
}
pub async fn get_many_files_datas(
db: &PrismaClient,
location_path: impl AsRef<Path>,

View file

@ -11,7 +11,7 @@ use crate::{
use sd_file_ext::extensions::{
DocumentExtension, Extension, ImageExtension, ALL_DOCUMENT_EXTENSIONS, ALL_IMAGE_EXTENSIONS,
};
use sd_images::format_image;
use sd_images::{format_image, scale_dimensions};
use sd_media_metadata::image::Orientation;
#[cfg(feature = "ffmpeg")]
@ -109,14 +109,6 @@ const TAGRET_PX: f32 = 262144_f32;
/// and is treated as a percentage (so 30% in this case, or it's the same as multiplying by `0.3`).
const TARGET_QUALITY: f32 = 30_f32;
/// This takes in a width and a height, and returns a scaled width and height
/// It is scaled proportionally to the [`TARGET_PX`], so smaller images will be upscaled,
/// and larger images will be downscaled. This approach also maintains the aspect ratio of the image.
fn calculate_factor(w: f32, h: f32) -> (u32, u32) {
let sf = (TAGRET_PX / (w * h)).sqrt();
((w * sf).round() as u32, (h * sf).round() as u32)
}
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
pub enum ThumbnailerEntryKind {
Image,
@ -140,13 +132,13 @@ pub async fn generate_image_thumbnail<P: AsRef<Path>>(
let img = format_image(&file_path).map_err(|_| ThumbnailerError::Encoding)?;
let (w, h) = img.dimensions();
let (w_scale, h_scale) = calculate_factor(w as f32, h as f32);
let (w_scaled, h_scaled) = scale_dimensions(w as f32, h as f32, TAGRET_PX);
// Optionally, resize the existing photo and convert back into DynamicImage
let mut img = DynamicImage::ImageRgba8(imageops::resize(
&img,
w_scale,
h_scale,
w_scaled as u32,
h_scaled as u32,
imageops::FilterType::Triangle,
));

View file

@ -69,6 +69,16 @@ pub fn inode_to_db(inode: u64) -> Vec<u8> {
#[error("Missing field {0}")]
pub struct MissingFieldError(&'static str);
impl From<MissingFieldError> for rspc::Error {
fn from(value: MissingFieldError) -> Self {
rspc::Error::with_cause(
rspc::ErrorCode::InternalServerError,
"Missing crucial data in the database".to_string(),
value,
)
}
}
pub trait OptionalField: Sized {
type Out;

View file

@ -8,6 +8,7 @@ pub struct FileIOError {
pub path: Box<Path>,
#[source]
pub source: io::Error,
pub maybe_context: Option<&'static str>,
}
impl<P: AsRef<Path>> From<(P, io::Error)> for FileIOError {
@ -15,10 +16,34 @@ impl<P: AsRef<Path>> From<(P, io::Error)> for FileIOError {
Self {
path: path.as_ref().into(),
source,
maybe_context: None,
}
}
}
impl<P: AsRef<Path>> From<(P, io::Error, &'static str)> for FileIOError {
fn from((path, source, context): (P, io::Error, &'static str)) -> Self {
Self {
path: path.as_ref().into(),
source,
maybe_context: Some(context),
}
}
}
impl From<FileIOError> for rspc::Error {
fn from(value: FileIOError) -> Self {
Self::with_cause(
rspc::ErrorCode::InternalServerError,
value
.maybe_context
.unwrap_or("Error accessing file system")
.to_string(),
value,
)
}
}
#[derive(Debug, Error)]
#[error("received a non UTF-8 path: <lossy_path='{}'>", .0.to_string_lossy())]
pub struct NonUtf8PathError(pub Box<Path>);

View file

@ -107,7 +107,7 @@ macro_rules! extension_category_enum {
$($(#[$variant_attr:meta])* $variant:ident $(= $( [$($magic_bytes:tt),*] $(+ $offset:literal)? )|+ )? ,)*
}
) => {
#[derive(Debug, ::serde::Serialize, ::serde::Deserialize, ::strum::Display, Clone, Copy, PartialEq, Eq)]
#[derive(Debug, ::serde::Serialize, ::serde::Deserialize, ::strum::Display, ::specta::Type, Clone, Copy, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
$(#[$enum_attr])*

View file

@ -2,8 +2,8 @@
name = "sd-images"
version = "0.0.0"
authors = [
"Jake Robinson <jake@spacedrive.com>",
"Vítor Vasconcellos <vitor@spacedrive.com>",
"Jake Robinson <jake@spacedrive.com>",
"Vítor Vasconcellos <vitor@spacedrive.com>",
]
license = { workspace = true }
repository = { workspace = true }
@ -14,12 +14,25 @@ heif = ["dep:libheif-rs", "dep:libheif-sys"]
[dependencies]
image = "0.24.7"
thiserror = "1.0.48"
webp = { version = "0.2.6", optional = true }
thiserror = "1.0.49"
resvg = "0.36.0"
rspc = { workspace = true, optional = true } # error conversion
specta = { workspace = true, optional = true }
serde = { workspace = true, optional = true, features = ["derive"] }
bincode = { version = "2.0.0-rc.3", features = [
"derive",
"alloc",
], optional = true }
once_cell = "1.18.0"
tracing = { workspace = true }
resvg = "0.35.0"
# both of these added *default* bindgen features in 0.22.0 and 2.0.0+1.16.2 respectively
# both of these added *default* bindgen features in 0.22.0 and 2.0.0 respectively
# this broke builds as we build our own liibheif, so i disabled their default features
libheif-rs = { version = "0.22.0", default-features = false, optional = true }
libheif-sys = { version = "2.0.0", default-features = false, optional = true }
pdfium-render = { version ="0.8.8", features = ["sync", "image", "thread_safe"] }
pdfium-render = { version = "0.8.8", features = [
"sync",
"image",
"thread_safe",
] }

View file

@ -1,31 +1,175 @@
use std::fmt::Display;
/// The size of 1MiB in bytes
const MIB: u64 = 1_048_576;
/// The maximum file size that an image can be in order to have a thumbnail generated.
///
/// This value is in MiB.
pub const MAXIMUM_FILE_SIZE: u64 = MIB * 192;
/// These are roughly all extensions supported by the `image` crate, as of `v0.24.7`.
///
/// We only support images that have both good encoding and decoding support, without external C-based dependencies (e.g. `avif`)
pub const GENERIC_EXTENSIONS: [&str; 16] = [
"bmp", "dib", "ff", "gif", "ico", "jpg", "jpeg", "png", "pnm", "qoi", "tga", "icb", "vda",
"vst", "tiff", "tif",
];
pub const SVG_EXTENSIONS: [&str; 2] = ["svg", "svgz"];
pub const PDF_EXTENSIONS: [&str; 1] = ["pdf"];
#[cfg(feature = "heif")]
pub const HEIF_EXTENSIONS: [&str; 7] = ["heif", "heifs", "heic", "heics", "avif", "avci", "avcs"];
/// The maximum file size that an image can be in order to have a thumbnail generated.
///
/// This value is in MiB.
#[cfg(feature = "heif")]
pub const HEIF_MAXIMUM_FILE_SIZE: u64 = MIB * 32;
pub const SVG_EXTENSIONS: [&str; 2] = ["svg", "svgz"];
// Will be needed for validating HEIF images
// #[cfg(feature = "heif")]
// pub const HEIF_BPS: u8 = 8;
/// The maximum file size that an image can be in order to have a thumbnail generated.
/// This is the target pixel count for all SVG images to be rendered at.
///
/// This value is in MiB.
pub const SVG_MAXIMUM_FILE_SIZE: u64 = MIB * 24;
/// The size that SVG images are rendered at.
pub const SVG_RENDER_SIZE: u32 = 512;
pub const PDF_EXTENSION: &str = "pdf";
/// It is 512x512, but if the SVG has a non-1:1 aspect ratio we need to account for that.
pub const SVG_TARGET_PX: f32 = 262_144_f32;
/// The size that PDF pages are rendered at.
pub const PDF_RENDER_SIZE: i32 = 1024;
/// The maximum file size that an image can be in order to have a thumbnail generated.
///
/// This value is in MiB.
pub const GENERIC_MAXIMUM_FILE_SIZE: u64 = MIB * 64;
/// This is 120 DPI at standard A4 printer paper size - the target aspect
/// ratio and height are maintained.
pub const PDF_RENDER_WIDTH: pdfium_render::prelude::Pixels = 992;
#[cfg_attr(feature = "specta", derive(specta::Type))]
#[cfg_attr(feature = "bincode", derive(bincode::Encode, bincode::Decode))]
#[cfg_attr(feature = "serde", serde(rename_all = "lowercase"))]
#[derive(Debug, Clone, Copy)]
pub enum ConvertableExtension {
Bmp,
Dib,
Ff,
Gif,
Ico,
Jpg,
Jpeg,
Png,
Pnm,
Qoi,
Tga,
Icb,
Vda,
Vst,
Tiff,
Tif,
Heif,
Heifs,
Heic,
Heics,
Avif,
Avci,
Avcs,
Svg,
Svgz,
Pdf,
}
impl Display for ConvertableExtension {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{self:?}")
}
}
impl TryFrom<String> for ConvertableExtension {
type Error = crate::Error;
fn try_from(value: String) -> Result<Self, Self::Error> {
let v = value.to_lowercase();
match v.as_str() {
"bmp" => Ok(Self::Bmp),
"dib" => Ok(Self::Dib),
"ff" => Ok(Self::Ff),
"gif" => Ok(Self::Gif),
"ico" => Ok(Self::Ico),
"jpg" => Ok(Self::Jpg),
"jpeg" => Ok(Self::Jpeg),
"png" => Ok(Self::Png),
"pnm" => Ok(Self::Pnm),
"qoi" => Ok(Self::Qoi),
"tga" => Ok(Self::Tga),
"icb" => Ok(Self::Icb),
"vda" => Ok(Self::Vda),
"vst" => Ok(Self::Vst),
"tiff" => Ok(Self::Tiff),
"tif" => Ok(Self::Tif),
"heif" => Ok(Self::Heif),
"heifs" => Ok(Self::Heifs),
"heic" => Ok(Self::Heic),
"heics" => Ok(Self::Heics),
"avif" => Ok(Self::Avif),
"avci" => Ok(Self::Avci),
"avcs" => Ok(Self::Avcs),
"svg" => Ok(Self::Svg),
"svgz" => Ok(Self::Svgz),
"pdf" => Ok(Self::Pdf),
_ => Err(crate::Error::Unsupported),
}
}
}
#[cfg(feature = "serde")]
impl serde::Serialize for ConvertableExtension {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
#[cfg(feature = "serde")]
struct ExtensionVisitor;
#[cfg(feature = "serde")]
impl<'de> serde::de::Visitor<'de> for ExtensionVisitor {
type Value = ConvertableExtension;
fn expecting(&self, formatter: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
formatter.write_str("A valid extension string`")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
Self::Value::try_from(v.to_string()).map_err(|e| E::custom(format!("unknown variant: {e}")))
}
}
#[cfg(feature = "serde")]
impl<'de> serde::Deserialize<'de> for ConvertableExtension {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
deserializer.deserialize_str(ExtensionVisitor)
}
}
#[inline]
#[must_use]
pub fn all_compatible_extensions() -> Vec<String> {
#[cfg(feature = "heif")]
let res = GENERIC_EXTENSIONS
.into_iter()
.chain(HEIF_EXTENSIONS)
.chain(SVG_EXTENSIONS)
.chain(PDF_EXTENSIONS)
.map(String::from)
.collect();
#[cfg(not(feature = "heif"))]
let res = GENERIC_EXTENSIONS
.into_iter()
.chain(SVG_EXTENSIONS)
.chain(PDF_EXTENSIONS)
.map(String::from)
.collect();
res
}

View file

@ -1,26 +1,12 @@
use std::num::TryFromIntError;
use std::{num::TryFromIntError, path::Path};
pub type Result<T> = std::result::Result<T, Error>;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[error("error with pdfium: {0}")]
Pdfium(#[from] pdfium_render::prelude::PdfiumError),
#[error("failed to load pdfium library")]
PdfiumBinding,
#[cfg(feature = "heif")]
#[error("error with libheif: {0}")]
LibHeif(#[from] libheif_rs::HeifError),
#[error("error with usvg: {0}")]
USvg(#[from] resvg::usvg::Error),
#[error("failed to allocate `Pixbuf` while converting an SVG")]
Pixbuf,
#[error("error while loading the image (via the `image` crate): {0}")]
Image(#[from] image::ImageError),
#[error("there was an i/o error: {0}")]
Io(#[from] std::io::Error),
#[error("there was an error while converting the image to an `RgbImage`")]
RgbImageConversion,
#[error("there was an i/o at path '{}' error: {0}", .1.display())]
Io(std::io::Error, Box<Path>),
#[error("the image provided is unsupported")]
Unsupported,
#[error("the image provided is too large (over 20MiB)")]
@ -29,12 +15,35 @@ pub enum Error {
InvalidBitDepth,
#[error("invalid path provided (non UTF-8)")]
InvalidPath,
#[error("the image has an invalid length to be RGB")]
#[error("the length of an input stream was invalid")]
InvalidLength,
#[error("invalid path provided (it had no file extension)")]
NoExtension,
#[error("error while converting from raw")]
RawConversion,
// these errors are either: reliant on external (C dependencies), or are extremely niche
// this means they rely on a lot of specific functionality, and therefore have specific errors
#[cfg(feature = "heif")]
#[error("error with libheif: {0}")]
LibHeif(#[from] libheif_rs::HeifError),
#[error("there was an error while converting the image to an `RgbImage`")]
RgbImageConversion,
#[error("error with pdfium: {0}")]
Pdfium(#[from] pdfium_render::prelude::PdfiumError),
#[error("failed to load pdfium library")]
PdfiumBinding,
#[error("error with usvg: {0}")]
USvg(#[from] resvg::usvg::Error),
#[error("failed to allocate `Pixbuf` while converting an SVG")]
Pixbuf,
#[error("error while loading the image (via the `image` crate): {0}")]
Image(#[from] image::ImageError),
// #[error("error while converting from raw")] // not enough rust support for it to be feasible
// RawConversion,
#[error("error while parsing integers")]
TryFromInt(#[from] TryFromIntError),
}
#[cfg(feature = "rspc")]
impl From<Error> for rspc::Error {
fn from(value: Error) -> Self {
Self::new(rspc::ErrorCode::InternalServerError, value.to_string())
}
}

View file

@ -1,52 +0,0 @@
use crate::{
consts,
error::{Error, Result},
generic::GenericHandler,
pdf::PdfHandler,
svg::SvgHandler,
ImageHandler,
};
use image::DynamicImage;
use std::{
ffi::{OsStr, OsString},
path::Path,
};
#[cfg(feature = "heif")]
use crate::heif::HeifHandler;
pub fn format_image(path: impl AsRef<Path>) -> Result<DynamicImage> {
let ext = path
.as_ref()
.extension()
.map_or_else(|| Err(Error::NoExtension), |e| Ok(e.to_ascii_lowercase()))?;
match_to_handler(&ext).handle_image(path.as_ref())
}
#[allow(clippy::useless_let_if_seq)]
fn match_to_handler(ext: &OsStr) -> Box<dyn ImageHandler> {
let mut handler: Box<dyn ImageHandler> = Box::new(GenericHandler {});
#[cfg(feature = "heif")]
if consts::HEIF_EXTENSIONS
.iter()
.map(OsString::from)
.any(|x| x == ext)
{
handler = Box::new(HeifHandler {});
}
if consts::SVG_EXTENSIONS
.iter()
.map(OsString::from)
.any(|x| x == ext)
{
handler = Box::new(SvgHandler {});
}
if ext == consts::PDF_EXTENSION {
handler = Box::new(PdfHandler {});
}
handler
}

View file

@ -1,4 +1,3 @@
use crate::consts::GENERIC_MAXIMUM_FILE_SIZE;
pub use crate::error::{Error, Result};
use crate::ImageHandler;
use image::DynamicImage;
@ -7,14 +6,6 @@ use std::path::Path;
pub struct GenericHandler {}
impl ImageHandler for GenericHandler {
fn maximum_size(&self) -> u64 {
GENERIC_MAXIMUM_FILE_SIZE
}
fn validate_image(&self, _bits_per_pixel: u8, _length: usize) -> Result<()> {
Ok(())
}
fn handle_image(&self, path: &Path) -> Result<DynamicImage> {
let data = self.get_data(path)?; // this also makes sure the file isn't above the maximum size
Ok(image::load_from_memory(&data)?)

View file

@ -0,0 +1,67 @@
use crate::{
consts,
error::{Error, Result},
generic::GenericHandler,
pdf::PdfHandler,
svg::SvgHandler,
ImageHandler,
};
use image::DynamicImage;
use std::{
ffi::{OsStr, OsString},
path::Path,
};
#[cfg(feature = "heif")]
use crate::heif::HeifHandler;
pub fn format_image(path: impl AsRef<Path>) -> Result<DynamicImage> {
let path = path.as_ref();
match_to_handler(path.extension())?.handle_image(path)
}
pub fn convert_image(path: impl AsRef<Path>, desired_ext: &OsStr) -> Result<DynamicImage> {
let path = path.as_ref();
match_to_handler(path.extension())?.convert_image(match_to_handler(Some(desired_ext))?, path)
}
#[allow(clippy::useless_let_if_seq)]
fn match_to_handler(ext: Option<&OsStr>) -> Result<Box<dyn ImageHandler>> {
let ext = ext.map(OsStr::to_ascii_lowercase).unwrap_or_default();
let mut handler: Option<Box<dyn ImageHandler>> = None;
if consts::GENERIC_EXTENSIONS
.iter()
.map(OsString::from)
.any(|x| x == ext)
{
handler = Some(Box::new(GenericHandler {}));
}
#[cfg(feature = "heif")]
if consts::HEIF_EXTENSIONS
.iter()
.map(OsString::from)
.any(|x| x == ext)
{
handler = Some(Box::new(HeifHandler {}));
}
if consts::SVG_EXTENSIONS
.iter()
.map(OsString::from)
.any(|x| x == ext)
{
handler = Some(Box::new(SvgHandler {}));
}
if consts::PDF_EXTENSIONS
.iter()
.map(OsString::from)
.any(|x| x == ext)
{
handler = Some(Box::new(PdfHandler {}));
}
handler.ok_or(Error::Unsupported)
}

View file

@ -1,5 +1,4 @@
pub use crate::consts::HEIF_EXTENSIONS;
use crate::consts::HEIF_MAXIMUM_FILE_SIZE;
pub use crate::error::{Error, Result};
use crate::ImageHandler;
use image::DynamicImage;
@ -14,19 +13,15 @@ static HEIF: Lazy<LibHeif> = Lazy::new(LibHeif::new);
pub struct HeifHandler {}
impl ImageHandler for HeifHandler {
fn maximum_size(&self) -> u64 {
HEIF_MAXIMUM_FILE_SIZE
}
// fn validate_image(&self, bits_per_pixel: u8, length: usize) -> Result<()> {
// if bits_per_pixel != 8 {
// return Err(Error::InvalidBitDepth);
// } else if length % 3 != 0 || length % 4 != 0 {
// return Err(Error::InvalidLength);
// }
fn validate_image(&self, bits_per_pixel: u8, length: usize) -> Result<()> {
if bits_per_pixel != 8 {
return Err(Error::InvalidBitDepth);
} else if length % 3 != 0 || length % 4 != 0 {
return Err(Error::InvalidLength);
}
Ok(())
}
// Ok(())
// }
fn handle_image(&self, path: &Path) -> Result<DynamicImage> {
let img = {
@ -38,7 +33,7 @@ impl ImageHandler for HeifHandler {
let planes = img.planes();
if let Some(i) = planes.interleaved {
self.validate_image(i.bits_per_pixel, i.data.len())?;
// self.validate_image(i.bits_per_pixel, i.data.len())?;
let mut reader = Cursor::new(i.data);
let mut sequence = vec![];
@ -49,9 +44,15 @@ impl ImageHandler for HeifHandler {
(0..img.height()).try_for_each(|x| {
let x: usize = x.try_into()?;
let start: u64 = (i.stride * x).try_into()?;
reader.seek(SeekFrom::Start(start))?;
reader
.seek(SeekFrom::Start(start))
.map_err(|e| Error::Io(e, path.to_path_buf().into_boxed_path()))?;
(0..img.width()).try_for_each(|_| {
reader.read_exact(&mut buffer)?;
reader
.read_exact(&mut buffer)
.map_err(|e| Error::Io(e, path.to_path_buf().into_boxed_path()))?;
sequence.extend_from_slice(&buffer);
Ok::<(), Error>(())
})?;
@ -68,20 +69,18 @@ impl ImageHandler for HeifHandler {
// This was hand-crafted using my best judgement, and I think it should work.
// I'm sure we'll get a GH issue opened regarding it if not - brxken128
self.validate_image(r.bits_per_pixel, r.data.len())?;
self.validate_image(g.bits_per_pixel, g.data.len())?;
self.validate_image(b.bits_per_pixel, b.data.len())?;
// self.validate_image(r.bits_per_pixel, r.data.len())?;
// self.validate_image(g.bits_per_pixel, g.data.len())?;
// self.validate_image(b.bits_per_pixel, b.data.len())?;
let mut red = Cursor::new(r.data);
let mut green = Cursor::new(g.data);
let mut blue = Cursor::new(b.data);
let (mut alpha, has_alpha) = if let Some(a) = planes.a {
self.validate_image(a.bits_per_pixel, a.data.len())?;
(Cursor::new(a.data), true)
} else {
(Cursor::new([].as_ref()), false)
};
let (mut alpha, has_alpha) = planes.a.map_or_else(
|| (Cursor::new([].as_ref()), false),
|a| (Cursor::new(a.data), true),
);
let mut sequence = vec![];
let mut buffer: [u8; 4] = [0u8; 4];
@ -91,15 +90,28 @@ impl ImageHandler for HeifHandler {
(0..img.height()).try_for_each(|x| {
let x: usize = x.try_into()?;
let start: u64 = (r.stride * x).try_into()?;
red.seek(SeekFrom::Start(start))?;
red.seek(SeekFrom::Start(start))
.map_err(|e| Error::Io(e, path.to_path_buf().into_boxed_path()))?;
(0..img.width()).try_for_each(|_| {
red.read_exact(&mut buffer[0..1])?;
green.read_exact(&mut buffer[1..2])?;
blue.read_exact(&mut buffer[2..3])?;
red.read_exact(&mut buffer[0..1])
.map_err(|e| Error::Io(e, path.to_path_buf().into_boxed_path()))?;
green
.read_exact(&mut buffer[1..2])
.map_err(|e| Error::Io(e, path.to_path_buf().into_boxed_path()))?;
blue.read_exact(&mut buffer[2..3])
.map_err(|e| Error::Io(e, path.to_path_buf().into_boxed_path()))?;
sequence.extend_from_slice(&buffer[..3]);
if has_alpha {
alpha.read_exact(&mut buffer[3..4])?;
alpha
.read_exact(&mut buffer[3..4])
.map_err(|e| Error::Io(e, path.to_path_buf().into_boxed_path()))?;
sequence.extend_from_slice(&buffer[3..4]);
}
Ok::<(), Error>(())

View file

@ -20,42 +20,73 @@
#![forbid(unsafe_code)]
#![allow(clippy::missing_errors_doc, clippy::module_name_repetitions)]
use std::{fs, path::Path};
mod consts;
mod error;
mod formatter;
mod generic;
mod handler;
#[cfg(feature = "heif")]
mod heif;
mod pdf;
mod svg;
use consts::MAXIMUM_FILE_SIZE;
// Re-exports
pub use consts::{all_compatible_extensions, ConvertableExtension};
pub use error::{Error, Result};
pub use formatter::format_image;
pub use handler::{convert_image, format_image};
pub use image::DynamicImage;
use std::{fs, io::Read, path::Path};
pub trait ImageHandler {
fn maximum_size(&self) -> u64
where
Self: Sized; // thanks vtables
#[inline]
fn get_data(&self, path: &Path) -> Result<Vec<u8>>
where
Self: Sized,
{
let mut file = fs::File::open(path)?;
if file.metadata()?.len() > self.maximum_size() {
Err(Error::TooLarge)
self.validate_image(path)?;
fs::read(path).map_err(|e| Error::Io(e, path.to_path_buf().into_boxed_path()))
}
fn validate_image(&self, path: &Path) -> Result<()>
where
Self: Sized,
{
if fs::metadata(path)
.map_err(|e| Error::Io(e, path.to_path_buf().into_boxed_path()))?
.len() <= MAXIMUM_FILE_SIZE
{
Ok(())
} else {
let mut data = vec![];
file.read_to_end(&mut data)?;
Ok(data)
Err(Error::TooLarge)
}
}
fn validate_image(&self, bits_per_pixel: u8, length: usize) -> Result<()>
where
Self: Sized;
fn handle_image(&self, path: &Path) -> Result<DynamicImage>;
#[inline]
fn convert_image(
&self,
opposing_handler: Box<dyn ImageHandler>,
path: &Path,
) -> Result<DynamicImage> {
opposing_handler.handle_image(path)
}
}
/// This takes in a width and a height, and returns a scaled width and height
/// It is scaled proportionally to the [`TARGET_PX`], so smaller images will be upscaled,
/// and larger images will be downscaled. This approach also maintains the aspect ratio of the image.
#[allow(
clippy::as_conversions,
clippy::cast_precision_loss,
clippy::cast_possible_truncation,
clippy::cast_sign_loss
)]
#[must_use]
pub fn scale_dimensions(w: f32, h: f32, target_px: f32) -> (f32, f32) {
let sf = (target_px / (w * h)).sqrt();
((w * sf).round(), (h * sf).round())
}

View file

@ -4,7 +4,7 @@ use std::{
path::{Path, PathBuf},
};
use crate::{consts::PDF_RENDER_SIZE, Error::PdfiumBinding, ImageHandler, Result};
use crate::{consts::PDF_RENDER_WIDTH, Error::PdfiumBinding, ImageHandler, Result};
use image::DynamicImage;
use once_cell::sync::Lazy;
use pdfium_render::prelude::{PdfPageRenderRotation, PdfRenderConfig, Pdfium};
@ -61,21 +61,11 @@ static PDFIUM: Lazy<Option<Pdfium>> = Lazy::new(|| {
pub struct PdfHandler {}
impl ImageHandler for PdfHandler {
fn maximum_size(&self) -> u64 {
// Pdfium will only load the portions of the document it actually needs into memory.
u64::MAX
}
fn validate_image(&self, _bits_per_pixel: u8, _length: usize) -> Result<()> {
Ok(())
}
fn handle_image(&self, path: &Path) -> Result<DynamicImage> {
let pdfium = PDFIUM.as_ref().ok_or(PdfiumBinding)?;
let render_config = PdfRenderConfig::new()
.set_target_width(PDF_RENDER_SIZE)
.set_maximum_height(PDF_RENDER_SIZE)
.set_target_width(PDF_RENDER_WIDTH)
.rotate_if_landscape(PdfPageRenderRotation::Degrees90, true);
Ok(pdfium

View file

@ -1,9 +1,6 @@
use std::path::Path;
use crate::{
consts::{SVG_MAXIMUM_FILE_SIZE, SVG_RENDER_SIZE},
Error, ImageHandler, Result,
};
use crate::{consts::SVG_TARGET_PX, scale_dimensions, Error, ImageHandler, Result};
use image::DynamicImage;
use resvg::{
tiny_skia::{self},
@ -11,17 +8,16 @@ use resvg::{
};
use usvg::{fontdb, TreeParsing, TreeTextToPath};
#[derive(PartialEq, Eq)]
pub struct SvgHandler {}
impl ImageHandler for SvgHandler {
fn maximum_size(&self) -> u64 {
SVG_MAXIMUM_FILE_SIZE
}
fn validate_image(&self, _bits_per_pixel: u8, _length: usize) -> Result<()> {
Ok(())
}
#[allow(
clippy::cast_possible_truncation,
clippy::cast_sign_loss,
clippy::as_conversions,
clippy::cast_precision_loss
)]
fn handle_image(&self, path: &Path) -> Result<DynamicImage> {
let data = self.get_data(path)?;
let rtree = usvg::Tree::from_data(&data, &usvg::Options::default()).map(|mut tree| {
@ -31,23 +27,21 @@ impl ImageHandler for SvgHandler {
resvg::Tree::from_usvg(&tree)
})?;
let (scaled_w, scaled_h) =
scale_dimensions(rtree.size.width(), rtree.size.height(), SVG_TARGET_PX);
let size = if rtree.size.width() > rtree.size.height() {
rtree.size.to_int_size().scale_to_width(SVG_RENDER_SIZE) // make this a const
rtree.size.to_int_size().scale_to_width(scaled_w as u32)
} else {
rtree.size.to_int_size().scale_to_height(SVG_RENDER_SIZE)
rtree.size.to_int_size().scale_to_height(scaled_h as u32)
}
.ok_or(Error::InvalidLength)?;
#[allow(clippy::cast_precision_loss)]
#[allow(clippy::as_conversions)]
let transform = tiny_skia::Transform::from_scale(
size.width() as f32 / rtree.size.width(),
size.height() as f32 / rtree.size.height(),
);
#[allow(clippy::cast_possible_truncation)]
#[allow(clippy::cast_sign_loss)]
#[allow(clippy::as_conversions)]
let Some(mut pixmap) = tiny_skia::Pixmap::new(size.width(), size.height()) else {
return Err(Error::Pixbuf);
};

View file

@ -6,12 +6,12 @@ edition = "2021"
[dependencies]
kamadak-exif = "0.5.5"
thiserror = "1.0.48"
thiserror = "1.0.49"
image-rs = { package = "image", version = "0.24.7" }
serde = { version = "1.0.188", features = ["derive"] }
serde_json = { workspace = true }
specta = { workspace = true, features = ["chrono"] }
chrono = { version = "0.4.30", features = ["serde"] }
chrono = { version = "0.4.31", features = ["serde"] }
rand = "0.8.5"
rand_chacha = "0.3.1"

View file

@ -122,3 +122,14 @@ impl ImageMetadata {
Ok(data)
}
}
// TODO(brxken128): more exif spec reading so we can source colour spaces correctly too
// pub enum ImageColorSpace {
// Rgb,
// RgbP,
// SRgb,
// Cmyk,
// DciP3,
// Wiz,
// Biz,
// }

View file

@ -8,6 +8,7 @@ export type Procedures = {
{ key: "buildInfo", input: never, result: BuildInfo } |
{ key: "categories.list", input: LibraryArgs<null>, result: { [key in Category]: number } } |
{ key: "files.get", input: LibraryArgs<GetArgs>, result: { id: number; pub_id: number[]; kind: number | null; key_id: number | null; hidden: boolean | null; favorite: boolean | null; important: boolean | null; note: string | null; date_created: string | null; date_accessed: string | null; file_paths: FilePath[] } | null } |
{ key: "files.getConvertableImageExtensions", input: never, result: string[] } |
{ key: "files.getEphemeralMediaData", input: string, result: MediaMetadata | null } |
{ key: "files.getMediaData", input: LibraryArgs<number>, result: MediaMetadata } |
{ key: "files.getPath", input: LibraryArgs<number>, result: string | null } |
@ -45,6 +46,7 @@ export type Procedures = {
{ key: "backups.backup", input: LibraryArgs<null>, result: string } |
{ key: "backups.delete", input: string, result: null } |
{ key: "backups.restore", input: string, result: null } |
{ key: "files.convertImage", input: LibraryArgs<ConvertImageArgs>, result: null } |
{ key: "files.copyFiles", input: LibraryArgs<FileCopierJobInit>, result: null } |
{ key: "files.cutFiles", input: LibraryArgs<FileCutterJobInit>, result: null } |
{ key: "files.deleteFiles", input: LibraryArgs<FileDeleterJobInit>, result: null } |
@ -130,6 +132,10 @@ export type ColorProfile = "Normal" | "Custom" | "HDRNoOriginal" | "HDRWithOrigi
export type Composite = "Unknown" | "False" | "General" | "Live"
export type ConvertImageArgs = { location_id: number; file_path_id: number; delete_src: boolean; desired_extension: ConvertableExtension; quality_percentage: number | null }
export type ConvertableExtension = "bmp" | "dib" | "ff" | "gif" | "ico" | "jpg" | "jpeg" | "png" | "pnm" | "qoi" | "tga" | "icb" | "vda" | "vst" | "tiff" | "tif" | "heif" | "heifs" | "heic" | "heics" | "avif" | "avci" | "avcs" | "svg" | "svgz" | "pdf"
export type CreateLibraryArgs = { name: LibraryName }
export type CursorOrderItem<T> = { order: SortOrder; data: T }