Optional filepath + object fields (#947)

* rust changes

* fix ts

* formatting

* linter's a little baby

* mere

* address review comments
This commit is contained in:
Brendan Allan 2023-06-16 02:22:11 +02:00 committed by GitHub
parent 6ab6670102
commit 9a25373a94
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
41 changed files with 520 additions and 468 deletions

View file

@ -23,16 +23,16 @@ prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust
"sqlite-create-many",
"migrations",
"sqlite",
] }
], default-features = false }
prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2", features = [
"rspc",
"sqlite-create-many",
"migrations",
"sqlite",
] }
], default-features = false }
prisma-client-rust-sdk = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2", features = [
"sqlite",
] }
], default-features = false }
rspc = { version = "0.1.4" }
specta = { version = "1.0.4" }

View file

@ -31,7 +31,7 @@ const Explorer = ({ items }: ExplorerProps) => {
const { modalRef, setData } = useActionsModalStore();
function handlePress(data: ExplorerItem) {
if (isPath(data) && data.item.is_dir) {
if (isPath(data) && data.item.is_dir && data.item.location_id !== null) {
navigation.push('Location', {
id: data.item.location_id,
path: `${data.item.materialized_path}${data.item.name}/`

View file

@ -123,28 +123,28 @@ model FilePath {
id Int @id @default(autoincrement())
pub_id Bytes @unique
is_dir Boolean @default(false)
is_dir Boolean?
// content addressable storage id - blake3 sampled checksum
cas_id String?
// full byte contents digested into blake3 checksum
integrity_checksum String? @unique
integrity_checksum String?
// location that owns this path
location_id Int
location Location @relation(fields: [location_id], references: [id], onDelete: Cascade, onUpdate: Cascade)
location_id Int?
location Location? @relation(fields: [location_id], references: [id], onDelete: Cascade, onUpdate: Cascade)
// the path of the file relative to its location
materialized_path String
materialized_path String?
// the name and extension
name String
extension String // Extension MUST have 'COLLATE NOCASE' in migration
name String?
extension String? // Extension MUST have 'COLLATE NOCASE' in migration
size_in_bytes String @default("0")
size_in_bytes String?
inode Bytes // This is actually an unsigned 64 bit integer, but we don't have this type in SQLite
device Bytes // This is actually an unsigned 64 bit integer, but we don't have this type in SQLite
inode Bytes? // This is actually an unsigned 64 bit integer, but we don't have this type in SQLite
device Bytes? // This is actually an unsigned 64 bit integer, but we don't have this type in SQLite
// the unique Object for this file path
object_id Int?
@ -153,9 +153,9 @@ model FilePath {
key_id Int? // replacement for encryption
// permissions String?
date_created DateTime @default(now())
date_modified DateTime @default(now())
date_indexed DateTime @default(now())
date_created DateTime?
date_modified DateTime?
date_indexed DateTime?
// key Key? @relation(fields: [key_id], references: [id])
@ -171,28 +171,29 @@ model Object {
id Int @id @default(autoincrement())
pub_id Bytes @unique
// Enum: sd_file_ext::kind::ObjectKind
kind Int @default(0)
kind Int?
key_id Int?
key_id Int?
// handy ways to mark an object
hidden Boolean @default(false)
favorite Boolean @default(false)
important Boolean @default(false)
hidden Boolean?
favorite Boolean?
important Boolean?
// if we have generated preview media for this object on at least one Node
has_thumbnail Boolean @default(false)
has_thumbstrip Boolean @default(false)
has_video_preview Boolean @default(false)
// commented out for now by @brendonovich since they they're irrelevant to the sync system
// has_thumbnail Boolean?
// has_thumbstrip Boolean?
// has_video_preview Boolean?
// TODO: change above to:
// has_generated_thumbnail Boolean @default(false)
// has_generated_thumbstrip Boolean @default(false)
// has_generated_video_preview Boolean @default(false)
// integration with ipfs
ipfs_id String?
// ipfs_id String?
// plain text note
note String?
note String?
// the original known creation date of this object
date_created DateTime @default(now())
date_accessed DateTime?
date_created DateTime?
date_accessed DateTime?
tags TagOnObject[]
labels LabelOnObject[]

View file

@ -85,7 +85,7 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
.object()
.update(
object::id::equals(args.id),
vec![object::favorite::set(args.favorite)],
vec![object::favorite::set(Some(args.favorite))],
)
.exec()
.await?;
@ -214,7 +214,7 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
library: &Library,
) -> Result<(), rspc::Error> {
let location_path = location_path.as_ref();
let iso_file_path = IsolatedFilePathData::from(
let iso_file_path = IsolatedFilePathData::try_from(
library
.db
.file_path()
@ -225,7 +225,8 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
.ok_or(LocationError::FilePath(FilePathError::IdNotFound(
from_file_path_id,
)))?,
);
)
.map_err(LocationError::MissingField)?;
if iso_file_path.full_name() == to {
return Ok(());
@ -275,8 +276,8 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
.update(
file_path::id::equals(from_file_path_id),
vec![
file_path::name::set(new_file_name.to_string()),
file_path::extension::set(new_extension.to_string()),
file_path::name::set(Some(new_file_name.to_string())),
file_path::extension::set(Some(new_extension.to_string())),
],
)
.exec()
@ -312,7 +313,11 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
.exec()
.await?
.into_iter()
.map(|file_path| (file_path.id, IsolatedFilePathData::from(file_path)))
.flat_map(|file_path| {
let id = file_path.id;
IsolatedFilePathData::try_from(file_path).map(|d| (id, d))
})
.map(|(file_path_id, iso_file_path)| {
let from = location_path.join(&iso_file_path);
let mut to = location_path.join(iso_file_path.parent());
@ -377,8 +382,8 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
library.db.file_path().update(
file_path::id::equals(file_path_id),
vec![
file_path::name::set(new_name),
file_path::extension::set(new_extension),
file_path::name::set(Some(new_name)),
file_path::extension::set(Some(new_extension)),
],
)
})

View file

@ -166,10 +166,10 @@ enum ObjectHiddenFilter {
Include,
}
impl From<ObjectHiddenFilter> for Option<object::WhereParam> {
fn from(value: ObjectHiddenFilter) -> Self {
match value {
ObjectHiddenFilter::Exclude => Some(object::hidden::not(true)),
impl ObjectHiddenFilter {
fn to_param(self) -> Option<object::WhereParam> {
match self {
ObjectHiddenFilter::Exclude => Some(object::hidden::not(Some(true))),
ObjectHiddenFilter::Include => None,
}
}
@ -192,20 +192,21 @@ struct ObjectFilterArgs {
impl ObjectFilterArgs {
fn into_params(self) -> Vec<object::WhereParam> {
use object::*;
chain_optional_iter(
[],
[
self.hidden.into(),
self.favorite.map(object::favorite::equals),
self.hidden.to_param(),
self.favorite.map(Some).map(favorite::equals),
self.date_accessed
.map(|date| date.into_prisma(object::date_accessed::equals)),
(!self.kind.is_empty())
.then(|| object::kind::in_vec(self.kind.into_iter().collect())),
.map(|date| date.into_prisma(date_accessed::equals)),
(!self.kind.is_empty()).then(|| kind::in_vec(self.kind.into_iter().collect())),
(!self.tags.is_empty()).then(|| {
let tags = self.tags.into_iter().map(tag::id::equals).collect();
let tags_on_object = tag_on_object::tag::is(vec![or(tags)]);
object::tags::some(vec![tags_on_object])
tags::some(vec![tags_on_object])
}),
],
)
@ -268,29 +269,26 @@ pub fn mount() -> AlphaRouter<Ctx> {
_ => None,
};
use file_path::*;
let params = chain_optional_iter(
filter
.search
.split(' ')
.map(str::to_string)
.map(file_path::name::contains),
.map(name::contains),
[
filter.location_id.map(file_path::location_id::equals),
filter.extension.map(file_path::extension::equals),
filter
.created_at
.from
.map(|v| file_path::date_created::gte(v.into())),
filter
.created_at
.to
.map(|v| file_path::date_created::lte(v.into())),
filter.location_id.map(Some).map(location_id::equals),
filter.extension.map(Some).map(extension::equals),
filter.created_at.from.map(|v| date_created::gte(v.into())),
filter.created_at.to.map(|v| date_created::lte(v.into())),
directory_materialized_path_str
.map(file_path::materialized_path::equals),
.map(Some)
.map(materialized_path::equals),
filter.object.and_then(|obj| {
let params = obj.into_params();
(!params.is_empty()).then(|| file_path::object::is(params))
(!params.is_empty()).then(|| object::is(params))
}),
],
);

View file

@ -1,7 +1,7 @@
use crate::{
location::file_path_helper::{file_path_to_handle_custom_uri, IsolatedFilePathData},
prisma::{file_path, location},
util::error::FileIOError,
util::{db::*, error::FileIOError},
Node,
};
@ -193,14 +193,14 @@ async fn handle_file(
.await?
.ok_or_else(|| HandleCustomUriError::NotFound("object"))?;
let Some(path) = &file_path.location.path else {
return Err(HandleCustomUriError::NoPath(file_path.location.id))
};
let location = maybe_missing(&file_path.location, "file_path.location")?;
let path = maybe_missing(&location.path, "file_path.location.path")?;
let lru_entry = (
Path::new(path).join(IsolatedFilePathData::from((location_id, &file_path))),
file_path.extension,
Path::new(path).join(IsolatedFilePathData::try_from((location_id, &file_path))?),
maybe_missing(file_path.extension, "extension")?,
);
FILE_METADATA_CACHE.insert(lru_cache_key, lru_entry.clone());
lru_entry
@ -400,8 +400,8 @@ pub enum HandleCustomUriError {
RangeNotSatisfiable(&'static str),
#[error("HandleCustomUriError::NotFound - resource '{0}'")]
NotFound(&'static str),
#[error("no-path")]
NoPath(i32),
#[error("HandleCustomUriError::MissingField - '{0}'")]
MissingField(#[from] MissingFieldError),
}
impl From<HandleCustomUriError> for Response<Vec<u8>> {
@ -444,7 +444,7 @@ impl From<HandleCustomUriError> for Response<Vec<u8>> {
.as_bytes()
.to_vec(),
),
HandleCustomUriError::NoPath(id) => {
HandleCustomUriError::MissingField(id) => {
error!("Location <id = {id}> has no path");
builder
.status(StatusCode::INTERNAL_SERVER_ERROR)

View file

@ -5,7 +5,7 @@ use crate::{
file_identifier::FileIdentifierJobError, fs::error::FileSystemJobsError,
preview::ThumbnailerError,
},
util::error::FileIOError,
util::{db::MissingFieldError, error::FileIOError},
};
use sd_crypto::Error as CryptoError;
@ -15,6 +15,7 @@ use std::{
fmt::Debug,
hash::{Hash, Hasher},
mem,
path::PathBuf,
sync::Arc,
};
@ -54,6 +55,7 @@ pub enum JobError {
MissingReport { id: Uuid, name: String },
#[error("missing some job data: '{value}'")]
MissingData { value: String },
#[error("error converting/handling paths")]
Path,
#[error("invalid job status integer: {0}")]
@ -74,12 +76,10 @@ pub enum JobError {
FileSystemJobsError(#[from] FileSystemJobsError),
#[error(transparent)]
CryptoError(#[from] CryptoError),
#[error("missing-field: {0}")]
MissingField(#[from] MissingFieldError),
#[error("item of type '{0}' with id '{1}' is missing from the db")]
MissingFromDb(&'static str, String),
#[error("the cas id is not set on the path data")]
MissingCasId,
#[error("missing-location-path")]
MissingPath,
// Not errors
#[error("step completed with errors: {0:?}")]

View file

@ -58,12 +58,12 @@ impl Category {
pub async fn get_category_count(db: &Arc<PrismaClient>, category: Category) -> i32 {
let param = match category {
Category::Recents => not![object::date_accessed::equals(None)],
Category::Favorites => object::favorite::equals(true),
Category::Favorites => object::favorite::equals(Some(true)),
Category::Photos
| Category::Videos
| Category::Music
| Category::Encrypted
| Category::Books => object::kind::equals(category.to_object_kind() as i32),
| Category::Books => object::kind::equals(Some(category.to_object_kind() as i32)),
_ => return 0,
};

View file

@ -9,7 +9,7 @@ use crate::{
object::{orphan_remover::OrphanRemoverActor, preview::get_thumbnail_path},
prisma::{file_path, location, PrismaClient},
sync::SyncManager,
util::error::FileIOError,
util::{db::maybe_missing, error::FileIOError},
NodeContext,
};
@ -124,16 +124,20 @@ impl Library {
.exec()
.await?
.into_iter()
.map(|file_path| {
(
.flat_map(|file_path| {
let location = maybe_missing(&file_path.location, "file_path.location")?;
Ok::<_, LibraryManagerError>((
file_path.id,
file_path.location.path.as_ref().map(|location_path| {
Path::new(&location_path).join(IsolatedFilePathData::from((
file_path.location.id,
&file_path,
)))
}),
)
location
.path
.as_ref()
.map(|location_path| {
IsolatedFilePathData::try_from((location.id, &file_path))
.map(|data| Path::new(&location_path).join(data))
})
.transpose()?,
))
}),
);

View file

@ -6,7 +6,7 @@ use crate::{
prisma::{location, node},
sync::{SyncManager, SyncMessage},
util::{
db,
db::{self, MissingFieldError},
error::{FileIOError, NonUtf8PathError},
migrator::{Migrate, MigratorError},
},
@ -65,8 +65,8 @@ pub enum LibraryManagerError {
NonUtf8Path(#[from] NonUtf8PathError),
#[error("failed to watch locations: {0}")]
LocationWatcher(#[from] LocationManagerError),
#[error("no-path")]
NoPath(i32),
#[error("missing-field: {0}")]
MissingField(#[from] MissingFieldError),
}
impl From<LibraryManagerError> for rspc::Error {

View file

@ -1,4 +1,7 @@
use crate::{prisma::location, util::error::FileIOError};
use crate::{
prisma::location,
util::{db::MissingFieldError, error::FileIOError},
};
use std::path::PathBuf;
@ -68,6 +71,8 @@ pub enum LocationError {
FileIO(#[from] FileIOError),
#[error("location missing path <id='{0}'>")]
MissingPath(location::id::Type),
#[error("missing-field: {0}")]
MissingField(#[from] MissingFieldError),
}
impl From<LocationError> for rspc::Error {

View file

@ -29,7 +29,7 @@ pub struct IsolatedFilePathData<'a> {
pub(in crate::location) is_dir: bool,
pub(in crate::location) name: Cow<'a, str>,
pub(in crate::location) extension: Cow<'a, str>,
pub(in crate::location) relative_path: Cow<'a, str>,
relative_path: Cow<'a, str>,
}
impl IsolatedFilePathData<'static> {
@ -90,6 +90,10 @@ impl<'a> IsolatedFilePathData<'a> {
&self.extension
}
pub fn materialized_path(&'a self) -> &'a str {
&self.materialized_path
}
pub fn is_root(&self) -> bool {
self.is_dir
&& self.materialized_path == "/"
@ -261,25 +265,25 @@ impl<'a> IsolatedFilePathData<'a> {
.unwrap_or_default()
}
fn from_db_data(
pub fn from_db_data(
location_id: location::id::Type,
db_materialized_path: &'a str,
db_is_dir: bool,
db_name: &'a str,
db_extension: &'a str,
is_dir: bool,
materialized_path: Cow<'a, str>,
name: Cow<'a, str>,
extension: Cow<'a, str>,
) -> Self {
Self {
location_id,
materialized_path: Cow::Borrowed(db_materialized_path),
is_dir: db_is_dir,
name: Cow::Borrowed(db_name),
extension: Cow::Borrowed(db_extension),
relative_path: Cow::Owned(assemble_relative_path(
db_materialized_path,
db_name,
db_extension,
db_is_dir,
&materialized_path,
&name,
&extension,
is_dir,
)),
location_id,
materialized_path,
is_dir,
name,
extension,
}
}
}
@ -304,10 +308,10 @@ impl From<IsolatedFilePathData<'static>> for file_path::UniqueWhereParam {
impl From<IsolatedFilePathData<'static>> for file_path::WhereParam {
fn from(path: IsolatedFilePathData<'static>) -> Self {
Self::And(vec![
file_path::location_id::equals(path.location_id),
file_path::materialized_path::equals(path.materialized_path.into_owned()),
file_path::name::equals(path.name.into_owned()),
file_path::extension::equals(path.extension.into_owned()),
file_path::location_id::equals(Some(path.location_id)),
file_path::materialized_path::equals(Some(path.materialized_path.into_owned())),
file_path::name::equals(Some(path.name.into_owned())),
file_path::extension::equals(Some(path.extension.into_owned())),
])
}
}
@ -326,10 +330,10 @@ impl From<&IsolatedFilePathData<'_>> for file_path::UniqueWhereParam {
impl From<&IsolatedFilePathData<'_>> for file_path::WhereParam {
fn from(path: &IsolatedFilePathData<'_>) -> Self {
Self::And(vec![
file_path::location_id::equals(path.location_id),
file_path::materialized_path::equals(path.materialized_path.to_string()),
file_path::name::equals(path.name.to_string()),
file_path::extension::equals(path.extension.to_string()),
file_path::location_id::equals(Some(path.location_id)),
file_path::materialized_path::equals(Some(path.materialized_path.to_string())),
file_path::name::equals(Some(path.name.to_string())),
file_path::extension::equals(Some(path.extension.to_string())),
])
}
}
@ -345,49 +349,47 @@ mod macros {
macro_rules! impl_from_db {
($($file_path_kind:ident),+ $(,)?) => {
$(
impl ::std::convert::From<$file_path_kind::Data> for $crate::
impl ::std::convert::TryFrom<$file_path_kind::Data> for $crate::
location::
file_path_helper::
isolated_file_path_data::
IsolatedFilePathData<'static>
{
fn from(path: $file_path_kind::Data) -> Self {
Self {
location_id: path.location_id,
relative_path: ::std::borrow::Cow::Owned(
$crate::
location::
file_path_helper::
isolated_file_path_data::
assemble_relative_path(
&path.materialized_path,
&path.name,
&path.extension,
path.is_dir,
)
),
materialized_path: ::std::borrow::Cow::Owned(path.materialized_path),
is_dir: path.is_dir,
name: ::std::borrow::Cow::Owned(path.name),
extension: ::std::borrow::Cow::Owned(path.extension),
}
type Error = $crate::util::db::MissingFieldError;
fn try_from(path: $file_path_kind::Data) -> Result<Self, Self::Error> {
use $crate::util::db::maybe_missing;
use ::std::borrow::Cow;
Ok(Self::from_db_data(
maybe_missing(path.location_id, "file_path.location_id")?,
maybe_missing(path.is_dir, "file_path.is_dir")?,
Cow::Owned(maybe_missing(path.materialized_path, "file_path.materialized_path")?),
Cow::Owned(maybe_missing(path.name, "file_path.name")?),
Cow::Owned(maybe_missing(path.extension, "file_path.extension")?)
))
}
}
impl<'a> ::std::convert::From<&'a $file_path_kind::Data> for $crate::
impl<'a> ::std::convert::TryFrom<&'a $file_path_kind::Data> for $crate::
location::
file_path_helper::
isolated_file_path_data::
IsolatedFilePathData<'a>
{
fn from(path: &'a $file_path_kind::Data) -> Self {
Self::from_db_data(
path.location_id,
&path.materialized_path,
path.is_dir,
&path.name,
&path.extension
)
type Error = $crate::util::db::MissingFieldError;
fn try_from(path: &'a $file_path_kind::Data) -> Result<Self, Self::Error> {
use $crate::util::db::maybe_missing;
use ::std::borrow::Cow;
Ok(Self::from_db_data(
maybe_missing(path.location_id, "file_path.location_id")?,
maybe_missing(path.is_dir, "file_path.is_dir")?,
Cow::Borrowed(maybe_missing(&path.materialized_path, "file_path.materialized_path")?),
Cow::Borrowed(maybe_missing(&path.name, "file_path.name")?),
Cow::Borrowed(maybe_missing(&path.extension, "file_path.extension")?)
))
}
}
)+
@ -397,49 +399,47 @@ mod macros {
macro_rules! impl_from_db_without_location_id {
($($file_path_kind:ident),+ $(,)?) => {
$(
impl ::std::convert::From<($crate::prisma::location::id::Type, $file_path_kind::Data)> for $crate::
impl ::std::convert::TryFrom<($crate::prisma::location::id::Type, $file_path_kind::Data)> for $crate::
location::
file_path_helper::
isolated_file_path_data::
IsolatedFilePathData<'static>
{
fn from((location_id, path): ($crate::prisma::location::id::Type, $file_path_kind::Data)) -> Self {
Self {
location_id,
relative_path: Cow::Owned(
$crate::
location::
file_path_helper::
isolated_file_path_data::
assemble_relative_path(
&path.materialized_path,
&path.name,
&path.extension,
path.is_dir,
)
),
materialized_path: Cow::Owned(path.materialized_path),
is_dir: path.is_dir,
name: Cow::Owned(path.name),
extension: Cow::Owned(path.extension),
}
type Error = $crate::util::db::MissingFieldError;
fn try_from((location_id, path): ($crate::prisma::location::id::Type, $file_path_kind::Data)) -> Result<Self, Self::Error> {
use $crate::util::db::maybe_missing;
use ::std::borrow::Cow;
Ok(Self::from_db_data(
location_id,
maybe_missing(path.is_dir, "file_path.is_dir")?,
Cow::Owned(maybe_missing(path.materialized_path, "file_path.materialized_path")?),
Cow::Owned(maybe_missing(path.name, "file_path.name")?),
Cow::Owned(maybe_missing(path.extension, "file_path.extension")?)
))
}
}
impl<'a> ::std::convert::From<($crate::prisma::location::id::Type, &'a $file_path_kind::Data)> for $crate::
impl<'a> ::std::convert::TryFrom<($crate::prisma::location::id::Type, &'a $file_path_kind::Data)> for $crate::
location::
file_path_helper::
isolated_file_path_data::
IsolatedFilePathData<'a>
{
fn from((location_id, path): ($crate::prisma::location::id::Type, &'a $file_path_kind::Data)) -> Self {
Self::from_db_data(
type Error = $crate::util::db::MissingFieldError;
fn try_from((location_id, path): ($crate::prisma::location::id::Type, &'a $file_path_kind::Data)) -> Result<Self, Self::Error> {
use $crate::util::db::maybe_missing;
use ::std::borrow::Cow;
Ok(Self::from_db_data(
location_id,
&path.materialized_path,
path.is_dir,
&path.name,
&path.extension
)
maybe_missing(path.is_dir, "file_path.is_dir")?,
Cow::Borrowed(maybe_missing(&path.materialized_path, "file_path.materialized_path")?),
Cow::Borrowed(maybe_missing(&path.name, "file_path.name")?),
Cow::Borrowed(maybe_missing(&path.extension, "file_path.extension")?)
))
}
}
)+

View file

@ -159,6 +159,7 @@ pub async fn create_file_path(
) -> Result<file_path::Data, FilePathError> {
use crate::{sync, util::db::uuid_to_bytes};
use sd_prisma::prisma;
use serde_json::json;
use uuid::Uuid;
@ -207,25 +208,22 @@ pub async fn create_file_path(
},
params,
),
db.file_path().create(
pub_id,
location::id::equals(location.id),
materialized_path.into_owned(),
name.into_owned(),
extension.into_owned(),
metadata.inode.to_le_bytes().into(),
metadata.device.to_le_bytes().into(),
{
use file_path::*;
vec![
cas_id::set(cas_id),
is_dir::set(is_dir),
size_in_bytes::set(metadata.size_in_bytes.to_string()),
date_created::set(metadata.created_at.into()),
date_modified::set(metadata.modified_at.into()),
]
},
),
db.file_path().create(pub_id, {
use file_path::*;
vec![
location::connect(prisma::location::id::equals(location.id)),
materialized_path::set(Some(materialized_path.into_owned())),
name::set(Some(name.into_owned())),
extension::set(Some(extension.into_owned())),
inode::set(Some(metadata.inode.to_le_bytes().into())),
device::set(Some(metadata.device.to_le_bytes().into())),
cas_id::set(cas_id),
is_dir::set(Some(is_dir)),
size_in_bytes::set(Some(metadata.size_in_bytes.to_string())),
date_created::set(Some(metadata.created_at.into())),
date_modified::set(Some(metadata.modified_at.into())),
]
}),
)
.await?;
@ -255,11 +253,11 @@ pub fn filter_existing_file_path_params(
}: &IsolatedFilePathData,
) -> Vec<file_path::WhereParam> {
vec![
file_path::location_id::equals(*location_id),
file_path::materialized_path::equals(materialized_path.to_string()),
file_path::is_dir::equals(*is_dir),
file_path::name::equals(name.to_string()),
file_path::extension::equals(extension.to_string()),
file_path::location_id::equals(Some(*location_id)),
file_path::materialized_path::equals(Some(materialized_path.to_string())),
file_path::is_dir::equals(Some(*is_dir)),
file_path::name::equals(Some(name.to_string())),
file_path::extension::equals(Some(extension.to_string())),
]
}
@ -277,10 +275,10 @@ pub fn loose_find_existing_file_path_params(
}: &IsolatedFilePathData,
) -> Vec<file_path::WhereParam> {
vec![
file_path::location_id::equals(*location_id),
file_path::materialized_path::equals(materialized_path.to_string()),
file_path::name::equals(name.to_string()),
file_path::extension::equals(extension.to_string()),
file_path::location_id::equals(Some(*location_id)),
file_path::materialized_path::equals(Some(materialized_path.to_string())),
file_path::name::equals(Some(name.to_string())),
file_path::extension::equals(Some(extension.to_string())),
]
}

View file

@ -6,6 +6,7 @@ use crate::{
IsolatedFilePathData,
},
to_remove_db_fetcher_fn,
util::db::maybe_missing,
};
use std::{path::Path, sync::Arc};
@ -62,10 +63,8 @@ impl StatefulJob for IndexerJob {
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let location_id = state.init.location.id;
let location_path = state.init.location.path.as_ref();
let Some(location_path) = location_path.map(Path::new) else {
return Err(JobError::MissingPath)
};
let location_path =
maybe_missing(&state.init.location.path, "location.path").map(Path::new)?;
let db = Arc::clone(&ctx.library.db);
@ -206,10 +205,8 @@ impl StatefulJob for IndexerJob {
}
IndexerJobStepInput::Walk(to_walk_entry) => {
let location_id = state.init.location.id;
let location_path = state.init.location.path.as_ref();
let Some(location_path) = location_path.map(Path::new) else {
return Err(JobError::MissingPath)
};
let location_path =
maybe_missing(&state.init.location.path, "location.path").map(Path::new)?;
let db = Arc::clone(&ctx.library.db);
@ -280,10 +277,8 @@ impl StatefulJob for IndexerJob {
}
async fn finalize(&mut self, ctx: WorkerContext, state: &mut JobState<Self>) -> JobResult {
let location_path = state.init.location.path.as_ref();
let Some(location_path) = location_path.map(Path::new) else {
return Err(JobError::MissingPath)
};
let location_path =
maybe_missing(&state.init.location.path, "location.path").map(Path::new)?;
finalize_indexer(location_path, state, ctx)
}

View file

@ -14,6 +14,7 @@ use std::{
};
use rspc::ErrorCode;
use sd_prisma::prisma_sync;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use serde_json::json;
use thiserror::Error;
@ -151,41 +152,63 @@ async fn execute_indexer_save_step(
use file_path::*;
let pub_id = uuid_to_bytes(entry.pub_id);
let (sync_params, db_params): (Vec<_>, Vec<_>) = [
(
(
location::NAME,
json!(prisma_sync::location::SyncId {
pub_id: pub_id.clone()
}),
),
location_id::set(Some(location.id)),
),
(
(materialized_path::NAME, json!(materialized_path)),
materialized_path::set(Some(materialized_path.to_string())),
),
((name::NAME, json!(name)), name::set(Some(name.to_string()))),
((is_dir::NAME, json!(*is_dir)), is_dir::set(Some(*is_dir))),
(
(extension::NAME, json!(extension)),
extension::set(Some(extension.to_string())),
),
(
(
size_in_bytes::NAME,
json!(entry.metadata.size_in_bytes.to_string()),
),
size_in_bytes::set(Some(entry.metadata.size_in_bytes.to_string())),
),
(
(inode::NAME, json!(entry.metadata.inode.to_le_bytes())),
inode::set(Some(entry.metadata.inode.to_le_bytes().into())),
),
(
(device::NAME, json!(entry.metadata.device.to_le_bytes())),
device::set(Some(entry.metadata.device.to_le_bytes().into())),
),
(
(date_created::NAME, json!(entry.metadata.created_at)),
date_created::set(Some(entry.metadata.created_at.into())),
),
(
(date_modified::NAME, json!(entry.metadata.modified_at)),
date_modified::set(Some(entry.metadata.modified_at.into())),
),
]
.into_iter()
.unzip();
(
sync.unique_shared_create(
sync::file_path::SyncId {
pub_id: uuid_to_bytes(entry.pub_id),
},
[
(materialized_path::NAME, json!(materialized_path)),
(name::NAME, json!(name)),
(is_dir::NAME, json!(*is_dir)),
(extension::NAME, json!(extension)),
(
size_in_bytes::NAME,
json!(entry.metadata.size_in_bytes.to_string()),
),
(inode::NAME, json!(entry.metadata.inode.to_le_bytes())),
(device::NAME, json!(entry.metadata.device.to_le_bytes())),
(date_created::NAME, json!(entry.metadata.created_at)),
(date_modified::NAME, json!(entry.metadata.modified_at)),
],
),
file_path::create_unchecked(
uuid_to_bytes(entry.pub_id),
location.id,
materialized_path.to_string(),
name.to_string(),
extension.to_string(),
entry.metadata.inode.to_le_bytes().into(),
entry.metadata.device.to_le_bytes().into(),
vec![
is_dir::set(*is_dir),
size_in_bytes::set(entry.metadata.size_in_bytes.to_string()),
date_created::set(entry.metadata.created_at.into()),
date_modified::set(entry.metadata.modified_at.into()),
],
sync_params,
),
file_path::create_unchecked(pub_id, db_params),
)
})
.unzip();
@ -298,12 +321,12 @@ macro_rules! to_remove_db_fetcher_fn {
iso_file_path;
$db.file_path()
.find_many(vec![
$crate::prisma::file_path::location_id::equals($location_id),
$crate::prisma::file_path::materialized_path::equals(
$crate::prisma::file_path::location_id::equals(Some($location_id)),
$crate::prisma::file_path::materialized_path::equals(Some(
iso_file_path
.materialized_path_for_children()
.expect("the received isolated file path must be from a directory"),
),
)),
::prisma_client_rust::operator::not(
unique_location_id_materialized_path_name_extension_params,
),

View file

@ -454,6 +454,18 @@ pub struct IndexerRule {
}
impl IndexerRule {
#[cfg(test)]
pub fn new(name: String, default: bool, rules: Vec<RulePerKind>) -> Self {
Self {
id: None,
name,
default,
rules,
date_created: Utc::now(),
date_modified: Utc::now(),
}
}
pub async fn apply(
&self,
source: impl AsRef<Path>,

View file

@ -305,7 +305,7 @@ where
.map(move |file_paths| {
let isolated_paths_already_in_db = file_paths
.into_iter()
.map(IsolatedFilePathData::from)
.flat_map(IsolatedFilePathData::try_from)
.collect::<HashSet<_>>();
indexed_paths.into_iter().filter_map(move |entry| {

View file

@ -1,4 +1,4 @@
use crate::{library::Library, prisma::location};
use crate::{library::Library, prisma::location, util::db::maybe_missing};
use std::{
collections::{HashMap, HashSet},
@ -23,10 +23,7 @@ pub(super) async fn check_online(
) -> Result<bool, LocationManagerError> {
let pub_id = Uuid::from_slice(&location.pub_id)?;
let location_path = location.path.as_ref();
let Some(location_path) = location_path.map(Path::new) else {
return Err(LocationManagerError::MissingPath(location.id))
};
let location_path = maybe_missing(&location.path, "location.path").map(Path::new)?;
if location.node_id == Some(library.node_local_id) {
match fs::metadata(&location_path).await {

View file

@ -1,4 +1,9 @@
use crate::{job::JobManagerError, library::Library, prisma::location, util::error::FileIOError};
use crate::{
job::JobManagerError,
library::Library,
prisma::location,
util::{db::MissingFieldError, error::FileIOError},
};
use std::{
collections::BTreeSet,
@ -102,8 +107,8 @@ pub enum LocationManagerError {
CorruptedLocationPubId(#[from] uuid::Error),
#[error("Job Manager error: (error: {0})")]
JobManager(#[from] JobManagerError),
#[error("location missing location path: <id='{0}'>")]
MissingPath(location::id::Type),
#[error("missing-field")]
MissingField(#[from] MissingFieldError),
#[error("invalid inode")]
InvalidInode,

View file

@ -1,4 +1,4 @@
use crate::{library::Library, prisma::location};
use crate::{library::Library, prisma::location, util::db::maybe_missing};
use std::{
collections::HashSet,
@ -106,13 +106,9 @@ impl LocationWatcher {
stop_rx,
));
let Some(path) = location.path else {
return Err(LocationManagerError::MissingPath(location.id))
};
Ok(Self {
id: location.id,
path,
path: maybe_missing(location.path, "location.path")?,
watcher,
ignore_path_tx,
handle: Some(handle),

View file

@ -16,13 +16,12 @@ use crate::{
},
object::{
file_identifier::FileMetadata,
object_just_id_has_thumbnail,
preview::{can_generate_thumbnail_for_image, generate_image_thumbnail, get_thumbnail_path},
validation::hash::file_checksum,
},
prisma::{file_path, location, object},
sync,
util::error::FileIOError,
util::{db::maybe_missing, error::FileIOError},
};
#[cfg(target_family = "unix")]
@ -75,9 +74,7 @@ pub(super) async fn create_dir(
let path = path.as_ref();
let Some(location_path) = &location.path else {
return Err(LocationManagerError::MissingPath(location_id))
};
let location_path = maybe_missing(&location.path, "location.path")?;
trace!(
"Location: <root_path ='{}'> creating directory: {}",
@ -109,7 +106,9 @@ pub(super) async fn create_dir(
return Ok(());
};
let created_path = create_file_path(
let materialized_path = iso_file_path.materialized_path().to_string();
create_file_path(
library,
iso_file_path,
None,
@ -123,10 +122,10 @@ pub(super) async fn create_dir(
)
.await?;
info!("Created path: {}", created_path.materialized_path);
info!("Created path: {}", &materialized_path);
// scan the new directory
scan_location_sub_path(library, location, &created_path.materialized_path).await?;
scan_location_sub_path(library, location, &materialized_path).await?;
invalidate_query!(library, "search.paths");
@ -151,6 +150,8 @@ pub(super) async fn create_file(
let db = &library.db;
let iso_file_path = IsolatedFilePathData::new(location_id, &location_path, path, false)?;
let materialized_path = iso_file_path.materialized_path().to_string();
let extension = iso_file_path.extension().to_string();
let (inode, device) = {
#[cfg(target_family = "unix")]
@ -179,9 +180,7 @@ pub(super) async fn create_file(
cas_id,
kind,
fs_metadata,
} = FileMetadata::new(&location_path, &iso_file_path)
.await
.map_err(|e| FileIOError::from((location_path.join(&iso_file_path), e)))?;
} = FileMetadata::new(&location_path, &iso_file_path).await?;
let created_file = create_file_path(
library,
@ -197,7 +196,9 @@ pub(super) async fn create_file(
)
.await?;
info!("Created path: {}", created_file.materialized_path);
info!("Created path: {}", &materialized_path);
object::select!(object_just_id { id });
let existing_object = db
.object()
@ -205,7 +206,7 @@ pub(super) async fn create_file(
file_path::cas_id::equals(Some(cas_id.clone())),
file_path::pub_id::not(created_file.pub_id.clone()),
])])
.select(object_just_id_has_thumbnail::select())
.select(object_just_id::select())
.exec()
.await?;
@ -216,13 +217,13 @@ pub(super) async fn create_file(
.create(
Uuid::new_v4().as_bytes().to_vec(),
vec![
object::date_created::set(
object::date_created::set(Some(
DateTime::<Local>::from(fs_metadata.created_or_now()).into(),
),
object::kind::set(kind as i32),
)),
object::kind::set(Some(kind as i32)),
],
)
.select(object_just_id_has_thumbnail::select())
.select(object_just_id::select())
.exec()
.await?
};
@ -235,12 +236,13 @@ pub(super) async fn create_file(
.exec()
.await?;
if !object.has_thumbnail && !created_file.extension.is_empty() {
if !extension.is_empty() {
// Running in a detached task as thumbnail generation can take a while and we don't want to block the watcher
let path = path.to_path_buf();
let library = library.clone();
tokio::spawn(async move {
generate_thumbnail(&created_file.extension, &cas_id, path, &library).await;
generate_thumbnail(&extension, &cas_id, path, &library).await;
});
}
@ -344,9 +346,7 @@ async fn inner_update_file(
.await?
.ok_or_else(|| LocationManagerError::MissingLocation(location_id))?;
let Some(location_path) = location.path.map(PathBuf::from) else {
return Err(LocationManagerError::MissingPath(location_id))
};
let location_path = maybe_missing(location.path.map(PathBuf::from), "location.path")?;
trace!(
"Location: <root_path ='{}'> updating file: {}",
@ -354,15 +354,13 @@ async fn inner_update_file(
full_path.display()
);
let iso_file_path = IsolatedFilePathData::from(file_path);
let iso_file_path = IsolatedFilePathData::try_from(file_path)?;
let FileMetadata {
cas_id,
fs_metadata,
kind,
} = FileMetadata::new(&location_path, &iso_file_path)
.await
.map_err(|e| FileIOError::from((location_path.join(&iso_file_path), e)))?;
} = FileMetadata::new(&location_path, &iso_file_path).await?;
if let Some(old_cas_id) = &file_path.cas_id {
if old_cas_id != &cas_id {
@ -376,12 +374,15 @@ async fn inner_update_file(
),
(
(size_in_bytes::NAME, json!(fs_metadata.len().to_string())),
size_in_bytes::set(fs_metadata.len().to_string()),
size_in_bytes::set(Some(fs_metadata.len().to_string())),
),
{
let date = DateTime::<Local>::from(fs_metadata.modified_or_now()).into();
((date_modified::NAME, json!(date)), date_modified::set(date))
(
(date_modified::NAME, json!(date)),
date_modified::set(Some(date)),
)
},
{
// TODO: Should this be a skip rather than a null-set?
@ -432,19 +433,21 @@ async fn inner_update_file(
if let Some(ref object) = file_path.object {
// if this file had a thumbnail previously, we update it to match the new content
if library.thumbnail_exists(old_cas_id).await? && !file_path.extension.is_empty() {
generate_thumbnail(&file_path.extension, &cas_id, full_path, library).await;
if library.thumbnail_exists(old_cas_id).await? {
if let Some(ext) = &file_path.extension {
generate_thumbnail(ext, &cas_id, full_path, library).await;
// remove the old thumbnail as we're generating a new one
let thumb_path = get_thumbnail_path(library, old_cas_id);
fs::remove_file(&thumb_path)
.await
.map_err(|e| FileIOError::from((thumb_path, e)))?;
// remove the old thumbnail as we're generating a new one
let thumb_path = get_thumbnail_path(library, old_cas_id);
fs::remove_file(&thumb_path)
.await
.map_err(|e| FileIOError::from((thumb_path, e)))?;
}
}
let int_kind = kind as i32;
if object.kind != int_kind {
if object.kind.map(|k| k != int_kind).unwrap_or_default() {
sync.write_op(
db,
sync.shared_update(
@ -456,7 +459,7 @@ async fn inner_update_file(
),
db.object().update(
object::id::equals(object.id),
vec![object::kind::set(int_kind)],
vec![object::kind::set(Some(int_kind))],
),
)
.await?;
@ -509,13 +512,13 @@ pub(super) async fn rename(
.exec()
.await?
{
let new =
IsolatedFilePathData::new(location_id, &location_path, new_path, file_path.is_dir)?;
let is_dir = maybe_missing(file_path.is_dir, "file_path.is_dir")?;
let new = IsolatedFilePathData::new(location_id, &location_path, new_path, is_dir)?;
// If the renamed path is a directory, we have to update every successor
if file_path.is_dir {
let old =
IsolatedFilePathData::new(location_id, &location_path, old_path, file_path.is_dir)?;
if is_dir {
let old = IsolatedFilePathData::new(location_id, &location_path, old_path, is_dir)?;
// TODO: Fetch all file_paths that will be updated and dispatch sync events
let updated = library
@ -524,8 +527,8 @@ pub(super) async fn rename(
"UPDATE file_path \
SET materialized_path = REPLACE(materialized_path, {}, {}) \
WHERE location_id = {}",
PrismaValue::String(format!("{}/{}/", old.materialized_path, old.name)),
PrismaValue::String(format!("{}/{}/", new.materialized_path, new.name)),
PrismaValue::String(format!("{}/{}/", old.materialized_path(), old.name())),
PrismaValue::String(format!("{}/{}/", new.materialized_path(), new.name())),
PrismaValue::Int(location_id as i64)
))
.exec()
@ -539,9 +542,9 @@ pub(super) async fn rename(
.update(
file_path::pub_id::equals(file_path.pub_id),
vec![
file_path::materialized_path::set(new_path_materialized_str),
file_path::name::set(new.name.to_string()),
file_path::extension::set(new.extension.to_string()),
file_path::materialized_path::set(Some(new_path_materialized_str)),
file_path::name::set(Some(new.name().to_string())),
file_path::extension::set(Some(new.extension().to_string())),
],
)
.exec()
@ -589,14 +592,16 @@ pub(super) async fn remove_by_file_path(
Err(e) if e.kind() == ErrorKind::NotFound => {
let db = &library.db;
let is_dir = maybe_missing(file_path.is_dir, "file_path.is_dir")?;
// if is doesn't, we can remove it safely from our db
if file_path.is_dir {
delete_directory(
library,
location_id,
Some(file_path.materialized_path.clone()),
)
.await?;
if is_dir {
let materialized_path = maybe_missing(
file_path.materialized_path.clone(),
"file_path.materialized_path",
)?;
delete_directory(library, location_id, Some(materialized_path)).await?;
} else {
db.file_path()
.delete(file_path::pub_id::equals(file_path.pub_id.clone()))
@ -684,9 +689,7 @@ pub(super) async fn extract_inode_and_device_from_path(
.await?
.ok_or(LocationManagerError::MissingLocation(location_id))?;
let Some(location_path) = &location.path else {
return Err(LocationManagerError::MissingPath(location_id))
};
let location_path = maybe_missing(&location.path, "location.path")?;
library
.db
@ -702,12 +705,12 @@ pub(super) async fn extract_inode_and_device_from_path(
|file_path| {
Ok((
u64::from_le_bytes(
file_path.inode[0..8]
maybe_missing(file_path.inode, "file_path.inode")?[0..8]
.try_into()
.map_err(|_| LocationManagerError::InvalidInode)?,
),
u64::from_le_bytes(
file_path.device[0..8]
maybe_missing(file_path.device, "file_path.device")?[0..8]
.try_into()
.map_err(|_| LocationManagerError::InvalidDevice)?,
),
@ -727,11 +730,6 @@ pub(super) async fn extract_location_path(
.map_or(
Err(LocationManagerError::MissingLocation(location_id)),
// NOTE: The following usage of `PathBuf` doesn't incur a new allocation so it's fine
|location| {
location
.path
.map(PathBuf::from)
.ok_or(LocationManagerError::MissingPath(location_id))
},
|location| Ok(maybe_missing(location.path, "location.path")?.into()),
)
}

View file

@ -8,7 +8,10 @@ use crate::{
},
prisma::{file_path, indexer_rules_in_location, location, node, object, PrismaClient},
sync,
util::{db::uuid_to_bytes, error::FileIOError},
util::{
db::{chain_optional_iter, uuid_to_bytes},
error::FileIOError,
},
};
use std::{
@ -672,14 +675,10 @@ pub async fn delete_directory(
) -> Result<(), QueryError> {
let Library { db, .. } = library;
let children_params = if let Some(parent_materialized_path) = parent_materialized_path {
vec![
file_path::location_id::equals(location_id),
file_path::materialized_path::starts_with(parent_materialized_path),
]
} else {
vec![file_path::location_id::equals(location_id)]
};
let children_params = chain_optional_iter(
[file_path::location_id::equals(Some(location_id))],
[parent_materialized_path.map(file_path::materialized_path::starts_with)],
);
// Fetching all object_ids from all children file_paths
let object_ids = db

View file

@ -9,7 +9,7 @@ use crate::{
file_path_for_file_identifier, IsolatedFilePathData,
},
prisma::{file_path, location, PrismaClient, SortOrder},
util::db::chain_optional_iter,
util::db::{chain_optional_iter, maybe_missing},
};
use std::{
@ -76,10 +76,8 @@ impl StatefulJob for FileIdentifierJob {
let location_id = state.init.location.id;
let location_path = state.init.location.path.as_ref();
let Some(location_path) = location_path.map(Path::new) else {
return Err(JobError::MissingPath)
};
let location_path =
maybe_missing(&state.init.location.path, "location.path").map(Path::new)?;
let maybe_sub_iso_file_path = if let Some(ref sub_path) = state.init.sub_path {
let full_path = ensure_sub_path_is_in_location(location_path, sub_path)
@ -234,8 +232,8 @@ fn orphan_path_filters(
chain_optional_iter(
[
file_path::object_id::equals(None),
file_path::is_dir::equals(false),
file_path::location_id::equals(location_id),
file_path::is_dir::equals(Some(false)),
file_path::location_id::equals(Some(location_id)),
],
[
// this is a workaround for the cursor not working properly

View file

@ -8,7 +8,10 @@ use crate::{
prisma::{file_path, location, object, PrismaClient},
sync,
sync::SyncManager,
util::db::uuid_to_bytes,
util::{
db::{maybe_missing, uuid_to_bytes},
error::FileIOError,
},
};
use sd_file_ext::{extensions::Extension, kind::ObjectKind};
@ -23,7 +26,7 @@ use futures::future::join_all;
use serde::{Deserialize, Serialize};
use serde_json::json;
use thiserror::Error;
use tokio::{fs, io};
use tokio::fs;
use tracing::{error, info};
use uuid::Uuid;
@ -59,10 +62,12 @@ impl FileMetadata {
pub async fn new(
location_path: impl AsRef<Path>,
iso_file_path: &IsolatedFilePathData<'_>, // TODO: use dedicated CreateUnchecked type
) -> Result<FileMetadata, io::Error> {
) -> Result<FileMetadata, FileIOError> {
let path = location_path.as_ref().join(iso_file_path);
let fs_metadata = fs::metadata(&path).await?;
let fs_metadata = fs::metadata(&path)
.await
.map_err(|e| FileIOError::from((&path, e)))?;
assert!(
!fs_metadata.is_dir(),
@ -75,7 +80,9 @@ impl FileMetadata {
.map(Into::into)
.unwrap_or(ObjectKind::Unknown);
let cas_id = generate_cas_id(&path, fs_metadata.len()).await?;
let cas_id = generate_cas_id(&path, fs_metadata.len())
.await
.map_err(|e| FileIOError::from((&path, e)))?;
info!("Analyzed file: {path:?} {cas_id:?} {kind:?}");
@ -101,25 +108,21 @@ async fn identifier_job_step(
location: &location::Data,
file_paths: &[file_path_for_file_identifier::Data],
) -> Result<(usize, usize), JobError> {
let location_path = location.path.as_ref();
let Some(location_path) = location_path.map(Path::new) else {
return Err(JobError::MissingPath)
};
let location_path = maybe_missing(&location.path, "location.path").map(Path::new)?;
let file_path_metas = join_all(file_paths.iter().map(|file_path| async move {
// NOTE: `file_path`'s `materialized_path` begins with a `/` character so we remove it to join it with `location.path`
FileMetadata::new(
let meta = FileMetadata::new(
&location_path,
&IsolatedFilePathData::from((location.id, file_path)),
&IsolatedFilePathData::try_from((location.id, file_path))?,
)
.await
.map(|params| {
(
// SAFETY: This should never happen
Uuid::from_slice(&file_path.pub_id).expect("file_path.pub_id is invalid!"),
(params, file_path),
)
})
.await?;
Ok((
// SAFETY: This should never happen
Uuid::from_slice(&file_path.pub_id).expect("file_path.pub_id is invalid!"),
(meta, file_path),
)) as Result<_, JobError>
}))
.await
.into_iter()
@ -130,7 +133,7 @@ async fn identifier_job_step(
data
})
.collect::<HashMap<_, _>>();
.collect::<HashMap<Uuid, (FileMetadata, &file_path_for_file_identifier::Data)>>();
let unique_cas_ids = file_path_metas
.values()
@ -261,7 +264,7 @@ async fn identifier_job_step(
uuid_to_bytes(object_pub_id),
vec![
object::date_created::set(fp.date_created),
object::kind::set(kind),
object::kind::set(Some(kind)),
],
),
);

View file

@ -7,7 +7,7 @@ use crate::{
file_path_for_file_identifier, IsolatedFilePathData,
},
prisma::{file_path, location, PrismaClient, SortOrder},
util::db::chain_optional_iter,
util::db::{chain_optional_iter, maybe_missing},
};
use std::path::{Path, PathBuf};
@ -33,10 +33,7 @@ pub async fn shallow(
info!("Identifying orphan File Paths...");
let location_id = location.id;
let location_path = location.path.as_ref();
let Some(location_path) = location_path.map(Path::new) else {
return Err(JobError::MissingPath)
};
let location_path = maybe_missing(&location.path, "location.path").map(Path::new)?;
let sub_iso_file_path = if sub_path != Path::new("") {
let full_path = ensure_sub_path_is_in_location(location_path, &sub_path)
@ -125,13 +122,13 @@ fn orphan_path_filters(
chain_optional_iter(
[
file_path::object_id::equals(None),
file_path::is_dir::equals(false),
file_path::location_id::equals(location_id),
file_path::materialized_path::equals(
file_path::is_dir::equals(Some(false)),
file_path::location_id::equals(Some(location_id)),
file_path::materialized_path::equals(Some(
sub_iso_file_path
.materialized_path_for_children()
.expect("sub path for shallow identifier must be a directory"),
),
)),
],
[file_path_id.map(file_path::id::gte)],
)

View file

@ -6,7 +6,10 @@ use crate::{
library::Library,
location::file_path_helper::IsolatedFilePathData,
prisma::{file_path, location},
util::error::FileIOError,
util::{
db::{maybe_missing, MissingFieldError},
error::FileIOError,
},
};
use std::{hash::Hash, path::PathBuf};
@ -77,7 +80,7 @@ impl StatefulJob for FileCopierJob {
)
.await?
.into_iter()
.map(|file_data| {
.flat_map(|file_data| {
// add the currently viewed subdirectory to the location root
let mut full_target_path =
targets_location_path.join(&state.init.target_location_relative_directory_path);
@ -85,11 +88,12 @@ impl StatefulJob for FileCopierJob {
full_target_path.push(construct_target_filename(
&file_data,
&state.init.target_file_name_suffix,
));
FileCopierJobStep {
)?);
Ok::<_, MissingFieldError>(FileCopierJobStep {
source_file_data: file_data,
target_full_path: full_target_path,
}
})
})
.collect();
@ -114,7 +118,7 @@ impl StatefulJob for FileCopierJob {
let data = extract_job_data!(state);
if source_file_data.file_path.is_dir {
if maybe_missing(source_file_data.file_path.is_dir, "file_path.is_dir")? {
fs::create_dir_all(target_full_path)
.await
.map_err(|e| FileIOError::from((target_full_path, e)))?;

View file

@ -90,7 +90,7 @@ impl StatefulJob for FileCutterJob {
let full_output = data
.full_target_directory_path
.join(construct_target_filename(step, &None));
.join(construct_target_filename(step, &None)?);
if step.full_path.parent().ok_or(JobError::Path)?
== full_output.parent().ok_or(JobError::Path)?

View file

@ -5,7 +5,7 @@ use crate::{
},
library::Library,
prisma::{file_path, location},
util::error::FileIOError,
util::{db::maybe_missing, error::FileIOError},
};
use std::hash::Hash;
@ -64,7 +64,10 @@ impl StatefulJob for FileDeleterJob {
) -> Result<(), JobError> {
let step = &state.steps[0];
if step.file_path.is_dir {
// need to handle stuff such as querying prisma for all paths of a file, and deleting all of those if requested (with a checkbox in the ui)
// maybe a files.countOccurances/and or files.getPath(location_id, path_id) to show how many of these files would be deleted (and where?)
if maybe_missing(step.file_path.is_dir, "file_path.is_dir")? {
fs::remove_dir_all(&step.full_path).await
} else {
fs::remove_file(&step.full_path).await

View file

@ -6,7 +6,7 @@ use crate::{
library::Library,
location::file_path_helper::IsolatedFilePathData,
prisma::{file_path, location},
util::error::FileIOError,
util::{db::maybe_missing, error::FileIOError},
};
use std::{hash::Hash, path::PathBuf};
@ -90,7 +90,7 @@ impl StatefulJob for FileEraserJob {
let step = &state.steps[0];
// Had to use `state.steps[0]` all over the place to appease the borrow checker
if step.file_path.is_dir {
if maybe_missing(step.file_path.is_dir, "file_path.is_dir")? {
let data = extract_job_data_mut!(state);
let mut dir = tokio::fs::read_dir(&step.full_path)

View file

@ -1,7 +1,7 @@
use crate::{
location::{file_path_helper::FilePathError, LocationError},
prisma::file_path,
util::error::FileIOError,
util::{db::MissingFieldError, error::FileIOError},
};
use std::path::Path;
@ -28,4 +28,6 @@ pub enum FileSystemJobsError {
MatchingSrcDest(Box<Path>),
#[error("action would overwrite another file: {}", .0.display())]
WouldOverwrite(Box<Path>),
#[error("missing-field: {0}")]
MissingField(#[from] MissingFieldError),
}

View file

@ -4,6 +4,7 @@ use crate::{
LocationError,
},
prisma::{file_path, location, PrismaClient},
util::db::{maybe_missing, MissingFieldError},
};
use std::path::{Path, PathBuf};
@ -42,14 +43,16 @@ pub async fn get_location_path_from_location_id(
db: &PrismaClient,
location_id: file_path::id::Type,
) -> Result<PathBuf, FileSystemJobsError> {
db.location()
let location = db
.location()
.find_unique(location::id::equals(location_id))
.exec()
.await?
.and_then(|location| location.path.map(PathBuf::from))
.ok_or(FileSystemJobsError::Location(LocationError::IdNotFound(
location_id,
)))
)))?;
Ok(maybe_missing(location.path, "location.path")?.into())
}
pub async fn get_many_files_datas(
@ -77,9 +80,11 @@ pub async fn get_many_files_datas(
.map(|(maybe_file_path, file_path_id)| {
maybe_file_path
.ok_or(FileSystemJobsError::FilePathIdNotFound(*file_path_id))
.map(|path_data| FileData {
full_path: location_path.join(IsolatedFilePathData::from(&path_data)),
file_path: path_data,
.and_then(|path_data| {
Ok(FileData {
full_path: location_path.join(IsolatedFilePathData::try_from(&path_data)?),
file_path: path_data,
})
})
})
.collect::<Result<Vec<_>, _>>()
@ -102,11 +107,13 @@ pub async fn get_file_data_from_isolated_file_path(
.into_boxed_path(),
)
})
.map(|path_data| FileData {
full_path: location_path
.as_ref()
.join(IsolatedFilePathData::from(&path_data)),
file_path: path_data,
.and_then(|path_data| {
Ok(FileData {
full_path: location_path
.as_ref()
.join(IsolatedFilePathData::try_from(&path_data)?),
file_path: path_data,
})
})
}
@ -125,52 +132,43 @@ pub async fn fetch_source_and_target_location_paths(
.await?
{
(Some(source_location), Some(target_location)) => Ok((
source_location
.path
.map(PathBuf::from)
.ok_or(FileSystemJobsError::Location(LocationError::MissingPath(
source_location_id,
)))?,
target_location
.path
.map(PathBuf::from)
.ok_or(FileSystemJobsError::Location(LocationError::MissingPath(
target_location_id,
)))?,
maybe_missing(source_location.path.map(PathBuf::from), "location.path")?,
maybe_missing(target_location.path.map(PathBuf::from), "location.path")?,
)),
(None, _) => Err(FileSystemJobsError::Location(LocationError::IdNotFound(
source_location_id,
))),
(_, None) => Err(FileSystemJobsError::Location(LocationError::IdNotFound(
target_location_id,
))),
(None, _) => Err(LocationError::IdNotFound(source_location_id))?,
(_, None) => Err(LocationError::IdNotFound(target_location_id))?,
}
}
fn construct_target_filename(
source_file_data: &FileData,
target_file_name_suffix: &Option<String>,
) -> String {
) -> Result<String, MissingFieldError> {
// extension wizardry for cloning and such
// if no suffix has been selected, just use the file name
// if a suffix is provided and it's a directory, use the directory name + suffix
// if a suffix is provided and it's a file, use the (file name + suffix).extension
if let Some(ref suffix) = target_file_name_suffix {
if source_file_data.file_path.is_dir {
format!("{}{suffix}", source_file_data.file_path.name)
Ok(if let Some(ref suffix) = target_file_name_suffix {
if maybe_missing(source_file_data.file_path.is_dir, "file_path.is_dir")? {
format!(
"{}{suffix}",
maybe_missing(&source_file_data.file_path.name, "file_path.name")?
)
} else {
format!(
"{}{suffix}.{}",
source_file_data.file_path.name, source_file_data.file_path.extension,
maybe_missing(&source_file_data.file_path.name, "file_path.name")?,
maybe_missing(&source_file_data.file_path.extension, "file_path.extension")?,
)
}
} else if source_file_data.file_path.is_dir {
source_file_data.file_path.name.clone()
} else if *maybe_missing(&source_file_data.file_path.is_dir, "file_path.is_dir")? {
maybe_missing(&source_file_data.file_path.name, "file_path.name")?.clone()
} else {
format!(
"{}.{}",
source_file_data.file_path.name, source_file_data.file_path.extension
maybe_missing(&source_file_data.file_path.name, "file_path.name")?,
maybe_missing(&source_file_data.file_path.extension, "file_path.extension")?
)
}
})
}

View file

@ -16,7 +16,6 @@ pub mod validation;
// Objects are what can be added to Spaces
// Object selectables!
object::select!(object_just_id_has_thumbnail { id has_thumbnail });
object::select!(object_for_file_identifier {
pub_id
file_paths: select { pub_id cas_id }

View file

@ -5,7 +5,7 @@ use crate::{
library::Library,
location::file_path_helper::{file_path_for_thumbnailer, FilePathError, IsolatedFilePathData},
prisma::location,
util::{error::FileIOError, version_manager::VersionManagerError},
util::{db::maybe_missing, error::FileIOError, version_manager::VersionManagerError},
};
use std::{
@ -230,7 +230,10 @@ async fn process_step(
ctx.progress(vec![JobReportUpdate::Message(format!(
"Processing {}",
step.file_path.materialized_path
maybe_missing(
&step.file_path.materialized_path,
"file_path.materialized_path"
)?
))]);
let data = state
@ -268,14 +271,14 @@ pub async fn inner_process_step(
let thumbnail_dir = thumbnail_dir.as_ref();
// assemble the file path
let path = location_path.join(IsolatedFilePathData::from((location.id, file_path)));
let path = location_path.join(IsolatedFilePathData::try_from((location.id, file_path))?);
trace!("image_file {:?}", file_path);
// get cas_id, if none found skip
let Some(cas_id) = &file_path.cas_id else {
warn!(
"skipping thumbnail generation for {}",
file_path.materialized_path
maybe_missing(&file_path.materialized_path, "file_path.materialized_path")?
);
return Ok(());

View file

@ -133,13 +133,13 @@ async fn get_files_by_extensions(
Ok(db
.file_path()
.find_many(vec![
file_path::location_id::equals(location_id),
file_path::location_id::equals(Some(location_id)),
file_path::extension::in_vec(extensions.iter().map(ToString::to_string).collect()),
file_path::materialized_path::equals(
file_path::materialized_path::equals(Some(
parent_isolated_file_path_data
.materialized_path_for_children()
.expect("sub path iso_file_path must be a directory"),
),
)),
])
.select(file_path_for_thumbnailer::select())
.exec()

View file

@ -175,7 +175,7 @@ async fn get_files_by_extensions(
Ok(db
.file_path()
.find_many(vec![
file_path::location_id::equals(iso_file_path.location_id()),
file_path::location_id::equals(Some(iso_file_path.location_id())),
file_path::extension::in_vec(extensions.iter().map(ToString::to_string).collect()),
file_path::materialized_path::starts_with(
iso_file_path

View file

@ -7,7 +7,7 @@ use crate::{
location::file_path_helper::{file_path_for_object_validator, IsolatedFilePathData},
prisma::{file_path, location},
sync,
util::error::FileIOError,
util::{db::maybe_missing, error::FileIOError},
};
use std::path::PathBuf;
@ -60,8 +60,8 @@ impl StatefulJob for ObjectValidatorJob {
state.steps.extend(
db.file_path()
.find_many(vec![
file_path::location_id::equals(state.init.location_id),
file_path::is_dir::equals(false),
file_path::location_id::equals(Some(state.init.location_id)),
file_path::is_dir::equals(Some(false)),
file_path::integrity_checksum::equals(None),
])
.select(file_path_for_object_validator::select())
@ -94,10 +94,10 @@ impl StatefulJob for ObjectValidatorJob {
// we can also compare old and new checksums here
// This if is just to make sure, we already queried objects where integrity_checksum is null
if file_path.integrity_checksum.is_none() {
let path = data.root_path.join(IsolatedFilePathData::from((
file_path.location.id,
let path = data.root_path.join(IsolatedFilePathData::try_from((
maybe_missing(&file_path.location, "file_path.location")?.id,
file_path,
)));
))?);
let checksum = file_checksum(&path)
.await
.map_err(|e| FileIOError::from((path, e)))?;

View file

@ -6,7 +6,7 @@ use std::{collections::HashMap, sync::Arc};
use sd_sync::*;
use serde_json::{from_value, json, to_vec, Value};
use serde_json::{json, to_vec, Value};
use tokio::sync::broadcast::{self, Receiver, Sender};
use uhlc::{HLCBuilder, HLC, NTP64};
use uuid::Uuid;
@ -200,33 +200,10 @@ impl SyncManager {
match ModelSyncData::from_op(op.typ.clone()).unwrap() {
ModelSyncData::FilePath(id, shared_op) => match shared_op {
SharedOperationData::Create(SharedOperationCreateData::Unique(mut data)) => {
SharedOperationData::Create(SharedOperationCreateData::Unique(data)) => {
db.file_path()
.create(
id.pub_id,
{
let val: std::collections::HashMap<String, Value> =
from_value(data.remove(file_path::location::NAME).unwrap())
.unwrap();
let val = val.into_iter().next().unwrap();
location::UniqueWhereParam::deserialize(&val.0, val.1).unwrap()
},
serde_json::from_value(
data.remove(file_path::materialized_path::NAME).unwrap(),
)
.unwrap(),
serde_json::from_value(data.remove(file_path::name::NAME).unwrap())
.unwrap(),
serde_json::from_value(
data.remove(file_path::extension::NAME)
.unwrap_or_else(|| serde_json::Value::String("".to_string())),
)
.unwrap(),
serde_json::from_value(data.remove(file_path::inode::NAME).unwrap())
.unwrap(),
serde_json::from_value(data.remove(file_path::device::NAME).unwrap())
.unwrap(),
data.into_iter()
.flat_map(|(k, v)| file_path::SetParam::deserialize(&k, v))
.collect(),

View file

@ -75,3 +75,35 @@ pub fn chain_optional_iter<T>(
pub fn uuid_to_bytes(uuid: Uuid) -> Vec<u8> {
uuid.as_bytes().to_vec()
}
#[derive(Error, Debug)]
#[error("Missing field {0}")]
pub struct MissingFieldError(&'static str);
pub trait OptionalField: Sized {
type Out;
fn transform(self) -> Option<Self::Out>;
}
impl<T> OptionalField for Option<T> {
type Out = T;
fn transform(self) -> Option<T> {
self
}
}
impl<'a, T> OptionalField for &'a Option<T> {
type Out = &'a T;
fn transform(self) -> Option<Self::Out> {
self.as_ref()
}
}
pub fn maybe_missing<'a, T: OptionalField>(
data: T,
field: &'static str,
) -> Result<T::Out, MissingFieldError> {
data.transform().ok_or(MissingFieldError(field))
}

View file

@ -55,7 +55,7 @@ export default ({
? filePathData.name
: filePathData.name + '.' + filePathData.extension;
if (newName !== oldName) {
if (oldName !== null && filePathData.location_id !== null && newName !== oldName) {
renameFile.mutate({
location_id: filePathData.location_id,
kind: {
@ -79,7 +79,7 @@ export default ({
if (!node) return;
range.setStart(node, 0);
range.setEnd(node, filePathData?.name.length || 0);
range.setEnd(node, filePathData?.name?.length || 0);
const sel = window.getSelection();
sel?.removeAllRanges();

View file

@ -172,7 +172,7 @@ function FileThumb({ size, cover, ...props }: ThumbProps) {
}
break;
default:
setSrc(getIcon(kind, isDark, extension, isDir));
if (isDir !== null) setSrc(getIcon(kind, isDark, extension, isDir));
break;
}
}, [

View file

@ -5,7 +5,7 @@ export type Procedures = {
queries:
{ key: "buildInfo", input: never, result: BuildInfo } |
{ key: "categories.list", input: LibraryArgs<null>, result: { [key in Category]: number } } |
{ key: "files.get", input: LibraryArgs<GetArgs>, result: { id: number; pub_id: number[]; kind: number; key_id: number | null; hidden: boolean; favorite: boolean; important: boolean; has_thumbnail: boolean; has_thumbstrip: boolean; has_video_preview: boolean; ipfs_id: string | null; note: string | null; date_created: string; date_accessed: string | null; file_paths: FilePath[]; media_data: MediaData | null } | null } |
{ key: "files.get", input: LibraryArgs<GetArgs>, result: { id: number; pub_id: number[]; kind: number | null; key_id: number | null; hidden: boolean | null; favorite: boolean | null; important: boolean | null; note: string | null; date_created: string | null; date_accessed: string | null; file_paths: FilePath[]; media_data: MediaData | null } | null } |
{ key: "invalidation.test-invalidate", input: never, result: number } |
{ key: "jobs.getHistory", input: LibraryArgs<null>, result: JobReport[] } |
{ key: "jobs.getRunning", input: LibraryArgs<null>, result: JobReport[] } |
@ -98,7 +98,7 @@ export type FileDeleterJobInit = { location_id: number; file_path_ids: number[]
export type FileEraserJobInit = { location_id: number; file_path_ids: number[]; passes: string }
export type FilePath = { id: number; pub_id: number[]; is_dir: boolean; cas_id: string | null; integrity_checksum: string | null; location_id: number; materialized_path: string; name: string; extension: string; size_in_bytes: string; inode: number[]; device: number[]; object_id: number | null; key_id: number | null; date_created: string; date_modified: string; date_indexed: string }
export type FilePath = { id: number; pub_id: number[]; is_dir: boolean | null; cas_id: string | null; integrity_checksum: string | null; location_id: number | null; materialized_path: string | null; name: string | null; extension: string | null; size_in_bytes: string | null; inode: number[] | null; device: number[] | null; object_id: number | null; key_id: number | null; date_created: string | null; date_modified: string | null; date_indexed: string | null }
export type FilePathFilterArgs = { locationId?: number | null; search?: string; extension?: string | null; createdAt?: OptionalRange<string>; path?: string | null; object?: ObjectFilterArgs | null }
@ -106,7 +106,7 @@ export type FilePathSearchArgs = { take?: number | null; order?: FilePathSearchO
export type FilePathSearchOrdering = { name: SortOrder } | { sizeInBytes: SortOrder } | { dateCreated: SortOrder } | { dateModified: SortOrder } | { dateIndexed: SortOrder } | { object: ObjectSearchOrdering }
export type FilePathWithObject = { id: number; pub_id: number[]; is_dir: boolean; cas_id: string | null; integrity_checksum: string | null; location_id: number; materialized_path: string; name: string; extension: string; size_in_bytes: string; inode: number[]; device: number[]; object_id: number | null; key_id: number | null; date_created: string; date_modified: string; date_indexed: string; object: Object | null }
export type FilePathWithObject = { id: number; pub_id: number[]; is_dir: boolean | null; cas_id: string | null; integrity_checksum: string | null; location_id: number | null; materialized_path: string | null; name: string | null; extension: string | null; size_in_bytes: string | null; inode: number[] | null; device: number[] | null; object_id: number | null; key_id: number | null; date_created: string | null; date_modified: string | null; date_indexed: string | null; object: Object | null }
export type FromPattern = { pattern: string; replace_all: boolean }
@ -184,7 +184,7 @@ export type NodeConfig = { id: string; name: string; p2p_port: number | null; p2
export type NodeState = ({ id: string; name: string; p2p_port: number | null; p2p_email: string | null; p2p_img_url: string | null }) & { data_path: string }
export type Object = { id: number; pub_id: number[]; kind: number; key_id: number | null; hidden: boolean; favorite: boolean; important: boolean; has_thumbnail: boolean; has_thumbstrip: boolean; has_video_preview: boolean; ipfs_id: string | null; note: string | null; date_created: string; date_accessed: string | null }
export type Object = { id: number; pub_id: number[]; kind: number | null; key_id: number | null; hidden: boolean | null; favorite: boolean | null; important: boolean | null; note: string | null; date_created: string | null; date_accessed: string | null }
export type ObjectFilterArgs = { favorite?: boolean | null; hidden?: ObjectHiddenFilter; dateAccessed?: MaybeNot<string | null> | null; kind?: number[]; tags?: number[] }
@ -196,7 +196,7 @@ export type ObjectSearchOrdering = { dateAccessed: SortOrder }
export type ObjectValidatorArgs = { id: number; path: string }
export type ObjectWithFilePaths = { id: number; pub_id: number[]; kind: number; key_id: number | null; hidden: boolean; favorite: boolean; important: boolean; has_thumbnail: boolean; has_thumbstrip: boolean; has_video_preview: boolean; ipfs_id: string | null; note: string | null; date_created: string; date_accessed: string | null; file_paths: FilePath[] }
export type ObjectWithFilePaths = { id: number; pub_id: number[]; kind: number | null; key_id: number | null; hidden: boolean | null; favorite: boolean | null; important: boolean | null; note: string | null; date_created: string | null; date_accessed: string | null; file_paths: FilePath[] }
/**
* Represents the operating system which the remote peer is running.