[ENG-1305] "Add Locations" onboarding flow (#1611)

* locations onboarding flow

* optional

* Backend for default location on library creation

* Rust fmt

* Enhancing error handling and introducing more resilience

* Removing .spacedrive metadata on library deletion

* Rust fmt again

* default to videos

---------

Co-authored-by: Ericson Fogo Soares <ericson.ds999@gmail.com>
This commit is contained in:
nikec 2023-10-19 05:18:36 +02:00 committed by GitHub
parent 1fe61700e0
commit ace3527bfc
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
17 changed files with 652 additions and 147 deletions

34
Cargo.lock generated
View file

@ -1649,7 +1649,16 @@ version = "4.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f51c5d4ddabd36886dd3e1438cb358cdcb0d7c499cb99cb4ac2e38e18b5cb210" checksum = "f51c5d4ddabd36886dd3e1438cb358cdcb0d7c499cb99cb4ac2e38e18b5cb210"
dependencies = [ dependencies = [
"dirs-sys", "dirs-sys 0.3.7",
]
[[package]]
name = "directories"
version = "5.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9a49173b84e034382284f27f1af4dcbbd231ffa358c0fe316541a7337f376a35"
dependencies = [
"dirs-sys 0.4.1",
] ]
[[package]] [[package]]
@ -1673,6 +1682,18 @@ dependencies = [
"winapi", "winapi",
] ]
[[package]]
name = "dirs-sys"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "520f05a5cbd335fae5a99ff7a6ab8627577660ee5cfd6a94a6a929b52ff0321c"
dependencies = [
"libc",
"option-ext",
"redox_users",
"windows-sys 0.48.0",
]
[[package]] [[package]]
name = "dirs-sys-next" name = "dirs-sys-next"
version = "0.1.2" version = "0.1.2"
@ -4892,6 +4913,12 @@ dependencies = [
"tokio-stream", "tokio-stream",
] ]
[[package]]
name = "option-ext"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "04744f49eae99ab78e0d5c0b603ab218f515ea8cfe5a456d7629ad883a3b6e7d"
[[package]] [[package]]
name = "ordered-float" name = "ordered-float"
version = "2.10.0" version = "2.10.0"
@ -5480,7 +5507,7 @@ name = "prisma-client-rust-cli"
version = "0.6.8" version = "0.6.8"
source = "git+https://github.com/Brendonovich/prisma-client-rust?branch=spacedrive#b93c4597daa40ca87e7b63c9b6b5d1b517d9df0b" source = "git+https://github.com/Brendonovich/prisma-client-rust?branch=spacedrive#b93c4597daa40ca87e7b63c9b6b5d1b517d9df0b"
dependencies = [ dependencies = [
"directories", "directories 4.0.1",
"flate2", "flate2",
"http", "http",
"prisma-client-rust-generator", "prisma-client-rust-generator",
@ -5500,7 +5527,7 @@ name = "prisma-client-rust-generator"
version = "0.6.8" version = "0.6.8"
source = "git+https://github.com/Brendonovich/prisma-client-rust?branch=spacedrive#b93c4597daa40ca87e7b63c9b6b5d1b517d9df0b" source = "git+https://github.com/Brendonovich/prisma-client-rust?branch=spacedrive#b93c4597daa40ca87e7b63c9b6b5d1b517d9df0b"
dependencies = [ dependencies = [
"directories", "directories 4.0.1",
"flate2", "flate2",
"http", "http",
"prisma-client-rust-sdk", "prisma-client-rust-sdk",
@ -6677,6 +6704,7 @@ dependencies = [
"chrono", "chrono",
"ctor 0.2.4", "ctor 0.2.4",
"dashmap", "dashmap",
"directories 5.0.1",
"enumflags2 0.7.7", "enumflags2 0.7.7",
"flate2", "flate2",
"futures", "futures",

View file

@ -106,6 +106,7 @@ http-body = "0.4.5"
pin-project-lite = "0.2.13" pin-project-lite = "0.2.13"
bytes = "1.5.0" bytes = "1.5.0"
reqwest = { version = "0.11.20", features = ["json"] } reqwest = { version = "0.11.20", features = ["json"] }
directories = "5.0.1"
[target.'cfg(target_os = "macos")'.dependencies] [target.'cfg(target_os = "macos")'.dependencies]
plist = "1" plist = "1"

View file

@ -1,16 +1,24 @@
use crate::{ use crate::{
library::{LibraryConfig, LibraryName}, library::{Library, LibraryConfig, LibraryName},
location::{scan_location, LocationCreateArgs},
util::MaybeUndefined, util::MaybeUndefined,
volume::get_volumes, volume::get_volumes,
Node,
}; };
use chrono::Utc;
use rspc::alpha::AlphaRouter;
use sd_p2p::spacetunnel::RemoteIdentity; use sd_p2p::spacetunnel::RemoteIdentity;
use sd_prisma::prisma::statistics; use sd_prisma::prisma::{indexer_rule, statistics};
use std::{convert::identity, sync::Arc};
use chrono::Utc;
use directories::UserDirs;
use futures_concurrency::future::Join;
use rspc::{alpha::AlphaRouter, ErrorCode};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use specta::Type; use specta::Type;
use tracing::debug; use tokio::spawn;
use tracing::{debug, error};
use uuid::Uuid; use uuid::Uuid;
use super::{ use super::{
@ -105,25 +113,164 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
}) })
}) })
.procedure("create", { .procedure("create", {
#[derive(Deserialize, Type, Default)]
pub struct DefaultLocations {
desktop: bool,
documents: bool,
downloads: bool,
pictures: bool,
music: bool,
videos: bool,
}
#[derive(Deserialize, Type)] #[derive(Deserialize, Type)]
pub struct CreateLibraryArgs { pub struct CreateLibraryArgs {
name: LibraryName, name: LibraryName,
#[specta(default)]
default_locations: DefaultLocations,
} }
R.mutation(|node, args: CreateLibraryArgs| async move { async fn create_default_locations_on_library_creation(
debug!("Creating library"); DefaultLocations {
desktop,
documents,
downloads,
pictures,
music,
videos,
}: DefaultLocations,
node: Arc<Node>,
library: Arc<Library>,
) -> Result<(), rspc::Error> {
// If all of them are false, we skip
if [!desktop, !documents, !downloads, !pictures, !music, !videos]
.into_iter()
.all(identity)
{
return Ok(());
}
let library = node.libraries.create(args.name, None, &node).await?; let Some(default_locations_paths) = UserDirs::new() else {
return Err(rspc::Error::new(
ErrorCode::NotFound,
"Didn't find any system locations".to_string(),
));
};
debug!("Created library {}", library.id); let default_rules_ids = library
.db
.indexer_rule()
.find_many(vec![indexer_rule::default::equals(Some(true))])
.select(indexer_rule::select!({ id }))
.exec()
.await
.map_err(|e| {
rspc::Error::with_cause(
ErrorCode::InternalServerError,
"Failed to get default indexer rules for default locations".to_string(),
e,
)
})?
.into_iter()
.map(|rule| rule.id)
.collect::<Vec<_>>();
Ok(LibraryConfigWrapped { let mut maybe_error = None;
uuid: library.id,
instance_id: library.instance_uuid, [
instance_public_key: library.identity.to_remote_identity(), (desktop, default_locations_paths.desktop_dir()),
config: library.config(), (documents, default_locations_paths.document_dir()),
(downloads, default_locations_paths.download_dir()),
(pictures, default_locations_paths.picture_dir()),
(music, default_locations_paths.audio_dir()),
(videos, default_locations_paths.video_dir()),
]
.into_iter()
.filter_map(|entry| {
if let (true, Some(path)) = entry {
let node = Arc::clone(&node);
let library = Arc::clone(&library);
let indexer_rules_ids = default_rules_ids.clone();
let path = path.to_path_buf();
Some(spawn(async move {
let Some(location) = LocationCreateArgs {
path,
dry_run: false,
indexer_rules_ids,
}
.create(&node, &library)
.await
.map_err(rspc::Error::from)?
else {
return Ok(());
};
scan_location(&node, &library, location)
.await
.map_err(rspc::Error::from)
}))
} else {
None
}
}) })
}) .collect::<Vec<_>>()
.join()
.await
.into_iter()
.map(|spawn_res| {
spawn_res
.map_err(|_| {
rspc::Error::new(
ErrorCode::InternalServerError,
"A task to create a default location failed".to_string(),
)
})
.and_then(identity)
})
.fold(&mut maybe_error, |maybe_error, res| {
if let Err(e) = res {
error!("Failed to create default location: {e:#?}");
*maybe_error = Some(e);
}
maybe_error
});
if let Some(e) = maybe_error {
return Err(e);
}
debug!("Created default locations");
Ok(())
}
R.mutation(
|node,
CreateLibraryArgs {
name,
default_locations,
}: CreateLibraryArgs| async move {
debug!("Creating library");
let library = node.libraries.create(name, None, &node).await?;
debug!("Created library {}", library.id);
create_default_locations_on_library_creation(
default_locations,
node,
Arc::clone(&library),
)
.await?;
Ok(LibraryConfigWrapped {
uuid: library.id,
instance_id: library.instance_uuid,
instance_public_key: library.identity.to_remote_identity(),
config: library.config(),
})
},
)
}) })
.procedure("edit", { .procedure("edit", {
#[derive(Type, Deserialize)] #[derive(Type, Deserialize)]

View file

@ -15,9 +15,10 @@ use crate::{
util::AbortOnDrop, util::AbortOnDrop,
}; };
use std::path::PathBuf; use std::path::{Path, PathBuf};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use directories::UserDirs;
use rspc::{self, alpha::AlphaRouter, ErrorCode}; use rspc::{self, alpha::AlphaRouter, ErrorCode};
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use specta::Type; use specta::Type;
@ -57,6 +58,28 @@ pub enum ExplorerItem {
item: PeerMetadata, item: PeerMetadata,
}, },
} }
#[derive(Serialize, Type, Debug)]
pub struct SystemLocations {
desktop: Option<PathBuf>,
documents: Option<PathBuf>,
downloads: Option<PathBuf>,
pictures: Option<PathBuf>,
music: Option<PathBuf>,
videos: Option<PathBuf>,
}
impl From<UserDirs> for SystemLocations {
fn from(value: UserDirs) -> Self {
Self {
desktop: value.desktop_dir().map(Path::to_path_buf),
documents: value.document_dir().map(Path::to_path_buf),
downloads: value.download_dir().map(Path::to_path_buf),
pictures: value.picture_dir().map(Path::to_path_buf),
music: value.audio_dir().map(Path::to_path_buf),
videos: value.video_dir().map(Path::to_path_buf),
}
}
}
impl ExplorerItem { impl ExplorerItem {
pub fn name(&self) -> &str { pub fn name(&self) -> &str {
@ -366,6 +389,16 @@ pub(crate) fn mount() -> AlphaRouter<Ctx> {
} }
}), }),
) )
.procedure("systemLocations", {
R.query(|_, _: ()| async move {
UserDirs::new().map(SystemLocations::from).ok_or_else(|| {
rspc::Error::new(
ErrorCode::NotFound,
"Didn't find any system locations".to_string(),
)
})
})
})
.merge("indexer_rules.", mount_indexer_rule_routes()) .merge("indexer_rules.", mount_indexer_rule_routes())
} }
@ -467,23 +500,4 @@ fn mount_indexer_rule_routes() -> AlphaRouter<Ctx> {
.map_err(Into::into) .map_err(Into::into)
}) })
}) })
// .procedure("createDirectory", {
// #[derive(Type, Deserialize)]
// struct CreateDirectoryArgs {
// location_id: location::id::Type,
// subpath: String,
// }
// R.with2(library())
// .query(|(_, library), args: CreateDirectoryArgs| async move {
// let location = find_location(&library, args.location_id)
// .exec()
// .await?
// .ok_or(LocationError::IdNotFound(args.location_id))?;
// let mut path = Path::new(&location.path.unwrap_or_default());
// path.push(args.subpath);
// Ok(())
// })
// })
} }

View file

@ -1,7 +1,10 @@
use crate::{ use crate::{
api::{utils::InvalidateOperationEvent, CoreEvent}, api::{utils::InvalidateOperationEvent, CoreEvent},
invalidate_query, invalidate_query,
location::indexer, location::{
indexer,
metadata::{LocationMetadataError, SpacedriveLocationMetadataFile},
},
node::Platform, node::Platform,
object::tag, object::tag,
p2p::{self, IdentityOrRemoteIdentity}, p2p::{self, IdentityOrRemoteIdentity},
@ -25,10 +28,11 @@ use std::{
}; };
use chrono::Utc; use chrono::Utc;
use futures_concurrency::future::{Join, TryJoin};
use sd_core_sync::SyncMessage; use sd_core_sync::SyncMessage;
use sd_p2p::spacetunnel::Identity; use sd_p2p::spacetunnel::Identity;
use sd_prisma::prisma::instance; use sd_prisma::prisma::instance;
use tokio::{fs, io, sync::RwLock, try_join}; use tokio::{fs, io, sync::RwLock};
use tracing::{debug, error, info, warn}; use tracing::{debug, error, info, warn};
use uuid::Uuid; use uuid::Uuid;
@ -276,10 +280,41 @@ impl Libraries {
.emit(LibraryManagerEvent::Delete(library.clone())) .emit(LibraryManagerEvent::Delete(library.clone()))
.await; .await;
if let Ok(location_paths) = library
.db
.location()
.find_many(vec![])
.select(location::select!({ path }))
.exec()
.await
.map(|locations| locations.into_iter().filter_map(|location| location.path))
.map_err(|e| error!("Failed to fetch locations for library deletion: {e:#?}"))
{
location_paths
.map(|location_path| async move {
if let Some(mut sd_metadata) =
SpacedriveLocationMetadataFile::try_load(location_path).await?
{
sd_metadata.remove_library(*id).await?;
}
Ok::<_, LocationMetadataError>(())
})
.collect::<Vec<_>>()
.join()
.await
.into_iter()
.for_each(|res| {
if let Err(e) = res {
error!("Failed to remove library from location metadata: {e:#?}");
}
});
}
let db_path = self.libraries_dir.join(format!("{}.db", library.id)); let db_path = self.libraries_dir.join(format!("{}.db", library.id));
let sd_lib_path = self.libraries_dir.join(format!("{}.sdlibrary", library.id)); let sd_lib_path = self.libraries_dir.join(format!("{}.sdlibrary", library.id));
try_join!( (
async { async {
fs::remove_file(&db_path) fs::remove_file(&db_path)
.await .await
@ -290,7 +325,9 @@ impl Libraries {
.await .await
.map_err(|e| LibraryManagerError::FileIO(FileIOError::from((sd_lib_path, e)))) .map_err(|e| LibraryManagerError::FileIO(FileIOError::from((sd_lib_path, e))))
}, },
)?; )
.try_join()
.await?;
// We only remove here after files deletion // We only remove here after files deletion
let library = libraries_write_guard let library = libraries_write_guard

View file

@ -1,9 +1,12 @@
use crate::{ use crate::{
prisma::location, prisma::location,
util::{db::MissingFieldError, error::FileIOError}, util::{
db::MissingFieldError,
error::{FileIOError, NonUtf8PathError},
},
}; };
use std::path::PathBuf; use std::path::Path;
use rspc::{self, ErrorCode}; use rspc::{self, ErrorCode};
use thiserror::Error; use thiserror::Error;
@ -18,7 +21,7 @@ use super::{
pub enum LocationError { pub enum LocationError {
// Not Found errors // Not Found errors
#[error("location not found <path='{}'>", .0.display())] #[error("location not found <path='{}'>", .0.display())]
PathNotFound(PathBuf), PathNotFound(Box<Path>),
#[error("location not found <uuid='{0}'>")] #[error("location not found <uuid='{0}'>")]
UuidNotFound(Uuid), UuidNotFound(Uuid),
#[error("location not found <id='{0}'>")] #[error("location not found <id='{0}'>")]
@ -26,29 +29,31 @@ pub enum LocationError {
// User errors // User errors
#[error("location not a directory <path='{}'>", .0.display())] #[error("location not a directory <path='{}'>", .0.display())]
NotDirectory(PathBuf), NotDirectory(Box<Path>),
#[error("could not find directory in location <path='{}'>", .0.display())] #[error("could not find directory in location <path='{}'>", .0.display())]
DirectoryNotFound(PathBuf), DirectoryNotFound(Box<Path>),
#[error( #[error(
"library exists in the location metadata file, must relink <old_path='{}', new_path='{}'>", "library exists in the location metadata file, must relink <old_path='{}', new_path='{}'>",
.old_path.display(), .old_path.display(),
.new_path.display(), .new_path.display(),
)] )]
NeedRelink { NeedRelink {
old_path: PathBuf, old_path: Box<Path>,
new_path: PathBuf, new_path: Box<Path>,
}, },
#[error( #[error(
"this location belongs to another library, must update .spacedrive file <path='{}'>", "this location belongs to another library, must update .spacedrive file <path='{}'>",
.0.display() .0.display()
)] )]
AddLibraryToMetadata(PathBuf), AddLibraryToMetadata(Box<Path>),
#[error("location metadata file not found <path='{}'>", .0.display())] #[error("location metadata file not found <path='{}'>", .0.display())]
MetadataNotFound(PathBuf), MetadataNotFound(Box<Path>),
#[error("location already exists in database <path='{}'>", .0.display())] #[error("location already exists in database <path='{}'>", .0.display())]
LocationAlreadyExists(PathBuf), LocationAlreadyExists(Box<Path>),
#[error("nested location currently not supported <path='{}'>", .0.display())] #[error("nested location currently not supported <path='{}'>", .0.display())]
NestedLocation(PathBuf), NestedLocation(Box<Path>),
#[error(transparent)]
NonUtf8Path(#[from] NonUtf8PathError),
// Internal Errors // Internal Errors
#[error(transparent)] #[error(transparent)]
@ -56,7 +61,7 @@ pub enum LocationError {
#[error("failed to read location path metadata info: {0}")] #[error("failed to read location path metadata info: {0}")]
LocationPathFilesystemMetadataAccess(FileIOError), LocationPathFilesystemMetadataAccess(FileIOError),
#[error("missing metadata file for location <path='{}'>", .0.display())] #[error("missing metadata file for location <path='{}'>", .0.display())]
MissingMetadataFile(PathBuf), MissingMetadataFile(Box<Path>),
#[error("failed to open file from local OS: {0}")] #[error("failed to open file from local OS: {0}")]
FileRead(FileIOError), FileRead(FileIOError),
#[error("failed to read mounted volumes from local OS: {0}")] #[error("failed to read mounted volumes from local OS: {0}")]

View file

@ -31,13 +31,13 @@ struct SpacedriveLocationMetadata {
updated_at: DateTime<Utc>, updated_at: DateTime<Utc>,
} }
pub(super) struct SpacedriveLocationMetadataFile { pub struct SpacedriveLocationMetadataFile {
path: PathBuf, path: PathBuf,
metadata: SpacedriveLocationMetadata, metadata: SpacedriveLocationMetadata,
} }
impl SpacedriveLocationMetadataFile { impl SpacedriveLocationMetadataFile {
pub(super) async fn try_load( pub async fn try_load(
location_path: impl AsRef<Path>, location_path: impl AsRef<Path>,
) -> Result<Option<Self>, LocationMetadataError> { ) -> Result<Option<Self>, LocationMetadataError> {
let metadata_file_name = location_path let metadata_file_name = location_path
@ -83,7 +83,7 @@ impl SpacedriveLocationMetadataFile {
} }
} }
pub(super) async fn create_and_save( pub async fn create_and_save(
library_id: LibraryId, library_id: LibraryId,
location_pub_id: Uuid, location_pub_id: Uuid,
location_path: impl AsRef<Path>, location_path: impl AsRef<Path>,
@ -114,7 +114,7 @@ impl SpacedriveLocationMetadataFile {
.await .await
} }
pub(super) async fn relink( pub async fn relink(
&mut self, &mut self,
library_id: LibraryId, library_id: LibraryId,
location_path: impl AsRef<Path>, location_path: impl AsRef<Path>,
@ -139,8 +139,7 @@ impl SpacedriveLocationMetadataFile {
self.write_metadata().await self.write_metadata().await
} }
#[allow(dead_code)] pub async fn update(
pub(super) async fn update(
&mut self, &mut self,
library_id: LibraryId, library_id: LibraryId,
location_name: String, location_name: String,
@ -157,7 +156,7 @@ impl SpacedriveLocationMetadataFile {
self.write_metadata().await self.write_metadata().await
} }
pub(super) async fn add_library( pub async fn add_library(
&mut self, &mut self,
library_id: LibraryId, library_id: LibraryId,
location_pub_id: Uuid, location_pub_id: Uuid,
@ -179,22 +178,22 @@ impl SpacedriveLocationMetadataFile {
self.write_metadata().await self.write_metadata().await
} }
pub(super) fn has_library(&self, library_id: LibraryId) -> bool { pub fn has_library(&self, library_id: LibraryId) -> bool {
self.metadata.libraries.contains_key(&library_id) self.metadata.libraries.contains_key(&library_id)
} }
pub(super) fn location_path(&self, library_id: LibraryId) -> Option<&Path> { pub fn location_path(&self, library_id: LibraryId) -> Option<&Path> {
self.metadata self.metadata
.libraries .libraries
.get(&library_id) .get(&library_id)
.map(|l| l.path.as_path()) .map(|l| l.path.as_path())
} }
pub(super) fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.metadata.libraries.is_empty() self.metadata.libraries.is_empty()
} }
pub(super) async fn remove_library( pub async fn remove_library(
&mut self, &mut self,
library_id: LibraryId, library_id: LibraryId,
) -> Result<(), LocationMetadataError> { ) -> Result<(), LocationMetadataError> {
@ -214,7 +213,7 @@ impl SpacedriveLocationMetadataFile {
} }
} }
pub(super) async fn clean_stale_libraries( pub async fn clean_stale_libraries(
&mut self, &mut self,
existing_libraries_ids: &HashSet<LibraryId>, existing_libraries_ids: &HashSet<LibraryId>,
) -> Result<(), LocationMetadataError> { ) -> Result<(), LocationMetadataError> {
@ -238,10 +237,7 @@ impl SpacedriveLocationMetadataFile {
} }
} }
pub(super) fn location_pub_id( pub fn location_pub_id(&self, library_id: LibraryId) -> Result<Uuid, LocationMetadataError> {
&self,
library_id: LibraryId,
) -> Result<Uuid, LocationMetadataError> {
self.metadata self.metadata
.libraries .libraries
.get(&library_id) .get(&library_id)

View file

@ -8,7 +8,10 @@ use crate::{
media::{media_processor, MediaProcessorJobInit}, media::{media_processor, MediaProcessorJobInit},
}, },
prisma::{file_path, indexer_rules_in_location, location, PrismaClient}, prisma::{file_path, indexer_rules_in_location, location, PrismaClient},
util::{db::maybe_missing, error::FileIOError}, util::{
db::maybe_missing,
error::{FileIOError, NonUtf8PathError},
},
Node, Node,
}; };
@ -28,7 +31,7 @@ use sd_utils::uuid_to_bytes;
use serde::Deserialize; use serde::Deserialize;
use serde_json::json; use serde_json::json;
use specta::Type; use specta::Type;
use tokio::{fs, io}; use tokio::{fs, io, time::Instant};
use tracing::{debug, info, warn}; use tracing::{debug, info, warn};
use uuid::Uuid; use uuid::Uuid;
@ -36,7 +39,7 @@ mod error;
pub mod file_path_helper; pub mod file_path_helper;
pub mod indexer; pub mod indexer;
mod manager; mod manager;
mod metadata; pub mod metadata;
pub mod non_indexed; pub mod non_indexed;
pub use error::LocationError; pub use error::LocationError;
@ -67,10 +70,16 @@ impl LocationCreateArgs {
node: &Node, node: &Node,
library: &Arc<Library>, library: &Arc<Library>,
) -> Result<Option<location_with_indexer_rules::Data>, LocationError> { ) -> Result<Option<location_with_indexer_rules::Data>, LocationError> {
let Some(path_str) = self.path.to_str().map(str::to_string) else {
return Err(LocationError::NonUtf8Path(NonUtf8PathError(
self.path.into_boxed_path(),
)));
};
let path_metadata = match fs::metadata(&self.path).await { let path_metadata = match fs::metadata(&self.path).await {
Ok(metadata) => metadata, Ok(metadata) => metadata,
Err(e) if e.kind() == io::ErrorKind::NotFound => { Err(e) if e.kind() == io::ErrorKind::NotFound => {
return Err(LocationError::PathNotFound(self.path)) return Err(LocationError::PathNotFound(self.path.into_boxed_path()))
} }
Err(e) => { Err(e) => {
return Err(LocationError::LocationPathFilesystemMetadataAccess( return Err(LocationError::LocationPathFilesystemMetadataAccess(
@ -80,7 +89,7 @@ impl LocationCreateArgs {
}; };
if !path_metadata.is_dir() { if !path_metadata.is_dir() {
return Err(LocationError::NotDirectory(self.path)); return Err(LocationError::NotDirectory(self.path.into_boxed_path()));
} }
if let Some(mut metadata) = SpacedriveLocationMetadataFile::try_load(&self.path).await? { if let Some(mut metadata) = SpacedriveLocationMetadataFile::try_load(&self.path).await? {
@ -97,17 +106,30 @@ impl LocationCreateArgs {
.await?; .await?;
if !metadata.is_empty() { if !metadata.is_empty() {
return if let Some(old_path) = metadata.location_path(library.id) { if let Some(old_path) = metadata.location_path(library.id) {
if old_path == self.path { if old_path == self.path {
Err(LocationError::LocationAlreadyExists(self.path)) if library
.db
.location()
.count(vec![location::path::equals(Some(path_str))])
.exec()
.await? > 0
{
// Location already exists in this library
return Err(LocationError::LocationAlreadyExists(
self.path.into_boxed_path(),
));
}
} else { } else {
Err(LocationError::NeedRelink { return Err(LocationError::NeedRelink {
old_path: old_path.to_path_buf(), old_path: old_path.into(),
new_path: self.path, new_path: self.path.into_boxed_path(),
}) });
} }
} else { } else {
Err(LocationError::AddLibraryToMetadata(self.path)) return Err(LocationError::AddLibraryToMetadata(
self.path.into_boxed_path(),
));
}; };
} }
} }
@ -143,10 +165,10 @@ impl LocationCreateArgs {
) )
.err_into::<LocationError>() .err_into::<LocationError>()
.and_then(|()| async move { .and_then(|()| async move {
Ok(node node.locations
.locations
.add(location.data.id, library.clone()) .add(location.data.id, library.clone())
.await?) .await
.map_err(Into::into)
}) })
.await .await
{ {
@ -167,9 +189,9 @@ impl LocationCreateArgs {
node: &Node, node: &Node,
library: &Arc<Library>, library: &Arc<Library>,
) -> Result<Option<location_with_indexer_rules::Data>, LocationError> { ) -> Result<Option<location_with_indexer_rules::Data>, LocationError> {
let mut metadata = SpacedriveLocationMetadataFile::try_load(&self.path) let Some(mut metadata) = SpacedriveLocationMetadataFile::try_load(&self.path).await? else {
.await? return Err(LocationError::MetadataNotFound(self.path.into_boxed_path()));
.ok_or_else(|| LocationError::MetadataNotFound(self.path.clone()))?; };
metadata metadata
.clean_stale_libraries( .clean_stale_libraries(
@ -185,17 +207,16 @@ impl LocationCreateArgs {
if metadata.has_library(library.id) { if metadata.has_library(library.id) {
return Err(LocationError::NeedRelink { return Err(LocationError::NeedRelink {
// SAFETY: This unwrap is ok as we checked that we have this library_id
old_path: metadata old_path: metadata
.location_path(library.id) .location_path(library.id)
.expect("This unwrap is ok as we checked that we have this library_id") .expect("We checked that we have this library_id")
.to_path_buf(), .into(),
new_path: self.path, new_path: self.path.into_boxed_path(),
}); });
} }
debug!( debug!(
"{} a new library (library_id = {}) to an already existing location '{}'", "{} a new Library <id='{}'> to an already existing location '{}'",
if self.dry_run { if self.dry_run {
"Dry run: Would add" "Dry run: Would add"
} else { } else {
@ -509,23 +530,21 @@ pub async fn light_scan_location(
} }
pub async fn relink_location( pub async fn relink_location(
library: &Arc<Library>, Library { db, id, sync, .. }: &Library,
location_path: impl AsRef<Path>, location_path: impl AsRef<Path>,
) -> Result<(), LocationError> { ) -> Result<(), LocationError> {
let Library { db, id, sync, .. } = &**library; let location_path = location_path.as_ref();
let mut metadata = SpacedriveLocationMetadataFile::try_load(&location_path) let mut metadata = SpacedriveLocationMetadataFile::try_load(&location_path)
.await? .await?
.ok_or_else(|| LocationError::MissingMetadataFile(location_path.as_ref().to_path_buf()))?; .ok_or_else(|| LocationError::MissingMetadataFile(location_path.into()))?;
metadata.relink(*id, &location_path).await?; metadata.relink(*id, location_path).await?;
let pub_id = metadata.location_pub_id(library.id)?.as_ref().to_vec(); let pub_id = metadata.location_pub_id(*id)?.as_ref().to_vec();
let path = location_path let path = location_path
.as_ref()
.to_str() .to_str()
.expect("Found non-UTF-8 path") .map(str::to_string)
.to_string(); .ok_or_else(|| NonUtf8PathError(location_path.into()))?;
sync.write_op( sync.write_op(
db, db,
@ -604,33 +623,27 @@ pub(crate) fn normalize_path(path: impl AsRef<Path>) -> io::Result<(String, Stri
} }
async fn create_location( async fn create_location(
library: &Arc<Library>, library @ Library { db, sync, .. }: &Library,
location_pub_id: Uuid, location_pub_id: Uuid,
location_path: impl AsRef<Path>, location_path: impl AsRef<Path>,
indexer_rules_ids: &[i32], indexer_rules_ids: &[i32],
dry_run: bool, dry_run: bool,
) -> Result<Option<CreatedLocationResult>, LocationError> { ) -> Result<Option<CreatedLocationResult>, LocationError> {
let Library { db, sync, .. } = &**library; let location_path = location_path.as_ref();
let (path, name) = normalize_path(location_path)
.map_err(|_| LocationError::DirectoryNotFound(location_path.into()))?;
let (path, name) = normalize_path(&location_path) if db
.map_err(|_| LocationError::DirectoryNotFound(location_path.as_ref().to_path_buf()))?;
if library
.db
.location() .location()
.count(vec![location::path::equals(Some(path.clone()))]) .count(vec![location::path::equals(Some(path.clone()))])
.exec() .exec()
.await? > 0 .await? > 0
{ {
return Err(LocationError::LocationAlreadyExists( return Err(LocationError::LocationAlreadyExists(location_path.into()));
location_path.as_ref().to_path_buf(),
));
} }
if check_nested_location(&location_path, &library.db).await? { if check_nested_location(&location_path, db).await? {
return Err(LocationError::NestedLocation( return Err(LocationError::NestedLocation(location_path.into()));
location_path.as_ref().to_path_buf(),
));
} }
if dry_run { if dry_run {
@ -654,7 +667,7 @@ async fn create_location(
( (
location::instance::NAME, location::instance::NAME,
json!(prisma_sync::instance::SyncId { json!(prisma_sync::instance::SyncId {
pub_id: uuid_to_bytes(library.sync.instance) pub_id: uuid_to_bytes(sync.instance)
}), }),
), ),
], ],
@ -703,26 +716,29 @@ pub async fn delete_location(
library: &Arc<Library>, library: &Arc<Library>,
location_id: location::id::Type, location_id: location::id::Type,
) -> Result<(), LocationError> { ) -> Result<(), LocationError> {
let start = Instant::now();
node.locations.remove(location_id, library.clone()).await?; node.locations.remove(location_id, library.clone()).await?;
debug!(
"Elapsed time to remove location from node: {:?}",
start.elapsed()
);
let start = Instant::now();
delete_directory(library, location_id, None).await?; delete_directory(library, location_id, None).await?;
debug!(
library "Elapsed time to delete location file paths: {:?}",
.db start.elapsed()
.indexer_rules_in_location() );
.delete_many(vec![indexer_rules_in_location::location_id::equals(
location_id,
)])
.exec()
.await?;
let location = library let location = library
.db .db
.location() .location()
.delete(location::id::equals(location_id)) .find_unique(location::id::equals(location_id))
.exec() .exec()
.await?; .await?
.ok_or(LocationError::IdNotFound(location_id))?;
let start = Instant::now();
// TODO: This should really be queued to the proper node so it will always run // TODO: This should really be queued to the proper node so it will always run
// TODO: Deal with whether a location is online or not // TODO: Deal with whether a location is online or not
// TODO(N): This isn't gonna work with removable media and this will likely permanently break if the DB is restored from a backup. // TODO(N): This isn't gonna work with removable media and this will likely permanently break if the DB is restored from a backup.
@ -745,10 +761,43 @@ pub async fn delete_location(
} }
} }
} }
debug!(
"Elapsed time to remove location metadata: {:?}",
start.elapsed()
);
let start = Instant::now();
library
.db
.indexer_rules_in_location()
.delete_many(vec![indexer_rules_in_location::location_id::equals(
location_id,
)])
.exec()
.await?;
debug!(
"Elapsed time to delete indexer rules in location: {:?}",
start.elapsed()
);
let start = Instant::now();
library
.db
.location()
.delete(location::id::equals(location_id))
.exec()
.await?;
debug!(
"Elapsed time to delete location from db: {:?}",
start.elapsed()
);
invalidate_query!(library, "locations.list"); invalidate_query!(library, "locations.list");
info!("Location {} deleted", location_id); info!("Location {location_id} deleted");
Ok(()) Ok(())
} }

View file

@ -100,9 +100,12 @@ pub enum ThumbnailerError {
#[error(transparent)] #[error(transparent)]
VersionManager(#[from] VersionManagerError), VersionManager(#[from] VersionManagerError),
#[error("failed to encode webp")] #[error("failed to encode webp")]
Encoding, WebPEncoding { path: Box<Path>, reason: String },
#[error("error while converting the image: {0}")] #[error("error while converting the image")]
SdImages(#[from] sd_images::Error), SdImages {
path: Box<Path>,
error: sd_images::Error,
},
#[error("failed to execute converting task: {0}")] #[error("failed to execute converting task: {0}")]
Task(#[from] task::JoinError), Task(#[from] task::JoinError),
#[cfg(feature = "ffmpeg")] #[cfg(feature = "ffmpeg")]
@ -140,7 +143,10 @@ pub async fn generate_image_thumbnail(
let file_path = file_path.as_ref().to_path_buf(); let file_path = file_path.as_ref().to_path_buf();
let webp = task::spawn_blocking(move || -> Result<_, ThumbnailerError> { let webp = task::spawn_blocking(move || -> Result<_, ThumbnailerError> {
let img = format_image(&file_path).map_err(|_| ThumbnailerError::Encoding)?; let img = format_image(&file_path).map_err(|e| ThumbnailerError::SdImages {
path: file_path.clone().into_boxed_path(),
error: e,
})?;
let (w, h) = img.dimensions(); let (w, h) = img.dimensions();
let (w_scaled, h_scaled) = scale_dimensions(w as f32, h as f32, TARGET_PX); let (w_scaled, h_scaled) = scale_dimensions(w as f32, h as f32, TARGET_PX);
@ -155,14 +161,16 @@ pub async fn generate_image_thumbnail(
// this corrects the rotation/flip of the image based on the *available* exif data // this corrects the rotation/flip of the image based on the *available* exif data
// not all images have exif data, so we don't error // not all images have exif data, so we don't error
if let Some(orientation) = Orientation::from_path(file_path) { if let Some(orientation) = Orientation::from_path(&file_path) {
img = orientation.correct_thumbnail(img); img = orientation.correct_thumbnail(img);
} }
// Create the WebP encoder for the above image // Create the WebP encoder for the above image
let Ok(encoder) = Encoder::from_image(&img) else { let encoder =
return Err(ThumbnailerError::Encoding); Encoder::from_image(&img).map_err(|reason| ThumbnailerError::WebPEncoding {
}; path: file_path.into_boxed_path(),
reason: reason.to_string(),
})?;
// Type WebPMemory is !Send, which makes the Future in this function !Send, // Type WebPMemory is !Send, which makes the Future in this function !Send,
// this make us `deref` to have a `&[u8]` and then `to_owned` to make a Vec<u8> // this make us `deref` to have a `&[u8]` and then `to_owned` to make a Vec<u8>
@ -178,7 +186,10 @@ pub async fn generate_image_thumbnail(
.await .await
.map_err(|e| FileIOError::from((shard_dir, e)))?; .map_err(|e| FileIOError::from((shard_dir, e)))?;
} else { } else {
return Err(ThumbnailerError::Encoding); error!(
"Failed to get parent directory of '{}' for sharding parent directory",
output_path.display()
);
} }
fs::write(output_path, &webp) fs::write(output_path, &webp)

View file

@ -3,12 +3,10 @@ import { createContext, useContext } from 'react';
import { useNavigate } from 'react-router'; import { useNavigate } from 'react-router';
import { import {
currentLibraryCache, currentLibraryCache,
DistanceFormat,
getOnboardingStore, getOnboardingStore,
getUnitFormatStore, getUnitFormatStore,
resetOnboardingStore, resetOnboardingStore,
telemetryStore, telemetryStore,
TemperatureFormat,
useBridgeMutation, useBridgeMutation,
useCachedLibraries, useCachedLibraries,
useMultiZodForm, useMultiZodForm,
@ -54,6 +52,16 @@ const schemas = {
'new-library': z.object({ 'new-library': z.object({
name: z.string().min(1, 'Name is required').regex(/[\S]/g).trim() name: z.string().min(1, 'Name is required').regex(/[\S]/g).trim()
}), }),
'locations': z.object({
locations: z.object({
desktop: z.coerce.boolean(),
documents: z.coerce.boolean(),
downloads: z.coerce.boolean(),
pictures: z.coerce.boolean(),
music: z.coerce.boolean(),
videos: z.coerce.boolean()
})
}),
'privacy': z.object({ 'privacy': z.object({
shareTelemetry: shareTelemetry.schema shareTelemetry: shareTelemetry.schema
}) })
@ -66,11 +74,12 @@ const useFormState = () => {
schemas, schemas,
defaultValues: { defaultValues: {
'new-library': obStore.data?.['new-library'] ?? undefined, 'new-library': obStore.data?.['new-library'] ?? undefined,
'locations': obStore.data?.locations ?? { locations: {} },
'privacy': obStore.data?.privacy ?? { 'privacy': obStore.data?.privacy ?? {
shareTelemetry: 'share-telemetry' shareTelemetry: 'share-telemetry'
} }
}, },
onData: (data) => (getOnboardingStore().data = data) onData: (data) => (getOnboardingStore().data = { ...obStore.data, ...data })
}); });
const navigate = useNavigate(); const navigate = useNavigate();
@ -97,7 +106,8 @@ const useFormState = () => {
// show creation screen for a bit for smoothness // show creation screen for a bit for smoothness
const [library] = await Promise.all([ const [library] = await Promise.all([
createLibrary.mutateAsync({ createLibrary.mutateAsync({
name: data['new-library'].name name: data['new-library'].name,
default_locations: data.locations.locations
}), }),
new Promise((res) => setTimeout(res, 500)) new Promise((res) => setTimeout(res, 500))
]); ]);

View file

@ -4,7 +4,7 @@ import { getOnboardingStore } from '@sd/client';
import Alpha from './alpha'; import Alpha from './alpha';
import { useOnboardingContext } from './context'; import { useOnboardingContext } from './context';
import CreatingLibrary from './creating-library'; import CreatingLibrary from './creating-library';
import Login from './login'; import Locations from './locations';
import NewLibrary from './new-library'; import NewLibrary from './new-library';
import Privacy from './privacy'; import Privacy from './privacy';
@ -32,6 +32,10 @@ export default [
element: <NewLibrary />, element: <NewLibrary />,
path: 'new-library' path: 'new-library'
}, },
{
element: <Locations />,
path: 'locations'
},
{ {
element: <Privacy />, element: <Privacy />,
path: 'privacy' path: 'privacy'

View file

@ -0,0 +1,172 @@
import {
Desktop,
DownloadSimple,
File,
Image,
MusicNote,
Icon as PhosportIcon,
Video
} from '@phosphor-icons/react';
import clsx from 'clsx';
import { useMemo } from 'react';
import { Controller, useWatch } from 'react-hook-form';
import { useNavigate } from 'react-router';
import { SystemLocations, useBridgeQuery } from '@sd/client';
import { Button, Form, RadixCheckbox } from '@sd/ui';
import { Icon, TruncatedText } from '~/components';
import { useIsDark, useOperatingSystem } from '~/hooks';
import { useOnboardingContext } from './context';
import { OnboardingContainer, OnboardingDescription, OnboardingTitle } from './Layout';
type SystemLocation = keyof SystemLocations;
const icons: Record<SystemLocation, PhosportIcon> = {
desktop: Desktop,
documents: File,
downloads: DownloadSimple,
pictures: Image,
music: MusicNote,
videos: Video
};
const LocationIcon = (props: { location: SystemLocation; active?: boolean }) => {
const isDark = useIsDark();
const LocationIcon = icons[props.location];
return (
<div className="absolute -bottom-9 -right-9 h-28 w-28">
<Icon name="Folder" />
<LocationIcon
weight="fill"
size={28}
className={clsx(
'absolute left-1/2 top-[42%] -translate-x-1/2 fill-black transition-opacity',
isDark
? 'opacity-30 group-focus-within:opacity-60 group-hover:opacity-60'
: 'opacity-25 group-focus-within:opacity-50 group-hover:opacity-50',
props.active && (isDark ? 'opacity-60' : 'opacity-50')
)}
/>
</div>
);
};
export default function OnboardingLocations() {
const navigate = useNavigate();
const os = useOperatingSystem(true);
const { data } = useBridgeQuery(['locations.systemLocations']);
const systemLocations = useMemo(() => {
const locations = (Object.keys(data ?? {}) as SystemLocation[]).reduce(
(locations, location) => ({
...locations,
...(data?.[location] ? { [location]: data[location] } : {})
}),
{} as Record<SystemLocation, string>
);
if (Object.keys(locations).length > 0) return locations;
}, [data]);
const form = useOnboardingContext().forms.useForm('locations');
const locations = useWatch({ control: form.control, name: 'locations' });
const toggled = useMemo(() => {
if (!systemLocations) return;
return (
Object.values(locations).filter(Boolean).length === Object.keys(systemLocations).length
);
}, [locations, systemLocations]);
return (
<Form
form={form}
onSubmit={form.handleSubmit(() => navigate('../privacy', { replace: true }))}
className="flex flex-col items-center"
>
<OnboardingContainer>
<OnboardingTitle>Add Locations</OnboardingTitle>
<OnboardingDescription>
Enhance your Spacedrive experience by adding your favorite locations to your
personal library, for seamless and efficient file management.
</OnboardingDescription>
{systemLocations && (
<div className="my-6">
<RadixCheckbox
name="toggle-all"
className="mb-1.5 justify-end"
labelClassName="!ml-1.5"
label="Toggle All"
checked={toggled}
onCheckedChange={(value) => {
if (typeof value !== 'boolean') return;
form.reset({
locations: Object.keys(systemLocations).reduce(
(locations, location) => ({
...locations,
[location]: value
}),
{} as Record<SystemLocation, boolean>
)
});
}}
/>
<div className="grid grid-cols-2 gap-2">
{(Object.keys(systemLocations) as SystemLocation[]).map((location) => (
<Controller
key={location}
control={form.control}
name={`locations.${location}`}
render={({ field }) => (
<label
htmlFor={field.name}
className={clsx(
'group relative flex w-72 overflow-hidden rounded-md border px-4 py-3',
field.value
? 'border-accent/25 bg-accent/10'
: 'border-app-line bg-app-box/50'
)}
>
<RadixCheckbox
name={field.name}
checked={field.value}
onCheckedChange={field.onChange}
className="mr-2 mt-1 self-start"
/>
<div className="max-w-[64%]">
<h1 className="font-bold capitalize">
{location === 'videos' && os === 'macOS'
? 'Movies'
: location}
</h1>
<TruncatedText className="text-sm text-ink-faint">
{systemLocations[location]}
</TruncatedText>
</div>
<LocationIcon
location={location}
active={field.value}
/>
</label>
)}
/>
))}
</div>
</div>
)}
<Button type="submit" className="text-center" variant="accent" size="sm">
Continue
</Button>
</OnboardingContainer>
</Form>
);
}

View file

@ -25,7 +25,7 @@ export default function OnboardingNewLibrary() {
<Form <Form
form={form} form={form}
onSubmit={form.handleSubmit(() => { onSubmit={form.handleSubmit(() => {
navigate('../privacy', { replace: true }); navigate('../locations', { replace: true });
})} })}
> >
<OnboardingContainer> <OnboardingContainer>

View file

@ -0,0 +1,21 @@
import clsx from 'clsx';
import { PropsWithChildren, useRef } from 'react';
import { Tooltip } from '@sd/ui';
import { useIsTextTruncated } from '~/hooks';
export const TruncatedText = ({
children,
className
}: PropsWithChildren<{ className?: string }>) => {
const ref = useRef<HTMLDivElement>(null);
const isTruncated = useIsTextTruncated(ref);
return (
<Tooltip label={isTruncated ? children : undefined} asChild>
<div ref={ref} className={clsx('truncate', className)}>
{children}
</div>
</Tooltip>
);
};

View file

@ -12,3 +12,4 @@ export * from './PasswordMeter';
export * from './SubtleButton'; export * from './SubtleButton';
export * from './TextViewer'; export * from './TextViewer';
export * from './TrafficLights'; export * from './TrafficLights';
export * from './TruncatedText';

View file

@ -23,6 +23,7 @@ export type Procedures = {
{ key: "locations.indexer_rules.list", input: LibraryArgs<null>, result: IndexerRule[] } | { key: "locations.indexer_rules.list", input: LibraryArgs<null>, result: IndexerRule[] } |
{ key: "locations.indexer_rules.listForLocation", input: LibraryArgs<number>, result: IndexerRule[] } | { key: "locations.indexer_rules.listForLocation", input: LibraryArgs<number>, result: IndexerRule[] } |
{ key: "locations.list", input: LibraryArgs<null>, result: Location[] } | { key: "locations.list", input: LibraryArgs<null>, result: Location[] } |
{ key: "locations.systemLocations", input: never, result: SystemLocations } |
{ key: "nodeState", input: never, result: NodeState } | { key: "nodeState", input: never, result: NodeState } |
{ key: "nodes.listLocations", input: LibraryArgs<string | null>, result: ExplorerItem[] } | { key: "nodes.listLocations", input: LibraryArgs<string | null>, result: ExplorerItem[] } |
{ key: "notifications.dismiss", input: NotificationId, result: null } | { key: "notifications.dismiss", input: NotificationId, result: null } |
@ -145,10 +146,12 @@ export type CreateEphemeralFolderArgs = { path: string; name: string | null }
export type CreateFolderArgs = { location_id: number; sub_path: string | null; name: string | null } export type CreateFolderArgs = { location_id: number; sub_path: string | null; name: string | null }
export type CreateLibraryArgs = { name: LibraryName } export type CreateLibraryArgs = { name: LibraryName; default_locations?: DefaultLocations }
export type CursorOrderItem<T> = { order: SortOrder; data: T } export type CursorOrderItem<T> = { order: SortOrder; data: T }
export type DefaultLocations = { desktop: boolean; documents: boolean; downloads: boolean; pictures: boolean; music: boolean; videos: boolean }
export type DiskType = "SSD" | "HDD" | "Removable" export type DiskType = "SSD" | "HDD" | "Removable"
export type DoubleClickAction = "openFile" | "quickPreview" export type DoubleClickAction = "openFile" | "quickPreview"
@ -403,6 +406,8 @@ export type SpacedropArgs = { peer_id: PeerId; file_path: string[] }
export type Statistics = { id: number; date_captured: string; total_object_count: number; library_db_size: string; total_bytes_used: string; total_bytes_capacity: string; total_unique_bytes: string; total_bytes_free: string; preview_media_bytes: string } export type Statistics = { id: number; date_captured: string; total_object_count: number; library_db_size: string; total_bytes_used: string; total_bytes_capacity: string; total_unique_bytes: string; total_bytes_free: string; preview_media_bytes: string }
export type SystemLocations = { desktop: string | null; documents: string | null; downloads: string | null; pictures: string | null; music: string | null; videos: string | null }
export type Tag = { id: number; pub_id: number[]; name: string | null; color: string | null; redundancy_goal: number | null; date_created: string | null; date_modified: string | null } export type Tag = { id: number; pub_id: number[]; name: string | null; color: string | null; redundancy_goal: number | null; date_created: string | null; date_modified: string | null }
export type TagAssignArgs = { object_ids: number[]; tag_id: number; unassign: boolean } export type TagAssignArgs = { object_ids: number[]; tag_id: number; unassign: boolean }

View file

@ -24,10 +24,11 @@ export const CheckBox = forwardRef<HTMLInputElement, CheckBoxProps>(
export interface RadixCheckboxProps extends ComponentProps<typeof Checkbox.Root> { export interface RadixCheckboxProps extends ComponentProps<typeof Checkbox.Root> {
label?: string; label?: string;
labelClassName?: string;
} }
// TODO: Replace above with this, requires refactor of usage // TODO: Replace above with this, requires refactor of usage
export const RadixCheckbox = ({ className, ...props }: RadixCheckboxProps) => ( export const RadixCheckbox = ({ className, labelClassName, ...props }: RadixCheckboxProps) => (
<div className={clsx('flex items-center', className)}> <div className={clsx('flex items-center', className)}>
<Checkbox.Root <Checkbox.Root
className="flex h-[17px] w-[17px] shrink-0 items-center justify-center rounded-md border border-app-line bg-app-button radix-state-checked:bg-accent" className="flex h-[17px] w-[17px] shrink-0 items-center justify-center rounded-md border border-app-line bg-app-button radix-state-checked:bg-accent"
@ -39,7 +40,10 @@ export const RadixCheckbox = ({ className, ...props }: RadixCheckboxProps) => (
</Checkbox.Indicator> </Checkbox.Indicator>
</Checkbox.Root> </Checkbox.Root>
{props.label && ( {props.label && (
<label className="ml-2 text-sm font-medium" htmlFor={props.name}> <label
className={clsx('ml-2 text-sm font-medium', labelClassName)}
htmlFor={props.name}
>
{props.label} {props.label}
</label> </label>
)} )}