Replace Location.local_path with path (#571)

* replace Location.local_path with path

* Using more references to avoid unneeded moves and removing unneeded errors variants

* remove unnecessary stuff

* location id checks

---------

Co-authored-by: Ericson Soares <ericson.ds999@gmail.com>
This commit is contained in:
Brendan Allan 2023-02-20 13:14:47 +08:00 committed by GitHub
parent c7dbc784cd
commit fd39dc3a3d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
28 changed files with 290 additions and 356 deletions

View file

@ -48,6 +48,9 @@
},
"allowlist": {
"all": true,
"notification": {
"all": false
},
"protocol": {
"assetScope": ["*"]
},

View file

@ -84,7 +84,7 @@ function LocationItem({ location, index }: { location: Location & { node: Node }
</Text>
</View>
<Text numberOfLines={1} style={tw`text-ink-dull mt-0.5 text-[10px] font-semibold`}>
{location.local_path}
{location.path}
</Text>
</View>
<CaretRight color={tw.color('ink-dull')} size={18} />

View file

@ -95,8 +95,8 @@ model Location {
id Int @id @default(autoincrement())
pub_id Bytes @unique
node_id Int
name String?
local_path String?
name String
path String
total_capacity Int?
available_capacity Int?
is_archived Boolean @default(false)

View file

@ -1,6 +1,12 @@
use crate::{prisma::file_path, Node};
use std::{cmp::min, io, path::PathBuf, str::FromStr, sync::Arc};
use std::{
cmp::min,
io,
path::{Path, PathBuf},
str::FromStr,
sync::Arc,
};
use http_range::HttpRange;
use httpz::{
@ -15,7 +21,7 @@ use tokio::{
fs::{self, File},
io::{AsyncReadExt, AsyncSeekExt, SeekFrom},
};
use tracing::{error, warn};
use tracing::error;
use uuid::Uuid;
// This LRU cache allows us to avoid doing a DB lookup on every request.
@ -118,14 +124,7 @@ async fn handle_file(
.ok_or_else(|| HandleCustomUriError::NotFound("object"))?;
let lru_entry = (
PathBuf::from(file_path.location.local_path.ok_or_else(|| {
warn!(
"Location '{}' doesn't have local path set",
file_path.location_id
);
HandleCustomUriError::BadRequest("Location doesn't have `local_path` set!")
})?)
.join(&file_path.materialized_path),
Path::new(&file_path.location.path).join(&file_path.materialized_path),
file_path.extension,
);
FILE_METADATA_CACHE.insert(lru_cache_key, lru_entry.clone());

View file

@ -78,6 +78,8 @@ impl JobManager {
}
});
debug!("JobManager initialized");
this
}

View file

@ -8,7 +8,7 @@ use util::secure_temp_keystore::SecureTempKeystore;
use std::{path::Path, sync::Arc};
use thiserror::Error;
use tokio::{fs, sync::broadcast};
use tracing::{error, info};
use tracing::{debug, error, info};
use tracing_subscriber::{prelude::*, EnvFilter};
pub mod api;
@ -150,6 +150,8 @@ impl Node {
}
}
debug!("Watching locations");
// Trying to resume possible paused jobs
let inner_library_manager = Arc::clone(&library_manager);
let inner_jobs = Arc::clone(&jobs);

View file

@ -22,6 +22,7 @@ use std::{
};
use thiserror::Error;
use tokio::sync::RwLock;
use tracing::debug;
use uuid::Uuid;
use super::{LibraryConfig, LibraryConfigWrapped, LibraryContext};
@ -171,6 +172,8 @@ impl LibraryManager {
node_context,
});
debug!("LibraryManager initialized");
Ok(this)
}

View file

@ -25,8 +25,6 @@ pub enum LocationError {
NotDirectory(PathBuf),
#[error("Could not find directory in Location (path: {0:?})")]
DirectoryNotFound(String),
#[error("Missing local_path (id: {0})")]
MissingLocalPath(i32),
#[error("Library exists in the location metadata file, must relink: (old_path: {old_path:?}, new_path: {new_path:?})")]
NeedRelink {
old_path: PathBuf,
@ -72,7 +70,7 @@ impl From<LocationError> for rspc::Error {
// User's fault errors
LocationError::NotDirectory(_)
| LocationError::MissingLocalPath(_)
// | LocationError::MissingLocalPath(_)
| LocationError::NeedRelink { .. }
| LocationError::AddLibraryToMetadata(_) => {
rspc::Error::with_cause(ErrorCode::BadRequest, err.to_string(), err)

View file

@ -45,7 +45,6 @@ pub struct IndexerJob;
location::include!(indexer_job_location {
indexer_rules: select { indexer_rule }
});
file_path::select!(file_path_id_only { id });
/// `IndexerJobInit` receives a `location::Data` object to be indexed
#[derive(Serialize, Deserialize)]
@ -63,7 +62,6 @@ impl Hash for IndexerJobInit {
/// contains some metadata for logging purposes.
#[derive(Serialize, Deserialize)]
pub struct IndexerJobData {
location_path: PathBuf,
db_write_start: DateTime<Utc>,
scan_read_time: Duration,
total_paths: usize,
@ -111,14 +109,6 @@ impl StatefulJob for IndexerJob {
/// Creates a vector of valid path buffers from a directory, chunked into batches of `BATCH_SIZE`.
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
let location_path = state
.init
.location
.local_path
.as_ref()
.map(PathBuf::from)
.unwrap();
// grab the next id so we can increment in memory for batch inserting
let first_file_id = get_max_file_path_id(&ctx.library_ctx).await?;
@ -136,7 +126,7 @@ impl StatefulJob for IndexerJob {
let scan_start = Instant::now();
let inner_ctx = ctx.clone();
let paths = walk(
location_path.clone(),
&state.init.location.path,
&indexer_rules_by_kind,
move |path, total_entries| {
IndexerJobData::on_scan_progress(
@ -191,7 +181,6 @@ impl StatefulJob for IndexerJob {
let total_entries = paths_entries.len();
state.data = Some(IndexerJobData {
location_path,
db_write_start: Utc::now(),
scan_read_time: scan_start.elapsed(),
total_paths: total_entries,
@ -228,14 +217,9 @@ impl StatefulJob for IndexerJob {
ctx: WorkerContext,
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let data = &state
.data
.as_ref()
.expect("critical error: missing data on job state");
let db = &ctx.library_ctx.db;
let location_path = &data.location_path;
let location_id = state.init.location.id;
let location = &state.init.location;
let (sync_stuff, paths): (Vec<_>, Vec<_>) = state.steps[0]
.iter()
@ -255,7 +239,7 @@ impl StatefulJob for IndexerJob {
}
let mut materialized_path = entry
.path
.strip_prefix(location_path)
.strip_prefix(&location.path)
.unwrap()
.to_str()
.expect("Found non-UTF-8 path")
@ -286,7 +270,7 @@ impl StatefulJob for IndexerJob {
),
file_path::create_unchecked(
entry.file_id,
location_id,
location.id,
materialized_path,
name,
vec![
@ -323,7 +307,7 @@ impl StatefulJob for IndexerJob {
.expect("critical error: missing data on job state");
info!(
"scan of {} completed in {:?}. {:?} files found. db write completed in {:?}",
state.init.location.local_path.as_ref().unwrap(),
state.init.location.path,
data.scan_read_time,
data.total_paths,
(Utc::now() - data.db_write_start)

View file

@ -52,10 +52,12 @@ impl Ord for WalkEntry {
/// a list of accepted entries. There are some useful comments in the implementation of this function
/// in case of doubts.
pub(super) async fn walk(
root: PathBuf,
root: impl AsRef<Path>,
rules_per_kind: &HashMap<RuleKind, Vec<IndexerRule>>,
update_notifier: impl Fn(&Path, usize),
) -> Result<Vec<WalkEntry>, IndexerError> {
let root = root.as_ref().to_path_buf();
let mut to_walk = VecDeque::with_capacity(1);
to_walk.push_back((root.clone(), None));
let mut indexed_paths = HashMap::new();

View file

@ -20,8 +20,8 @@ const LOCATION_CHECK_INTERVAL: Duration = Duration::from_secs(5);
pub(super) async fn check_online(location: &location::Data, library_ctx: &LibraryContext) -> bool {
let pub_id = &location.pub_id;
if let Some(ref local_path) = location.local_path {
match fs::metadata(local_path).await {
if location.node_id == library_ctx.node_local_id {
match fs::metadata(&location.path).await {
Ok(_) => {
library_ctx.location_manager().add_online(pub_id).await;
true
@ -53,13 +53,12 @@ pub(super) async fn location_check_sleep(
pub(super) fn watch_location(
location: location::Data,
library_id: LibraryId,
location_path: impl AsRef<Path>,
locations_watched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
locations_unwatched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
) {
let location_id = location.id;
if let Some(mut watcher) = locations_unwatched.remove(&(location_id, library_id)) {
if watcher.check_path(location_path) {
if watcher.check_path(&location.path) {
watcher.watch();
} else {
watcher.update_data(location, true);
@ -72,13 +71,12 @@ pub(super) fn watch_location(
pub(super) fn unwatch_location(
location: location::Data,
library_id: LibraryId,
location_path: impl AsRef<Path>,
locations_watched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
locations_unwatched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
) {
let location_id = location.id;
if let Some(mut watcher) = locations_watched.remove(&(location_id, library_id)) {
if watcher.check_path(location_path) {
if watcher.check_path(&location.path) {
watcher.unwatch();
} else {
watcher.update_data(location, false)
@ -149,11 +147,10 @@ pub(super) async fn handle_remove_location_request(
) {
let key = (location_id, library_ctx.id);
if let Some(location) = get_location(location_id, &library_ctx).await {
if let Some(ref local_path_str) = location.local_path.clone() {
if location.node_id == library_ctx.node_local_id {
unwatch_location(
location,
library_ctx.id,
local_path_str,
locations_watched,
locations_unwatched,
);
@ -207,21 +204,14 @@ pub(super) async fn handle_stop_watcher_request(
reason: String::from("failed to fetch location from db"),
})
.map(|location| {
location
.local_path
.clone()
.ok_or(LocationManagerError::LocationMissingLocalPath(location_id))
.map(|local_path_str| {
unwatch_location(
location,
library_ctx.id,
local_path_str,
locations_watched,
locations_unwatched,
);
forced_unwatch.insert(key);
})
})?
unwatch_location(
location,
library_ctx.id,
locations_watched,
locations_unwatched,
);
forced_unwatch.insert(key);
})
} else {
Ok(())
}
@ -262,21 +252,14 @@ pub(super) async fn handle_reinit_watcher_request(
reason: String::from("failed to fetch location from db"),
})
.map(|location| {
location
.local_path
.clone()
.ok_or(LocationManagerError::LocationMissingLocalPath(location_id))
.map(|local_path_str| {
watch_location(
location,
library_ctx.id,
local_path_str,
locations_watched,
locations_unwatched,
);
forced_unwatch.remove(&key);
})
})?
watch_location(
location,
library_ctx.id,
locations_watched,
locations_unwatched,
);
forced_unwatch.remove(&key);
})
} else {
Ok(())
}

View file

@ -15,14 +15,11 @@ use tokio::{
oneshot, RwLock,
},
};
use tracing::error;
use tracing::{debug, error};
#[cfg(feature = "location-watcher")]
use tokio::sync::mpsc;
#[cfg(feature = "location-watcher")]
use tracing::debug;
#[cfg(feature = "location-watcher")]
mod watcher;
@ -117,6 +114,8 @@ impl LocationManager {
pub fn new() -> Arc<Self> {
let online_tx = broadcast::channel(16).0;
debug!("LocationManager initialized");
#[cfg(feature = "location-watcher")]
{
let (location_management_tx, location_management_rx) = mpsc::channel(128);
@ -130,8 +129,6 @@ impl LocationManager {
stop_rx,
));
debug!("Location manager initialized");
Arc::new(Self {
online_locations: Default::default(),
online_tx,
@ -432,14 +429,13 @@ impl LocationManager {
// The time to check came for an already removed library, so we just ignore it
to_remove.remove(&key);
} else if let Some(location) = get_location(location_id, &library_ctx).await {
if let Some(ref local_path_str) = location.local_path.clone() {
if location.node_id == library_ctx.node_local_id {
if check_online(&location, &library_ctx).await
&& !forced_unwatch.contains(&key)
{
watch_location(
location,
library_ctx.id,
local_path_str,
&mut locations_watched,
&mut locations_unwatched,
);
@ -447,7 +443,6 @@ impl LocationManager {
unwatch_location(
location,
library_ctx.id,
local_path_str,
&mut locations_watched,
&mut locations_unwatched,
);

View file

@ -35,16 +35,16 @@ impl EventHandler for LinuxEventHandler {
match event.kind {
EventKind::Access(AccessKind::Close(AccessMode::Write)) => {
// If a file was closed with write mode, then it was updated or created
file_creation_or_update(location, event, library_ctx).await?;
file_creation_or_update(&location, event, library_ctx).await?;
}
EventKind::Create(CreateKind::Folder) => {
create_dir(location, event, library_ctx.clone()).await?;
create_dir(&location, event, library_ctx).await?;
}
EventKind::Modify(ModifyKind::Name(RenameMode::Both)) => {
rename_both_event(location, event, library_ctx).await?;
rename_both_event(&location, event, library_ctx).await?;
}
EventKind::Remove(remove_kind) => {
remove_event(location, event, remove_kind, library_ctx).await?;
remove_event(&location, event, remove_kind, library_ctx).await?;
}
other_event_kind => {
trace!("Other Linux event that we don't handle for now: {other_event_kind:#?}");

View file

@ -39,11 +39,11 @@ impl EventHandler for MacOsEventHandler {
match event.kind {
EventKind::Create(CreateKind::Folder) => {
create_dir(location, event, library_ctx.clone()).await?;
create_dir(&location, event, library_ctx).await?;
}
EventKind::Modify(ModifyKind::Data(DataChange::Content)) => {
// If a file had its content modified, then it was updated or created
file_creation_or_update(location, event, library_ctx).await?;
file_creation_or_update(&location, event, library_ctx).await?;
}
EventKind::Modify(ModifyKind::Name(RenameMode::Any)) => {
match self.rename_stack.take() {
@ -51,14 +51,19 @@ impl EventHandler for MacOsEventHandler {
self.rename_stack = Some(event);
}
Some(from_event) => {
rename(&event.paths[0], &from_event.paths[0], location, library_ctx)
.await?;
rename(
&event.paths[0],
&from_event.paths[0],
&location,
library_ctx,
)
.await?;
}
}
}
EventKind::Remove(remove_kind) => {
remove_event(location, event, remove_kind, library_ctx).await?;
remove_event(&location, event, remove_kind, library_ctx).await?;
}
other_event_kind => {
trace!("Other MacOS event that we don't handle for now: {other_event_kind:#?}");

View file

@ -61,7 +61,6 @@ trait EventHandler {
#[derive(Debug)]
pub(super) struct LocationWatcher {
location: location::Data,
path: PathBuf,
watcher: RecommendedWatcher,
ignore_path_tx: mpsc::UnboundedSender<IgnorePath>,
handle: Option<JoinHandle<()>>,
@ -96,13 +95,6 @@ impl LocationWatcher {
Config::default(),
)?;
let path = PathBuf::from(
location
.local_path
.as_ref()
.ok_or(LocationManagerError::LocationMissingLocalPath(location.id))?,
);
let handle = tokio::spawn(Self::handle_watch_events(
location.id,
library_ctx,
@ -113,7 +105,6 @@ impl LocationWatcher {
Ok(Self {
location,
path,
watcher,
ignore_path_tx,
handle: Some(handle),
@ -214,56 +205,49 @@ impl LocationWatcher {
}
pub(super) fn check_path(&self, path: impl AsRef<Path>) -> bool {
self.path == path.as_ref()
(self.location.path.as_ref() as &Path) == path.as_ref()
}
pub(super) fn watch(&mut self) {
if let Err(e) = self.watcher.watch(&self.path, RecursiveMode::Recursive) {
error!(
"Unable to watch location: (path: {}, error: {e:#?})",
self.path.display()
);
let path = &self.location.path;
if let Err(e) = self.watcher.watch(path.as_ref(), RecursiveMode::Recursive) {
error!("Unable to watch location: (path: {path}, error: {e:#?})");
} else {
debug!("Now watching location: (path: {})", self.path.display());
debug!("Now watching location: (path: {path})");
}
}
pub(super) fn unwatch(&mut self) {
if let Err(e) = self.watcher.unwatch(&self.path) {
let path = &self.location.path;
if let Err(e) = self.watcher.unwatch(path.as_ref()) {
/**************************************** TODO: ****************************************
* According to an unit test, this error may occur when a subdirectory is removed *
* and we try to unwatch the parent directory then we have to check the implications *
* of unwatch error for this case. *
**************************************************************************************/
error!(
"Unable to unwatch location: (path: {}, error: {e:#?})",
self.path.display()
);
error!("Unable to unwatch location: (path: {path}, error: {e:#?})",);
} else {
debug!("Stop watching location: (path: {})", self.path.display());
debug!("Stop watching location: (path: {path})");
}
}
pub(super) fn update_data(&mut self, location: location::Data, to_watch: bool) {
pub(super) fn update_data(&mut self, new_location: location::Data, to_watch: bool) {
assert_eq!(
self.location.id, location.id,
self.location.id, new_location.id,
"Updated location data must have the same id"
);
let path = PathBuf::from(location.local_path.as_ref().unwrap_or_else(|| {
panic!(
"Tried to watch a location without local_path: <id='{}'>",
location.id
)
}));
if self.path != path {
let new_path = self.location.path != new_location.path;
if new_path {
self.unwatch();
self.path = path;
if to_watch {
self.watch();
}
}
self.location = location;
self.location = new_location;
if new_path && to_watch {
self.watch();
}
}
}

View file

@ -19,6 +19,7 @@ use crate::{
use std::{
collections::HashSet,
ffi::OsStr,
path::{Path, PathBuf},
str::FromStr,
};
@ -49,79 +50,79 @@ pub(super) fn check_event(event: &Event, ignore_paths: &HashSet<PathBuf>) -> boo
}
pub(super) async fn create_dir(
location: indexer_job_location::Data,
location: &indexer_job_location::Data,
event: Event,
library_ctx: LibraryContext,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
if let Some(ref location_local_path) = location.local_path {
trace!(
"Location: <root_path ='{location_local_path}'> creating directory: {}",
event.paths[0].display()
);
if let Some(subpath) = subtract_location_path(location_local_path, &event.paths[0]) {
let parent_directory = get_parent_dir(location.id, &subpath, &library_ctx).await?;
trace!("parent_directory: {:?}", parent_directory);
if let Some(parent_directory) = parent_directory {
let created_path = create_file_path(
&library_ctx,
location.id,
subpath.to_str().expect("Found non-UTF-8 path").to_string(),
subpath
.file_stem()
.unwrap()
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
None,
Some(parent_directory.id),
true,
)
.await?;
info!("Created path: {}", created_path.materialized_path);
invalidate_query!(library_ctx, "locations.getExplorerData");
} else {
warn!("Watcher found a path without parent");
}
}
if location.node_id != library_ctx.node_local_id {
return Ok(());
}
trace!(
"Location: <root_path ='{}'> creating directory: {}",
location.path,
event.paths[0].display()
);
let Some(subpath) = subtract_location_path(&location.path, &event.paths[0]) else {
return Ok(());
};
let parent_directory = get_parent_dir(location.id, &subpath, library_ctx).await?;
trace!("parent_directory: {:?}", parent_directory);
let Some(parent_directory) = parent_directory else {
warn!("Watcher found a path without parent");
return Ok(())
};
let created_path = create_file_path(
library_ctx,
location.id,
subpath
.to_str()
.map(str::to_string)
.expect("Found non-UTF-8 path"),
subpath
.file_stem()
.and_then(OsStr::to_str)
.map(str::to_string)
.expect("Found non-UTF-8 path"),
None,
Some(parent_directory.id),
true,
)
.await?;
info!("Created path: {}", created_path.materialized_path);
invalidate_query!(library_ctx, "locations.getExplorerData");
Ok(())
}
pub(super) async fn create_file(
location: indexer_job_location::Data,
event: Event,
library_ctx: LibraryContext,
) -> Result<(), LocationManagerError> {
if let Some(ref location_local_path) = location.local_path {
inner_create_file(location.id, location_local_path, event, &library_ctx).await
} else {
Err(LocationManagerError::LocationMissingLocalPath(location.id))
}
}
async fn inner_create_file(
location_id: LocationId,
location_local_path: &str,
location: &indexer_job_location::Data,
event: Event,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
if location.node_id != library_ctx.node_local_id {
return Ok(());
}
trace!(
"Location: <root_path ='{location_local_path}'> creating file: {}",
"Location: <root_path ='{}'> creating file: {}",
&location.path,
event.paths[0].display()
);
let db = &library_ctx.db;
let Some(materialized_path) = subtract_location_path(location_local_path, &event.paths[0]) else { return Ok(()) };
let Some(materialized_path) = subtract_location_path(&location.path, &event.paths[0]) else { return Ok(()) };
let Some(parent_directory) =
get_parent_dir(location_id, &materialized_path, library_ctx).await?
get_parent_dir(location.id, &materialized_path, library_ctx).await?
else {
warn!("Watcher found a path without parent");
return Ok(())
@ -129,7 +130,7 @@ async fn inner_create_file(
let created_file = create_file_path(
library_ctx,
location_id,
location.id,
materialized_path
.to_str()
.expect("Found non-UTF-8 path")
@ -159,7 +160,7 @@ async fn inner_create_file(
cas_id,
kind,
fs_metadata,
} = FileMetadata::new(location_local_path, &created_file.materialized_path).await?;
} = FileMetadata::new(&location.path, &created_file.materialized_path).await?;
let existing_object = db
.object()
@ -206,7 +207,7 @@ async fn inner_create_file(
db.file_path()
.update(
file_path::location_id_id(location_id, created_file.id),
file_path::location_id_id(location.id, created_file.id),
vec![file_path::object_id::set(Some(object.id))],
)
.exec()
@ -225,48 +226,30 @@ async fn inner_create_file(
}
pub(super) async fn file_creation_or_update(
location: indexer_job_location::Data,
location: &indexer_job_location::Data,
event: Event,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
if let Some(ref location_local_path) = location.local_path {
if let Some(file_path) =
get_existing_file_path(&location, &event.paths[0], false, library_ctx).await?
{
inner_update_file(
&location,
location_local_path,
file_path,
event,
library_ctx,
)
.await
} else {
// We received None because it is a new file
inner_create_file(location.id, location_local_path, event, library_ctx).await
}
if let Some(ref file_path) =
get_existing_file_path(location, &event.paths[0], false, library_ctx).await?
{
inner_update_file(location, file_path, event, library_ctx).await
} else {
Err(LocationManagerError::LocationMissingLocalPath(location.id))
// We received None because it is a new file
create_file(location, event, library_ctx).await
}
}
pub(super) async fn update_file(
location: indexer_job_location::Data,
location: &indexer_job_location::Data,
event: Event,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
if let Some(ref location_local_path) = location.local_path {
if let Some(file_path) =
get_existing_file_path(&location, &event.paths[0], false, library_ctx).await?
if location.node_id == library_ctx.node_local_id {
if let Some(ref file_path) =
get_existing_file_path(location, &event.paths[0], false, library_ctx).await?
{
let ret = inner_update_file(
&location,
location_local_path,
file_path,
event,
library_ctx,
)
.await;
let ret = inner_update_file(location, file_path, event, library_ctx).await;
invalidate_query!(library_ctx, "locations.getExplorerData");
ret
} else {
@ -281,21 +264,21 @@ pub(super) async fn update_file(
async fn inner_update_file(
location: &indexer_job_location::Data,
location_local_path: &str,
file_path: file_path_with_object::Data,
file_path: &file_path_with_object::Data,
event: Event,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
trace!(
"Location: <root_path ='{location_local_path}'> updating file: {}",
"Location: <root_path ='{}'> updating file: {}",
&location.path,
event.paths[0].display()
);
let FileMetadata {
cas_id,
kind: _,
fs_metadata,
} = FileMetadata::new(location_local_path, &file_path.materialized_path).await?;
..
} = FileMetadata::new(&location.path, &file_path.materialized_path).await?;
if let Some(old_cas_id) = &file_path.cas_id {
if old_cas_id != &cas_id {
@ -327,6 +310,7 @@ async fn inner_update_file(
if file_path
.object
.as_ref()
.map(|o| o.has_thumbnail)
.unwrap_or_default()
{
@ -344,7 +328,7 @@ async fn inner_update_file(
}
pub(super) async fn rename_both_event(
location: indexer_job_location::Data,
location: &indexer_job_location::Data,
event: Event,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
@ -354,22 +338,21 @@ pub(super) async fn rename_both_event(
pub(super) async fn rename(
new_path: impl AsRef<Path>,
old_path: impl AsRef<Path>,
location: indexer_job_location::Data,
location: &indexer_job_location::Data,
library_ctx: &LibraryContext,
) -> Result<(), LocationManagerError> {
let mut old_path_materialized = extract_materialized_path(&location, old_path.as_ref())?
let mut old_path_materialized = extract_materialized_path(location, old_path.as_ref())?
.to_str()
.expect("Found non-UTF-8 path")
.to_string();
let new_path_materialized = extract_materialized_path(&location, new_path.as_ref())?;
let new_path_materialized = extract_materialized_path(location, new_path.as_ref())?;
let mut new_path_materialized_str = new_path_materialized
.to_str()
.expect("Found non-UTF-8 path")
.to_string();
if let Some(file_path) =
get_existing_file_or_directory(&location, old_path, library_ctx).await?
if let Some(file_path) = get_existing_file_or_directory(location, old_path, library_ctx).await?
{
// If the renamed path is a directory, we have to update every successor
if file_path.is_dir {
@ -426,7 +409,7 @@ pub(super) async fn rename(
}
pub(super) async fn remove_event(
location: indexer_job_location::Data,
location: &indexer_job_location::Data,
event: Event,
remove_kind: RemoveKind,
library_ctx: &LibraryContext,
@ -435,7 +418,7 @@ pub(super) async fn remove_event(
// if it doesn't either way, then we don't care
if let Some(file_path) =
get_existing_file_or_directory(&location, &event.paths[0], library_ctx).await?
get_existing_file_or_directory(location, &event.paths[0], library_ctx).await?
{
// check file still exists on disk
match fs::metadata(&event.paths[0]).await {
@ -482,14 +465,7 @@ fn extract_materialized_path(
location: &indexer_job_location::Data,
path: impl AsRef<Path>,
) -> Result<PathBuf, LocationManagerError> {
subtract_location_path(
location
.local_path
.as_ref()
.ok_or(LocationManagerError::LocationMissingLocalPath(location.id))?,
&path,
)
.ok_or_else(|| {
subtract_location_path(&location.path, &path).ok_or_else(|| {
LocationManagerError::UnableToExtractMaterializedPath(
location.id,
path.as_ref().to_path_buf(),

View file

@ -45,16 +45,16 @@ impl EventHandler for WindowsEventHandler {
if metadata.is_file() {
self.create_file_stack = Some(event);
} else {
create_dir(location, event, library_ctx.clone()).await?;
create_dir(&location, event, library_ctx).await?;
}
}
EventKind::Modify(ModifyKind::Any) => {
let metadata = fs::metadata(&event.paths[0]).await?;
if metadata.is_file() {
if let Some(create_file_event) = self.create_file_stack.take() {
create_file(location, create_file_event, library_ctx.clone()).await?;
create_file(&location, create_file_event, library_ctx).await?;
} else {
update_file(location, event, library_ctx).await?;
update_file(&location, event, library_ctx).await?;
}
} else {
warn!("Unexpected Windows modify event on a directory");
@ -68,10 +68,16 @@ impl EventHandler for WindowsEventHandler {
.rename_stack
.take()
.expect("Unexpectedly missing rename from windows event");
rename(&event.paths[0], &from_event.paths[0], location, library_ctx).await?;
rename(
&event.paths[0],
&from_event.paths[0],
&location,
library_ctx,
)
.await?;
}
EventKind::Remove(remove_kind) => {
remove_event(location, event, remove_kind, library_ctx).await?;
remove_event(&location, event, remove_kind, library_ctx).await?;
}
other_event_kind => {

View file

@ -15,6 +15,7 @@ use serde::Deserialize;
use serde_json::json;
use std::{
collections::HashSet,
ffi::OsStr,
path::{Path, PathBuf},
};
@ -93,7 +94,7 @@ impl LocationCreateArgs {
ctx.id,
uuid,
&self.path,
location.name.as_ref().unwrap().clone(),
location.name.clone(),
)
.await?;
@ -129,12 +130,7 @@ impl LocationCreateArgs {
let location = create_location(ctx, uuid, &self.path, &self.indexer_rules_ids).await?;
metadata
.add_library(
ctx.id,
uuid,
&self.path,
location.name.as_ref().unwrap().clone(),
)
.add_library(ctx.id, uuid, &self.path, location.name.clone())
.await?;
info!(
@ -173,8 +169,8 @@ impl LocationUpdateArgs {
let params = [
self.name
.clone()
.filter(|name| location.name.as_ref() != Some(name))
.map(|v| location::name::set(Some(v))),
.filter(|name| &location.name != name)
.map(location::name::set),
self.generate_preview_media
.map(location::generate_preview_media::set),
self.sync_preview_media
@ -192,9 +188,9 @@ impl LocationUpdateArgs {
.exec()
.await?;
if let Some(ref local_path) = location.local_path {
if location.node_id == ctx.node_local_id {
if let Some(mut metadata) =
SpacedriveLocationMetadataFile::try_load(local_path).await?
SpacedriveLocationMetadataFile::try_load(&location.path).await?
{
metadata.update(ctx.id, self.name.unwrap()).await?;
}
@ -268,9 +264,9 @@ pub async fn scan_location(
ctx: &LibraryContext,
location: indexer_job_location::Data,
) -> Result<(), LocationError> {
if location.local_path.is_none() {
return Err(LocationError::MissingLocalPath(location.id));
};
if location.node_id != ctx.node_local_id {
return Ok(());
}
ctx.queue_job(Job::new(
FullFileIdentifierJobInit {
@ -311,13 +307,13 @@ pub async fn relink_location(
.location()
.update(
location::pub_id::equals(metadata.location_pub_id(ctx.id)?.as_ref().to_vec()),
vec![location::local_path::set(Some(
vec![location::path::set(
location_path
.as_ref()
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
))],
)],
)
.exec()
.await?;
@ -331,49 +327,48 @@ async fn create_location(
location_path: impl AsRef<Path>,
indexer_rules_ids: &[i32],
) -> Result<indexer_job_location::Data, LocationError> {
let db = &ctx.db;
let LibraryContext { db, sync, .. } = &ctx;
let location_name = location_path
.as_ref()
let location_path = location_path.as_ref();
let name = location_path
.file_name()
.unwrap()
.to_str()
.unwrap()
.to_string();
.and_then(OsStr::to_str)
.map(str::to_string)
.unwrap();
let local_path = location_path
.as_ref()
let path = location_path
.to_str()
.expect("Found non-UTF-8 path")
.to_string();
.map(str::to_string)
.expect("Found non-UTF-8 path");
let location = ctx
.sync
let location = sync
.write_op(
db,
ctx.sync.owned_create(
sync.owned_create(
sync::location::SyncId {
pub_id: location_pub_id.as_bytes().to_vec(),
},
[
("node", json!({ "pub_id": ctx.id.as_bytes() })),
("name", json!(location_name)),
("local_path", json!(&local_path)),
("name", json!(&name)),
("path", json!(&path)),
],
),
db.location()
.create(
location_pub_id.as_bytes().to_vec(),
name,
path,
node::id::equals(ctx.node_local_id),
vec![
location::name::set(Some(location_name.clone())),
location::local_path::set(Some(local_path)),
],
vec![],
)
.include(indexer_job_location::include()),
)
.await?;
debug!("created in db");
if !indexer_rules_ids.is_empty() {
link_location_and_indexer_rules(ctx, location.id, indexer_rules_ids).await?;
}
@ -414,8 +409,9 @@ pub async fn delete_location(ctx: &LibraryContext, location_id: i32) -> Result<(
.exec()
.await?;
if let Some(local_path) = location.local_path {
if let Ok(Some(mut metadata)) = SpacedriveLocationMetadataFile::try_load(&local_path).await
if location.node_id == ctx.node_local_id {
if let Ok(Some(mut metadata)) =
SpacedriveLocationMetadataFile::try_load(&location.path).await
{
metadata.remove_library(ctx.id).await?;
}
@ -482,22 +478,22 @@ pub async fn delete_directory(
}
// check if a path exists in our database at that location
pub async fn check_virtual_path_exists(
library_ctx: &LibraryContext,
location_id: i32,
subpath: impl AsRef<Path>,
) -> Result<bool, LocationError> {
let path = subpath.as_ref().to_str().unwrap().to_string();
// pub async fn check_virtual_path_exists(
// library_ctx: &LibraryContext,
// location_id: i32,
// subpath: impl AsRef<Path>,
// ) -> Result<bool, LocationError> {
// let path = subpath.as_ref().to_str().unwrap().to_string();
let file_path = library_ctx
.db
.file_path()
.find_first(vec![
file_path::location_id::equals(location_id),
file_path::materialized_path::equals(path),
])
.exec()
.await?;
// let file_path = library_ctx
// .db
// .file_path()
// .find_first(vec![
// file_path::location_id::equals(location_id),
// file_path::materialized_path::equals(path),
// ])
// .exec()
// .await?;
Ok(file_path.is_some())
}
// Ok(file_path.is_some())
// }

View file

@ -45,22 +45,16 @@ pub async fn get_path_from_location_id(
db: &PrismaClient,
location_id: i32,
) -> Result<PathBuf, JobError> {
let location = db
Ok(db
.location()
.find_unique(location::id::equals(location_id))
.exec()
.await?
.ok_or(JobError::MissingData {
value: String::from("location which matches location_id"),
})?;
location
.local_path
.as_ref()
.map(PathBuf::from)
.ok_or(JobError::MissingData {
value: String::from("path when cast as `PathBuf`"),
})
})?
.path
.into())
}
pub async fn context_menu_fs_info(

View file

@ -41,14 +41,13 @@ impl From<&FilePathIdAndLocationIdCursor> for file_path::UniqueWhereParam {
#[derive(Serialize, Deserialize)]
pub struct FullFileIdentifierJobState {
location: location::Data,
location_path: PathBuf,
cursor: FilePathIdAndLocationIdCursor,
report: FileIdentifierReport,
}
#[derive(Serialize, Deserialize, Debug, Default)]
pub struct FileIdentifierReport {
location_path: String,
location_path: PathBuf,
total_orphan_paths: usize,
total_objects_created: usize,
total_objects_linked: usize,
@ -78,12 +77,6 @@ impl StatefulJob for FullFileIdentifierJob {
.await?
.ok_or(IdentifierJobError::MissingLocation(state.init.location_id))?;
let location_path = location
.local_path
.as_ref()
.map(PathBuf::from)
.ok_or(IdentifierJobError::LocationLocalPath(location_id))?;
let orphan_count = count_orphan_file_paths(&ctx.library_ctx, location_id).await?;
info!("Found {} orphan file paths", orphan_count);
@ -106,12 +99,11 @@ impl StatefulJob for FullFileIdentifierJob {
state.data = Some(FullFileIdentifierJobState {
report: FileIdentifierReport {
location_path: location_path.to_str().unwrap_or("").to_string(),
location_path: location.path.clone().into(),
total_orphan_paths: orphan_count,
..Default::default()
},
location,
location_path,
cursor: FilePathIdAndLocationIdCursor {
file_path_id: first_path_id,
location_id: state.init.location_id,
@ -154,13 +146,9 @@ impl StatefulJob for FullFileIdentifierJob {
data.report.total_orphan_paths
);
let (total_objects_created, total_objects_linked) = identifier_job_step(
&ctx.library_ctx,
&data.location,
&data.location_path,
&file_paths,
)
.await?;
let (total_objects_created, total_objects_linked) =
identifier_job_step(&ctx.library_ctx, &data.location, &file_paths).await?;
data.report.total_objects_created += total_objects_created;
data.report.total_objects_linked += total_objects_linked;

View file

@ -33,8 +33,6 @@ pub enum IdentifierJobError {
MissingLocation(i32),
#[error("Root file path not found: <path = '{0}'>")]
MissingRootFilePath(PathBuf),
#[error("Location without local path: <id = '{0}'>")]
LocationLocalPath(i32),
}
#[derive(Debug, Clone)]
@ -80,13 +78,10 @@ impl FileMetadata {
async fn identifier_job_step(
LibraryContext { db, sync, .. }: &LibraryContext,
location: &location::Data,
location_path: impl AsRef<Path>,
file_paths: &[file_path::Data],
) -> Result<(usize, usize), JobError> {
let location_path = location_path.as_ref();
let file_path_metas = join_all(file_paths.iter().map(|file_path| async move {
FileMetadata::new(location_path, &file_path.materialized_path)
FileMetadata::new(&location.path, &file_path.materialized_path)
.await
.map(|params| (file_path.id, (params, file_path)))
}))

View file

@ -47,12 +47,9 @@ pub enum ThumbnailError {
MissingLocation(i32),
#[error("Root file path not found: <path = '{0}'>")]
MissingRootFilePath(PathBuf),
#[error("Location without local path: <id = '{0}'>")]
LocationLocalPath(i32),
}
file_path::include!(file_path_with_object { object });
file_path::select!(file_path_id_only { id });
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
enum ThumbnailJobStepKind {
@ -79,15 +76,15 @@ impl StatefulJob for ThumbnailJob {
}
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
let LibraryContext { db, .. } = &ctx.library_ctx;
let thumbnail_dir = ctx
.library_ctx
.config()
.data_directory()
.join(THUMBNAIL_CACHE_DIR_NAME);
let location = ctx
.library_ctx
.db
let location = db
.location()
.find_unique(location::id::equals(state.init.location_id))
.exec()
@ -101,9 +98,7 @@ impl StatefulJob for ThumbnailJob {
.expect("Found non-UTF-8 path")
.to_string();
let parent_directory_id = ctx
.library_ctx
.db
let parent_directory_id = db
.file_path()
.find_first(vec![
file_path::location_id::equals(state.init.location_id),
@ -114,7 +109,7 @@ impl StatefulJob for ThumbnailJob {
}),
file_path::is_dir::equals(true),
])
.select(file_path_id_only::select())
.select(file_path::select!({ id }))
.exec()
.await?
.ok_or_else(|| ThumbnailError::MissingRootFilePath(state.init.root_path.clone()))?
@ -127,10 +122,7 @@ impl StatefulJob for ThumbnailJob {
// create all necessary directories if they don't exist
fs::create_dir_all(&thumbnail_dir).await?;
let root_path = location
.local_path
.map(PathBuf::from)
.ok_or(ThumbnailError::LocationLocalPath(location.id))?;
let root_path = location.path.into();
// query database for all image files in this location that need thumbnails
let image_files = get_files_by_extensions(

View file

@ -83,7 +83,7 @@ impl StatefulJob for ObjectValidatorJob {
.unwrap();
state.data = Some(ObjectValidatorJobState {
root_path: location.local_path.as_ref().map(PathBuf::from).unwrap(),
root_path: location.path.into(),
task_count: state.steps.len(),
});

View file

@ -328,6 +328,10 @@ impl SyncManager {
.location()
.create(
id.pub_id,
serde_json::from_value(data.remove("name").unwrap())
.unwrap(),
serde_json::from_value(data.remove("path").unwrap())
.unwrap(),
{
let val: std::collections::HashMap<String, Value> =
from_value(data.remove("node").unwrap()).unwrap();

View file

@ -17,12 +17,12 @@ export type Procedures = {
{ key: "keys.listMounted", input: LibraryArgs<null>, result: string[] } |
{ key: "library.getStatistics", input: LibraryArgs<null>, result: Statistics } |
{ key: "library.list", input: never, result: LibraryConfigWrapped[] } |
{ key: "locations.getById", input: LibraryArgs<number>, result: { id: number, pub_id: number[], node_id: number, name: string | null, local_path: string | null, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string, indexer_rules: IndexerRulesInLocation[] } | null } |
{ key: "locations.getById", input: LibraryArgs<number>, result: { id: number, pub_id: number[], node_id: number, name: string, path: string, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string, indexer_rules: IndexerRulesInLocation[] } | null } |
{ key: "locations.getExplorerData", input: LibraryArgs<LocationExplorerArgs>, result: ExplorerData } |
{ key: "locations.indexer_rules.get", input: LibraryArgs<number>, result: IndexerRule } |
{ key: "locations.indexer_rules.list", input: LibraryArgs<null>, result: IndexerRule[] } |
{ key: "locations.indexer_rules.listForLocation", input: LibraryArgs<number>, result: IndexerRule[] } |
{ key: "locations.list", input: LibraryArgs<null>, result: { id: number, pub_id: number[], node_id: number, name: string | null, local_path: string | null, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string, node: Node }[] } |
{ key: "locations.list", input: LibraryArgs<null>, result: { id: number, pub_id: number[], node_id: number, name: string, path: string, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string, node: Node }[] } |
{ key: "nodeState", input: never, result: NodeState } |
{ key: "tags.get", input: LibraryArgs<number>, result: Tag | null } |
{ key: "tags.getExplorerData", input: LibraryArgs<number>, result: ExplorerData } |
@ -101,6 +101,11 @@ export type CreateLibraryArgs = { name: string, auth: AuthOption, algorithm: Alg
export type EditLibraryArgs = { id: string, name: string | null, description: string | null }
/**
* This should be used for passing an encrypted key around.
*
* This is always `ENCRYPTED_KEY_LEN` (which is `KEY_LEM` + `AEAD_TAG_LEN`)
*/
export type EncryptedKey = number[]
export type ExplorerContext = ({ type: "Location" } & Location) | ({ type: "Tag" } & Tag)
@ -170,7 +175,7 @@ export type LibraryConfig = ({ version: string | null }) & { name: string, descr
export type LibraryConfigWrapped = { uuid: string, config: LibraryConfig }
export type Location = { id: number, pub_id: number[], node_id: number, name: string | null, local_path: string | null, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string }
export type Location = { id: number, pub_id: number[], node_id: number, name: string, path: string, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string }
/**
* `LocationCreateArgs` is the argument received from the client using `rspc` to create a new location.
@ -204,6 +209,11 @@ export type NodeConfig = ({ version: string | null }) & { id: string, name: stri
export type NodeState = (({ version: string | null }) & { id: string, name: string, p2p_port: number | null }) & { data_path: string }
/**
* This should be used for providing a nonce to encrypt/decrypt functions.
*
* You may also generate a nonce for a given algorithm with `Nonce::generate()`
*/
export type Nonce = { XChaCha20Poly1305: number[] } | { Aes256Gcm: number[] }
export type Object = { id: number, pub_id: number[], name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string }
@ -213,7 +223,7 @@ export type ObjectValidatorArgs = { id: number, path: string }
/**
* These parameters define the password-hashing level.
*
* The harder the parameter, the longer the password will take to hash.
* The greater the parameter, the longer the password will take to hash.
*/
export type Params = "Standard" | "Hardened" | "Paranoid"
@ -221,6 +231,11 @@ export type RestoreBackupArgs = { password: string, secret_key: string, path: st
export type RuleKind = "AcceptFilesByGlob" | "RejectFilesByGlob" | "AcceptIfChildrenDirectoriesArePresent" | "RejectIfChildrenDirectoriesArePresent"
/**
* This should be used for passing a salt around.
*
* You may also generate a salt with `Salt::generate()`
*/
export type Salt = number[]
export type SetFavoriteArgs = { id: number, favorite: boolean }
@ -230,12 +245,20 @@ export type SetNoteArgs = { id: number, note: string | null }
export type Statistics = { id: number, date_captured: string, total_object_count: number, library_db_size: string, total_bytes_used: string, total_bytes_capacity: string, total_unique_bytes: string, total_bytes_free: string, preview_media_bytes: string }
/**
* This is a stored key, and can be freely written to Prisma/another database.
* This is a stored key, and can be freely written to the database.
*
* It contains no sensitive information that is not encrypted.
*/
export type StoredKey = { uuid: string, version: StoredKeyVersion, key_type: StoredKeyType, algorithm: Algorithm, hashing_algorithm: HashingAlgorithm, content_salt: Salt, master_key: EncryptedKey, master_key_nonce: Nonce, key_nonce: Nonce, key: number[], salt: Salt, memory_only: boolean, automount: boolean }
/**
* This denotes the type of key. `Root` keys can be used to unlock the key manager, and `User` keys are ordinary keys.
*/
export type StoredKeyType = "User" | "Root"
/**
* This denotes the `StoredKey` version.
*/
export type StoredKeyVersion = "V1"
export type Tag = { id: number, pub_id: number[], name: string | null, color: string | null, total_objects: number | null, redundancy_goal: number | null, date_created: string, date_modified: string }

View file

@ -108,7 +108,7 @@ export const Inspector = ({ data, context, ...elementProps }: Props) => {
{context?.type == 'Location' && data?.type === 'Path' && (
<MetaContainer>
<MetaTitle>URI</MetaTitle>
<MetaValue>{`${context.local_path}/${data.item.materialized_path}`}</MetaValue>
<MetaValue>{`${context.path}/${data.item.materialized_path}`}</MetaValue>
</MetaContainer>
)}
<Divider />

View file

@ -36,7 +36,7 @@ export default function LocationListItem({ location }: LocationListItemProps) {
<h1 className="pt-0.5 text-sm font-semibold">{location.name}</h1>
<p className="text-ink-dull mt-0.5 select-text truncate text-sm">
<span className="bg-app-selected mr-1 rounded py-[1px] px-1">{location.node.name}</span>
{location.local_path}
{location.path}
</p>
</div>
<div className="flex grow" />

View file

@ -37,8 +37,8 @@ export default function EditLocation() {
onSuccess: (data) => {
if (data && !isDirty)
form.reset({
displayName: data.name || undefined,
localPath: data.local_path || undefined,
displayName: data.name,
localPath: data.path,
indexer_rules_ids: data.indexer_rules.map((i) => i.indexer_rule_id.toString()),
generatePreviewMedia: data.generate_preview_media,
syncPreviewMedia: data.sync_preview_media,