mirror of
https://github.com/spacedriveapp/spacedrive
synced 2024-07-08 07:12:49 +00:00
Replace Location.local_path with path (#571)
* replace Location.local_path with path * Using more references to avoid unneeded moves and removing unneeded errors variants * remove unnecessary stuff * location id checks --------- Co-authored-by: Ericson Soares <ericson.ds999@gmail.com>
This commit is contained in:
parent
c7dbc784cd
commit
fd39dc3a3d
|
@ -48,6 +48,9 @@
|
||||||
},
|
},
|
||||||
"allowlist": {
|
"allowlist": {
|
||||||
"all": true,
|
"all": true,
|
||||||
|
"notification": {
|
||||||
|
"all": false
|
||||||
|
},
|
||||||
"protocol": {
|
"protocol": {
|
||||||
"assetScope": ["*"]
|
"assetScope": ["*"]
|
||||||
},
|
},
|
||||||
|
|
|
@ -84,7 +84,7 @@ function LocationItem({ location, index }: { location: Location & { node: Node }
|
||||||
</Text>
|
</Text>
|
||||||
</View>
|
</View>
|
||||||
<Text numberOfLines={1} style={tw`text-ink-dull mt-0.5 text-[10px] font-semibold`}>
|
<Text numberOfLines={1} style={tw`text-ink-dull mt-0.5 text-[10px] font-semibold`}>
|
||||||
{location.local_path}
|
{location.path}
|
||||||
</Text>
|
</Text>
|
||||||
</View>
|
</View>
|
||||||
<CaretRight color={tw.color('ink-dull')} size={18} />
|
<CaretRight color={tw.color('ink-dull')} size={18} />
|
||||||
|
|
|
@ -95,8 +95,8 @@ model Location {
|
||||||
id Int @id @default(autoincrement())
|
id Int @id @default(autoincrement())
|
||||||
pub_id Bytes @unique
|
pub_id Bytes @unique
|
||||||
node_id Int
|
node_id Int
|
||||||
name String?
|
name String
|
||||||
local_path String?
|
path String
|
||||||
total_capacity Int?
|
total_capacity Int?
|
||||||
available_capacity Int?
|
available_capacity Int?
|
||||||
is_archived Boolean @default(false)
|
is_archived Boolean @default(false)
|
||||||
|
|
|
@ -1,6 +1,12 @@
|
||||||
use crate::{prisma::file_path, Node};
|
use crate::{prisma::file_path, Node};
|
||||||
|
|
||||||
use std::{cmp::min, io, path::PathBuf, str::FromStr, sync::Arc};
|
use std::{
|
||||||
|
cmp::min,
|
||||||
|
io,
|
||||||
|
path::{Path, PathBuf},
|
||||||
|
str::FromStr,
|
||||||
|
sync::Arc,
|
||||||
|
};
|
||||||
|
|
||||||
use http_range::HttpRange;
|
use http_range::HttpRange;
|
||||||
use httpz::{
|
use httpz::{
|
||||||
|
@ -15,7 +21,7 @@ use tokio::{
|
||||||
fs::{self, File},
|
fs::{self, File},
|
||||||
io::{AsyncReadExt, AsyncSeekExt, SeekFrom},
|
io::{AsyncReadExt, AsyncSeekExt, SeekFrom},
|
||||||
};
|
};
|
||||||
use tracing::{error, warn};
|
use tracing::error;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
// This LRU cache allows us to avoid doing a DB lookup on every request.
|
// This LRU cache allows us to avoid doing a DB lookup on every request.
|
||||||
|
@ -118,14 +124,7 @@ async fn handle_file(
|
||||||
.ok_or_else(|| HandleCustomUriError::NotFound("object"))?;
|
.ok_or_else(|| HandleCustomUriError::NotFound("object"))?;
|
||||||
|
|
||||||
let lru_entry = (
|
let lru_entry = (
|
||||||
PathBuf::from(file_path.location.local_path.ok_or_else(|| {
|
Path::new(&file_path.location.path).join(&file_path.materialized_path),
|
||||||
warn!(
|
|
||||||
"Location '{}' doesn't have local path set",
|
|
||||||
file_path.location_id
|
|
||||||
);
|
|
||||||
HandleCustomUriError::BadRequest("Location doesn't have `local_path` set!")
|
|
||||||
})?)
|
|
||||||
.join(&file_path.materialized_path),
|
|
||||||
file_path.extension,
|
file_path.extension,
|
||||||
);
|
);
|
||||||
FILE_METADATA_CACHE.insert(lru_cache_key, lru_entry.clone());
|
FILE_METADATA_CACHE.insert(lru_cache_key, lru_entry.clone());
|
||||||
|
|
|
@ -78,6 +78,8 @@ impl JobManager {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
debug!("JobManager initialized");
|
||||||
|
|
||||||
this
|
this
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,7 +8,7 @@ use util::secure_temp_keystore::SecureTempKeystore;
|
||||||
use std::{path::Path, sync::Arc};
|
use std::{path::Path, sync::Arc};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use tokio::{fs, sync::broadcast};
|
use tokio::{fs, sync::broadcast};
|
||||||
use tracing::{error, info};
|
use tracing::{debug, error, info};
|
||||||
use tracing_subscriber::{prelude::*, EnvFilter};
|
use tracing_subscriber::{prelude::*, EnvFilter};
|
||||||
|
|
||||||
pub mod api;
|
pub mod api;
|
||||||
|
@ -150,6 +150,8 @@ impl Node {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
debug!("Watching locations");
|
||||||
|
|
||||||
// Trying to resume possible paused jobs
|
// Trying to resume possible paused jobs
|
||||||
let inner_library_manager = Arc::clone(&library_manager);
|
let inner_library_manager = Arc::clone(&library_manager);
|
||||||
let inner_jobs = Arc::clone(&jobs);
|
let inner_jobs = Arc::clone(&jobs);
|
||||||
|
|
|
@ -22,6 +22,7 @@ use std::{
|
||||||
};
|
};
|
||||||
use thiserror::Error;
|
use thiserror::Error;
|
||||||
use tokio::sync::RwLock;
|
use tokio::sync::RwLock;
|
||||||
|
use tracing::debug;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use super::{LibraryConfig, LibraryConfigWrapped, LibraryContext};
|
use super::{LibraryConfig, LibraryConfigWrapped, LibraryContext};
|
||||||
|
@ -171,6 +172,8 @@ impl LibraryManager {
|
||||||
node_context,
|
node_context,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
debug!("LibraryManager initialized");
|
||||||
|
|
||||||
Ok(this)
|
Ok(this)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -25,8 +25,6 @@ pub enum LocationError {
|
||||||
NotDirectory(PathBuf),
|
NotDirectory(PathBuf),
|
||||||
#[error("Could not find directory in Location (path: {0:?})")]
|
#[error("Could not find directory in Location (path: {0:?})")]
|
||||||
DirectoryNotFound(String),
|
DirectoryNotFound(String),
|
||||||
#[error("Missing local_path (id: {0})")]
|
|
||||||
MissingLocalPath(i32),
|
|
||||||
#[error("Library exists in the location metadata file, must relink: (old_path: {old_path:?}, new_path: {new_path:?})")]
|
#[error("Library exists in the location metadata file, must relink: (old_path: {old_path:?}, new_path: {new_path:?})")]
|
||||||
NeedRelink {
|
NeedRelink {
|
||||||
old_path: PathBuf,
|
old_path: PathBuf,
|
||||||
|
@ -72,7 +70,7 @@ impl From<LocationError> for rspc::Error {
|
||||||
|
|
||||||
// User's fault errors
|
// User's fault errors
|
||||||
LocationError::NotDirectory(_)
|
LocationError::NotDirectory(_)
|
||||||
| LocationError::MissingLocalPath(_)
|
// | LocationError::MissingLocalPath(_)
|
||||||
| LocationError::NeedRelink { .. }
|
| LocationError::NeedRelink { .. }
|
||||||
| LocationError::AddLibraryToMetadata(_) => {
|
| LocationError::AddLibraryToMetadata(_) => {
|
||||||
rspc::Error::with_cause(ErrorCode::BadRequest, err.to_string(), err)
|
rspc::Error::with_cause(ErrorCode::BadRequest, err.to_string(), err)
|
||||||
|
|
|
@ -45,7 +45,6 @@ pub struct IndexerJob;
|
||||||
location::include!(indexer_job_location {
|
location::include!(indexer_job_location {
|
||||||
indexer_rules: select { indexer_rule }
|
indexer_rules: select { indexer_rule }
|
||||||
});
|
});
|
||||||
file_path::select!(file_path_id_only { id });
|
|
||||||
|
|
||||||
/// `IndexerJobInit` receives a `location::Data` object to be indexed
|
/// `IndexerJobInit` receives a `location::Data` object to be indexed
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
|
@ -63,7 +62,6 @@ impl Hash for IndexerJobInit {
|
||||||
/// contains some metadata for logging purposes.
|
/// contains some metadata for logging purposes.
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct IndexerJobData {
|
pub struct IndexerJobData {
|
||||||
location_path: PathBuf,
|
|
||||||
db_write_start: DateTime<Utc>,
|
db_write_start: DateTime<Utc>,
|
||||||
scan_read_time: Duration,
|
scan_read_time: Duration,
|
||||||
total_paths: usize,
|
total_paths: usize,
|
||||||
|
@ -111,14 +109,6 @@ impl StatefulJob for IndexerJob {
|
||||||
|
|
||||||
/// Creates a vector of valid path buffers from a directory, chunked into batches of `BATCH_SIZE`.
|
/// Creates a vector of valid path buffers from a directory, chunked into batches of `BATCH_SIZE`.
|
||||||
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
|
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
|
||||||
let location_path = state
|
|
||||||
.init
|
|
||||||
.location
|
|
||||||
.local_path
|
|
||||||
.as_ref()
|
|
||||||
.map(PathBuf::from)
|
|
||||||
.unwrap();
|
|
||||||
|
|
||||||
// grab the next id so we can increment in memory for batch inserting
|
// grab the next id so we can increment in memory for batch inserting
|
||||||
let first_file_id = get_max_file_path_id(&ctx.library_ctx).await?;
|
let first_file_id = get_max_file_path_id(&ctx.library_ctx).await?;
|
||||||
|
|
||||||
|
@ -136,7 +126,7 @@ impl StatefulJob for IndexerJob {
|
||||||
let scan_start = Instant::now();
|
let scan_start = Instant::now();
|
||||||
let inner_ctx = ctx.clone();
|
let inner_ctx = ctx.clone();
|
||||||
let paths = walk(
|
let paths = walk(
|
||||||
location_path.clone(),
|
&state.init.location.path,
|
||||||
&indexer_rules_by_kind,
|
&indexer_rules_by_kind,
|
||||||
move |path, total_entries| {
|
move |path, total_entries| {
|
||||||
IndexerJobData::on_scan_progress(
|
IndexerJobData::on_scan_progress(
|
||||||
|
@ -191,7 +181,6 @@ impl StatefulJob for IndexerJob {
|
||||||
let total_entries = paths_entries.len();
|
let total_entries = paths_entries.len();
|
||||||
|
|
||||||
state.data = Some(IndexerJobData {
|
state.data = Some(IndexerJobData {
|
||||||
location_path,
|
|
||||||
db_write_start: Utc::now(),
|
db_write_start: Utc::now(),
|
||||||
scan_read_time: scan_start.elapsed(),
|
scan_read_time: scan_start.elapsed(),
|
||||||
total_paths: total_entries,
|
total_paths: total_entries,
|
||||||
|
@ -228,14 +217,9 @@ impl StatefulJob for IndexerJob {
|
||||||
ctx: WorkerContext,
|
ctx: WorkerContext,
|
||||||
state: &mut JobState<Self>,
|
state: &mut JobState<Self>,
|
||||||
) -> Result<(), JobError> {
|
) -> Result<(), JobError> {
|
||||||
let data = &state
|
|
||||||
.data
|
|
||||||
.as_ref()
|
|
||||||
.expect("critical error: missing data on job state");
|
|
||||||
let db = &ctx.library_ctx.db;
|
let db = &ctx.library_ctx.db;
|
||||||
|
|
||||||
let location_path = &data.location_path;
|
let location = &state.init.location;
|
||||||
let location_id = state.init.location.id;
|
|
||||||
|
|
||||||
let (sync_stuff, paths): (Vec<_>, Vec<_>) = state.steps[0]
|
let (sync_stuff, paths): (Vec<_>, Vec<_>) = state.steps[0]
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -255,7 +239,7 @@ impl StatefulJob for IndexerJob {
|
||||||
}
|
}
|
||||||
let mut materialized_path = entry
|
let mut materialized_path = entry
|
||||||
.path
|
.path
|
||||||
.strip_prefix(location_path)
|
.strip_prefix(&location.path)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.to_str()
|
.to_str()
|
||||||
.expect("Found non-UTF-8 path")
|
.expect("Found non-UTF-8 path")
|
||||||
|
@ -286,7 +270,7 @@ impl StatefulJob for IndexerJob {
|
||||||
),
|
),
|
||||||
file_path::create_unchecked(
|
file_path::create_unchecked(
|
||||||
entry.file_id,
|
entry.file_id,
|
||||||
location_id,
|
location.id,
|
||||||
materialized_path,
|
materialized_path,
|
||||||
name,
|
name,
|
||||||
vec![
|
vec![
|
||||||
|
@ -323,7 +307,7 @@ impl StatefulJob for IndexerJob {
|
||||||
.expect("critical error: missing data on job state");
|
.expect("critical error: missing data on job state");
|
||||||
info!(
|
info!(
|
||||||
"scan of {} completed in {:?}. {:?} files found. db write completed in {:?}",
|
"scan of {} completed in {:?}. {:?} files found. db write completed in {:?}",
|
||||||
state.init.location.local_path.as_ref().unwrap(),
|
state.init.location.path,
|
||||||
data.scan_read_time,
|
data.scan_read_time,
|
||||||
data.total_paths,
|
data.total_paths,
|
||||||
(Utc::now() - data.db_write_start)
|
(Utc::now() - data.db_write_start)
|
||||||
|
|
|
@ -52,10 +52,12 @@ impl Ord for WalkEntry {
|
||||||
/// a list of accepted entries. There are some useful comments in the implementation of this function
|
/// a list of accepted entries. There are some useful comments in the implementation of this function
|
||||||
/// in case of doubts.
|
/// in case of doubts.
|
||||||
pub(super) async fn walk(
|
pub(super) async fn walk(
|
||||||
root: PathBuf,
|
root: impl AsRef<Path>,
|
||||||
rules_per_kind: &HashMap<RuleKind, Vec<IndexerRule>>,
|
rules_per_kind: &HashMap<RuleKind, Vec<IndexerRule>>,
|
||||||
update_notifier: impl Fn(&Path, usize),
|
update_notifier: impl Fn(&Path, usize),
|
||||||
) -> Result<Vec<WalkEntry>, IndexerError> {
|
) -> Result<Vec<WalkEntry>, IndexerError> {
|
||||||
|
let root = root.as_ref().to_path_buf();
|
||||||
|
|
||||||
let mut to_walk = VecDeque::with_capacity(1);
|
let mut to_walk = VecDeque::with_capacity(1);
|
||||||
to_walk.push_back((root.clone(), None));
|
to_walk.push_back((root.clone(), None));
|
||||||
let mut indexed_paths = HashMap::new();
|
let mut indexed_paths = HashMap::new();
|
||||||
|
|
|
@ -20,8 +20,8 @@ const LOCATION_CHECK_INTERVAL: Duration = Duration::from_secs(5);
|
||||||
pub(super) async fn check_online(location: &location::Data, library_ctx: &LibraryContext) -> bool {
|
pub(super) async fn check_online(location: &location::Data, library_ctx: &LibraryContext) -> bool {
|
||||||
let pub_id = &location.pub_id;
|
let pub_id = &location.pub_id;
|
||||||
|
|
||||||
if let Some(ref local_path) = location.local_path {
|
if location.node_id == library_ctx.node_local_id {
|
||||||
match fs::metadata(local_path).await {
|
match fs::metadata(&location.path).await {
|
||||||
Ok(_) => {
|
Ok(_) => {
|
||||||
library_ctx.location_manager().add_online(pub_id).await;
|
library_ctx.location_manager().add_online(pub_id).await;
|
||||||
true
|
true
|
||||||
|
@ -53,13 +53,12 @@ pub(super) async fn location_check_sleep(
|
||||||
pub(super) fn watch_location(
|
pub(super) fn watch_location(
|
||||||
location: location::Data,
|
location: location::Data,
|
||||||
library_id: LibraryId,
|
library_id: LibraryId,
|
||||||
location_path: impl AsRef<Path>,
|
|
||||||
locations_watched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
|
locations_watched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
|
||||||
locations_unwatched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
|
locations_unwatched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
|
||||||
) {
|
) {
|
||||||
let location_id = location.id;
|
let location_id = location.id;
|
||||||
if let Some(mut watcher) = locations_unwatched.remove(&(location_id, library_id)) {
|
if let Some(mut watcher) = locations_unwatched.remove(&(location_id, library_id)) {
|
||||||
if watcher.check_path(location_path) {
|
if watcher.check_path(&location.path) {
|
||||||
watcher.watch();
|
watcher.watch();
|
||||||
} else {
|
} else {
|
||||||
watcher.update_data(location, true);
|
watcher.update_data(location, true);
|
||||||
|
@ -72,13 +71,12 @@ pub(super) fn watch_location(
|
||||||
pub(super) fn unwatch_location(
|
pub(super) fn unwatch_location(
|
||||||
location: location::Data,
|
location: location::Data,
|
||||||
library_id: LibraryId,
|
library_id: LibraryId,
|
||||||
location_path: impl AsRef<Path>,
|
|
||||||
locations_watched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
|
locations_watched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
|
||||||
locations_unwatched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
|
locations_unwatched: &mut HashMap<LocationAndLibraryKey, LocationWatcher>,
|
||||||
) {
|
) {
|
||||||
let location_id = location.id;
|
let location_id = location.id;
|
||||||
if let Some(mut watcher) = locations_watched.remove(&(location_id, library_id)) {
|
if let Some(mut watcher) = locations_watched.remove(&(location_id, library_id)) {
|
||||||
if watcher.check_path(location_path) {
|
if watcher.check_path(&location.path) {
|
||||||
watcher.unwatch();
|
watcher.unwatch();
|
||||||
} else {
|
} else {
|
||||||
watcher.update_data(location, false)
|
watcher.update_data(location, false)
|
||||||
|
@ -149,11 +147,10 @@ pub(super) async fn handle_remove_location_request(
|
||||||
) {
|
) {
|
||||||
let key = (location_id, library_ctx.id);
|
let key = (location_id, library_ctx.id);
|
||||||
if let Some(location) = get_location(location_id, &library_ctx).await {
|
if let Some(location) = get_location(location_id, &library_ctx).await {
|
||||||
if let Some(ref local_path_str) = location.local_path.clone() {
|
if location.node_id == library_ctx.node_local_id {
|
||||||
unwatch_location(
|
unwatch_location(
|
||||||
location,
|
location,
|
||||||
library_ctx.id,
|
library_ctx.id,
|
||||||
local_path_str,
|
|
||||||
locations_watched,
|
locations_watched,
|
||||||
locations_unwatched,
|
locations_unwatched,
|
||||||
);
|
);
|
||||||
|
@ -207,21 +204,14 @@ pub(super) async fn handle_stop_watcher_request(
|
||||||
reason: String::from("failed to fetch location from db"),
|
reason: String::from("failed to fetch location from db"),
|
||||||
})
|
})
|
||||||
.map(|location| {
|
.map(|location| {
|
||||||
location
|
unwatch_location(
|
||||||
.local_path
|
location,
|
||||||
.clone()
|
library_ctx.id,
|
||||||
.ok_or(LocationManagerError::LocationMissingLocalPath(location_id))
|
locations_watched,
|
||||||
.map(|local_path_str| {
|
locations_unwatched,
|
||||||
unwatch_location(
|
);
|
||||||
location,
|
forced_unwatch.insert(key);
|
||||||
library_ctx.id,
|
})
|
||||||
local_path_str,
|
|
||||||
locations_watched,
|
|
||||||
locations_unwatched,
|
|
||||||
);
|
|
||||||
forced_unwatch.insert(key);
|
|
||||||
})
|
|
||||||
})?
|
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
@ -262,21 +252,14 @@ pub(super) async fn handle_reinit_watcher_request(
|
||||||
reason: String::from("failed to fetch location from db"),
|
reason: String::from("failed to fetch location from db"),
|
||||||
})
|
})
|
||||||
.map(|location| {
|
.map(|location| {
|
||||||
location
|
watch_location(
|
||||||
.local_path
|
location,
|
||||||
.clone()
|
library_ctx.id,
|
||||||
.ok_or(LocationManagerError::LocationMissingLocalPath(location_id))
|
locations_watched,
|
||||||
.map(|local_path_str| {
|
locations_unwatched,
|
||||||
watch_location(
|
);
|
||||||
location,
|
forced_unwatch.remove(&key);
|
||||||
library_ctx.id,
|
})
|
||||||
local_path_str,
|
|
||||||
locations_watched,
|
|
||||||
locations_unwatched,
|
|
||||||
);
|
|
||||||
forced_unwatch.remove(&key);
|
|
||||||
})
|
|
||||||
})?
|
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,14 +15,11 @@ use tokio::{
|
||||||
oneshot, RwLock,
|
oneshot, RwLock,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
use tracing::error;
|
use tracing::{debug, error};
|
||||||
|
|
||||||
#[cfg(feature = "location-watcher")]
|
#[cfg(feature = "location-watcher")]
|
||||||
use tokio::sync::mpsc;
|
use tokio::sync::mpsc;
|
||||||
|
|
||||||
#[cfg(feature = "location-watcher")]
|
|
||||||
use tracing::debug;
|
|
||||||
|
|
||||||
#[cfg(feature = "location-watcher")]
|
#[cfg(feature = "location-watcher")]
|
||||||
mod watcher;
|
mod watcher;
|
||||||
|
|
||||||
|
@ -117,6 +114,8 @@ impl LocationManager {
|
||||||
pub fn new() -> Arc<Self> {
|
pub fn new() -> Arc<Self> {
|
||||||
let online_tx = broadcast::channel(16).0;
|
let online_tx = broadcast::channel(16).0;
|
||||||
|
|
||||||
|
debug!("LocationManager initialized");
|
||||||
|
|
||||||
#[cfg(feature = "location-watcher")]
|
#[cfg(feature = "location-watcher")]
|
||||||
{
|
{
|
||||||
let (location_management_tx, location_management_rx) = mpsc::channel(128);
|
let (location_management_tx, location_management_rx) = mpsc::channel(128);
|
||||||
|
@ -130,8 +129,6 @@ impl LocationManager {
|
||||||
stop_rx,
|
stop_rx,
|
||||||
));
|
));
|
||||||
|
|
||||||
debug!("Location manager initialized");
|
|
||||||
|
|
||||||
Arc::new(Self {
|
Arc::new(Self {
|
||||||
online_locations: Default::default(),
|
online_locations: Default::default(),
|
||||||
online_tx,
|
online_tx,
|
||||||
|
@ -432,14 +429,13 @@ impl LocationManager {
|
||||||
// The time to check came for an already removed library, so we just ignore it
|
// The time to check came for an already removed library, so we just ignore it
|
||||||
to_remove.remove(&key);
|
to_remove.remove(&key);
|
||||||
} else if let Some(location) = get_location(location_id, &library_ctx).await {
|
} else if let Some(location) = get_location(location_id, &library_ctx).await {
|
||||||
if let Some(ref local_path_str) = location.local_path.clone() {
|
if location.node_id == library_ctx.node_local_id {
|
||||||
if check_online(&location, &library_ctx).await
|
if check_online(&location, &library_ctx).await
|
||||||
&& !forced_unwatch.contains(&key)
|
&& !forced_unwatch.contains(&key)
|
||||||
{
|
{
|
||||||
watch_location(
|
watch_location(
|
||||||
location,
|
location,
|
||||||
library_ctx.id,
|
library_ctx.id,
|
||||||
local_path_str,
|
|
||||||
&mut locations_watched,
|
&mut locations_watched,
|
||||||
&mut locations_unwatched,
|
&mut locations_unwatched,
|
||||||
);
|
);
|
||||||
|
@ -447,7 +443,6 @@ impl LocationManager {
|
||||||
unwatch_location(
|
unwatch_location(
|
||||||
location,
|
location,
|
||||||
library_ctx.id,
|
library_ctx.id,
|
||||||
local_path_str,
|
|
||||||
&mut locations_watched,
|
&mut locations_watched,
|
||||||
&mut locations_unwatched,
|
&mut locations_unwatched,
|
||||||
);
|
);
|
||||||
|
|
|
@ -35,16 +35,16 @@ impl EventHandler for LinuxEventHandler {
|
||||||
match event.kind {
|
match event.kind {
|
||||||
EventKind::Access(AccessKind::Close(AccessMode::Write)) => {
|
EventKind::Access(AccessKind::Close(AccessMode::Write)) => {
|
||||||
// If a file was closed with write mode, then it was updated or created
|
// If a file was closed with write mode, then it was updated or created
|
||||||
file_creation_or_update(location, event, library_ctx).await?;
|
file_creation_or_update(&location, event, library_ctx).await?;
|
||||||
}
|
}
|
||||||
EventKind::Create(CreateKind::Folder) => {
|
EventKind::Create(CreateKind::Folder) => {
|
||||||
create_dir(location, event, library_ctx.clone()).await?;
|
create_dir(&location, event, library_ctx).await?;
|
||||||
}
|
}
|
||||||
EventKind::Modify(ModifyKind::Name(RenameMode::Both)) => {
|
EventKind::Modify(ModifyKind::Name(RenameMode::Both)) => {
|
||||||
rename_both_event(location, event, library_ctx).await?;
|
rename_both_event(&location, event, library_ctx).await?;
|
||||||
}
|
}
|
||||||
EventKind::Remove(remove_kind) => {
|
EventKind::Remove(remove_kind) => {
|
||||||
remove_event(location, event, remove_kind, library_ctx).await?;
|
remove_event(&location, event, remove_kind, library_ctx).await?;
|
||||||
}
|
}
|
||||||
other_event_kind => {
|
other_event_kind => {
|
||||||
trace!("Other Linux event that we don't handle for now: {other_event_kind:#?}");
|
trace!("Other Linux event that we don't handle for now: {other_event_kind:#?}");
|
||||||
|
|
|
@ -39,11 +39,11 @@ impl EventHandler for MacOsEventHandler {
|
||||||
|
|
||||||
match event.kind {
|
match event.kind {
|
||||||
EventKind::Create(CreateKind::Folder) => {
|
EventKind::Create(CreateKind::Folder) => {
|
||||||
create_dir(location, event, library_ctx.clone()).await?;
|
create_dir(&location, event, library_ctx).await?;
|
||||||
}
|
}
|
||||||
EventKind::Modify(ModifyKind::Data(DataChange::Content)) => {
|
EventKind::Modify(ModifyKind::Data(DataChange::Content)) => {
|
||||||
// If a file had its content modified, then it was updated or created
|
// If a file had its content modified, then it was updated or created
|
||||||
file_creation_or_update(location, event, library_ctx).await?;
|
file_creation_or_update(&location, event, library_ctx).await?;
|
||||||
}
|
}
|
||||||
EventKind::Modify(ModifyKind::Name(RenameMode::Any)) => {
|
EventKind::Modify(ModifyKind::Name(RenameMode::Any)) => {
|
||||||
match self.rename_stack.take() {
|
match self.rename_stack.take() {
|
||||||
|
@ -51,14 +51,19 @@ impl EventHandler for MacOsEventHandler {
|
||||||
self.rename_stack = Some(event);
|
self.rename_stack = Some(event);
|
||||||
}
|
}
|
||||||
Some(from_event) => {
|
Some(from_event) => {
|
||||||
rename(&event.paths[0], &from_event.paths[0], location, library_ctx)
|
rename(
|
||||||
.await?;
|
&event.paths[0],
|
||||||
|
&from_event.paths[0],
|
||||||
|
&location,
|
||||||
|
library_ctx,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
EventKind::Remove(remove_kind) => {
|
EventKind::Remove(remove_kind) => {
|
||||||
remove_event(location, event, remove_kind, library_ctx).await?;
|
remove_event(&location, event, remove_kind, library_ctx).await?;
|
||||||
}
|
}
|
||||||
other_event_kind => {
|
other_event_kind => {
|
||||||
trace!("Other MacOS event that we don't handle for now: {other_event_kind:#?}");
|
trace!("Other MacOS event that we don't handle for now: {other_event_kind:#?}");
|
||||||
|
|
|
@ -61,7 +61,6 @@ trait EventHandler {
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(super) struct LocationWatcher {
|
pub(super) struct LocationWatcher {
|
||||||
location: location::Data,
|
location: location::Data,
|
||||||
path: PathBuf,
|
|
||||||
watcher: RecommendedWatcher,
|
watcher: RecommendedWatcher,
|
||||||
ignore_path_tx: mpsc::UnboundedSender<IgnorePath>,
|
ignore_path_tx: mpsc::UnboundedSender<IgnorePath>,
|
||||||
handle: Option<JoinHandle<()>>,
|
handle: Option<JoinHandle<()>>,
|
||||||
|
@ -96,13 +95,6 @@ impl LocationWatcher {
|
||||||
Config::default(),
|
Config::default(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let path = PathBuf::from(
|
|
||||||
location
|
|
||||||
.local_path
|
|
||||||
.as_ref()
|
|
||||||
.ok_or(LocationManagerError::LocationMissingLocalPath(location.id))?,
|
|
||||||
);
|
|
||||||
|
|
||||||
let handle = tokio::spawn(Self::handle_watch_events(
|
let handle = tokio::spawn(Self::handle_watch_events(
|
||||||
location.id,
|
location.id,
|
||||||
library_ctx,
|
library_ctx,
|
||||||
|
@ -113,7 +105,6 @@ impl LocationWatcher {
|
||||||
|
|
||||||
Ok(Self {
|
Ok(Self {
|
||||||
location,
|
location,
|
||||||
path,
|
|
||||||
watcher,
|
watcher,
|
||||||
ignore_path_tx,
|
ignore_path_tx,
|
||||||
handle: Some(handle),
|
handle: Some(handle),
|
||||||
|
@ -214,56 +205,49 @@ impl LocationWatcher {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn check_path(&self, path: impl AsRef<Path>) -> bool {
|
pub(super) fn check_path(&self, path: impl AsRef<Path>) -> bool {
|
||||||
self.path == path.as_ref()
|
(self.location.path.as_ref() as &Path) == path.as_ref()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn watch(&mut self) {
|
pub(super) fn watch(&mut self) {
|
||||||
if let Err(e) = self.watcher.watch(&self.path, RecursiveMode::Recursive) {
|
let path = &self.location.path;
|
||||||
error!(
|
if let Err(e) = self.watcher.watch(path.as_ref(), RecursiveMode::Recursive) {
|
||||||
"Unable to watch location: (path: {}, error: {e:#?})",
|
error!("Unable to watch location: (path: {path}, error: {e:#?})");
|
||||||
self.path.display()
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
debug!("Now watching location: (path: {})", self.path.display());
|
debug!("Now watching location: (path: {path})");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn unwatch(&mut self) {
|
pub(super) fn unwatch(&mut self) {
|
||||||
if let Err(e) = self.watcher.unwatch(&self.path) {
|
let path = &self.location.path;
|
||||||
|
if let Err(e) = self.watcher.unwatch(path.as_ref()) {
|
||||||
/**************************************** TODO: ****************************************
|
/**************************************** TODO: ****************************************
|
||||||
* According to an unit test, this error may occur when a subdirectory is removed *
|
* According to an unit test, this error may occur when a subdirectory is removed *
|
||||||
* and we try to unwatch the parent directory then we have to check the implications *
|
* and we try to unwatch the parent directory then we have to check the implications *
|
||||||
* of unwatch error for this case. *
|
* of unwatch error for this case. *
|
||||||
**************************************************************************************/
|
**************************************************************************************/
|
||||||
error!(
|
error!("Unable to unwatch location: (path: {path}, error: {e:#?})",);
|
||||||
"Unable to unwatch location: (path: {}, error: {e:#?})",
|
|
||||||
self.path.display()
|
|
||||||
);
|
|
||||||
} else {
|
} else {
|
||||||
debug!("Stop watching location: (path: {})", self.path.display());
|
debug!("Stop watching location: (path: {path})");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) fn update_data(&mut self, location: location::Data, to_watch: bool) {
|
pub(super) fn update_data(&mut self, new_location: location::Data, to_watch: bool) {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
self.location.id, location.id,
|
self.location.id, new_location.id,
|
||||||
"Updated location data must have the same id"
|
"Updated location data must have the same id"
|
||||||
);
|
);
|
||||||
let path = PathBuf::from(location.local_path.as_ref().unwrap_or_else(|| {
|
|
||||||
panic!(
|
|
||||||
"Tried to watch a location without local_path: <id='{}'>",
|
|
||||||
location.id
|
|
||||||
)
|
|
||||||
}));
|
|
||||||
|
|
||||||
if self.path != path {
|
let new_path = self.location.path != new_location.path;
|
||||||
|
|
||||||
|
if new_path {
|
||||||
self.unwatch();
|
self.unwatch();
|
||||||
self.path = path;
|
|
||||||
if to_watch {
|
|
||||||
self.watch();
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
self.location = location;
|
|
||||||
|
self.location = new_location;
|
||||||
|
|
||||||
|
if new_path && to_watch {
|
||||||
|
self.watch();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@ use crate::{
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet,
|
collections::HashSet,
|
||||||
|
ffi::OsStr,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
str::FromStr,
|
str::FromStr,
|
||||||
};
|
};
|
||||||
|
@ -49,79 +50,79 @@ pub(super) fn check_event(event: &Event, ignore_paths: &HashSet<PathBuf>) -> boo
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) async fn create_dir(
|
pub(super) async fn create_dir(
|
||||||
location: indexer_job_location::Data,
|
location: &indexer_job_location::Data,
|
||||||
event: Event,
|
event: Event,
|
||||||
library_ctx: LibraryContext,
|
library_ctx: &LibraryContext,
|
||||||
) -> Result<(), LocationManagerError> {
|
) -> Result<(), LocationManagerError> {
|
||||||
if let Some(ref location_local_path) = location.local_path {
|
if location.node_id != library_ctx.node_local_id {
|
||||||
trace!(
|
return Ok(());
|
||||||
"Location: <root_path ='{location_local_path}'> creating directory: {}",
|
|
||||||
event.paths[0].display()
|
|
||||||
);
|
|
||||||
|
|
||||||
if let Some(subpath) = subtract_location_path(location_local_path, &event.paths[0]) {
|
|
||||||
let parent_directory = get_parent_dir(location.id, &subpath, &library_ctx).await?;
|
|
||||||
|
|
||||||
trace!("parent_directory: {:?}", parent_directory);
|
|
||||||
|
|
||||||
if let Some(parent_directory) = parent_directory {
|
|
||||||
let created_path = create_file_path(
|
|
||||||
&library_ctx,
|
|
||||||
location.id,
|
|
||||||
subpath.to_str().expect("Found non-UTF-8 path").to_string(),
|
|
||||||
subpath
|
|
||||||
.file_stem()
|
|
||||||
.unwrap()
|
|
||||||
.to_str()
|
|
||||||
.expect("Found non-UTF-8 path")
|
|
||||||
.to_string(),
|
|
||||||
None,
|
|
||||||
Some(parent_directory.id),
|
|
||||||
true,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
info!("Created path: {}", created_path.materialized_path);
|
|
||||||
|
|
||||||
invalidate_query!(library_ctx, "locations.getExplorerData");
|
|
||||||
} else {
|
|
||||||
warn!("Watcher found a path without parent");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
trace!(
|
||||||
|
"Location: <root_path ='{}'> creating directory: {}",
|
||||||
|
location.path,
|
||||||
|
event.paths[0].display()
|
||||||
|
);
|
||||||
|
|
||||||
|
let Some(subpath) = subtract_location_path(&location.path, &event.paths[0]) else {
|
||||||
|
return Ok(());
|
||||||
|
};
|
||||||
|
|
||||||
|
let parent_directory = get_parent_dir(location.id, &subpath, library_ctx).await?;
|
||||||
|
|
||||||
|
trace!("parent_directory: {:?}", parent_directory);
|
||||||
|
|
||||||
|
let Some(parent_directory) = parent_directory else {
|
||||||
|
warn!("Watcher found a path without parent");
|
||||||
|
return Ok(())
|
||||||
|
};
|
||||||
|
|
||||||
|
let created_path = create_file_path(
|
||||||
|
library_ctx,
|
||||||
|
location.id,
|
||||||
|
subpath
|
||||||
|
.to_str()
|
||||||
|
.map(str::to_string)
|
||||||
|
.expect("Found non-UTF-8 path"),
|
||||||
|
subpath
|
||||||
|
.file_stem()
|
||||||
|
.and_then(OsStr::to_str)
|
||||||
|
.map(str::to_string)
|
||||||
|
.expect("Found non-UTF-8 path"),
|
||||||
|
None,
|
||||||
|
Some(parent_directory.id),
|
||||||
|
true,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
info!("Created path: {}", created_path.materialized_path);
|
||||||
|
|
||||||
|
invalidate_query!(library_ctx, "locations.getExplorerData");
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) async fn create_file(
|
pub(super) async fn create_file(
|
||||||
location: indexer_job_location::Data,
|
location: &indexer_job_location::Data,
|
||||||
event: Event,
|
|
||||||
library_ctx: LibraryContext,
|
|
||||||
) -> Result<(), LocationManagerError> {
|
|
||||||
if let Some(ref location_local_path) = location.local_path {
|
|
||||||
inner_create_file(location.id, location_local_path, event, &library_ctx).await
|
|
||||||
} else {
|
|
||||||
Err(LocationManagerError::LocationMissingLocalPath(location.id))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async fn inner_create_file(
|
|
||||||
location_id: LocationId,
|
|
||||||
location_local_path: &str,
|
|
||||||
event: Event,
|
event: Event,
|
||||||
library_ctx: &LibraryContext,
|
library_ctx: &LibraryContext,
|
||||||
) -> Result<(), LocationManagerError> {
|
) -> Result<(), LocationManagerError> {
|
||||||
|
if location.node_id != library_ctx.node_local_id {
|
||||||
|
return Ok(());
|
||||||
|
}
|
||||||
|
|
||||||
trace!(
|
trace!(
|
||||||
"Location: <root_path ='{location_local_path}'> creating file: {}",
|
"Location: <root_path ='{}'> creating file: {}",
|
||||||
|
&location.path,
|
||||||
event.paths[0].display()
|
event.paths[0].display()
|
||||||
);
|
);
|
||||||
|
|
||||||
let db = &library_ctx.db;
|
let db = &library_ctx.db;
|
||||||
|
|
||||||
let Some(materialized_path) = subtract_location_path(location_local_path, &event.paths[0]) else { return Ok(()) };
|
let Some(materialized_path) = subtract_location_path(&location.path, &event.paths[0]) else { return Ok(()) };
|
||||||
|
|
||||||
let Some(parent_directory) =
|
let Some(parent_directory) =
|
||||||
get_parent_dir(location_id, &materialized_path, library_ctx).await?
|
get_parent_dir(location.id, &materialized_path, library_ctx).await?
|
||||||
else {
|
else {
|
||||||
warn!("Watcher found a path without parent");
|
warn!("Watcher found a path without parent");
|
||||||
return Ok(())
|
return Ok(())
|
||||||
|
@ -129,7 +130,7 @@ async fn inner_create_file(
|
||||||
|
|
||||||
let created_file = create_file_path(
|
let created_file = create_file_path(
|
||||||
library_ctx,
|
library_ctx,
|
||||||
location_id,
|
location.id,
|
||||||
materialized_path
|
materialized_path
|
||||||
.to_str()
|
.to_str()
|
||||||
.expect("Found non-UTF-8 path")
|
.expect("Found non-UTF-8 path")
|
||||||
|
@ -159,7 +160,7 @@ async fn inner_create_file(
|
||||||
cas_id,
|
cas_id,
|
||||||
kind,
|
kind,
|
||||||
fs_metadata,
|
fs_metadata,
|
||||||
} = FileMetadata::new(location_local_path, &created_file.materialized_path).await?;
|
} = FileMetadata::new(&location.path, &created_file.materialized_path).await?;
|
||||||
|
|
||||||
let existing_object = db
|
let existing_object = db
|
||||||
.object()
|
.object()
|
||||||
|
@ -206,7 +207,7 @@ async fn inner_create_file(
|
||||||
|
|
||||||
db.file_path()
|
db.file_path()
|
||||||
.update(
|
.update(
|
||||||
file_path::location_id_id(location_id, created_file.id),
|
file_path::location_id_id(location.id, created_file.id),
|
||||||
vec![file_path::object_id::set(Some(object.id))],
|
vec![file_path::object_id::set(Some(object.id))],
|
||||||
)
|
)
|
||||||
.exec()
|
.exec()
|
||||||
|
@ -225,48 +226,30 @@ async fn inner_create_file(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) async fn file_creation_or_update(
|
pub(super) async fn file_creation_or_update(
|
||||||
location: indexer_job_location::Data,
|
location: &indexer_job_location::Data,
|
||||||
event: Event,
|
event: Event,
|
||||||
library_ctx: &LibraryContext,
|
library_ctx: &LibraryContext,
|
||||||
) -> Result<(), LocationManagerError> {
|
) -> Result<(), LocationManagerError> {
|
||||||
if let Some(ref location_local_path) = location.local_path {
|
if let Some(ref file_path) =
|
||||||
if let Some(file_path) =
|
get_existing_file_path(location, &event.paths[0], false, library_ctx).await?
|
||||||
get_existing_file_path(&location, &event.paths[0], false, library_ctx).await?
|
{
|
||||||
{
|
inner_update_file(location, file_path, event, library_ctx).await
|
||||||
inner_update_file(
|
|
||||||
&location,
|
|
||||||
location_local_path,
|
|
||||||
file_path,
|
|
||||||
event,
|
|
||||||
library_ctx,
|
|
||||||
)
|
|
||||||
.await
|
|
||||||
} else {
|
|
||||||
// We received None because it is a new file
|
|
||||||
inner_create_file(location.id, location_local_path, event, library_ctx).await
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
Err(LocationManagerError::LocationMissingLocalPath(location.id))
|
// We received None because it is a new file
|
||||||
|
create_file(location, event, library_ctx).await
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) async fn update_file(
|
pub(super) async fn update_file(
|
||||||
location: indexer_job_location::Data,
|
location: &indexer_job_location::Data,
|
||||||
event: Event,
|
event: Event,
|
||||||
library_ctx: &LibraryContext,
|
library_ctx: &LibraryContext,
|
||||||
) -> Result<(), LocationManagerError> {
|
) -> Result<(), LocationManagerError> {
|
||||||
if let Some(ref location_local_path) = location.local_path {
|
if location.node_id == library_ctx.node_local_id {
|
||||||
if let Some(file_path) =
|
if let Some(ref file_path) =
|
||||||
get_existing_file_path(&location, &event.paths[0], false, library_ctx).await?
|
get_existing_file_path(location, &event.paths[0], false, library_ctx).await?
|
||||||
{
|
{
|
||||||
let ret = inner_update_file(
|
let ret = inner_update_file(location, file_path, event, library_ctx).await;
|
||||||
&location,
|
|
||||||
location_local_path,
|
|
||||||
file_path,
|
|
||||||
event,
|
|
||||||
library_ctx,
|
|
||||||
)
|
|
||||||
.await;
|
|
||||||
invalidate_query!(library_ctx, "locations.getExplorerData");
|
invalidate_query!(library_ctx, "locations.getExplorerData");
|
||||||
ret
|
ret
|
||||||
} else {
|
} else {
|
||||||
|
@ -281,21 +264,21 @@ pub(super) async fn update_file(
|
||||||
|
|
||||||
async fn inner_update_file(
|
async fn inner_update_file(
|
||||||
location: &indexer_job_location::Data,
|
location: &indexer_job_location::Data,
|
||||||
location_local_path: &str,
|
file_path: &file_path_with_object::Data,
|
||||||
file_path: file_path_with_object::Data,
|
|
||||||
event: Event,
|
event: Event,
|
||||||
library_ctx: &LibraryContext,
|
library_ctx: &LibraryContext,
|
||||||
) -> Result<(), LocationManagerError> {
|
) -> Result<(), LocationManagerError> {
|
||||||
trace!(
|
trace!(
|
||||||
"Location: <root_path ='{location_local_path}'> updating file: {}",
|
"Location: <root_path ='{}'> updating file: {}",
|
||||||
|
&location.path,
|
||||||
event.paths[0].display()
|
event.paths[0].display()
|
||||||
);
|
);
|
||||||
|
|
||||||
let FileMetadata {
|
let FileMetadata {
|
||||||
cas_id,
|
cas_id,
|
||||||
kind: _,
|
|
||||||
fs_metadata,
|
fs_metadata,
|
||||||
} = FileMetadata::new(location_local_path, &file_path.materialized_path).await?;
|
..
|
||||||
|
} = FileMetadata::new(&location.path, &file_path.materialized_path).await?;
|
||||||
|
|
||||||
if let Some(old_cas_id) = &file_path.cas_id {
|
if let Some(old_cas_id) = &file_path.cas_id {
|
||||||
if old_cas_id != &cas_id {
|
if old_cas_id != &cas_id {
|
||||||
|
@ -327,6 +310,7 @@ async fn inner_update_file(
|
||||||
|
|
||||||
if file_path
|
if file_path
|
||||||
.object
|
.object
|
||||||
|
.as_ref()
|
||||||
.map(|o| o.has_thumbnail)
|
.map(|o| o.has_thumbnail)
|
||||||
.unwrap_or_default()
|
.unwrap_or_default()
|
||||||
{
|
{
|
||||||
|
@ -344,7 +328,7 @@ async fn inner_update_file(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) async fn rename_both_event(
|
pub(super) async fn rename_both_event(
|
||||||
location: indexer_job_location::Data,
|
location: &indexer_job_location::Data,
|
||||||
event: Event,
|
event: Event,
|
||||||
library_ctx: &LibraryContext,
|
library_ctx: &LibraryContext,
|
||||||
) -> Result<(), LocationManagerError> {
|
) -> Result<(), LocationManagerError> {
|
||||||
|
@ -354,22 +338,21 @@ pub(super) async fn rename_both_event(
|
||||||
pub(super) async fn rename(
|
pub(super) async fn rename(
|
||||||
new_path: impl AsRef<Path>,
|
new_path: impl AsRef<Path>,
|
||||||
old_path: impl AsRef<Path>,
|
old_path: impl AsRef<Path>,
|
||||||
location: indexer_job_location::Data,
|
location: &indexer_job_location::Data,
|
||||||
library_ctx: &LibraryContext,
|
library_ctx: &LibraryContext,
|
||||||
) -> Result<(), LocationManagerError> {
|
) -> Result<(), LocationManagerError> {
|
||||||
let mut old_path_materialized = extract_materialized_path(&location, old_path.as_ref())?
|
let mut old_path_materialized = extract_materialized_path(location, old_path.as_ref())?
|
||||||
.to_str()
|
.to_str()
|
||||||
.expect("Found non-UTF-8 path")
|
.expect("Found non-UTF-8 path")
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
let new_path_materialized = extract_materialized_path(&location, new_path.as_ref())?;
|
let new_path_materialized = extract_materialized_path(location, new_path.as_ref())?;
|
||||||
let mut new_path_materialized_str = new_path_materialized
|
let mut new_path_materialized_str = new_path_materialized
|
||||||
.to_str()
|
.to_str()
|
||||||
.expect("Found non-UTF-8 path")
|
.expect("Found non-UTF-8 path")
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
if let Some(file_path) =
|
if let Some(file_path) = get_existing_file_or_directory(location, old_path, library_ctx).await?
|
||||||
get_existing_file_or_directory(&location, old_path, library_ctx).await?
|
|
||||||
{
|
{
|
||||||
// If the renamed path is a directory, we have to update every successor
|
// If the renamed path is a directory, we have to update every successor
|
||||||
if file_path.is_dir {
|
if file_path.is_dir {
|
||||||
|
@ -426,7 +409,7 @@ pub(super) async fn rename(
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(super) async fn remove_event(
|
pub(super) async fn remove_event(
|
||||||
location: indexer_job_location::Data,
|
location: &indexer_job_location::Data,
|
||||||
event: Event,
|
event: Event,
|
||||||
remove_kind: RemoveKind,
|
remove_kind: RemoveKind,
|
||||||
library_ctx: &LibraryContext,
|
library_ctx: &LibraryContext,
|
||||||
|
@ -435,7 +418,7 @@ pub(super) async fn remove_event(
|
||||||
|
|
||||||
// if it doesn't either way, then we don't care
|
// if it doesn't either way, then we don't care
|
||||||
if let Some(file_path) =
|
if let Some(file_path) =
|
||||||
get_existing_file_or_directory(&location, &event.paths[0], library_ctx).await?
|
get_existing_file_or_directory(location, &event.paths[0], library_ctx).await?
|
||||||
{
|
{
|
||||||
// check file still exists on disk
|
// check file still exists on disk
|
||||||
match fs::metadata(&event.paths[0]).await {
|
match fs::metadata(&event.paths[0]).await {
|
||||||
|
@ -482,14 +465,7 @@ fn extract_materialized_path(
|
||||||
location: &indexer_job_location::Data,
|
location: &indexer_job_location::Data,
|
||||||
path: impl AsRef<Path>,
|
path: impl AsRef<Path>,
|
||||||
) -> Result<PathBuf, LocationManagerError> {
|
) -> Result<PathBuf, LocationManagerError> {
|
||||||
subtract_location_path(
|
subtract_location_path(&location.path, &path).ok_or_else(|| {
|
||||||
location
|
|
||||||
.local_path
|
|
||||||
.as_ref()
|
|
||||||
.ok_or(LocationManagerError::LocationMissingLocalPath(location.id))?,
|
|
||||||
&path,
|
|
||||||
)
|
|
||||||
.ok_or_else(|| {
|
|
||||||
LocationManagerError::UnableToExtractMaterializedPath(
|
LocationManagerError::UnableToExtractMaterializedPath(
|
||||||
location.id,
|
location.id,
|
||||||
path.as_ref().to_path_buf(),
|
path.as_ref().to_path_buf(),
|
||||||
|
|
|
@ -45,16 +45,16 @@ impl EventHandler for WindowsEventHandler {
|
||||||
if metadata.is_file() {
|
if metadata.is_file() {
|
||||||
self.create_file_stack = Some(event);
|
self.create_file_stack = Some(event);
|
||||||
} else {
|
} else {
|
||||||
create_dir(location, event, library_ctx.clone()).await?;
|
create_dir(&location, event, library_ctx).await?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
EventKind::Modify(ModifyKind::Any) => {
|
EventKind::Modify(ModifyKind::Any) => {
|
||||||
let metadata = fs::metadata(&event.paths[0]).await?;
|
let metadata = fs::metadata(&event.paths[0]).await?;
|
||||||
if metadata.is_file() {
|
if metadata.is_file() {
|
||||||
if let Some(create_file_event) = self.create_file_stack.take() {
|
if let Some(create_file_event) = self.create_file_stack.take() {
|
||||||
create_file(location, create_file_event, library_ctx.clone()).await?;
|
create_file(&location, create_file_event, library_ctx).await?;
|
||||||
} else {
|
} else {
|
||||||
update_file(location, event, library_ctx).await?;
|
update_file(&location, event, library_ctx).await?;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
warn!("Unexpected Windows modify event on a directory");
|
warn!("Unexpected Windows modify event on a directory");
|
||||||
|
@ -68,10 +68,16 @@ impl EventHandler for WindowsEventHandler {
|
||||||
.rename_stack
|
.rename_stack
|
||||||
.take()
|
.take()
|
||||||
.expect("Unexpectedly missing rename from windows event");
|
.expect("Unexpectedly missing rename from windows event");
|
||||||
rename(&event.paths[0], &from_event.paths[0], location, library_ctx).await?;
|
rename(
|
||||||
|
&event.paths[0],
|
||||||
|
&from_event.paths[0],
|
||||||
|
&location,
|
||||||
|
library_ctx,
|
||||||
|
)
|
||||||
|
.await?;
|
||||||
}
|
}
|
||||||
EventKind::Remove(remove_kind) => {
|
EventKind::Remove(remove_kind) => {
|
||||||
remove_event(location, event, remove_kind, library_ctx).await?;
|
remove_event(&location, event, remove_kind, library_ctx).await?;
|
||||||
}
|
}
|
||||||
|
|
||||||
other_event_kind => {
|
other_event_kind => {
|
||||||
|
|
|
@ -15,6 +15,7 @@ use serde::Deserialize;
|
||||||
use serde_json::json;
|
use serde_json::json;
|
||||||
use std::{
|
use std::{
|
||||||
collections::HashSet,
|
collections::HashSet,
|
||||||
|
ffi::OsStr,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -93,7 +94,7 @@ impl LocationCreateArgs {
|
||||||
ctx.id,
|
ctx.id,
|
||||||
uuid,
|
uuid,
|
||||||
&self.path,
|
&self.path,
|
||||||
location.name.as_ref().unwrap().clone(),
|
location.name.clone(),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
@ -129,12 +130,7 @@ impl LocationCreateArgs {
|
||||||
let location = create_location(ctx, uuid, &self.path, &self.indexer_rules_ids).await?;
|
let location = create_location(ctx, uuid, &self.path, &self.indexer_rules_ids).await?;
|
||||||
|
|
||||||
metadata
|
metadata
|
||||||
.add_library(
|
.add_library(ctx.id, uuid, &self.path, location.name.clone())
|
||||||
ctx.id,
|
|
||||||
uuid,
|
|
||||||
&self.path,
|
|
||||||
location.name.as_ref().unwrap().clone(),
|
|
||||||
)
|
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
info!(
|
info!(
|
||||||
|
@ -173,8 +169,8 @@ impl LocationUpdateArgs {
|
||||||
let params = [
|
let params = [
|
||||||
self.name
|
self.name
|
||||||
.clone()
|
.clone()
|
||||||
.filter(|name| location.name.as_ref() != Some(name))
|
.filter(|name| &location.name != name)
|
||||||
.map(|v| location::name::set(Some(v))),
|
.map(location::name::set),
|
||||||
self.generate_preview_media
|
self.generate_preview_media
|
||||||
.map(location::generate_preview_media::set),
|
.map(location::generate_preview_media::set),
|
||||||
self.sync_preview_media
|
self.sync_preview_media
|
||||||
|
@ -192,9 +188,9 @@ impl LocationUpdateArgs {
|
||||||
.exec()
|
.exec()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
if let Some(ref local_path) = location.local_path {
|
if location.node_id == ctx.node_local_id {
|
||||||
if let Some(mut metadata) =
|
if let Some(mut metadata) =
|
||||||
SpacedriveLocationMetadataFile::try_load(local_path).await?
|
SpacedriveLocationMetadataFile::try_load(&location.path).await?
|
||||||
{
|
{
|
||||||
metadata.update(ctx.id, self.name.unwrap()).await?;
|
metadata.update(ctx.id, self.name.unwrap()).await?;
|
||||||
}
|
}
|
||||||
|
@ -268,9 +264,9 @@ pub async fn scan_location(
|
||||||
ctx: &LibraryContext,
|
ctx: &LibraryContext,
|
||||||
location: indexer_job_location::Data,
|
location: indexer_job_location::Data,
|
||||||
) -> Result<(), LocationError> {
|
) -> Result<(), LocationError> {
|
||||||
if location.local_path.is_none() {
|
if location.node_id != ctx.node_local_id {
|
||||||
return Err(LocationError::MissingLocalPath(location.id));
|
return Ok(());
|
||||||
};
|
}
|
||||||
|
|
||||||
ctx.queue_job(Job::new(
|
ctx.queue_job(Job::new(
|
||||||
FullFileIdentifierJobInit {
|
FullFileIdentifierJobInit {
|
||||||
|
@ -311,13 +307,13 @@ pub async fn relink_location(
|
||||||
.location()
|
.location()
|
||||||
.update(
|
.update(
|
||||||
location::pub_id::equals(metadata.location_pub_id(ctx.id)?.as_ref().to_vec()),
|
location::pub_id::equals(metadata.location_pub_id(ctx.id)?.as_ref().to_vec()),
|
||||||
vec![location::local_path::set(Some(
|
vec![location::path::set(
|
||||||
location_path
|
location_path
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.to_str()
|
.to_str()
|
||||||
.expect("Found non-UTF-8 path")
|
.expect("Found non-UTF-8 path")
|
||||||
.to_string(),
|
.to_string(),
|
||||||
))],
|
)],
|
||||||
)
|
)
|
||||||
.exec()
|
.exec()
|
||||||
.await?;
|
.await?;
|
||||||
|
@ -331,49 +327,48 @@ async fn create_location(
|
||||||
location_path: impl AsRef<Path>,
|
location_path: impl AsRef<Path>,
|
||||||
indexer_rules_ids: &[i32],
|
indexer_rules_ids: &[i32],
|
||||||
) -> Result<indexer_job_location::Data, LocationError> {
|
) -> Result<indexer_job_location::Data, LocationError> {
|
||||||
let db = &ctx.db;
|
let LibraryContext { db, sync, .. } = &ctx;
|
||||||
|
|
||||||
let location_name = location_path
|
let location_path = location_path.as_ref();
|
||||||
.as_ref()
|
|
||||||
|
let name = location_path
|
||||||
.file_name()
|
.file_name()
|
||||||
.unwrap()
|
.and_then(OsStr::to_str)
|
||||||
.to_str()
|
.map(str::to_string)
|
||||||
.unwrap()
|
.unwrap();
|
||||||
.to_string();
|
|
||||||
|
|
||||||
let local_path = location_path
|
let path = location_path
|
||||||
.as_ref()
|
|
||||||
.to_str()
|
.to_str()
|
||||||
.expect("Found non-UTF-8 path")
|
.map(str::to_string)
|
||||||
.to_string();
|
.expect("Found non-UTF-8 path");
|
||||||
|
|
||||||
let location = ctx
|
let location = sync
|
||||||
.sync
|
|
||||||
.write_op(
|
.write_op(
|
||||||
db,
|
db,
|
||||||
ctx.sync.owned_create(
|
sync.owned_create(
|
||||||
sync::location::SyncId {
|
sync::location::SyncId {
|
||||||
pub_id: location_pub_id.as_bytes().to_vec(),
|
pub_id: location_pub_id.as_bytes().to_vec(),
|
||||||
},
|
},
|
||||||
[
|
[
|
||||||
("node", json!({ "pub_id": ctx.id.as_bytes() })),
|
("node", json!({ "pub_id": ctx.id.as_bytes() })),
|
||||||
("name", json!(location_name)),
|
("name", json!(&name)),
|
||||||
("local_path", json!(&local_path)),
|
("path", json!(&path)),
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
db.location()
|
db.location()
|
||||||
.create(
|
.create(
|
||||||
location_pub_id.as_bytes().to_vec(),
|
location_pub_id.as_bytes().to_vec(),
|
||||||
|
name,
|
||||||
|
path,
|
||||||
node::id::equals(ctx.node_local_id),
|
node::id::equals(ctx.node_local_id),
|
||||||
vec![
|
vec![],
|
||||||
location::name::set(Some(location_name.clone())),
|
|
||||||
location::local_path::set(Some(local_path)),
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
.include(indexer_job_location::include()),
|
.include(indexer_job_location::include()),
|
||||||
)
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
|
debug!("created in db");
|
||||||
|
|
||||||
if !indexer_rules_ids.is_empty() {
|
if !indexer_rules_ids.is_empty() {
|
||||||
link_location_and_indexer_rules(ctx, location.id, indexer_rules_ids).await?;
|
link_location_and_indexer_rules(ctx, location.id, indexer_rules_ids).await?;
|
||||||
}
|
}
|
||||||
|
@ -414,8 +409,9 @@ pub async fn delete_location(ctx: &LibraryContext, location_id: i32) -> Result<(
|
||||||
.exec()
|
.exec()
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
if let Some(local_path) = location.local_path {
|
if location.node_id == ctx.node_local_id {
|
||||||
if let Ok(Some(mut metadata)) = SpacedriveLocationMetadataFile::try_load(&local_path).await
|
if let Ok(Some(mut metadata)) =
|
||||||
|
SpacedriveLocationMetadataFile::try_load(&location.path).await
|
||||||
{
|
{
|
||||||
metadata.remove_library(ctx.id).await?;
|
metadata.remove_library(ctx.id).await?;
|
||||||
}
|
}
|
||||||
|
@ -482,22 +478,22 @@ pub async fn delete_directory(
|
||||||
}
|
}
|
||||||
|
|
||||||
// check if a path exists in our database at that location
|
// check if a path exists in our database at that location
|
||||||
pub async fn check_virtual_path_exists(
|
// pub async fn check_virtual_path_exists(
|
||||||
library_ctx: &LibraryContext,
|
// library_ctx: &LibraryContext,
|
||||||
location_id: i32,
|
// location_id: i32,
|
||||||
subpath: impl AsRef<Path>,
|
// subpath: impl AsRef<Path>,
|
||||||
) -> Result<bool, LocationError> {
|
// ) -> Result<bool, LocationError> {
|
||||||
let path = subpath.as_ref().to_str().unwrap().to_string();
|
// let path = subpath.as_ref().to_str().unwrap().to_string();
|
||||||
|
|
||||||
let file_path = library_ctx
|
// let file_path = library_ctx
|
||||||
.db
|
// .db
|
||||||
.file_path()
|
// .file_path()
|
||||||
.find_first(vec![
|
// .find_first(vec![
|
||||||
file_path::location_id::equals(location_id),
|
// file_path::location_id::equals(location_id),
|
||||||
file_path::materialized_path::equals(path),
|
// file_path::materialized_path::equals(path),
|
||||||
])
|
// ])
|
||||||
.exec()
|
// .exec()
|
||||||
.await?;
|
// .await?;
|
||||||
|
|
||||||
Ok(file_path.is_some())
|
// Ok(file_path.is_some())
|
||||||
}
|
// }
|
||||||
|
|
|
@ -45,22 +45,16 @@ pub async fn get_path_from_location_id(
|
||||||
db: &PrismaClient,
|
db: &PrismaClient,
|
||||||
location_id: i32,
|
location_id: i32,
|
||||||
) -> Result<PathBuf, JobError> {
|
) -> Result<PathBuf, JobError> {
|
||||||
let location = db
|
Ok(db
|
||||||
.location()
|
.location()
|
||||||
.find_unique(location::id::equals(location_id))
|
.find_unique(location::id::equals(location_id))
|
||||||
.exec()
|
.exec()
|
||||||
.await?
|
.await?
|
||||||
.ok_or(JobError::MissingData {
|
.ok_or(JobError::MissingData {
|
||||||
value: String::from("location which matches location_id"),
|
value: String::from("location which matches location_id"),
|
||||||
})?;
|
})?
|
||||||
|
.path
|
||||||
location
|
.into())
|
||||||
.local_path
|
|
||||||
.as_ref()
|
|
||||||
.map(PathBuf::from)
|
|
||||||
.ok_or(JobError::MissingData {
|
|
||||||
value: String::from("path when cast as `PathBuf`"),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn context_menu_fs_info(
|
pub async fn context_menu_fs_info(
|
||||||
|
|
|
@ -41,14 +41,13 @@ impl From<&FilePathIdAndLocationIdCursor> for file_path::UniqueWhereParam {
|
||||||
#[derive(Serialize, Deserialize)]
|
#[derive(Serialize, Deserialize)]
|
||||||
pub struct FullFileIdentifierJobState {
|
pub struct FullFileIdentifierJobState {
|
||||||
location: location::Data,
|
location: location::Data,
|
||||||
location_path: PathBuf,
|
|
||||||
cursor: FilePathIdAndLocationIdCursor,
|
cursor: FilePathIdAndLocationIdCursor,
|
||||||
report: FileIdentifierReport,
|
report: FileIdentifierReport,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Deserialize, Debug, Default)]
|
#[derive(Serialize, Deserialize, Debug, Default)]
|
||||||
pub struct FileIdentifierReport {
|
pub struct FileIdentifierReport {
|
||||||
location_path: String,
|
location_path: PathBuf,
|
||||||
total_orphan_paths: usize,
|
total_orphan_paths: usize,
|
||||||
total_objects_created: usize,
|
total_objects_created: usize,
|
||||||
total_objects_linked: usize,
|
total_objects_linked: usize,
|
||||||
|
@ -78,12 +77,6 @@ impl StatefulJob for FullFileIdentifierJob {
|
||||||
.await?
|
.await?
|
||||||
.ok_or(IdentifierJobError::MissingLocation(state.init.location_id))?;
|
.ok_or(IdentifierJobError::MissingLocation(state.init.location_id))?;
|
||||||
|
|
||||||
let location_path = location
|
|
||||||
.local_path
|
|
||||||
.as_ref()
|
|
||||||
.map(PathBuf::from)
|
|
||||||
.ok_or(IdentifierJobError::LocationLocalPath(location_id))?;
|
|
||||||
|
|
||||||
let orphan_count = count_orphan_file_paths(&ctx.library_ctx, location_id).await?;
|
let orphan_count = count_orphan_file_paths(&ctx.library_ctx, location_id).await?;
|
||||||
info!("Found {} orphan file paths", orphan_count);
|
info!("Found {} orphan file paths", orphan_count);
|
||||||
|
|
||||||
|
@ -106,12 +99,11 @@ impl StatefulJob for FullFileIdentifierJob {
|
||||||
|
|
||||||
state.data = Some(FullFileIdentifierJobState {
|
state.data = Some(FullFileIdentifierJobState {
|
||||||
report: FileIdentifierReport {
|
report: FileIdentifierReport {
|
||||||
location_path: location_path.to_str().unwrap_or("").to_string(),
|
location_path: location.path.clone().into(),
|
||||||
total_orphan_paths: orphan_count,
|
total_orphan_paths: orphan_count,
|
||||||
..Default::default()
|
..Default::default()
|
||||||
},
|
},
|
||||||
location,
|
location,
|
||||||
location_path,
|
|
||||||
cursor: FilePathIdAndLocationIdCursor {
|
cursor: FilePathIdAndLocationIdCursor {
|
||||||
file_path_id: first_path_id,
|
file_path_id: first_path_id,
|
||||||
location_id: state.init.location_id,
|
location_id: state.init.location_id,
|
||||||
|
@ -154,13 +146,9 @@ impl StatefulJob for FullFileIdentifierJob {
|
||||||
data.report.total_orphan_paths
|
data.report.total_orphan_paths
|
||||||
);
|
);
|
||||||
|
|
||||||
let (total_objects_created, total_objects_linked) = identifier_job_step(
|
let (total_objects_created, total_objects_linked) =
|
||||||
&ctx.library_ctx,
|
identifier_job_step(&ctx.library_ctx, &data.location, &file_paths).await?;
|
||||||
&data.location,
|
|
||||||
&data.location_path,
|
|
||||||
&file_paths,
|
|
||||||
)
|
|
||||||
.await?;
|
|
||||||
data.report.total_objects_created += total_objects_created;
|
data.report.total_objects_created += total_objects_created;
|
||||||
data.report.total_objects_linked += total_objects_linked;
|
data.report.total_objects_linked += total_objects_linked;
|
||||||
|
|
||||||
|
|
|
@ -33,8 +33,6 @@ pub enum IdentifierJobError {
|
||||||
MissingLocation(i32),
|
MissingLocation(i32),
|
||||||
#[error("Root file path not found: <path = '{0}'>")]
|
#[error("Root file path not found: <path = '{0}'>")]
|
||||||
MissingRootFilePath(PathBuf),
|
MissingRootFilePath(PathBuf),
|
||||||
#[error("Location without local path: <id = '{0}'>")]
|
|
||||||
LocationLocalPath(i32),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
|
@ -80,13 +78,10 @@ impl FileMetadata {
|
||||||
async fn identifier_job_step(
|
async fn identifier_job_step(
|
||||||
LibraryContext { db, sync, .. }: &LibraryContext,
|
LibraryContext { db, sync, .. }: &LibraryContext,
|
||||||
location: &location::Data,
|
location: &location::Data,
|
||||||
location_path: impl AsRef<Path>,
|
|
||||||
file_paths: &[file_path::Data],
|
file_paths: &[file_path::Data],
|
||||||
) -> Result<(usize, usize), JobError> {
|
) -> Result<(usize, usize), JobError> {
|
||||||
let location_path = location_path.as_ref();
|
|
||||||
|
|
||||||
let file_path_metas = join_all(file_paths.iter().map(|file_path| async move {
|
let file_path_metas = join_all(file_paths.iter().map(|file_path| async move {
|
||||||
FileMetadata::new(location_path, &file_path.materialized_path)
|
FileMetadata::new(&location.path, &file_path.materialized_path)
|
||||||
.await
|
.await
|
||||||
.map(|params| (file_path.id, (params, file_path)))
|
.map(|params| (file_path.id, (params, file_path)))
|
||||||
}))
|
}))
|
||||||
|
|
|
@ -47,12 +47,9 @@ pub enum ThumbnailError {
|
||||||
MissingLocation(i32),
|
MissingLocation(i32),
|
||||||
#[error("Root file path not found: <path = '{0}'>")]
|
#[error("Root file path not found: <path = '{0}'>")]
|
||||||
MissingRootFilePath(PathBuf),
|
MissingRootFilePath(PathBuf),
|
||||||
#[error("Location without local path: <id = '{0}'>")]
|
|
||||||
LocationLocalPath(i32),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
file_path::include!(file_path_with_object { object });
|
file_path::include!(file_path_with_object { object });
|
||||||
file_path::select!(file_path_id_only { id });
|
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
|
#[derive(Debug, Serialize, Deserialize, Clone, Copy)]
|
||||||
enum ThumbnailJobStepKind {
|
enum ThumbnailJobStepKind {
|
||||||
|
@ -79,15 +76,15 @@ impl StatefulJob for ThumbnailJob {
|
||||||
}
|
}
|
||||||
|
|
||||||
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
|
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
|
||||||
|
let LibraryContext { db, .. } = &ctx.library_ctx;
|
||||||
|
|
||||||
let thumbnail_dir = ctx
|
let thumbnail_dir = ctx
|
||||||
.library_ctx
|
.library_ctx
|
||||||
.config()
|
.config()
|
||||||
.data_directory()
|
.data_directory()
|
||||||
.join(THUMBNAIL_CACHE_DIR_NAME);
|
.join(THUMBNAIL_CACHE_DIR_NAME);
|
||||||
|
|
||||||
let location = ctx
|
let location = db
|
||||||
.library_ctx
|
|
||||||
.db
|
|
||||||
.location()
|
.location()
|
||||||
.find_unique(location::id::equals(state.init.location_id))
|
.find_unique(location::id::equals(state.init.location_id))
|
||||||
.exec()
|
.exec()
|
||||||
|
@ -101,9 +98,7 @@ impl StatefulJob for ThumbnailJob {
|
||||||
.expect("Found non-UTF-8 path")
|
.expect("Found non-UTF-8 path")
|
||||||
.to_string();
|
.to_string();
|
||||||
|
|
||||||
let parent_directory_id = ctx
|
let parent_directory_id = db
|
||||||
.library_ctx
|
|
||||||
.db
|
|
||||||
.file_path()
|
.file_path()
|
||||||
.find_first(vec![
|
.find_first(vec![
|
||||||
file_path::location_id::equals(state.init.location_id),
|
file_path::location_id::equals(state.init.location_id),
|
||||||
|
@ -114,7 +109,7 @@ impl StatefulJob for ThumbnailJob {
|
||||||
}),
|
}),
|
||||||
file_path::is_dir::equals(true),
|
file_path::is_dir::equals(true),
|
||||||
])
|
])
|
||||||
.select(file_path_id_only::select())
|
.select(file_path::select!({ id }))
|
||||||
.exec()
|
.exec()
|
||||||
.await?
|
.await?
|
||||||
.ok_or_else(|| ThumbnailError::MissingRootFilePath(state.init.root_path.clone()))?
|
.ok_or_else(|| ThumbnailError::MissingRootFilePath(state.init.root_path.clone()))?
|
||||||
|
@ -127,10 +122,7 @@ impl StatefulJob for ThumbnailJob {
|
||||||
|
|
||||||
// create all necessary directories if they don't exist
|
// create all necessary directories if they don't exist
|
||||||
fs::create_dir_all(&thumbnail_dir).await?;
|
fs::create_dir_all(&thumbnail_dir).await?;
|
||||||
let root_path = location
|
let root_path = location.path.into();
|
||||||
.local_path
|
|
||||||
.map(PathBuf::from)
|
|
||||||
.ok_or(ThumbnailError::LocationLocalPath(location.id))?;
|
|
||||||
|
|
||||||
// query database for all image files in this location that need thumbnails
|
// query database for all image files in this location that need thumbnails
|
||||||
let image_files = get_files_by_extensions(
|
let image_files = get_files_by_extensions(
|
||||||
|
|
|
@ -83,7 +83,7 @@ impl StatefulJob for ObjectValidatorJob {
|
||||||
.unwrap();
|
.unwrap();
|
||||||
|
|
||||||
state.data = Some(ObjectValidatorJobState {
|
state.data = Some(ObjectValidatorJobState {
|
||||||
root_path: location.local_path.as_ref().map(PathBuf::from).unwrap(),
|
root_path: location.path.into(),
|
||||||
task_count: state.steps.len(),
|
task_count: state.steps.len(),
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -328,6 +328,10 @@ impl SyncManager {
|
||||||
.location()
|
.location()
|
||||||
.create(
|
.create(
|
||||||
id.pub_id,
|
id.pub_id,
|
||||||
|
serde_json::from_value(data.remove("name").unwrap())
|
||||||
|
.unwrap(),
|
||||||
|
serde_json::from_value(data.remove("path").unwrap())
|
||||||
|
.unwrap(),
|
||||||
{
|
{
|
||||||
let val: std::collections::HashMap<String, Value> =
|
let val: std::collections::HashMap<String, Value> =
|
||||||
from_value(data.remove("node").unwrap()).unwrap();
|
from_value(data.remove("node").unwrap()).unwrap();
|
||||||
|
|
|
@ -17,12 +17,12 @@ export type Procedures = {
|
||||||
{ key: "keys.listMounted", input: LibraryArgs<null>, result: string[] } |
|
{ key: "keys.listMounted", input: LibraryArgs<null>, result: string[] } |
|
||||||
{ key: "library.getStatistics", input: LibraryArgs<null>, result: Statistics } |
|
{ key: "library.getStatistics", input: LibraryArgs<null>, result: Statistics } |
|
||||||
{ key: "library.list", input: never, result: LibraryConfigWrapped[] } |
|
{ key: "library.list", input: never, result: LibraryConfigWrapped[] } |
|
||||||
{ key: "locations.getById", input: LibraryArgs<number>, result: { id: number, pub_id: number[], node_id: number, name: string | null, local_path: string | null, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string, indexer_rules: IndexerRulesInLocation[] } | null } |
|
{ key: "locations.getById", input: LibraryArgs<number>, result: { id: number, pub_id: number[], node_id: number, name: string, path: string, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string, indexer_rules: IndexerRulesInLocation[] } | null } |
|
||||||
{ key: "locations.getExplorerData", input: LibraryArgs<LocationExplorerArgs>, result: ExplorerData } |
|
{ key: "locations.getExplorerData", input: LibraryArgs<LocationExplorerArgs>, result: ExplorerData } |
|
||||||
{ key: "locations.indexer_rules.get", input: LibraryArgs<number>, result: IndexerRule } |
|
{ key: "locations.indexer_rules.get", input: LibraryArgs<number>, result: IndexerRule } |
|
||||||
{ key: "locations.indexer_rules.list", input: LibraryArgs<null>, result: IndexerRule[] } |
|
{ key: "locations.indexer_rules.list", input: LibraryArgs<null>, result: IndexerRule[] } |
|
||||||
{ key: "locations.indexer_rules.listForLocation", input: LibraryArgs<number>, result: IndexerRule[] } |
|
{ key: "locations.indexer_rules.listForLocation", input: LibraryArgs<number>, result: IndexerRule[] } |
|
||||||
{ key: "locations.list", input: LibraryArgs<null>, result: { id: number, pub_id: number[], node_id: number, name: string | null, local_path: string | null, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string, node: Node }[] } |
|
{ key: "locations.list", input: LibraryArgs<null>, result: { id: number, pub_id: number[], node_id: number, name: string, path: string, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string, node: Node }[] } |
|
||||||
{ key: "nodeState", input: never, result: NodeState } |
|
{ key: "nodeState", input: never, result: NodeState } |
|
||||||
{ key: "tags.get", input: LibraryArgs<number>, result: Tag | null } |
|
{ key: "tags.get", input: LibraryArgs<number>, result: Tag | null } |
|
||||||
{ key: "tags.getExplorerData", input: LibraryArgs<number>, result: ExplorerData } |
|
{ key: "tags.getExplorerData", input: LibraryArgs<number>, result: ExplorerData } |
|
||||||
|
@ -101,6 +101,11 @@ export type CreateLibraryArgs = { name: string, auth: AuthOption, algorithm: Alg
|
||||||
|
|
||||||
export type EditLibraryArgs = { id: string, name: string | null, description: string | null }
|
export type EditLibraryArgs = { id: string, name: string | null, description: string | null }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This should be used for passing an encrypted key around.
|
||||||
|
*
|
||||||
|
* This is always `ENCRYPTED_KEY_LEN` (which is `KEY_LEM` + `AEAD_TAG_LEN`)
|
||||||
|
*/
|
||||||
export type EncryptedKey = number[]
|
export type EncryptedKey = number[]
|
||||||
|
|
||||||
export type ExplorerContext = ({ type: "Location" } & Location) | ({ type: "Tag" } & Tag)
|
export type ExplorerContext = ({ type: "Location" } & Location) | ({ type: "Tag" } & Tag)
|
||||||
|
@ -170,7 +175,7 @@ export type LibraryConfig = ({ version: string | null }) & { name: string, descr
|
||||||
|
|
||||||
export type LibraryConfigWrapped = { uuid: string, config: LibraryConfig }
|
export type LibraryConfigWrapped = { uuid: string, config: LibraryConfig }
|
||||||
|
|
||||||
export type Location = { id: number, pub_id: number[], node_id: number, name: string | null, local_path: string | null, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string }
|
export type Location = { id: number, pub_id: number[], node_id: number, name: string, path: string, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* `LocationCreateArgs` is the argument received from the client using `rspc` to create a new location.
|
* `LocationCreateArgs` is the argument received from the client using `rspc` to create a new location.
|
||||||
|
@ -204,6 +209,11 @@ export type NodeConfig = ({ version: string | null }) & { id: string, name: stri
|
||||||
|
|
||||||
export type NodeState = (({ version: string | null }) & { id: string, name: string, p2p_port: number | null }) & { data_path: string }
|
export type NodeState = (({ version: string | null }) & { id: string, name: string, p2p_port: number | null }) & { data_path: string }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This should be used for providing a nonce to encrypt/decrypt functions.
|
||||||
|
*
|
||||||
|
* You may also generate a nonce for a given algorithm with `Nonce::generate()`
|
||||||
|
*/
|
||||||
export type Nonce = { XChaCha20Poly1305: number[] } | { Aes256Gcm: number[] }
|
export type Nonce = { XChaCha20Poly1305: number[] } | { Aes256Gcm: number[] }
|
||||||
|
|
||||||
export type Object = { id: number, pub_id: number[], name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string }
|
export type Object = { id: number, pub_id: number[], name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string }
|
||||||
|
@ -213,7 +223,7 @@ export type ObjectValidatorArgs = { id: number, path: string }
|
||||||
/**
|
/**
|
||||||
* These parameters define the password-hashing level.
|
* These parameters define the password-hashing level.
|
||||||
*
|
*
|
||||||
* The harder the parameter, the longer the password will take to hash.
|
* The greater the parameter, the longer the password will take to hash.
|
||||||
*/
|
*/
|
||||||
export type Params = "Standard" | "Hardened" | "Paranoid"
|
export type Params = "Standard" | "Hardened" | "Paranoid"
|
||||||
|
|
||||||
|
@ -221,6 +231,11 @@ export type RestoreBackupArgs = { password: string, secret_key: string, path: st
|
||||||
|
|
||||||
export type RuleKind = "AcceptFilesByGlob" | "RejectFilesByGlob" | "AcceptIfChildrenDirectoriesArePresent" | "RejectIfChildrenDirectoriesArePresent"
|
export type RuleKind = "AcceptFilesByGlob" | "RejectFilesByGlob" | "AcceptIfChildrenDirectoriesArePresent" | "RejectIfChildrenDirectoriesArePresent"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This should be used for passing a salt around.
|
||||||
|
*
|
||||||
|
* You may also generate a salt with `Salt::generate()`
|
||||||
|
*/
|
||||||
export type Salt = number[]
|
export type Salt = number[]
|
||||||
|
|
||||||
export type SetFavoriteArgs = { id: number, favorite: boolean }
|
export type SetFavoriteArgs = { id: number, favorite: boolean }
|
||||||
|
@ -230,12 +245,20 @@ export type SetNoteArgs = { id: number, note: string | null }
|
||||||
export type Statistics = { id: number, date_captured: string, total_object_count: number, library_db_size: string, total_bytes_used: string, total_bytes_capacity: string, total_unique_bytes: string, total_bytes_free: string, preview_media_bytes: string }
|
export type Statistics = { id: number, date_captured: string, total_object_count: number, library_db_size: string, total_bytes_used: string, total_bytes_capacity: string, total_unique_bytes: string, total_bytes_free: string, preview_media_bytes: string }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This is a stored key, and can be freely written to Prisma/another database.
|
* This is a stored key, and can be freely written to the database.
|
||||||
|
*
|
||||||
|
* It contains no sensitive information that is not encrypted.
|
||||||
*/
|
*/
|
||||||
export type StoredKey = { uuid: string, version: StoredKeyVersion, key_type: StoredKeyType, algorithm: Algorithm, hashing_algorithm: HashingAlgorithm, content_salt: Salt, master_key: EncryptedKey, master_key_nonce: Nonce, key_nonce: Nonce, key: number[], salt: Salt, memory_only: boolean, automount: boolean }
|
export type StoredKey = { uuid: string, version: StoredKeyVersion, key_type: StoredKeyType, algorithm: Algorithm, hashing_algorithm: HashingAlgorithm, content_salt: Salt, master_key: EncryptedKey, master_key_nonce: Nonce, key_nonce: Nonce, key: number[], salt: Salt, memory_only: boolean, automount: boolean }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This denotes the type of key. `Root` keys can be used to unlock the key manager, and `User` keys are ordinary keys.
|
||||||
|
*/
|
||||||
export type StoredKeyType = "User" | "Root"
|
export type StoredKeyType = "User" | "Root"
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This denotes the `StoredKey` version.
|
||||||
|
*/
|
||||||
export type StoredKeyVersion = "V1"
|
export type StoredKeyVersion = "V1"
|
||||||
|
|
||||||
export type Tag = { id: number, pub_id: number[], name: string | null, color: string | null, total_objects: number | null, redundancy_goal: number | null, date_created: string, date_modified: string }
|
export type Tag = { id: number, pub_id: number[], name: string | null, color: string | null, total_objects: number | null, redundancy_goal: number | null, date_created: string, date_modified: string }
|
||||||
|
|
|
@ -108,7 +108,7 @@ export const Inspector = ({ data, context, ...elementProps }: Props) => {
|
||||||
{context?.type == 'Location' && data?.type === 'Path' && (
|
{context?.type == 'Location' && data?.type === 'Path' && (
|
||||||
<MetaContainer>
|
<MetaContainer>
|
||||||
<MetaTitle>URI</MetaTitle>
|
<MetaTitle>URI</MetaTitle>
|
||||||
<MetaValue>{`${context.local_path}/${data.item.materialized_path}`}</MetaValue>
|
<MetaValue>{`${context.path}/${data.item.materialized_path}`}</MetaValue>
|
||||||
</MetaContainer>
|
</MetaContainer>
|
||||||
)}
|
)}
|
||||||
<Divider />
|
<Divider />
|
||||||
|
|
|
@ -36,7 +36,7 @@ export default function LocationListItem({ location }: LocationListItemProps) {
|
||||||
<h1 className="pt-0.5 text-sm font-semibold">{location.name}</h1>
|
<h1 className="pt-0.5 text-sm font-semibold">{location.name}</h1>
|
||||||
<p className="text-ink-dull mt-0.5 select-text truncate text-sm">
|
<p className="text-ink-dull mt-0.5 select-text truncate text-sm">
|
||||||
<span className="bg-app-selected mr-1 rounded py-[1px] px-1">{location.node.name}</span>
|
<span className="bg-app-selected mr-1 rounded py-[1px] px-1">{location.node.name}</span>
|
||||||
{location.local_path}
|
{location.path}
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
<div className="flex grow" />
|
<div className="flex grow" />
|
||||||
|
|
|
@ -37,8 +37,8 @@ export default function EditLocation() {
|
||||||
onSuccess: (data) => {
|
onSuccess: (data) => {
|
||||||
if (data && !isDirty)
|
if (data && !isDirty)
|
||||||
form.reset({
|
form.reset({
|
||||||
displayName: data.name || undefined,
|
displayName: data.name,
|
||||||
localPath: data.local_path || undefined,
|
localPath: data.path,
|
||||||
indexer_rules_ids: data.indexer_rules.map((i) => i.indexer_rule_id.toString()),
|
indexer_rules_ids: data.indexer_rules.map((i) => i.indexer_rule_id.toString()),
|
||||||
generatePreviewMedia: data.generate_preview_media,
|
generatePreviewMedia: data.generate_preview_media,
|
||||||
syncPreviewMedia: data.sync_preview_media,
|
syncPreviewMedia: data.sync_preview_media,
|
||||||
|
|
Loading…
Reference in a new issue