mirror of
https://github.com/spacedriveapp/spacedrive
synced 2024-07-02 10:03:28 +00:00
Fix unique constraint on file path table (#585)
* Complying with a pedantic Clippy * Some task runners for vscode * Solving duplicated directories events When creating a directory through MacOS's Finder, for some reason FSEvents receives 2 Create Folder events that we have to handle * Damn rustfmt
This commit is contained in:
parent
ad157f58a5
commit
fe215566d6
81
.vscode/tasks.json
vendored
Normal file
81
.vscode/tasks.json
vendored
Normal file
|
@ -0,0 +1,81 @@
|
|||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"type": "cargo",
|
||||
"command": "clippy",
|
||||
"problemMatcher": ["$rustc"],
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"label": "rust: cargo clippy",
|
||||
"args": ["--all-targets", "--all-features", "--all"]
|
||||
},
|
||||
{
|
||||
"type": "npm",
|
||||
"script": "prep",
|
||||
"label": "pnpm: prep",
|
||||
"group": "none",
|
||||
"problemMatcher": ["$rustc"]
|
||||
},
|
||||
{
|
||||
"type": "shell",
|
||||
"label": "ui:dev",
|
||||
"problemMatcher": {
|
||||
"owner": "vite",
|
||||
"fileLocation": "autoDetect",
|
||||
"pattern": {
|
||||
"regexp": "^([^\\s].*)\\((\\d+|\\d+,\\d+|\\d+,\\d+,\\d+,\\d+)\\):\\s+(error|warning|info)\\s+(TS\\d+)\\s*:\\s*(.*)$",
|
||||
"file": 1,
|
||||
"location": 2,
|
||||
"severity": 3,
|
||||
"code": 4,
|
||||
"message": 5
|
||||
},
|
||||
"background": {
|
||||
"activeOnStart": true,
|
||||
"beginsPattern": "^> @sd\\/root@\\d\\.\\d\\.\\d desktop",
|
||||
"endsPattern": "to show help$"
|
||||
}
|
||||
},
|
||||
"isBackground": true,
|
||||
"command": "pnpm",
|
||||
"args": ["desktop", "vite", "--clearScreen=false", "--mode=development"],
|
||||
"runOptions": {
|
||||
"instanceLimit": 1
|
||||
}
|
||||
},
|
||||
{
|
||||
"type": "shell",
|
||||
"label": "ui:build",
|
||||
"problemMatcher": "$tsc",
|
||||
"command": "pnpm",
|
||||
"args": ["desktop", "vite", "build"]
|
||||
},
|
||||
{
|
||||
"type": "cargo",
|
||||
"command": "run",
|
||||
"args": ["--package", "spacedrive", "--bin", "spacedrive"],
|
||||
"env": {
|
||||
"RUST_BACKTRACE": "short" // Change this if you want more or less backtrace
|
||||
},
|
||||
"problemMatcher": ["$rustc"],
|
||||
"group": "build",
|
||||
"label": "rust: run spacedrive",
|
||||
"dependsOn": ["ui:dev"]
|
||||
},
|
||||
{
|
||||
"type": "cargo",
|
||||
"command": "run",
|
||||
"args": ["--package", "spacedrive", "--bin", "spacedrive", "--release"],
|
||||
"env": {
|
||||
"RUST_BACKTRACE": "short" // Change this if you want more or less backtrace
|
||||
},
|
||||
"problemMatcher": ["$rustc"],
|
||||
"group": "build",
|
||||
"label": "rust: run spacedrive release",
|
||||
"dependsOn": ["ui:build"]
|
||||
}
|
||||
]
|
||||
}
|
|
@ -108,7 +108,7 @@ CREATE TABLE "file_path" (
|
|||
"location_id" INTEGER NOT NULL,
|
||||
"materialized_path" TEXT NOT NULL,
|
||||
"name" TEXT NOT NULL,
|
||||
"extension" TEXT COLLATE NOCASE,
|
||||
"extension" TEXT COLLATE NOCASE NOT NULL,
|
||||
"object_id" INTEGER,
|
||||
"parent_id" INTEGER,
|
||||
"key_id" INTEGER,
|
||||
|
|
|
@ -174,7 +174,7 @@ model FilePath {
|
|||
// the name and extension
|
||||
// Must have 'COLLATE NOCASE' in migration
|
||||
name String
|
||||
extension String?
|
||||
extension String
|
||||
|
||||
// the unique Object for this file path
|
||||
object_id Int?
|
||||
|
|
|
@ -54,7 +54,11 @@ pub(crate) fn mount() -> RouterBuilder {
|
|||
// grab the first path and tac on the name
|
||||
let oldest_path = &object.file_paths[0];
|
||||
object.name = Some(oldest_path.name.clone());
|
||||
object.extension = oldest_path.extension.clone();
|
||||
object.extension = if oldest_path.extension.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(oldest_path.extension.clone())
|
||||
};
|
||||
// a long term fix for this would be to have the indexer give the Object
|
||||
// a name and extension, sacrificing its own and only store newly found Path
|
||||
// names that differ from the Object name
|
||||
|
|
|
@ -27,7 +27,8 @@ use uuid::Uuid;
|
|||
// This LRU cache allows us to avoid doing a DB lookup on every request.
|
||||
// The main advantage of this LRU Cache is for video files. Video files are fetch in multiple chunks and the cache prevents a DB lookup on every chunk reducing the request time from 15-25ms to 1-10ms.
|
||||
type MetadataCacheKey = (Uuid, i32, i32);
|
||||
static FILE_METADATA_CACHE: Lazy<Cache<MetadataCacheKey, (PathBuf, Option<String>)>> =
|
||||
type NameAndExtension = (PathBuf, String);
|
||||
static FILE_METADATA_CACHE: Lazy<Cache<MetadataCacheKey, NameAndExtension>> =
|
||||
Lazy::new(|| Cache::new(100));
|
||||
|
||||
// TODO: We should listen to events when deleting or moving a location and evict the cache accordingly.
|
||||
|
@ -145,18 +146,18 @@ async fn handle_file(
|
|||
let metadata = file.metadata().await?;
|
||||
|
||||
// TODO: This should be determined from magic bytes when the file is indexed and stored it in the DB on the file path
|
||||
let (mime_type, is_video) = match extension.as_deref() {
|
||||
Some("mp4") => ("video/mp4", true),
|
||||
Some("webm") => ("video/webm", true),
|
||||
Some("mkv") => ("video/x-matroska", true),
|
||||
Some("avi") => ("video/x-msvideo", true),
|
||||
Some("mov") => ("video/quicktime", true),
|
||||
Some("png") => ("image/png", false),
|
||||
Some("jpg") => ("image/jpeg", false),
|
||||
Some("jpeg") => ("image/jpeg", false),
|
||||
Some("gif") => ("image/gif", false),
|
||||
Some("webp") => ("image/webp", false),
|
||||
Some("svg") => ("image/svg+xml", false),
|
||||
let (mime_type, is_video) = match extension.as_str() {
|
||||
"mp4" => ("video/mp4", true),
|
||||
"webm" => ("video/webm", true),
|
||||
"mkv" => ("video/x-matroska", true),
|
||||
"avi" => ("video/x-msvideo", true),
|
||||
"mov" => ("video/quicktime", true),
|
||||
"png" => ("image/png", false),
|
||||
"jpg" => ("image/jpeg", false),
|
||||
"jpeg" => ("image/jpeg", false),
|
||||
"gif" => ("image/gif", false),
|
||||
"webp" => ("image/webp", false),
|
||||
"svg" => ("image/svg+xml", false),
|
||||
_ => {
|
||||
return Err(HandleCustomUriError::BadRequest(
|
||||
"TODO: This filetype is not supported because of the missing mime type!",
|
||||
|
|
|
@ -41,7 +41,7 @@ pub async fn create_file_path(
|
|||
location_id: i32,
|
||||
mut materialized_path: String,
|
||||
name: String,
|
||||
extension: Option<String>,
|
||||
extension: String,
|
||||
parent_id: Option<i32>,
|
||||
is_dir: bool,
|
||||
) -> Result<file_path::Data, QueryError> {
|
||||
|
@ -67,10 +67,10 @@ pub async fn create_file_path(
|
|||
location::id::equals(location_id),
|
||||
materialized_path,
|
||||
name,
|
||||
extension,
|
||||
vec![
|
||||
file_path::parent_id::set(parent_id),
|
||||
file_path::is_dir::set(is_dir),
|
||||
file_path::extension::set(extension),
|
||||
],
|
||||
)
|
||||
.exec()
|
||||
|
|
|
@ -230,11 +230,11 @@ impl StatefulJob for IndexerJob {
|
|||
// if 'entry.path' is a directory, set extension to an empty string to
|
||||
// avoid periods in folder names being interpreted as file extensions
|
||||
if entry.is_dir {
|
||||
extension = None;
|
||||
extension = "".to_string();
|
||||
name = extract_name(entry.path.file_name());
|
||||
} else {
|
||||
// if the 'entry.path' is not a directory, then get the extension and name.
|
||||
extension = Some(extract_name(entry.path.extension()).to_lowercase());
|
||||
extension = extract_name(entry.path.extension()).to_lowercase();
|
||||
name = extract_name(entry.path.file_stem());
|
||||
}
|
||||
let mut materialized_path = entry
|
||||
|
@ -273,9 +273,9 @@ impl StatefulJob for IndexerJob {
|
|||
location.id,
|
||||
materialized_path,
|
||||
name,
|
||||
extension,
|
||||
vec![
|
||||
is_dir::set(entry.is_dir),
|
||||
extension::set(extension),
|
||||
parent_id::set(entry.parent_id),
|
||||
date_created::set(entry.created_at.into()),
|
||||
],
|
||||
|
|
|
@ -350,28 +350,28 @@ mod tests {
|
|||
|
||||
#[rustfmt::skip]
|
||||
let expected = [
|
||||
WalkEntry { path: root_path.to_path_buf(), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/.git"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/Cargo.toml"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/src"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/src/main.rs"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/target"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/target/debug"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/target/debug/main"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/.git"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/package.json"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/src"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/src/App.tsx"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/node_modules"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/node_modules/react"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/node_modules/react/package.json"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("photos"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("photos/photo1.png"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("photos/photo2.jpg"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("photos/photo3.jpeg"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.to_path_buf(), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/.git"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/Cargo.toml"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/src"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/src/main.rs"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/target"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/target/debug"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/target/debug/main"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/.git"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/package.json"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/src"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/src/App.tsx"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/node_modules"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/node_modules/react"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/node_modules/react/package.json"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("photos"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("photos/photo1.png"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("photos/photo2.jpg"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("photos/photo3.jpeg"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("photos/text.txt"), is_dir: false, created_at: any_datetime },
|
||||
]
|
||||
.into_iter()
|
||||
|
@ -396,10 +396,10 @@ mod tests {
|
|||
|
||||
#[rustfmt::skip]
|
||||
let expected = [
|
||||
WalkEntry { path: root_path.to_path_buf(), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("photos"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("photos/photo1.png"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("photos/photo2.jpg"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.to_path_buf(), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("photos"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("photos/photo1.png"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("photos/photo2.jpg"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("photos/photo3.jpeg"), is_dir: false, created_at: any_datetime },
|
||||
]
|
||||
.into_iter()
|
||||
|
@ -435,23 +435,23 @@ mod tests {
|
|||
|
||||
#[rustfmt::skip]
|
||||
let expected = [
|
||||
WalkEntry { path: root_path.to_path_buf(), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/.git"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/Cargo.toml"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/src"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/src/main.rs"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/target"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/target/debug"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/target/debug/main"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/.git"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/package.json"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/src"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/src/App.tsx"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/node_modules"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/node_modules/react"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.to_path_buf(), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/.git"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/Cargo.toml"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/src"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/src/main.rs"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/target"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/target/debug"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/target/debug/main"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/.git"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/package.json"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/src"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/src/App.tsx"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/node_modules"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/node_modules/react"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/node_modules/react/package.json"), is_dir: false, created_at: any_datetime },
|
||||
]
|
||||
.into_iter()
|
||||
|
@ -489,17 +489,17 @@ mod tests {
|
|||
|
||||
#[rustfmt::skip]
|
||||
let expected = [
|
||||
WalkEntry { path: root_path.to_path_buf(), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/.git"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/Cargo.toml"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/src"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("rust_project/src/main.rs"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/.git"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/package.json"), is_dir: false, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.join("inner/node_project/src"), is_dir: true, created_at: any_datetime.clone() },
|
||||
WalkEntry { path: root_path.to_path_buf(), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/.git"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/Cargo.toml"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/src"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("rust_project/src/main.rs"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/.git"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/package.json"), is_dir: false, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/src"), is_dir: true, created_at: any_datetime },
|
||||
WalkEntry { path: root_path.join("inner/node_project/src/App.tsx"), is_dir: false, created_at: any_datetime },
|
||||
]
|
||||
.into_iter()
|
||||
|
|
|
@ -35,16 +35,16 @@ impl EventHandler for LinuxEventHandler {
|
|||
match event.kind {
|
||||
EventKind::Access(AccessKind::Close(AccessMode::Write)) => {
|
||||
// If a file was closed with write mode, then it was updated or created
|
||||
file_creation_or_update(&location, event, library_ctx).await?;
|
||||
file_creation_or_update(&location, &event, library_ctx).await?;
|
||||
}
|
||||
EventKind::Create(CreateKind::Folder) => {
|
||||
create_dir(&location, event, library_ctx).await?;
|
||||
create_dir(&location, &event, library_ctx).await?;
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(RenameMode::Both)) => {
|
||||
rename_both_event(&location, event, library_ctx).await?;
|
||||
rename_both_event(&location, &event, library_ctx).await?;
|
||||
}
|
||||
EventKind::Remove(remove_kind) => {
|
||||
remove_event(&location, event, remove_kind, library_ctx).await?;
|
||||
remove_event(&location, &event, remove_kind, library_ctx).await?;
|
||||
}
|
||||
other_event_kind => {
|
||||
trace!("Other Linux event that we don't handle for now: {other_event_kind:#?}");
|
||||
|
|
|
@ -17,6 +17,7 @@ use super::{
|
|||
|
||||
#[derive(Debug, Default)]
|
||||
pub(super) struct MacOsEventHandler {
|
||||
latest_created_dir: Option<Event>,
|
||||
rename_stack: Option<Event>,
|
||||
}
|
||||
|
||||
|
@ -39,11 +40,22 @@ impl EventHandler for MacOsEventHandler {
|
|||
|
||||
match event.kind {
|
||||
EventKind::Create(CreateKind::Folder) => {
|
||||
create_dir(&location, event, library_ctx).await?;
|
||||
if let Some(latest_created_dir) = self.latest_created_dir.take() {
|
||||
if event.paths[0] == latest_created_dir.paths[0] {
|
||||
// NOTE: This is a MacOS specific event that happens when a folder is created
|
||||
// trough Finder. It creates a folder but 2 events are triggered in
|
||||
// FSEvents. So we store and check the latest created folder to avoid
|
||||
// hiting a unique constraint in the database
|
||||
return Ok(());
|
||||
}
|
||||
}
|
||||
|
||||
create_dir(&location, &event, library_ctx).await?;
|
||||
self.latest_created_dir = Some(event);
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Data(DataChange::Content)) => {
|
||||
// If a file had its content modified, then it was updated or created
|
||||
file_creation_or_update(&location, event, library_ctx).await?;
|
||||
file_creation_or_update(&location, &event, library_ctx).await?;
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Name(RenameMode::Any)) => {
|
||||
match self.rename_stack.take() {
|
||||
|
@ -63,7 +75,7 @@ impl EventHandler for MacOsEventHandler {
|
|||
}
|
||||
|
||||
EventKind::Remove(remove_kind) => {
|
||||
remove_event(&location, event, remove_kind, library_ctx).await?;
|
||||
remove_event(&location, &event, remove_kind, library_ctx).await?;
|
||||
}
|
||||
other_event_kind => {
|
||||
trace!("Other MacOS event that we don't handle for now: {other_event_kind:#?}");
|
||||
|
|
|
@ -179,7 +179,7 @@ impl LocationWatcher {
|
|||
.await?
|
||||
else {
|
||||
warn!("Tried to handle event for unknown location: <id='{location_id}'>");
|
||||
return Ok(())
|
||||
return Ok(());
|
||||
};
|
||||
|
||||
if !library_ctx
|
||||
|
|
|
@ -36,22 +36,19 @@ use uuid::Uuid;
|
|||
use super::file_path_with_object;
|
||||
|
||||
pub(super) fn check_event(event: &Event, ignore_paths: &HashSet<PathBuf>) -> bool {
|
||||
// if first path includes .DS_Store, ignore
|
||||
if event.paths.iter().any(|p| {
|
||||
p.to_str()
|
||||
.expect("Found non-UTF-8 path")
|
||||
.contains(".DS_Store")
|
||||
|| ignore_paths.contains(p)
|
||||
}) {
|
||||
return false;
|
||||
}
|
||||
// if path includes .DS_Store, .spacedrive or is in the `ignore_paths` set, we ignore
|
||||
!event.paths.iter().any(|p| {
|
||||
let path_str = p.to_str().expect("Found non-UTF-8 path");
|
||||
|
||||
true
|
||||
path_str.contains(".DS_Store")
|
||||
|| path_str.contains(".spacedrive")
|
||||
|| ignore_paths.contains(p)
|
||||
})
|
||||
}
|
||||
|
||||
pub(super) async fn create_dir(
|
||||
location: &indexer_job_location::Data,
|
||||
event: Event,
|
||||
event: &Event,
|
||||
library_ctx: &LibraryContext,
|
||||
) -> Result<(), LocationManagerError> {
|
||||
if location.node_id != library_ctx.node_local_id {
|
||||
|
@ -89,7 +86,7 @@ pub(super) async fn create_dir(
|
|||
.and_then(OsStr::to_str)
|
||||
.map(str::to_string)
|
||||
.expect("Found non-UTF-8 path"),
|
||||
None,
|
||||
"".to_string(),
|
||||
Some(parent_directory.id),
|
||||
true,
|
||||
)
|
||||
|
@ -104,7 +101,7 @@ pub(super) async fn create_dir(
|
|||
|
||||
pub(super) async fn create_file(
|
||||
location: &indexer_job_location::Data,
|
||||
event: Event,
|
||||
event: &Event,
|
||||
library_ctx: &LibraryContext,
|
||||
) -> Result<(), LocationManagerError> {
|
||||
if location.node_id != library_ctx.node_local_id {
|
||||
|
@ -141,13 +138,10 @@ pub(super) async fn create_file(
|
|||
.to_str()
|
||||
.expect("Found non-UTF-8 path")
|
||||
.to_string(),
|
||||
materialized_path.extension().and_then(|ext| {
|
||||
if ext.is_empty() {
|
||||
None
|
||||
} else {
|
||||
Some(ext.to_str().expect("Found non-UTF-8 path").to_string())
|
||||
}
|
||||
}),
|
||||
materialized_path
|
||||
.extension()
|
||||
.map(|ext| ext.to_str().expect("Found non-UTF-8 path").to_string())
|
||||
.unwrap_or_default(),
|
||||
Some(parent_directory.id),
|
||||
false,
|
||||
)
|
||||
|
@ -214,10 +208,14 @@ pub(super) async fn create_file(
|
|||
.await?;
|
||||
|
||||
trace!("object: {:#?}", object);
|
||||
if !object.has_thumbnail {
|
||||
if let Some(ref extension) = created_file.extension {
|
||||
generate_thumbnail(extension, &cas_id, &event.paths[0], library_ctx).await;
|
||||
}
|
||||
if !object.has_thumbnail && !created_file.extension.is_empty() {
|
||||
generate_thumbnail(
|
||||
&created_file.extension,
|
||||
&cas_id,
|
||||
&event.paths[0],
|
||||
library_ctx,
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
invalidate_query!(library_ctx, "locations.getExplorerData");
|
||||
|
@ -227,7 +225,7 @@ pub(super) async fn create_file(
|
|||
|
||||
pub(super) async fn file_creation_or_update(
|
||||
location: &indexer_job_location::Data,
|
||||
event: Event,
|
||||
event: &Event,
|
||||
library_ctx: &LibraryContext,
|
||||
) -> Result<(), LocationManagerError> {
|
||||
if let Some(ref file_path) =
|
||||
|
@ -242,7 +240,7 @@ pub(super) async fn file_creation_or_update(
|
|||
|
||||
pub(super) async fn update_file(
|
||||
location: &indexer_job_location::Data,
|
||||
event: Event,
|
||||
event: &Event,
|
||||
library_ctx: &LibraryContext,
|
||||
) -> Result<(), LocationManagerError> {
|
||||
if location.node_id == library_ctx.node_local_id {
|
||||
|
@ -265,7 +263,7 @@ pub(super) async fn update_file(
|
|||
async fn inner_update_file(
|
||||
location: &indexer_job_location::Data,
|
||||
file_path: &file_path_with_object::Data,
|
||||
event: Event,
|
||||
event: &Event,
|
||||
library_ctx: &LibraryContext,
|
||||
) -> Result<(), LocationManagerError> {
|
||||
trace!(
|
||||
|
@ -315,8 +313,9 @@ async fn inner_update_file(
|
|||
.unwrap_or_default()
|
||||
{
|
||||
// if this file had a thumbnail previously, we update it to match the new content
|
||||
if let Some(extension) = &file_path.extension {
|
||||
generate_thumbnail(extension, &cas_id, &event.paths[0], library_ctx).await;
|
||||
if !file_path.extension.is_empty() {
|
||||
generate_thumbnail(&file_path.extension, &cas_id, &event.paths[0], library_ctx)
|
||||
.await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -329,7 +328,7 @@ async fn inner_update_file(
|
|||
|
||||
pub(super) async fn rename_both_event(
|
||||
location: &indexer_job_location::Data,
|
||||
event: Event,
|
||||
event: &Event,
|
||||
library_ctx: &LibraryContext,
|
||||
) -> Result<(), LocationManagerError> {
|
||||
rename(&event.paths[1], &event.paths[0], location, library_ctx).await
|
||||
|
@ -396,7 +395,12 @@ pub(super) async fn rename(
|
|||
file_path::extension::set(
|
||||
new_path_materialized
|
||||
.extension()
|
||||
.map(|s| s.to_str().expect("Found non-UTF-8 path").to_string()),
|
||||
.map(|s| {
|
||||
s.to_str()
|
||||
.expect("Found non-UTF-8 extension in path")
|
||||
.to_string()
|
||||
})
|
||||
.unwrap_or_default(),
|
||||
),
|
||||
],
|
||||
)
|
||||
|
@ -410,7 +414,7 @@ pub(super) async fn rename(
|
|||
|
||||
pub(super) async fn remove_event(
|
||||
location: &indexer_job_location::Data,
|
||||
event: Event,
|
||||
event: &Event,
|
||||
remove_kind: RemoveKind,
|
||||
library_ctx: &LibraryContext,
|
||||
) -> Result<(), LocationManagerError> {
|
||||
|
|
|
@ -45,16 +45,16 @@ impl EventHandler for WindowsEventHandler {
|
|||
if metadata.is_file() {
|
||||
self.create_file_stack = Some(event);
|
||||
} else {
|
||||
create_dir(&location, event, library_ctx).await?;
|
||||
create_dir(&location, &event, library_ctx).await?;
|
||||
}
|
||||
}
|
||||
EventKind::Modify(ModifyKind::Any) => {
|
||||
let metadata = fs::metadata(&event.paths[0]).await?;
|
||||
if metadata.is_file() {
|
||||
if let Some(create_file_event) = self.create_file_stack.take() {
|
||||
create_file(&location, create_file_event, library_ctx).await?;
|
||||
create_file(&location, &create_file_event, library_ctx).await?;
|
||||
} else {
|
||||
update_file(&location, event, library_ctx).await?;
|
||||
update_file(&location, &event, library_ctx).await?;
|
||||
}
|
||||
} else {
|
||||
warn!("Unexpected Windows modify event on a directory");
|
||||
|
@ -77,7 +77,7 @@ impl EventHandler for WindowsEventHandler {
|
|||
.await?;
|
||||
}
|
||||
EventKind::Remove(remove_kind) => {
|
||||
remove_event(&location, event, remove_kind, library_ctx).await?;
|
||||
remove_event(&location, &event, remove_kind, library_ctx).await?;
|
||||
}
|
||||
|
||||
other_event_kind => {
|
||||
|
|
|
@ -98,6 +98,8 @@ impl LocationCreateArgs {
|
|||
)
|
||||
.await?;
|
||||
|
||||
ctx.location_manager().add(location.id, ctx.clone()).await?;
|
||||
|
||||
info!("Created location: {location:?}");
|
||||
|
||||
Ok(location)
|
||||
|
@ -133,6 +135,8 @@ impl LocationCreateArgs {
|
|||
.add_library(ctx.id, uuid, &self.path, location.name.clone())
|
||||
.await?;
|
||||
|
||||
ctx.location_manager().add(location.id, ctx.clone()).await?;
|
||||
|
||||
info!(
|
||||
"Added library (library_id = {}) to location: {location:?}",
|
||||
ctx.id
|
||||
|
@ -382,8 +386,6 @@ async fn create_location(
|
|||
|
||||
invalidate_query!(ctx, "locations.list");
|
||||
|
||||
ctx.location_manager().add(location.id, ctx.clone()).await?;
|
||||
|
||||
Ok(location)
|
||||
}
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ pub mod error;
|
|||
|
||||
pub mod erase;
|
||||
|
||||
pub const BYTES: &str = "bytes";
|
||||
pub const BYTES_EXT: &str = ".bytes";
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone, Eq, PartialEq)]
|
||||
pub enum ObjectType {
|
||||
|
@ -30,8 +30,6 @@ pub enum ObjectType {
|
|||
Directory,
|
||||
}
|
||||
|
||||
pub const BYTES_EXT: &str = ".bytes";
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug, Clone)]
|
||||
pub struct FsInfo {
|
||||
pub path_data: file_path_with_object::Data,
|
||||
|
|
|
@ -13,9 +13,12 @@ pub struct Tag {
|
|||
}
|
||||
|
||||
impl Tag {
|
||||
#[allow(dead_code)]
|
||||
pub fn new(name: String, color: String) -> Self {
|
||||
Self { name, color }
|
||||
}
|
||||
|
||||
#[allow(dead_code)]
|
||||
pub async fn save(self, db: &PrismaClient) -> Result<(), QueryError> {
|
||||
db.tag()
|
||||
.create(
|
||||
|
|
|
@ -225,6 +225,12 @@ impl SyncManager {
|
|||
.unwrap(),
|
||||
serde_json::from_value(data.remove("name").unwrap())
|
||||
.unwrap(),
|
||||
serde_json::from_value(
|
||||
data.remove("extension").unwrap_or_else(|| {
|
||||
serde_json::Value::String("".to_string())
|
||||
}),
|
||||
)
|
||||
.unwrap(),
|
||||
data.into_iter()
|
||||
.flat_map(|(k, v)| {
|
||||
file_path::SetParam::deserialize(&k, v)
|
||||
|
@ -284,6 +290,12 @@ impl SyncManager {
|
|||
data.remove("name").unwrap(),
|
||||
)
|
||||
.unwrap(),
|
||||
serde_json::from_value(
|
||||
data.remove("extension").unwrap_or_else(|| {
|
||||
serde_json::Value::String("".to_string())
|
||||
}),
|
||||
)
|
||||
.unwrap(),
|
||||
data.into_iter()
|
||||
.flat_map(|(k, v)| {
|
||||
file_path::SetParam::deserialize(&k, v)
|
||||
|
|
|
@ -370,7 +370,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(AES_BYTES_EXPECTED[0].to_vec(), ciphertext)
|
||||
assert_eq!(AES_BYTES_EXPECTED[0].to_vec(), ciphertext);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -380,7 +380,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(AES_BYTES_EXPECTED[1].to_vec(), ciphertext)
|
||||
assert_eq!(AES_BYTES_EXPECTED[1].to_vec(), ciphertext);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -395,7 +395,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(PLAINTEXT.to_vec(), plaintext.expose().to_vec())
|
||||
assert_eq!(PLAINTEXT.to_vec(), plaintext.expose().clone());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -410,7 +410,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(PLAINTEXT.to_vec(), plaintext.expose().to_vec())
|
||||
assert_eq!(PLAINTEXT.to_vec(), plaintext.expose().clone());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -497,7 +497,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(XCHACHA_BYTES_EXPECTED[0].to_vec(), ciphertext)
|
||||
assert_eq!(XCHACHA_BYTES_EXPECTED[0].to_vec(), ciphertext);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -512,7 +512,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(XCHACHA_BYTES_EXPECTED[1].to_vec(), ciphertext)
|
||||
assert_eq!(XCHACHA_BYTES_EXPECTED[1].to_vec(), ciphertext);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -527,7 +527,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(PLAINTEXT.to_vec(), plaintext.expose().to_vec())
|
||||
assert_eq!(PLAINTEXT.to_vec(), plaintext.expose().clone());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
@ -542,7 +542,7 @@ mod tests {
|
|||
.await
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(PLAINTEXT.to_vec(), plaintext.expose().to_vec())
|
||||
assert_eq!(PLAINTEXT.to_vec(), plaintext.expose().clone());
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
|
@ -394,7 +394,7 @@ mod tests {
|
|||
|
||||
FileHeader::from_reader(&mut writer).await.unwrap();
|
||||
|
||||
assert!(writer.position() == 260)
|
||||
assert!(writer.position() == 260);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
|
|
|
@ -254,7 +254,7 @@ mod tests {
|
|||
.hash(Protected::new(PASSWORD.to_vec()), SALT, None)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&HASH_ARGON2ID_EXPECTED[0], output.expose())
|
||||
assert_eq!(&HASH_ARGON2ID_EXPECTED[0], output.expose());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -263,7 +263,7 @@ mod tests {
|
|||
.hash(Protected::new(PASSWORD.to_vec()), SALT, Some(SECRET_KEY))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&HASH_ARGON2ID_WITH_SECRET_EXPECTED[0], output.expose())
|
||||
assert_eq!(&HASH_ARGON2ID_WITH_SECRET_EXPECTED[0], output.expose());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -272,7 +272,7 @@ mod tests {
|
|||
.hash(Protected::new(PASSWORD.to_vec()), SALT, None)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&HASH_ARGON2ID_EXPECTED[1], output.expose())
|
||||
assert_eq!(&HASH_ARGON2ID_EXPECTED[1], output.expose());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -281,7 +281,7 @@ mod tests {
|
|||
.hash(Protected::new(PASSWORD.to_vec()), SALT, Some(SECRET_KEY))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&HASH_ARGON2ID_WITH_SECRET_EXPECTED[1], output.expose())
|
||||
assert_eq!(&HASH_ARGON2ID_WITH_SECRET_EXPECTED[1], output.expose());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -290,7 +290,7 @@ mod tests {
|
|||
.hash(Protected::new(PASSWORD.to_vec()), SALT, None)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&HASH_ARGON2ID_EXPECTED[2], output.expose())
|
||||
assert_eq!(&HASH_ARGON2ID_EXPECTED[2], output.expose());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -299,7 +299,7 @@ mod tests {
|
|||
.hash(Protected::new(PASSWORD.to_vec()), SALT, Some(SECRET_KEY))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&HASH_ARGON2ID_WITH_SECRET_EXPECTED[2], output.expose())
|
||||
assert_eq!(&HASH_ARGON2ID_WITH_SECRET_EXPECTED[2], output.expose());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -308,7 +308,7 @@ mod tests {
|
|||
.hash(Protected::new(PASSWORD.to_vec()), SALT, None)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&HASH_B3BALLOON_EXPECTED[0], output.expose())
|
||||
assert_eq!(&HASH_B3BALLOON_EXPECTED[0], output.expose());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -317,7 +317,7 @@ mod tests {
|
|||
.hash(Protected::new(PASSWORD.to_vec()), SALT, Some(SECRET_KEY))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&HASH_B3BALLOON_WITH_SECRET_EXPECTED[0], output.expose())
|
||||
assert_eq!(&HASH_B3BALLOON_WITH_SECRET_EXPECTED[0], output.expose());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -326,7 +326,7 @@ mod tests {
|
|||
.hash(Protected::new(PASSWORD.to_vec()), SALT, None)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&HASH_B3BALLOON_EXPECTED[1], output.expose())
|
||||
assert_eq!(&HASH_B3BALLOON_EXPECTED[1], output.expose());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -335,7 +335,7 @@ mod tests {
|
|||
.hash(Protected::new(PASSWORD.to_vec()), SALT, Some(SECRET_KEY))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&HASH_B3BALLOON_WITH_SECRET_EXPECTED[1], output.expose())
|
||||
assert_eq!(&HASH_B3BALLOON_WITH_SECRET_EXPECTED[1], output.expose());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -344,7 +344,7 @@ mod tests {
|
|||
.hash(Protected::new(PASSWORD.to_vec()), SALT, None)
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&HASH_B3BALLOON_EXPECTED[2], output.expose())
|
||||
assert_eq!(&HASH_B3BALLOON_EXPECTED[2], output.expose());
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
@ -353,13 +353,13 @@ mod tests {
|
|||
.hash(Protected::new(PASSWORD.to_vec()), SALT, Some(SECRET_KEY))
|
||||
.unwrap();
|
||||
|
||||
assert_eq!(&HASH_B3BALLOON_WITH_SECRET_EXPECTED[2], output.expose())
|
||||
assert_eq!(&HASH_B3BALLOON_WITH_SECRET_EXPECTED[2], output.expose());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn derive_b3() {
|
||||
let output = Key::derive(KEY, SALT, TEST_CONTEXT);
|
||||
|
||||
assert_eq!(&DERIVE_B3_EXPECTED, output.expose())
|
||||
assert_eq!(&DERIVE_B3_EXPECTED, output.expose());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -265,8 +265,6 @@ extension_category_enum! {
|
|||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::path::PathBuf;
|
||||
use tokio::fs::File;
|
||||
|
||||
use super::*;
|
||||
|
||||
|
|
|
@ -126,7 +126,7 @@ export type FileEncryptorJobInit = { location_id: number, path_id: number, key_u
|
|||
|
||||
export type FileEraserJobInit = { location_id: number, path_id: number, passes: string }
|
||||
|
||||
export type FilePath = { id: number, is_dir: boolean, cas_id: string | null, integrity_checksum: string | null, location_id: number, materialized_path: string, name: string, extension: string | null, object_id: number | null, parent_id: number | null, key_id: number | null, date_created: string, date_modified: string, date_indexed: string }
|
||||
export type FilePath = { id: number, is_dir: boolean, cas_id: string | null, integrity_checksum: string | null, location_id: number, materialized_path: string, name: string, extension: string, object_id: number | null, parent_id: number | null, key_id: number | null, date_created: string, date_modified: string, date_indexed: string }
|
||||
|
||||
export type GenerateThumbsForLocationArgs = { id: number, path: string }
|
||||
|
||||
|
@ -277,6 +277,6 @@ export type UnlockKeyManagerArgs = { password: string, secret_key: string }
|
|||
|
||||
export type Volume = { name: string, mount_point: string, total_capacity: string, available_capacity: string, is_removable: boolean, disk_type: string | null, file_system: string | null, is_root_filesystem: boolean }
|
||||
|
||||
export type file_path_with_object = { id: number, is_dir: boolean, cas_id: string | null, integrity_checksum: string | null, location_id: number, materialized_path: string, name: string, extension: string | null, object_id: number | null, parent_id: number | null, key_id: number | null, date_created: string, date_modified: string, date_indexed: string, object: Object | null }
|
||||
export type file_path_with_object = { id: number, is_dir: boolean, cas_id: string | null, integrity_checksum: string | null, location_id: number, materialized_path: string, name: string, extension: string, object_id: number | null, parent_id: number | null, key_id: number | null, date_created: string, date_modified: string, date_indexed: string, object: Object | null }
|
||||
|
||||
export type object_with_file_paths = { id: number, pub_id: number[], name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string, file_paths: FilePath[] }
|
||||
|
|
Loading…
Reference in a new issue