diff --git a/Cargo.lock b/Cargo.lock index 252ce3401..aaa9b8b39 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -69,7 +69,7 @@ version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e8b47f52ea9bae42228d07ec09eb676433d7c4ed1ebdf0f1d1c29ed446f1ab8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cipher 0.3.0", "cpufeatures", "opaque-debug", @@ -81,7 +81,7 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "433cfd6710c9986c576a25ca913c39d66a6474107b406f34f91d4a8923395241" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cipher 0.4.4", "cpufeatures", ] @@ -364,7 +364,7 @@ checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" dependencies = [ "async-lock", "autocfg", - "cfg-if 1.0.0", + "cfg-if", "concurrent-queue", "futures-lite", "log", @@ -575,7 +575,7 @@ checksum = "233d376d6d185f2a3093e58f283f60f880315b6c60075b01f36b3b85154564ca" dependencies = [ "addr2line", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", "miniz_oxide 0.6.2", "object", @@ -741,7 +741,7 @@ dependencies = [ "arrayref", "arrayvec 0.7.2", "cc", - "cfg-if 1.0.0", + "cfg-if", "constant_time_eq", "digest 0.10.7", ] @@ -1027,12 +1027,6 @@ dependencies = [ "target-lexicon", ] -[[package]] -name = "cfg-if" -version = "0.1.10" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4785bdd1c96b2a846b2bd7cc02e86b6b3dbf14e7e53446c4f54c92a361040822" - [[package]] name = "cfg-if" version = "1.0.0" @@ -1045,7 +1039,7 @@ version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c80e5460aa66fe3b91d40bcbdab953a597b60053e34d684ac6903f863b680a6" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cipher 0.3.0", "cpufeatures", "zeroize", @@ -1057,7 +1051,7 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cipher 0.4.4", "cpufeatures", ] @@ -1414,7 +1408,7 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1423,7 +1417,7 @@ version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a33c2bf77f2df06183c3aa30d1e96c0695a313d4f9c453cc3762a6db39f99200" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crossbeam-utils", ] @@ -1433,7 +1427,7 @@ version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ce6fd6f855243022dcecf8702fef0c297d4338e226845fe067f6341ad9fa0cef" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "crossbeam-epoch", "crossbeam-utils", ] @@ -1445,7 +1439,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "46bd5f3f85273295a9d14aedfb86f6aadbff6d8f5295c4a9edb08e819dcf5695" dependencies = [ "autocfg", - "cfg-if 1.0.0", + "cfg-if", "crossbeam-utils", "memoffset 0.8.0", "scopeguard", @@ -1457,7 +1451,7 @@ version = "0.8.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3c063cd8cc95f5c377ed0d4b49a4b21f632396ff690e8470c29b3359b346984b" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -1632,7 +1626,7 @@ version = "4.0.0-rc.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d4ba9852b42210c7538b75484f9daa0655e9a3ac04f693747bb0f02cf3cfe16" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "fiat-crypto", "packed_simd_2", "platforms", @@ -1716,7 +1710,7 @@ version = "5.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "907076dfda823b0b36d2a1bb5f90c96660a5bbcd7729e10727f07858f22c4edc" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "hashbrown 0.12.3", "lock_api", "once_cell", @@ -1912,7 +1906,7 @@ version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b98cf8ebf19c3d1b223e151f99a4f9f0690dca41414773390fc824184ac833e1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "dirs-sys-next", ] @@ -2099,7 +2093,7 @@ version = "0.8.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071a31f4ee85403370b58aca746f01041ede6f0da2730960ad001edc2b71b394" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] @@ -2323,7 +2317,7 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5cbc844cecaee9d4443931972e1289c8ff485cb4cc2767cb03ca139ed6885153" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall 0.2.16", "windows-sys 0.48.0", @@ -2686,7 +2680,7 @@ version = "0.1.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fc3cb4d91f53b50155bdcfd23f6a4c39ae1969c2ae85982b135750cccaf5fce" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "wasi 0.9.0+wasi-snapshot-preview1", ] @@ -2697,7 +2691,7 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c85e1d9ab2eadba7e5040d4e09cbd6d072b76a557ad64e797c2cb9d4da21d7e4" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "libc", "wasi 0.11.0+wasi-snapshot-preview1", @@ -2798,7 +2792,7 @@ checksum = "10c6ae9f6fa26f4fb2ac16b528d138d971ead56141de489f8111e259b9df3c4a" dependencies = [ "anyhow", "heck 0.4.1", - "proc-macro-crate 1.1.3", + "proc-macro-crate 1.3.1", "proc-macro-error", "proc-macro2", "quote", @@ -2915,7 +2909,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "684c0456c086e8e7e9af73ec5b84e35938df394712054550e81558d21c44ab0d" dependencies = [ "anyhow", - "proc-macro-crate 1.1.3", + "proc-macro-crate 1.3.1", "proc-macro-error", "proc-macro2", "quote", @@ -3461,27 +3455,25 @@ version = "0.1.12" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", ] [[package]] name = "int-enum" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b1428b2b1abe959e6eedb0a17d0ab12f6ba20e1106cc29fc4874e3ba393c177" +checksum = "cff87d3cc4b79b4559e3c75068d64247284aceb6a038bd4bb38387f3f164476d" dependencies = [ - "cfg-if 0.1.10", "int-enum-impl", ] [[package]] name = "int-enum-impl" -version = "0.4.0" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2c3cecaad8ca1a5020843500c696de2b9a07b63b624ddeef91f85f9bafb3671" +checksum = "df1f2f068675add1a3fc77f5f5ab2e29290c841ee34d151abc007bce902e5d34" dependencies = [ - "cfg-if 0.1.10", - "proc-macro-crate 0.1.5", + "proc-macro-crate 1.3.1", "proc-macro2", "quote", "syn 1.0.109", @@ -3757,7 +3749,7 @@ checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe" dependencies = [ "arrayvec 0.5.2", "bitflags", - "cfg-if 1.0.0", + "cfg-if", "ryu", "static_assertions", ] @@ -3795,7 +3787,7 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b67380fd3b2fbe7527a606e18729d21c6f3951633d0500574c4dc22d2d638b9f" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "winapi", ] @@ -4207,7 +4199,7 @@ version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff50ecb28bb86013e935fb6683ab1f6d3a20016f123c76fd4c27470076ac30f5" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "generator", "scoped-tls", "serde", @@ -4600,11 +4592,11 @@ dependencies = [ [[package]] name = "multihash-derive" -version = "0.8.1" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d6d4752e6230d8ef7adf7bd5d8c4b1f6561c1014c5ba9a37445ccefe18aa1db" +checksum = "fc076939022111618a5026d3be019fd8b366e76314538ff9a1b59ffbcbf98bcd" dependencies = [ - "proc-macro-crate 1.1.3", + "proc-macro-crate 1.3.1", "proc-macro-error", "proc-macro2", "quote", @@ -4789,7 +4781,7 @@ checksum = "e4916f159ed8e5de0082076562152a76b7a1f64a01fd9d1e0fea002c37624faf" dependencies = [ "bitflags", "cc", - "cfg-if 1.0.0", + "cfg-if", "libc", "memoffset 0.6.5", ] @@ -4801,7 +4793,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fa52e972a9a719cecb6864fb88568781eb706bac2cd1d4f04a648542dbf78069" dependencies = [ "bitflags", - "cfg-if 1.0.0", + "cfg-if", "libc", "memoffset 0.6.5", ] @@ -4813,7 +4805,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfdda3d196821d6af13126e40375cdf7da646a96114af134d5f417a9a1dc8e1a" dependencies = [ "bitflags", - "cfg-if 1.0.0", + "cfg-if", "libc", "static_assertions", ] @@ -5013,7 +5005,7 @@ version = "0.5.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ - "proc-macro-crate 1.1.3", + "proc-macro-crate 1.3.1", "proc-macro2", "quote", "syn 1.0.109", @@ -5134,7 +5126,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "12df40a956736488b7b44fe79fe12d4f245bb5b3f5a1f6095e499760015be392" dependencies = [ "bitflags", - "cfg-if 1.0.0", + "cfg-if", "foreign-types", "libc", "once_cell", @@ -5273,7 +5265,7 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1914cd452d8fccd6f9db48147b29fd4ae05bea9dc5d9ad578509f72415de282" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libm 0.1.4", ] @@ -5335,7 +5327,7 @@ version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60a2cfe6f0ad2bfc16aefa463b497d5c7a5ecd44a23efa72aa342d90177356dc" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "instant", "libc", "redox_syscall 0.2.16", @@ -5349,7 +5341,7 @@ version = "0.9.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9069cbb9f99e3a5083476ccb29ceb1de18b9118cafa53e90c9551235de2b9521" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "libc", "redox_syscall 0.2.16", "smallvec", @@ -5662,7 +5654,7 @@ checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" dependencies = [ "autocfg", "bitflags", - "cfg-if 1.0.0", + "cfg-if", "concurrent-queue", "libc", "log", @@ -5698,7 +5690,7 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8419d2b623c7c0896ff2d5d96e2cb4ede590fed28fcc34934f4c33c036e620a1" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "opaque-debug", "universal-hash 0.4.1", @@ -5710,7 +5702,7 @@ version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ef234e08c11dfcb2e56f79fd70f6f2eb7f025c0ce2333e82f4f0518ecad30c6" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "opaque-debug", "universal-hash 0.5.1", @@ -5739,7 +5731,7 @@ dependencies = [ [[package]] name = "prisma-client-rust" version = "0.6.8" -source = "git+https://github.com/Brendonovich/prisma-client-rust?rev=687a00812130454613eee2c8e804bc615e755180#687a00812130454613eee2c8e804bc615e755180" +source = "git+https://github.com/Brendonovich/prisma-client-rust?rev=1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2#1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2" dependencies = [ "base64 0.13.1", "bigdecimal", @@ -5772,7 +5764,7 @@ dependencies = [ [[package]] name = "prisma-client-rust-cli" version = "0.6.8" -source = "git+https://github.com/Brendonovich/prisma-client-rust?rev=687a00812130454613eee2c8e804bc615e755180#687a00812130454613eee2c8e804bc615e755180" +source = "git+https://github.com/Brendonovich/prisma-client-rust?rev=1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2#1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2" dependencies = [ "directories", "flate2", @@ -5792,7 +5784,7 @@ dependencies = [ [[package]] name = "prisma-client-rust-macros" version = "0.6.8" -source = "git+https://github.com/Brendonovich/prisma-client-rust?rev=687a00812130454613eee2c8e804bc615e755180#687a00812130454613eee2c8e804bc615e755180" +source = "git+https://github.com/Brendonovich/prisma-client-rust?rev=1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2#1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2" dependencies = [ "convert_case 0.6.0", "proc-macro2", @@ -5804,7 +5796,7 @@ dependencies = [ [[package]] name = "prisma-client-rust-sdk" version = "0.6.8" -source = "git+https://github.com/Brendonovich/prisma-client-rust?rev=687a00812130454613eee2c8e804bc615e755180#687a00812130454613eee2c8e804bc615e755180" +source = "git+https://github.com/Brendonovich/prisma-client-rust?rev=1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2#1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2" dependencies = [ "convert_case 0.5.0", "dmmf", @@ -5863,12 +5855,12 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "1.1.3" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e17d47ce914bf4de440332250b0edd23ce48c005f59fab39d3335866b114f11a" +checksum = "7f4c021e1093a56626774e81216a4ce732a735e5bad4868a03f3ed65ca0c3919" dependencies = [ - "thiserror", - "toml 0.5.11", + "once_cell", + "toml_edit", ] [[package]] @@ -6418,9 +6410,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.8.3" +version = "1.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81ca098a9821bd52d6b24fd8b10bd081f47d39c22778cafaa75a2857a62c6390" +checksum = "d0ab3ca65655bb1e41f2a8c8cd662eb4fb035e67c3f78da1d61dffe89d07300f" dependencies = [ "aho-corasick 1.0.2", "memchr", @@ -6924,6 +6916,7 @@ dependencies = [ "notify", "once_cell", "prisma-client-rust", + "regex", "rmp", "rmp-serde", "rspc", @@ -7429,7 +7422,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "99cd6713db3cf16b6c84e06321e049a9b9f699826e16096d23bbcc44d15d51a6" dependencies = [ "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.9.0", "opaque-debug", @@ -7441,7 +7434,7 @@ version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -7453,7 +7446,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d58a1e1bf39749807d89cf2d98ac2dfa0ff1cb3faa38fbb64dd88ac8013d800" dependencies = [ "block-buffer 0.9.0", - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.9.0", "opaque-debug", @@ -7465,7 +7458,7 @@ version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "cpufeatures", "digest 0.10.7", ] @@ -7985,7 +7978,7 @@ version = "0.28.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c2f3ca6693feb29a89724516f016488e9aafc7f37264f898593ee4b942f31b" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "core-foundation-sys", "libc", "ntapi", @@ -8345,7 +8338,7 @@ version = "3.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b9fbec84f381d5795b08656e4912bec604d162bff9291d6189a78f4c8ab87998" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "fastrand", "redox_syscall 0.3.5", "rustix", @@ -8404,7 +8397,7 @@ version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "once_cell", ] @@ -8642,7 +8635,7 @@ version = "0.1.37" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "log", "pin-project-lite", "tracing-attributes 0.1.24", @@ -8844,7 +8837,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4f7f83d1e4a0e4358ac54c5c3681e5d7da5efc5a7a632c90bb6d6669ddd9bc26" dependencies = [ "async-trait", - "cfg-if 1.0.0", + "cfg-if", "data-encoding", "enum-as-inner", "futures-channel", @@ -8869,7 +8862,7 @@ version = "0.22.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "aff21aa4dcefb0a1afbfac26deb0adc93888c7d295fb63ab273ef276ba2b7cfe" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "futures-util", "ipconfig", "lazy_static", @@ -9248,7 +9241,7 @@ version = "0.2.86" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bba0e8cb82ba49ff4e229459ff22a191bbe9a1cb3a341610c9c33efc27ddf73" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "wasm-bindgen-macro", ] @@ -9273,7 +9266,7 @@ version = "0.4.36" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2d1985d03709c53167ce907ff394f5316aa22cb4e12761295c5dc57dacb6297e" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "js-sys", "wasm-bindgen", "web-sys", @@ -10054,7 +10047,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "76a1a57ff50e9b408431e8f97d5456f2807f8eb2a2cd79b06068fc87f8ecf189" dependencies = [ - "cfg-if 1.0.0", + "cfg-if", "winapi", ] @@ -10311,7 +10304,7 @@ version = "2.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e4ca5e22593eb4212382d60d26350065bf2a02c34b85bc850474a74b589a3de9" dependencies = [ - "proc-macro-crate 1.1.3", + "proc-macro-crate 1.3.1", "proc-macro2", "quote", "syn 1.0.109", diff --git a/Cargo.toml b/Cargo.toml index 21e129ef2..8fd6f6cdc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,19 +18,19 @@ edition = "2021" repository = "https://github.com/spacedriveapp/spacedrive" [workspace.dependencies] -prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "687a00812130454613eee2c8e804bc615e755180", features = [ +prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2", features = [ "rspc", "sqlite-create-many", "migrations", "sqlite", ] } -prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "687a00812130454613eee2c8e804bc615e755180", features = [ +prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2", features = [ "rspc", "sqlite-create-many", "migrations", "sqlite", ] } -prisma-client-rust-sdk = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "687a00812130454613eee2c8e804bc615e755180", features = [ +prisma-client-rust-sdk = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "1c1a7fb7b436a01ee7763e7f75cfa8f25a5c10e2", features = [ "sqlite", ] } diff --git a/apps/cli/Cargo.toml b/apps/cli/Cargo.toml index 629c444a7..ebdcb92b5 100644 --- a/apps/cli/Cargo.toml +++ b/apps/cli/Cargo.toml @@ -1,9 +1,9 @@ [package] name = "cli" version = "0.1.0" -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/apps/desktop/crates/linux/src/desktop_entry.rs b/apps/desktop/crates/linux/src/desktop_entry.rs index 3da1789fe..2040d551d 100644 --- a/apps/desktop/crates/linux/src/desktop_entry.rs +++ b/apps/desktop/crates/linux/src/desktop_entry.rs @@ -157,10 +157,10 @@ fn parse_file(path: &Path) -> Option { } } -impl TryFrom for DesktopEntry { +impl TryFrom<&PathBuf> for DesktopEntry { type Error = Error; - fn try_from(path: PathBuf) -> Result { - parse_file(&path).ok_or(Error::BadEntry(path)) + fn try_from(path: &PathBuf) -> Result { + parse_file(path).ok_or(Error::BadEntry(path.clone())) } } diff --git a/apps/desktop/crates/linux/src/handler.rs b/apps/desktop/crates/linux/src/handler.rs index 5ca3624ab..be0932d2d 100644 --- a/apps/desktop/crates/linux/src/handler.rs +++ b/apps/desktop/crates/linux/src/handler.rs @@ -41,7 +41,7 @@ impl Handler { } pub fn get_entry(&self) -> Result { - DesktopEntry::try_from(self.get_path()?) + DesktopEntry::try_from(&self.get_path()?) } pub fn launch(&self, args: &[&str]) -> Result<()> { diff --git a/apps/desktop/crates/linux/src/system.rs b/apps/desktop/crates/linux/src/system.rs index 6305486c8..b462f2464 100644 --- a/apps/desktop/crates/linux/src/system.rs +++ b/apps/desktop/crates/linux/src/system.rs @@ -14,11 +14,10 @@ pub struct SystemApps(pub HashMap>); impl SystemApps { pub fn get_handlers(&self, handler_type: HandlerType) -> VecDeque { - let mime_db = SharedMimeInfo::new(); match handler_type { HandlerType::Ext(ext) => { let mut handlers: HashSet = HashSet::new(); - for mime in mime_db.get_mime_types_from_file_name(ext.as_str()) { + for mime in SharedMimeInfo::new().get_mime_types_from_file_name(ext.as_str()) { if let Some(mime_handlers) = self.0.get(&mime) { for handler in mime_handlers { handlers.insert(handler.clone()); @@ -40,12 +39,7 @@ impl SystemApps { .list_data_files_once("applications") .into_iter() .filter(|p| p.extension().and_then(|x| x.to_str()) == Some("desktop")) - .filter_map(|p| { - Some(( - p.file_name()?.to_owned(), - DesktopEntry::try_from(p.clone()).ok()?, - )) - })) + .filter_map(|p| Some((p.file_name()?.to_owned(), DesktopEntry::try_from(&p).ok()?)))) } pub fn populate() -> Result { diff --git a/apps/desktop/src-tauri/src/file.rs b/apps/desktop/src-tauri/src/file.rs index 3ee7146f9..d9d6dfc90 100644 --- a/apps/desktop/src-tauri/src/file.rs +++ b/apps/desktop/src-tauri/src/file.rs @@ -1,81 +1,110 @@ -use std::sync::Arc; +use std::{collections::HashMap, sync::Arc}; use sd_core::Node; use serde::Serialize; use specta::Type; +use tracing::error; #[derive(Serialize, Type)] #[serde(tag = "t", content = "c")] pub enum OpenFilePathResult { NoLibrary, - NoFile, - OpenError(String), - AllGood, + NoFile(i32), + OpenError(i32, String), + AllGood(i32), + Internal(String), } #[tauri::command(async)] #[specta::specta] pub async fn open_file_path( library: uuid::Uuid, - id: i32, + ids: Vec, node: tauri::State<'_, Arc>, -) -> Result { +) -> Result, ()> { let res = if let Some(library) = node.library_manager.get_library(library).await { - let Ok(Some(path)) = library - .get_file_path(id) - .await - else { - return Ok(OpenFilePathResult::NoFile) - }; - - opener::open(path) - .map(|_| OpenFilePathResult::AllGood) - .unwrap_or_else(|e| OpenFilePathResult::OpenError(e.to_string())) + library.get_file_paths(ids).await.map_or_else( + |e| vec![OpenFilePathResult::Internal(e.to_string())], + |paths| { + paths + .into_iter() + .map(|(id, maybe_path)| { + if let Some(path) = maybe_path { + opener::open(path) + .map(|_| OpenFilePathResult::AllGood(id)) + .unwrap_or_else(|e| { + OpenFilePathResult::OpenError(id, e.to_string()) + }) + } else { + OpenFilePathResult::NoFile(id) + } + }) + .collect() + }, + ) } else { - OpenFilePathResult::NoLibrary + vec![OpenFilePathResult::NoLibrary] }; Ok(res) } -#[derive(Type, Debug, serde::Serialize)] -pub struct OpenWithApplication { - name: String, - #[cfg(target_os = "linux")] - url: std::path::PathBuf, - #[cfg(not(target_os = "linux"))] - url: String, +#[derive(Serialize, Type)] +#[serde(tag = "t", content = "c")] +#[allow(dead_code)] +pub enum OpenWithApplication { + File { + id: i32, + name: String, + #[cfg(target_os = "linux")] + url: std::path::PathBuf, + #[cfg(not(target_os = "linux"))] + url: String, + }, + Error(i32, String), } #[tauri::command(async)] #[specta::specta] +#[allow(unused_variables)] pub async fn get_file_path_open_with_apps( library: uuid::Uuid, - id: i32, + ids: Vec, node: tauri::State<'_, Arc>, ) -> Result, ()> { let Some(library) = node.library_manager.get_library(library).await else { return Err(()) }; - let Ok(Some(path)) = library - .get_file_path(id) - .await - else { - return Err(()) - }; + let Ok(paths) = library.get_file_paths(ids).await.map_err(|e| {error!("{e:#?}");}) + else { + return Err(()); + }; #[cfg(target_os = "macos")] - return Ok(unsafe { - sd_desktop_macos::get_open_with_applications(&path.to_str().unwrap().into()) - } - .as_slice() - .iter() - .map(|app| OpenWithApplication { - name: app.name.to_string(), - url: app.url.to_string(), - }) - .collect()); + return Ok(paths + .into_iter() + .flat_map(|(id, path)| { + if let Some(path) = path { + unsafe { + sd_desktop_macos::get_open_with_applications(&path.to_str().unwrap().into()) + } + .as_slice() + .iter() + .map(|app| OpenWithApplication::File { + id, + name: app.name.to_string(), + url: app.url.to_string(), + }) + .collect::>() + } else { + vec![OpenWithApplication::Error( + id, + "File not found in database".into(), + )] + } + }) + .collect()); #[cfg(target_os = "linux")] { @@ -84,71 +113,131 @@ pub async fn get_file_path_open_with_apps( // TODO: cache this, and only update when the underlying XDG desktop apps changes let system_apps = SystemApps::populate().map_err(|_| ())?; - let handlers = system_apps.get_handlers(HandlerType::Ext( - path.file_name() - .and_then(|name| name.to_str()) - .map(|name| name.to_string()) - .ok_or( - // io::Error::new( - // io::ErrorKind::Other, - // "Missing file name from path", - // ) - (), - )?, - )); + return Ok(paths + .into_iter() + .flat_map(|(id, path)| { + if let Some(path) = path { + let Some(name) = path.file_name() + .and_then(|name| name.to_str()) + .map(|name| name.to_string()) + else { + return vec![OpenWithApplication::Error( + id, + "Failed to extract file name".into(), + )] + }; - let data = handlers - .iter() - .map(|handler| { - let path = handler.get_path().map_err(|_| ())?; - let entry = DesktopEntry::try_from(path.clone()).map_err(|_| ())?; - Ok(OpenWithApplication { - name: entry.name, - url: path, - }) + system_apps + .get_handlers(HandlerType::Ext(name)) + .iter() + .map(|handler| { + handler + .get_path() + .map(|path| { + DesktopEntry::try_from(&path) + .map(|entry| OpenWithApplication::File { + id, + name: entry.name, + url: path, + }) + .unwrap_or_else(|e| { + error!("{e:#?}"); + OpenWithApplication::Error( + id, + "Failed to parse desktop entry".into(), + ) + }) + }) + .unwrap_or_else(|e| { + error!("{e:#?}"); + OpenWithApplication::Error( + id, + "Failed to get path from desktop entry".into(), + ) + }) + }) + .collect::>() + } else { + vec![OpenWithApplication::Error( + id, + "File not found in database".into(), + )] + } }) - .collect::, _>>()?; - - return Ok(data); + .collect()); } #[allow(unreachable_code)] Err(()) } +type FileIdAndUrl = (i32, String); + #[tauri::command(async)] #[specta::specta] +#[allow(unused_variables)] pub async fn open_file_path_with( library: uuid::Uuid, - id: i32, - url: String, + file_ids_and_urls: Vec, node: tauri::State<'_, Arc>, ) -> Result<(), ()> { let Some(library) = node.library_manager.get_library(library).await else { return Err(()) }; - let Ok(Some(path)) = library - .get_file_path(id) - .await - else { - return Err(()) - }; + let url_by_id = file_ids_and_urls.into_iter().collect::>(); + let ids = url_by_id.keys().copied().collect::>(); #[cfg(target_os = "macos")] - unsafe { - sd_desktop_macos::open_file_path_with( - &path.to_str().ok_or(())?.into(), - &url.as_str().into(), - ) - }; + { + library + .get_file_paths(ids) + .await + .map(|paths| { + paths.iter().for_each(|(id, path)| { + if let Some(path) = path { + unsafe { + sd_desktop_macos::open_file_path_with( + &path.to_str().unwrap().into(), + &url_by_id + .get(id) + .expect("we just created this hashmap") + .as_str() + .into(), + ) + } + } + }) + }) + .map_err(|e| { + error!("{e:#?}"); + }) + } #[cfg(target_os = "linux")] { - sd_desktop_linux::Handler::assume_valid(url.into()) - .open(&[path.to_str().ok_or(())?]) - .map_err(|_| ())?; + library + .get_file_paths(ids) + .await + .map(|paths| { + paths.iter().for_each(|(id, path)| { + if let Some(path) = path.as_ref().and_then(|path| path.to_str()) { + if let Err(e) = sd_desktop_linux::Handler::assume_valid( + url_by_id + .get(id) + .expect("we just created this hashmap") + .as_str() + .into(), + ) + .open(&[path]) + { + error!("{e:#?}"); + } + } + }) + }) + .map_err(|e| { + error!("{e:#?}"); + }) } - - Ok(()) } diff --git a/apps/desktop/src-tauri/src/theme.rs b/apps/desktop/src-tauri/src/theme.rs index d79ef31e3..44b062b2d 100644 --- a/apps/desktop/src-tauri/src/theme.rs +++ b/apps/desktop/src-tauri/src/theme.rs @@ -10,6 +10,7 @@ pub enum AppThemeType { #[tauri::command(async)] #[specta::specta] +#[allow(unused_variables)] pub async fn lock_app_theme(theme_type: AppThemeType) { #[cfg(target_os = "macos")] unsafe { diff --git a/apps/desktop/src/commands.ts b/apps/desktop/src/commands.ts index f5b1fc6cc..7102021f7 100644 --- a/apps/desktop/src/commands.ts +++ b/apps/desktop/src/commands.ts @@ -22,22 +22,22 @@ export function openLogsDir() { return invoke()("open_logs_dir") } -export function openFilePath(library: string, id: number) { - return invoke()("open_file_path", { library,id }) +export function openFilePath(library: string, ids: number[]) { + return invoke()("open_file_path", { library,ids }) } -export function getFilePathOpenWithApps(library: string, id: number) { - return invoke()("get_file_path_open_with_apps", { library,id }) +export function getFilePathOpenWithApps(library: string, ids: number[]) { + return invoke()("get_file_path_open_with_apps", { library,ids }) } -export function openFilePathWith(library: string, id: number, url: string) { - return invoke()("open_file_path_with", { library,id,url }) +export function openFilePathWith(library: string, fileIdsAndUrls: ([number, string])[]) { + return invoke()("open_file_path_with", { library,fileIdsAndUrls }) } export function lockAppTheme(themeType: AppThemeType) { return invoke()("lock_app_theme", { themeType }) } -export type OpenWithApplication = { name: string; url: string } -export type OpenFilePathResult = { t: "NoLibrary" } | { t: "NoFile" } | { t: "OpenError"; c: string } | { t: "AllGood" } +export type OpenWithApplication = { t: "File"; c: { id: number; name: string; url: string } } | { t: "Error"; c: [number, string] } export type AppThemeType = "Auto" | "Light" | "Dark" +export type OpenFilePathResult = { t: "NoLibrary" } | { t: "NoFile"; c: number } | { t: "OpenError"; c: [number, string] } | { t: "AllGood"; c: number } | { t: "Internal"; c: string } diff --git a/apps/mobile/crates/android/Cargo.toml b/apps/mobile/crates/android/Cargo.toml index 3bbb47bc9..88bcf978c 100644 --- a/apps/mobile/crates/android/Cargo.toml +++ b/apps/mobile/crates/android/Cargo.toml @@ -2,9 +2,9 @@ name = "sd-mobile-android" version = "0.1.0" rust-version = "1.64.0" -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } [lib] # Android can use dynamic linking since all FFI is done via JNI diff --git a/apps/mobile/crates/core/Cargo.toml b/apps/mobile/crates/core/Cargo.toml index 29d96dc10..d1fbe19d1 100644 --- a/apps/mobile/crates/core/Cargo.toml +++ b/apps/mobile/crates/core/Cargo.toml @@ -2,16 +2,16 @@ name = "sd-mobile-core" version = "0.1.0" rust-version = "1.64.0" -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } [dependencies] once_cell = "1.17.2" sd-core = { path = "../../../../core", features = [ "mobile", ], default-features = false } -rspc.workspace = true +rspc = { workspace = true } serde_json = "1.0.96" tokio = { workspace = true } openssl = { version = "0.10.53", features = [ diff --git a/apps/mobile/crates/ios/Cargo.toml b/apps/mobile/crates/ios/Cargo.toml index a4a108074..6504b6cc1 100644 --- a/apps/mobile/crates/ios/Cargo.toml +++ b/apps/mobile/crates/ios/Cargo.toml @@ -2,9 +2,9 @@ name = "sd-mobile-ios" version = "0.1.0" rust-version = "1.64.0" -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } [lib] diff --git a/apps/server/Cargo.toml b/apps/server/Cargo.toml index 28e1dc5ba..48d9faf31 100644 --- a/apps/server/Cargo.toml +++ b/apps/server/Cargo.toml @@ -1,9 +1,9 @@ [package] name = "server" version = "0.1.0" -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } [features] assets = [] diff --git a/core/Cargo.toml b/core/Cargo.toml index b3f53a0a5..16b6299c7 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -3,10 +3,10 @@ name = "sd-core" version = "0.1.0" description = "Virtual distributed filesystem engine that powers Spacedrive." authors = ["Spacedrive Technology Inc."] -rust-version = "1.68.1" -license.workspace = true -repository.workspace = true -edition.workspace = true +rust-version = "1.70.0" +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } [features] default = [] @@ -88,8 +88,9 @@ normpath = { version = "1.1.1", features = ["localization"] } tracing-appender = { git = "https://github.com/tokio-rs/tracing", rev = "29146260fb4615d271d2e899ad95a753bb42915e" } # Unreleased changes for log deletion strum = { version = "0.24", features = ["derive"] } strum_macros = "0.24" +regex = "1.8.4" hex = "0.4.3" -int-enum = "0.4.0" +int-enum = "0.5.0" [target.'cfg(windows)'.dependencies.winapi-util] version = "0.1.5" diff --git a/core/src/api/files.rs b/core/src/api/files.rs index 8fbb3d126..49d4f84e8 100644 --- a/core/src/api/files.rs +++ b/core/src/api/files.rs @@ -1,20 +1,30 @@ use crate::{ api::utils::library, invalidate_query, - location::{file_path_helper::IsolatedFilePathData, find_location, LocationError}, + library::Library, + location::{ + file_path_helper::{ + file_path_to_isolate, file_path_to_isolate_with_id, FilePathError, IsolatedFilePathData, + }, + find_location, LocationError, + }, object::fs::{ copy::FileCopierJobInit, cut::FileCutterJobInit, delete::FileDeleterJobInit, erase::FileEraserJobInit, }, - prisma::{location, object}, + prisma::{file_path, location, object}, }; +use std::path::Path; + use chrono::Utc; +use futures::future::try_join_all; +use regex::Regex; use rspc::{alpha::AlphaRouter, ErrorCode}; use serde::Deserialize; use specta::Type; -use std::path::Path; use tokio::fs; +use tracing::error; use super::{Ctx, R}; @@ -86,20 +96,6 @@ pub(crate) fn mount() -> AlphaRouter { Ok(()) }) }) - .procedure("delete", { - R.with2(library()) - .mutation(|(_, library), id: i32| async move { - library - .db - .object() - .delete(object::id::equals(id)) - .exec() - .await?; - - invalidate_query!(library, "search.paths"); - Ok(()) - }) - }) .procedure("updateAccessTime", { R.with2(library()) .mutation(|(_, library), id: i32| async move { @@ -119,12 +115,12 @@ pub(crate) fn mount() -> AlphaRouter { }) .procedure("removeAccessTime", { R.with2(library()) - .mutation(|(_, library), id: i32| async move { + .mutation(|(_, library), object_ids: Vec| async move { library .db .object() - .update( - object::id::equals(id), + .update_many( + vec![object::id::in_vec(object_ids)], vec![object::date_accessed::set(None)], ) .exec() @@ -178,52 +174,244 @@ pub(crate) fn mount() -> AlphaRouter { }) .procedure("renameFile", { #[derive(Type, Deserialize)] - pub struct RenameFileArgs { - pub location_id: i32, - pub file_name: String, - pub new_file_name: String, + pub struct FromPattern { + pub pattern: String, + pub replace_all: bool, } - R.with2(library()).mutation( - |(_, library), - RenameFileArgs { - location_id, - file_name, - new_file_name, - }: RenameFileArgs| async move { - let location = find_location(&library, location_id) + #[derive(Type, Deserialize)] + pub struct RenameOne { + pub from_file_path_id: file_path::id::Type, + pub to: String, + } + + #[derive(Type, Deserialize)] + pub struct RenameMany { + pub from_pattern: FromPattern, + pub to_pattern: String, + pub from_file_path_ids: Vec, + } + + #[derive(Type, Deserialize)] + pub enum RenameKind { + One(RenameOne), + Many(RenameMany), + } + + #[derive(Type, Deserialize)] + pub struct RenameFileArgs { + pub location_id: location::id::Type, + pub kind: RenameKind, + } + + impl RenameFileArgs { + pub async fn rename_one( + RenameOne { + from_file_path_id, + to, + }: RenameOne, + location_path: impl AsRef, + library: &Library, + ) -> Result<(), rspc::Error> { + let location_path = location_path.as_ref(); + let iso_file_path = IsolatedFilePathData::from( + library + .db + .file_path() + .find_unique(file_path::id::equals(from_file_path_id)) + .select(file_path_to_isolate::select()) + .exec() + .await? + .ok_or(LocationError::FilePath(FilePathError::IdNotFound( + from_file_path_id, + )))?, + ); + + if iso_file_path.full_name() == to { + return Ok(()); + } + + let (new_file_name, new_extension) = + IsolatedFilePathData::separate_name_and_extension_from_str(&to) + .map_err(LocationError::FilePath)?; + + let mut new_file_full_path = location_path.join(iso_file_path.parent()); + new_file_full_path.push(new_file_name); + if !new_extension.is_empty() { + new_file_full_path.set_extension(new_extension); + } + + match fs::metadata(&new_file_full_path).await { + Ok(_) => { + return Err(rspc::Error::new( + ErrorCode::Conflict, + "File already exists".to_string(), + )) + } + Err(e) => { + if e.kind() != std::io::ErrorKind::NotFound { + return Err(rspc::Error::with_cause( + ErrorCode::InternalServerError, + "Failed to check if file exists".to_string(), + e, + )); + } + } + } + + fs::rename(location_path.join(&iso_file_path), new_file_full_path) + .await + .map_err(|e| { + rspc::Error::with_cause( + ErrorCode::Conflict, + "Failed to rename file".to_string(), + e, + ) + })?; + + library + .db + .file_path() + .update( + file_path::id::equals(from_file_path_id), + vec![ + file_path::name::set(new_file_name.to_string()), + file_path::extension::set(new_extension.to_string()), + ], + ) + .exec() + .await?; + + Ok(()) + } + + pub async fn rename_many( + RenameMany { + from_pattern, + to_pattern, + from_file_path_ids, + }: RenameMany, + location_path: impl AsRef, + library: &Library, + ) -> Result<(), rspc::Error> { + let location_path = location_path.as_ref(); + + let Ok(from_regex) = Regex::new(&from_pattern.pattern) else { + return Err(rspc::Error::new( + rspc::ErrorCode::BadRequest, + "Invalid `from` regex pattern".into(), + )); + }; + + let to_update = try_join_all( + library + .db + .file_path() + .find_many(vec![file_path::id::in_vec(from_file_path_ids)]) + .select(file_path_to_isolate_with_id::select()) + .exec() + .await? + .into_iter() + .map(|file_path| (file_path.id, IsolatedFilePathData::from(file_path))) + .map(|(file_path_id, iso_file_path)| { + let from = location_path.join(&iso_file_path); + let mut to = location_path.join(iso_file_path.parent()); + let full_name = iso_file_path.full_name(); + let replaced_full_name = if from_pattern.replace_all { + from_regex.replace_all(&full_name, &to_pattern) + } else { + from_regex.replace(&full_name, &to_pattern) + } + .to_string(); + + to.push(&replaced_full_name); + + async move { + if !IsolatedFilePathData::accept_file_name(&replaced_full_name) + { + Err(rspc::Error::new( + ErrorCode::BadRequest, + "Invalid file name".to_string(), + )) + } else { + fs::rename(&from, &to) + .await + .map_err(|e| { + error!( + "Failed to rename file from: '{}' to: '{}'", + from.display(), + to.display() + ); + rspc::Error::with_cause( + ErrorCode::Conflict, + "Failed to rename file".to_string(), + e, + ) + }) + .map(|_| { + let (name, extension) = + IsolatedFilePathData::separate_name_and_extension_from_str( + &replaced_full_name, + ) + .expect("we just built this full name and validated it"); + + ( + file_path_id, + (name.to_string(), extension.to_string()), + ) + }) + } + } + }), + ) + .await?; + + // TODO: dispatch sync update events + + library + .db + ._batch( + to_update + .into_iter() + .map(|(file_path_id, (new_name, new_extension))| { + library.db.file_path().update( + file_path::id::equals(file_path_id), + vec![ + file_path::name::set(new_name), + file_path::extension::set(new_extension), + ], + ) + }) + .collect::>(), + ) + .await?; + + Ok(()) + } + } + + R.with2(library()) + .mutation(|(_, library), args: RenameFileArgs| async move { + let location_path = find_location(&library, args.location_id) .select(location::select!({ path })) .exec() .await? - .ok_or(LocationError::IdNotFound(location_id))?; + .ok_or(LocationError::IdNotFound(args.location_id))? + .path + .ok_or(LocationError::MissingPath(args.location_id))?; - let Some(location_path) = location.path.as_ref().map(Path::new) else { - Err(LocationError::MissingPath)? - }; - - fs::rename( - location_path.join(IsolatedFilePathData::from_relative_str( - location_id, - &file_name, - )), - location_path.join(IsolatedFilePathData::from_relative_str( - location_id, - &new_file_name, - )), - ) - .await - .map_err(|e| { - rspc::Error::with_cause( - ErrorCode::Conflict, - "Failed to rename file".to_string(), - e, - ) - })?; + let res = match args.kind { + RenameKind::One(one) => { + RenameFileArgs::rename_one(one, location_path, &library).await + } + RenameKind::Many(many) => { + RenameFileArgs::rename_many(many, location_path, &library).await + } + }; invalidate_query!(library, "search.objects"); - Ok(()) - }, - ) + res + }) }) } diff --git a/core/src/api/jobs.rs b/core/src/api/jobs.rs index 435ecc40f..bdda0e4f8 100644 --- a/core/src/api/jobs.rs +++ b/core/src/api/jobs.rs @@ -6,12 +6,14 @@ use crate::{ preview::thumbnailer_job::ThumbnailerJobInit, validation::validator_job::ObjectValidatorJobInit, }, + prisma::location, }; +use std::path::PathBuf; + use rspc::alpha::AlphaRouter; use serde::Deserialize; use specta::Type; -use std::path::PathBuf; use uuid::Uuid; use super::{utils::library, CoreEvent, Ctx, R}; @@ -46,7 +48,7 @@ pub(crate) fn mount() -> AlphaRouter { .procedure("generateThumbsForLocation", { #[derive(Type, Deserialize)] pub struct GenerateThumbsForLocationArgs { - pub id: i32, + pub id: location::id::Type, pub path: PathBuf, } @@ -69,7 +71,7 @@ pub(crate) fn mount() -> AlphaRouter { .procedure("objectValidator", { #[derive(Type, Deserialize)] pub struct ObjectValidatorArgs { - pub id: i32, + pub id: location::id::Type, pub path: PathBuf, } @@ -92,7 +94,7 @@ pub(crate) fn mount() -> AlphaRouter { .procedure("identifyUniqueFiles", { #[derive(Type, Deserialize)] pub struct IdentifyUniqueFilesArgs { - pub id: i32, + pub id: location::id::Type, pub path: PathBuf, } diff --git a/core/src/api/locations.rs b/core/src/api/locations.rs index 29a80bf03..96d6078ab 100644 --- a/core/src/api/locations.rs +++ b/core/src/api/locations.rs @@ -9,10 +9,11 @@ use crate::{ util::AbortOnDrop, }; +use std::path::PathBuf; + use rspc::{self, alpha::AlphaRouter, ErrorCode}; use serde::{Deserialize, Serialize}; use specta::Type; -use std::path::PathBuf; use super::{utils::library, Ctx, R}; @@ -67,7 +68,7 @@ pub(crate) fn mount() -> AlphaRouter { }) .procedure("get", { R.with2(library()) - .query(|(_, library), location_id: i32| async move { + .query(|(_, library), location_id: location::id::Type| async move { Ok(library .db .location() @@ -78,7 +79,7 @@ pub(crate) fn mount() -> AlphaRouter { }) .procedure("getWithRules", { R.with2(library()) - .query(|(_, library), location_id: i32| async move { + .query(|(_, library), location_id: location::id::Type| async move { Ok(library .db .location() @@ -106,12 +107,13 @@ pub(crate) fn mount() -> AlphaRouter { }) }) .procedure("delete", { - R.with2(library()) - .mutation(|(_, library), location_id: i32| async move { + R.with2(library()).mutation( + |(_, library), location_id: location::id::Type| async move { delete_location(&library, location_id).await?; invalidate_query!(library, "locations.list"); Ok(()) - }) + }, + ) }) .procedure("relink", { R.with2(library()) @@ -132,8 +134,8 @@ pub(crate) fn mount() -> AlphaRouter { }) }) .procedure("fullRescan", { - R.with2(library()) - .mutation(|(_, library), location_id: i32| async move { + R.with2(library()).mutation( + |(_, library), location_id: location::id::Type| async move { // rescan location scan_location( &library, @@ -145,12 +147,13 @@ pub(crate) fn mount() -> AlphaRouter { ) .await .map_err(Into::into) - }) + }, + ) }) .procedure("quickRescan", { #[derive(Clone, Serialize, Deserialize, Type, Debug)] pub struct LightScanArgs { - pub location_id: i32, + pub location_id: location::id::Type, pub sub_path: String, } @@ -275,7 +278,7 @@ fn mount_indexer_rule_routes() -> AlphaRouter { // list indexer rules for location, returning the indexer rule .procedure("listForLocation", { R.with2(library()) - .query(|(_, library), location_id: i32| async move { + .query(|(_, library), location_id: location::id::Type| async move { library .db .indexer_rule() diff --git a/core/src/api/search.rs b/core/src/api/search.rs index 2859eb709..cd7ad42be 100644 --- a/core/src/api/search.rs +++ b/core/src/api/search.rs @@ -1,7 +1,18 @@ use crate::{ - location::file_path_helper::{check_file_path_exists, IsolatedFilePathData}, + api::{ + locations::{file_path_with_object, object_with_file_paths, ExplorerItem}, + utils::library, + }, + library::Library, + location::{ + file_path_helper::{check_file_path_exists, IsolatedFilePathData}, + find_location, LocationError, + }, object::preview::get_thumb_key, + prisma::{self, file_path, location, object, tag, tag_on_object}, + util::db::chain_optional_iter, }; + use std::collections::BTreeSet; use chrono::{DateTime, FixedOffset, Utc}; @@ -10,17 +21,6 @@ use rspc::{alpha::AlphaRouter, ErrorCode}; use serde::{Deserialize, Serialize}; use specta::Type; -use crate::{ - api::{ - locations::{file_path_with_object, object_with_file_paths, ExplorerItem}, - utils::library, - }, - library::Library, - location::{find_location, LocationError}, - prisma::{self, file_path, object, tag, tag_on_object}, - util::db::chain_optional_iter, -}; - use super::{Ctx, R}; #[derive(Serialize, Type, Debug)] @@ -109,7 +109,7 @@ impl MaybeNot { #[serde(rename_all = "camelCase")] struct FilePathFilterArgs { #[specta(optional)] - location_id: Option, + location_id: Option, #[serde(default)] search: String, #[specta(optional)] @@ -166,11 +166,11 @@ enum ObjectHiddenFilter { Include, } -impl Into> for ObjectHiddenFilter { - fn into(self) -> Option { - match self { - Self::Exclude => Some(object::hidden::not(true)), - Self::Include => None, +impl From for Option { + fn from(value: ObjectHiddenFilter) -> Self { + match value { + ObjectHiddenFilter::Exclude => Some(object::hidden::not(true)), + ObjectHiddenFilter::Include => None, } } } diff --git a/core/src/api/tags.rs b/core/src/api/tags.rs index 1846593ca..979347683 100644 --- a/core/src/api/tags.rs +++ b/core/src/api/tags.rs @@ -8,7 +8,7 @@ use uuid::Uuid; use crate::{ invalidate_query, library::Library, - prisma::{object, tag, tag_on_object}, + prisma::{tag, tag_on_object}, sync, }; @@ -45,19 +45,6 @@ pub(crate) fn mount() -> AlphaRouter { .await?) }) }) - // .library_mutation("create", |t| { - // #[derive(Type, Deserialize)] - // pub struct TagCreateArgs { - // pub name: String, - // pub color: String, - // } - // t(|_, args: TagCreateArgs, library| async move { - // let created_tag = Tag::new(args.name, args.color); - // created_tag.save(&library.db).await?; - // invalidate_query!(library, "tags.list"); - // Ok(created_tag) - // }) - // }) .procedure("create", { #[derive(Type, Deserialize)] pub struct TagCreateArgs { @@ -101,7 +88,7 @@ pub(crate) fn mount() -> AlphaRouter { .procedure("assign", { #[derive(Debug, Type, Deserialize)] pub struct TagAssignArgs { - pub object_id: i32, + pub object_ids: Vec, pub tag_id: i32, pub unassign: bool, } @@ -112,17 +99,29 @@ pub(crate) fn mount() -> AlphaRouter { library .db .tag_on_object() - .delete(tag_on_object::tag_id_object_id(args.tag_id, args.object_id)) + .delete_many( + args.object_ids + .iter() + .map(|&object_id| { + tag_on_object::tag_id_object_id(args.tag_id, object_id) + }) + .collect(), + ) .exec() .await?; } else { library .db .tag_on_object() - .create( - tag::id::equals(args.tag_id), - object::id::equals(args.object_id), - vec![], + .create_many( + args.object_ids + .iter() + .map(|&object_id| tag_on_object::CreateUnchecked { + tag_id: args.tag_id, + object_id, + _params: vec![], + }) + .collect(), ) .exec() .await?; diff --git a/core/src/custom_uri.rs b/core/src/custom_uri.rs index 0c5388064..764bc4fa6 100644 --- a/core/src/custom_uri.rs +++ b/core/src/custom_uri.rs @@ -1,6 +1,6 @@ use crate::{ location::file_path_helper::{file_path_to_handle_custom_uri, IsolatedFilePathData}, - prisma::file_path, + prisma::{file_path, location}, util::error::FileIOError, Node, }; @@ -34,7 +34,7 @@ use uuid::Uuid; // This LRU cache allows us to avoid doing a DB lookup on every request. // The main advantage of this LRU Cache is for video files. Video files are fetch in multiple chunks and the cache prevents a DB lookup on every chunk reducing the request time from 15-25ms to 1-10ms. -type MetadataCacheKey = (Uuid, i32); +type MetadataCacheKey = (Uuid, file_path::id::Type); type NameAndExtension = (PathBuf, String); static FILE_METADATA_CACHE: Lazy> = Lazy::new(|| Cache::new(100)); @@ -160,14 +160,14 @@ async fn handle_file( let location_id = path .get(2) - .and_then(|id| id.parse::().ok()) + .and_then(|id| id.parse::().ok()) .ok_or_else(|| { HandleCustomUriError::BadRequest("Invalid number of parameters. Missing location_id!") })?; let file_path_id = path .get(3) - .and_then(|id| id.parse::().ok()) + .and_then(|id| id.parse::().ok()) .ok_or_else(|| { HandleCustomUriError::BadRequest("Invalid number of parameters. Missing file_path_id!") })?; diff --git a/core/src/job/job_manager.rs b/core/src/job/job_manager.rs index fcb5b16b8..2d861381b 100644 --- a/core/src/job/job_manager.rs +++ b/core/src/job/job_manager.rs @@ -48,9 +48,6 @@ pub enum JobManagerError { #[error("Failed to fetch job data from database: {0}")] Database(#[from] prisma_client_rust::QueryError), - - #[error("Job error: {0}")] - Job(#[from] JobError), } impl From for rspc::Error { @@ -66,11 +63,6 @@ impl From for rspc::Error { "Error accessing the database".to_string(), value, ), - JobManagerError::Job(_) => Self::with_cause( - rspc::ErrorCode::InternalServerError, - "Job error".to_string(), - value, - ), } } } @@ -221,12 +213,12 @@ impl JobManager { } } - pub async fn resume_jobs(self: Arc, library: &Library) -> Result<(), JobManagerError> { + pub async fn resume_jobs(self: Arc, library: &Library) -> Result<(), JobError> { library .db .job() .delete_many(vec![job::name::not_in_vec( - ALL_JOB_NAMES.into_iter().map(|s| s.to_string()).collect(), + ALL_JOB_NAMES.iter().map(|s| s.to_string()).collect(), )]) .exec() .await?; @@ -547,7 +539,7 @@ fn get_background_info_by_job_name(name: &str) -> bool { fn get_resumable_job( job_report: JobReport, next_jobs: VecDeque>, -) -> Result, JobManagerError> { +) -> Result, JobError> { dispatch_call_to_job_by_name!( job_report.name.as_str(), T -> Job::resume(job_report, T {}, next_jobs), @@ -569,7 +561,6 @@ fn get_resumable_job( FileEraserJob, ] ) - .map_err(Into::into) } const ALL_JOB_NAMES: &[&str] = &[ diff --git a/core/src/job/mod.rs b/core/src/job/mod.rs index 276d9c846..f57af9fb7 100644 --- a/core/src/job/mod.rs +++ b/core/src/job/mod.rs @@ -1,21 +1,25 @@ use crate::{ library::Library, - location::indexer::IndexerError, - object::{file_identifier::FileIdentifierJobError, preview::ThumbnailerError}, + location::{indexer::IndexerError, LocationError}, + object::{ + file_identifier::FileIdentifierJobError, fs::error::FileSystemJobsError, + preview::ThumbnailerError, + }, util::error::FileIOError, }; +use sd_crypto::Error as CryptoError; + use std::{ collections::{hash_map::DefaultHasher, VecDeque}, fmt::Debug, hash::{Hash, Hasher}, mem, - path::PathBuf, sync::Arc, }; +use prisma_client_rust::QueryError; use rmp_serde::{decode::Error as DecodeError, encode::Error as EncodeError}; -use sd_crypto::Error as CryptoError; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use thiserror::Error; use tracing::{debug, error, info, warn}; @@ -31,16 +35,16 @@ pub use worker::*; pub enum JobError { // General errors #[error("database error: {0}")] - DatabaseError(#[from] prisma_client_rust::QueryError), + Database(#[from] QueryError), #[error("Failed to join Tokio spawn blocking: {0}")] - JoinTaskError(#[from] tokio::task::JoinError), - #[error("Job state encode error: {0}")] + JoinTask(#[from] tokio::task::JoinError), + #[error("job state encode error: {0}")] StateEncode(#[from] EncodeError), - #[error("Job state decode error: {0}")] + #[error("job state decode error: {0}")] StateDecode(#[from] DecodeError), - #[error("Job metadata serialization error: {0}")] + #[error("job metadata serialization error: {0}")] MetadataSerialization(#[from] serde_json::Error), - #[error("Tried to resume a job with unknown name: job ")] + #[error("tried to resume a job with unknown name: job ")] UnknownJobName(Uuid, String), #[error( "Tried to resume a job that doesn't have saved state data: job " @@ -50,28 +54,26 @@ pub enum JobError { MissingReport { id: Uuid, name: String }, #[error("missing some job data: '{value}'")] MissingData { value: String }, - #[error("error converting/handling OS strings")] - OsStr, #[error("error converting/handling paths")] Path, #[error("invalid job status integer: {0}")] InvalidJobStatusInt(i32), #[error(transparent)] FileIO(#[from] FileIOError), + #[error("Location error: {0}")] + Location(#[from] LocationError), // Specific job errors - #[error("Indexer error: {0}")] - IndexerError(#[from] IndexerError), - #[error("Thumbnailer error: {0}")] + #[error(transparent)] + Indexer(#[from] IndexerError), + #[error(transparent)] ThumbnailError(#[from] ThumbnailerError), - #[error("Identifier error: {0}")] + #[error(transparent)] IdentifierError(#[from] FileIdentifierJobError), - #[error("Crypto error: {0}")] + #[error(transparent)] + FileSystemJobsError(#[from] FileSystemJobsError), + #[error(transparent)] CryptoError(#[from] CryptoError), - #[error("source and destination path are the same: {}", .0.display())] - MatchingSrcDest(PathBuf), - #[error("action would overwrite another file: {}", .0.display())] - WouldOverwrite(PathBuf), #[error("item of type '{0}' with id '{1}' is missing from the db")] MissingFromDb(&'static str, String), #[error("the cas id is not set on the path data")] @@ -469,3 +471,23 @@ impl DynJob for Job { Ok(()) } } + +#[macro_export] +macro_rules! extract_job_data { + ($state:ident) => {{ + $state + .data + .as_ref() + .expect("critical error: missing data on job state") + }}; +} + +#[macro_export] +macro_rules! extract_job_data_mut { + ($state:ident) => {{ + $state + .data + .as_mut() + .expect("critical error: missing data on job state") + }}; +} diff --git a/core/src/library/library.rs b/core/src/library/library.rs index 961bd1cf8..1cd805276 100644 --- a/core/src/library/library.rs +++ b/core/src/library/library.rs @@ -14,6 +14,7 @@ use crate::{ }; use std::{ + collections::HashMap, fmt::{Debug, Formatter}, path::{Path, PathBuf}, sync::Arc, @@ -100,27 +101,42 @@ impl Library { } /// Returns the full path of a file - pub async fn get_file_path(&self, id: i32) -> Result, LibraryManagerError> { - Ok(self - .db - .file_path() - .find_first(vec![ - file_path::location::is(vec![location::node_id::equals(Some(self.node_local_id))]), - file_path::id::equals(id), - ]) - .select(file_path_to_full_path::select()) - .exec() - .await? - .map(|record| { - record - .location - .path - .as_ref() - .map(|p| { - Path::new(p).join(IsolatedFilePathData::from((record.location.id, &record))) - }) - .ok_or_else(|| LibraryManagerError::NoPath(record.location.id)) - }) - .transpose()?) + pub async fn get_file_paths( + &self, + ids: Vec, + ) -> Result>, LibraryManagerError> { + let mut out = ids + .iter() + .copied() + .map(|id| (id, None)) + .collect::>(); + + out.extend( + self.db + .file_path() + .find_many(vec![ + file_path::location::is(vec![location::node_id::equals(Some( + self.node_local_id, + ))]), + file_path::id::in_vec(ids), + ]) + .select(file_path_to_full_path::select()) + .exec() + .await? + .into_iter() + .map(|file_path| { + ( + file_path.id, + file_path.location.path.as_ref().map(|location_path| { + Path::new(&location_path).join(IsolatedFilePathData::from(( + file_path.location.id, + &file_path, + ))) + }), + ) + }), + ); + + Ok(out) } } diff --git a/core/src/location/error.rs b/core/src/location/error.rs index cc3dac7f7..b27bacb3f 100644 --- a/core/src/location/error.rs +++ b/core/src/location/error.rs @@ -1,4 +1,4 @@ -use crate::util::error::FileIOError; +use crate::{prisma::location, util::error::FileIOError}; use std::path::PathBuf; @@ -19,7 +19,7 @@ pub enum LocationError { #[error("location not found ")] UuidNotFound(Uuid), #[error("location not found ")] - IdNotFound(i32), + IdNotFound(location::id::Type), // User errors #[error("location not a directory ", .0.display())] @@ -49,25 +49,25 @@ pub enum LocationError { // Internal Errors #[error(transparent)] - LocationMetadataError(#[from] LocationMetadataError), + LocationMetadata(#[from] LocationMetadataError), #[error("failed to read location path metadata info: {0}")] LocationPathFilesystemMetadataAccess(FileIOError), #[error("missing metadata file for location ", .0.display())] MissingMetadataFile(PathBuf), #[error("failed to open file from local OS: {0}")] - FileReadError(FileIOError), + FileRead(FileIOError), #[error("failed to read mounted volumes from local OS: {0}")] VolumeReadError(String), #[error("database error: {0}")] - DatabaseError(#[from] prisma_client_rust::QueryError), + Database(#[from] prisma_client_rust::QueryError), #[error(transparent)] - LocationManagerError(#[from] LocationManagerError), + LocationManager(#[from] LocationManagerError), #[error(transparent)] - FilePathError(#[from] FilePathError), + FilePath(#[from] FilePathError), #[error(transparent)] FileIO(#[from] FileIOError), - #[error("missing-path")] - MissingPath, + #[error("location missing path ")] + MissingPath(location::id::Type), } impl From for rspc::Error { diff --git a/core/src/location/file_path_helper/isolated_file_path_data.rs b/core/src/location/file_path_helper/isolated_file_path_data.rs index 60aa3d794..197aa7bd1 100644 --- a/core/src/location/file_path_helper/isolated_file_path_data.rs +++ b/core/src/location/file_path_helper/isolated_file_path_data.rs @@ -1,19 +1,30 @@ -use crate::{location::LocationId, prisma::file_path, util::error::NonUtf8PathError}; +use crate::{ + prisma::{file_path, location}, + util::error::NonUtf8PathError, +}; -use std::{borrow::Cow, fmt, path::Path}; +use std::{ + borrow::Cow, + fmt, + path::{Path, MAIN_SEPARATOR}, + sync::OnceLock, +}; +use regex::RegexSet; use serde::{Deserialize, Serialize}; use super::{ file_path_for_file_identifier, file_path_for_object_validator, file_path_for_thumbnailer, file_path_to_full_path, file_path_to_handle_custom_uri, file_path_to_isolate, - file_path_with_object, FilePathError, + file_path_to_isolate_with_id, file_path_with_object, FilePathError, }; +static FORBIDDEN_FILE_NAMES: OnceLock = OnceLock::new(); + #[derive(Serialize, Deserialize, Debug, Hash, Eq, PartialEq)] #[non_exhaustive] pub struct IsolatedFilePathData<'a> { - pub(in crate::location) location_id: LocationId, + pub(in crate::location) location_id: location::id::Type, pub(in crate::location) materialized_path: Cow<'a, str>, pub(in crate::location) is_dir: bool, pub(in crate::location) name: Cow<'a, str>, @@ -23,7 +34,7 @@ pub struct IsolatedFilePathData<'a> { impl IsolatedFilePathData<'static> { pub fn new( - location_id: LocationId, + location_id: location::id::Type, location_path: impl AsRef, full_path: impl AsRef, is_dir: bool, @@ -67,10 +78,18 @@ impl IsolatedFilePathData<'static> { } impl<'a> IsolatedFilePathData<'a> { - pub fn location_id(&self) -> LocationId { + pub fn location_id(&self) -> location::id::Type { self.location_id } + pub fn name(&'a self) -> &'a str { + &self.name + } + + pub fn extension(&'a self) -> &'a str { + &self.extension + } + pub fn parent(&'a self) -> Self { let (parent_path_str, name, relative_path) = if self.materialized_path == "/" { ("/", "", "") @@ -97,7 +116,10 @@ impl<'a> IsolatedFilePathData<'a> { } } - pub fn from_relative_str(location_id: LocationId, relative_file_path_str: &'a str) -> Self { + pub fn from_relative_str( + location_id: location::id::Type, + relative_file_path_str: &'a str, + ) -> Self { let is_dir = relative_file_path_str.ends_with('/'); let (materialized_path, maybe_name, maybe_extension) = @@ -113,6 +135,14 @@ impl<'a> IsolatedFilePathData<'a> { } } + pub fn full_name(&self) -> String { + if self.extension.is_empty() { + self.name.to_string() + } else { + format!("{}.{}", self.name, self.extension) + } + } + pub fn materialized_path_for_children(&self) -> Option { if self.materialized_path == "/" && self.name.is_empty() && self.is_dir { // We're at the root file_path @@ -123,6 +153,53 @@ impl<'a> IsolatedFilePathData<'a> { } } + pub fn separate_name_and_extension_from_str( + source: &'a str, + ) -> Result<(&'a str, &'a str), FilePathError> { + if source.contains(MAIN_SEPARATOR) { + return Err(FilePathError::InvalidFilenameAndExtension( + source.to_string(), + )); + } + + if let Some(last_dot_idx) = source.rfind('.') { + if last_dot_idx == 0 { + // The dot is the first character, so it's a hidden file + Ok((source, "")) + } else { + Ok((&source[..last_dot_idx], &source[last_dot_idx + 1..])) + } + } else { + // It's a file without extension + Ok((source, "")) + } + } + + pub fn accept_file_name(name: &str) -> bool { + let reg = { + // Maybe we should enforce windows more restrictive rules on all platforms? + #[cfg(target_os = "windows")] + { + FORBIDDEN_FILE_NAMES.get_or_init(|| { + RegexSet::new([ + r"(?i)^(CON|PRN|AUX|NUL|COM[1-9]|LPT[1-9])(\.\w+)*$", + r#"[<>:"/\\|?*\u0000-\u0031]"#, + ]) + .expect("this regex should always be valid") + }) + } + + #[cfg(not(target_os = "windows"))] + { + FORBIDDEN_FILE_NAMES.get_or_init(|| { + RegexSet::new([r"/|\x00"]).expect("this regex should always be valid") + }) + } + }; + + !reg.is_match(name) + } + pub fn separate_path_name_and_extension_from_str( source: &'a str, is_dir: bool, @@ -178,7 +255,7 @@ impl<'a> IsolatedFilePathData<'a> { } fn from_db_data( - location_id: LocationId, + location_id: location::id::Type, db_materialized_path: &'a str, db_is_dir: bool, db_name: &'a str, @@ -313,13 +390,13 @@ mod macros { macro_rules! impl_from_db_without_location_id { ($($file_path_kind:ident),+ $(,)?) => { $( - impl ::std::convert::From<($crate::location::LocationId, $file_path_kind::Data)> for $crate:: + impl ::std::convert::From<($crate::prisma::location::id::Type, $file_path_kind::Data)> for $crate:: location:: file_path_helper:: isolated_file_path_data:: IsolatedFilePathData<'static> { - fn from((location_id, path): ($crate::location::LocationId, $file_path_kind::Data)) -> Self { + fn from((location_id, path): ($crate::prisma::location::id::Type, $file_path_kind::Data)) -> Self { Self { location_id, relative_path: Cow::Owned( @@ -342,13 +419,13 @@ mod macros { } } - impl<'a> ::std::convert::From<($crate::location::LocationId, &'a $file_path_kind::Data)> for $crate:: + impl<'a> ::std::convert::From<($crate::prisma::location::id::Type, &'a $file_path_kind::Data)> for $crate:: location:: file_path_helper:: isolated_file_path_data:: IsolatedFilePathData<'a> { - fn from((location_id, path): ($crate::location::LocationId, &'a $file_path_kind::Data)) -> Self { + fn from((location_id, path): ($crate::prisma::location::id::Type, &'a $file_path_kind::Data)) -> Self { Self::from_db_data( location_id, &path.materialized_path, @@ -363,7 +440,12 @@ mod macros { } } -impl_from_db!(file_path, file_path_to_isolate, file_path_with_object); +impl_from_db!( + file_path, + file_path_to_isolate, + file_path_to_isolate_with_id, + file_path_with_object +); impl_from_db_without_location_id!( file_path_for_file_identifier, @@ -374,7 +456,7 @@ impl_from_db_without_location_id!( ); fn extract_relative_path( - location_id: LocationId, + location_id: location::id::Type, location_path: impl AsRef, path: impl AsRef, ) -> Result { @@ -396,7 +478,7 @@ fn extract_relative_path( /// This function separates a file path from a location path, and normalizes replacing '\' with '/' /// to be consistent between Windows and Unix like systems pub fn extract_normalized_materialized_path_str( - location_id: LocationId, + location_id: location::id::Type, location_path: impl AsRef, path: impl AsRef, ) -> Result { @@ -439,6 +521,7 @@ fn assemble_relative_path( } #[cfg(test)] +#[allow(clippy::unwrap_used)] mod tests { use super::*; diff --git a/core/src/location/file_path_helper/mod.rs b/core/src/location/file_path_helper/mod.rs index c1be30961..21c0ce6e3 100644 --- a/core/src/location/file_path_helper/mod.rs +++ b/core/src/location/file_path_helper/mod.rs @@ -1,5 +1,5 @@ use crate::{ - prisma::{file_path, PrismaClient}, + prisma::{file_path, location, PrismaClient}, util::error::{FileIOError, NonUtf8PathError}, }; @@ -20,8 +20,6 @@ pub mod isolated_file_path_data; pub use isolated_file_path_data::IsolatedFilePathData; -use super::LocationId; - // File Path selectables! file_path::select!(file_path_just_pub_id { pub_id }); file_path::select!(file_path_just_pub_id_materialized_path { @@ -63,6 +61,14 @@ file_path::select!(file_path_to_isolate { name extension }); +file_path::select!(file_path_to_isolate_with_id { + id + location_id + materialized_path + is_dir + name + extension +}); file_path::select!(file_path_to_handle_custom_uri { materialized_path is_dir @@ -74,6 +80,7 @@ file_path::select!(file_path_to_handle_custom_uri { } }); file_path::select!(file_path_to_full_path { + id materialized_path is_dir name @@ -98,10 +105,12 @@ pub struct FilePathMetadata { #[derive(Error, Debug)] pub enum FilePathError { + #[error("file path not found: ")] + IdNotFound(file_path::id::Type), #[error("file Path not found: ", .0.display())] NotFound(Box), #[error("location '{0}' not found")] - LocationNotFound(i32), + LocationNotFound(location::id::Type), #[error("received an invalid sub path: ", .location_path.display(), .sub_path.display())] InvalidSubPath { location_path: Box, @@ -115,12 +124,12 @@ pub enum FilePathError { .sub_path.display() )] SubPathParentNotInLocation { - location_id: LocationId, + location_id: location::id::Type, sub_path: Box, }, #[error("unable to extract materialized path from location: ", .location_id, .path.display())] UnableToExtractMaterializedPath { - location_id: LocationId, + location_id: location::id::Type, path: Box, }, #[error("database error: {0}")] @@ -130,6 +139,8 @@ pub enum FilePathError { FileIO(#[from] FileIOError), #[error(transparent)] NonUtf8Path(#[from] NonUtf8PathError), + #[error("received an invalid filename and extension: ")] + InvalidFilenameAndExtension(String), } #[cfg(feature = "location-watcher")] @@ -146,7 +157,7 @@ pub async fn create_file_path( cas_id: Option, metadata: FilePathMetadata, ) -> Result { - use crate::{prisma::location, sync, util::db::uuid_to_bytes}; + use crate::{sync, util::db::uuid_to_bytes}; use serde_json::json; use uuid::Uuid; diff --git a/core/src/location/indexer/indexer_job.rs b/core/src/location/indexer/indexer_job.rs index 251cc4e92..017ac87c2 100644 --- a/core/src/location/indexer/indexer_job.rs +++ b/core/src/location/indexer/indexer_job.rs @@ -1,5 +1,5 @@ use crate::{ - file_paths_db_fetcher_fn, + extract_job_data_mut, file_paths_db_fetcher_fn, job::{JobError, JobInitData, JobResult, JobState, StatefulJob, WorkerContext}, location::file_path_helper::{ ensure_file_path_exists, ensure_sub_path_is_directory, ensure_sub_path_is_in_location, @@ -180,10 +180,7 @@ impl StatefulJob for IndexerJob { mut ctx: WorkerContext, state: &mut JobState, ) -> Result<(), JobError> { - let data = state - .data - .as_mut() - .expect("critical error: missing data on job state"); + let data = extract_job_data_mut!(state); match &state.steps[0] { IndexerJobStepInput::Save(step) => { @@ -288,6 +285,6 @@ impl StatefulJob for IndexerJob { return Err(JobError::MissingPath) }; - finalize_indexer(&location_path, state, ctx) + finalize_indexer(location_path, state, ctx) } } diff --git a/core/src/location/indexer/mod.rs b/core/src/location/indexer/mod.rs index 2ce8192f3..9965155f0 100644 --- a/core/src/location/indexer/mod.rs +++ b/core/src/location/indexer/mod.rs @@ -1,8 +1,8 @@ use crate::{ - invalidate_query, + extract_job_data, invalidate_query, job::{JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext}, library::Library, - prisma::{file_path, PrismaClient}, + prisma::{file_path, location, PrismaClient}, sync, util::{db::uuid_to_bytes, error::FileIOError}, }; @@ -21,7 +21,7 @@ use tracing::info; use super::{ file_path_helper::{file_path_just_pub_id, FilePathError, IsolatedFilePathData}, - location_with_indexer_rules, LocationId, + location_with_indexer_rules, }; pub mod indexer_job; @@ -215,10 +215,7 @@ where Init: Serialize + DeserializeOwned + Send + Sync + Hash, Step: Serialize + DeserializeOwned + Send + Sync, { - let data = state - .data - .as_ref() - .expect("critical error: missing data on job state"); + let data = extract_job_data!(state); info!( "scan of {} completed in {:?}. {} new files found, \ @@ -250,7 +247,7 @@ fn update_notifier_fn(batch_size: usize, ctx: &mut WorkerContext) -> impl FnMut( } fn iso_file_path_factory( - location_id: LocationId, + location_id: location::id::Type, location_path: &Path, ) -> impl Fn(&Path, bool) -> Result, IndexerError> + '_ { move |path, is_dir| { diff --git a/core/src/location/indexer/rules.rs b/core/src/location/indexer/rules.rs index 0096fd437..19f336716 100644 --- a/core/src/location/indexer/rules.rs +++ b/core/src/location/indexer/rules.rs @@ -7,6 +7,12 @@ use crate::{ }, }; +use std::{ + collections::{HashMap, HashSet}, + marker::PhantomData, + path::Path, +}; + use chrono::{DateTime, Utc}; use futures::future::try_join_all; use globset::{Glob, GlobSet, GlobSetBuilder}; @@ -14,11 +20,6 @@ use rmp_serde::{self, decode, encode}; use rspc::ErrorCode; use serde::{de, ser, Deserialize, Serialize}; use specta::Type; -use std::{ - collections::{HashMap, HashSet}, - marker::PhantomData, - path::Path, -}; use thiserror::Error; use tokio::fs; use tracing::debug; @@ -453,17 +454,6 @@ pub struct IndexerRule { } impl IndexerRule { - pub fn new(name: String, default: bool, rules: Vec) -> Self { - Self { - id: None, - name, - default, - rules, - date_created: Utc::now(), - date_modified: Utc::now(), - } - } - pub async fn apply( &self, source: impl AsRef, @@ -777,7 +767,7 @@ mod seeder { ] .into_iter() .flatten() - ).unwrap(), + ).expect("this is hardcoded and should always work"), ], } } @@ -786,7 +776,8 @@ mod seeder { SystemIndexerRule { name: "No Hidden", default: true, - rules: vec![RulePerKind::new_reject_files_by_globs_str(["**/.*"]).unwrap()], + rules: vec![RulePerKind::new_reject_files_by_globs_str(["**/.*"]) + .expect("this is hardcoded and should always work")], } } @@ -807,7 +798,7 @@ mod seeder { rules: vec![RulePerKind::new_accept_files_by_globs_str([ "*.{avif,bmp,gif,ico,jpeg,jpg,png,svg,tif,tiff,webp}", ]) - .unwrap()], + .expect("this is hardcoded and should always work")], } } } @@ -815,11 +806,25 @@ mod seeder { pub use seeder::*; #[cfg(test)] +#[allow(clippy::unwrap_used)] mod tests { use super::*; use tempfile::tempdir; use tokio::fs; + impl IndexerRule { + pub fn new(name: String, default: bool, rules: Vec) -> Self { + Self { + id: None, + name, + default, + rules, + date_created: Utc::now(), + date_modified: Utc::now(), + } + } + } + async fn check_rule(indexer_rule: &IndexerRule, path: impl AsRef) -> bool { indexer_rule .apply(path) diff --git a/core/src/location/indexer/shallow.rs b/core/src/location/indexer/shallow.rs index dbb750f5e..b81d634a1 100644 --- a/core/src/location/indexer/shallow.rs +++ b/core/src/location/indexer/shallow.rs @@ -2,9 +2,12 @@ use crate::{ file_paths_db_fetcher_fn, invalidate_query, job::JobError, library::Library, - location::file_path_helper::{ - check_file_path_exists, ensure_sub_path_is_directory, ensure_sub_path_is_in_location, - IsolatedFilePathData, + location::{ + file_path_helper::{ + check_file_path_exists, ensure_sub_path_is_directory, ensure_sub_path_is_in_location, + IsolatedFilePathData, + }, + LocationError, }, to_remove_db_fetcher_fn, }; @@ -30,7 +33,7 @@ pub async fn shallow( ) -> Result<(), JobError> { let location_id = location.id; let Some(location_path) = location.path.as_ref().map(PathBuf::from) else { - panic!(); + return Err(JobError::Location(LocationError::MissingPath(location_id))); }; let db = library.db.clone(); @@ -100,10 +103,10 @@ pub async fn shallow( .collect::>(); for step in steps { - execute_indexer_save_step(&location, &step, &library).await?; + execute_indexer_save_step(location, &step, library).await?; } - invalidate_query!(&library, "search.paths"); + invalidate_query!(library, "search.paths"); library.orphan_remover.invoke().await; diff --git a/core/src/location/indexer/walk.rs b/core/src/location/indexer/walk.rs index 4941f7e76..e27a66a85 100644 --- a/core/src/location/indexer/walk.rs +++ b/core/src/location/indexer/walk.rs @@ -608,6 +608,7 @@ where } #[cfg(test)] +#[allow(clippy::unwrap_used, clippy::panic)] mod tests { use super::super::rules::RulePerKind; use super::*; diff --git a/core/src/location/manager/helpers.rs b/core/src/location/manager/helpers.rs index fae1df23f..392330823 100644 --- a/core/src/location/manager/helpers.rs +++ b/core/src/location/manager/helpers.rs @@ -10,10 +10,10 @@ use tokio::{fs, io::ErrorKind, sync::oneshot, time::sleep}; use tracing::{error, warn}; use uuid::Uuid; -use super::{watcher::LocationWatcher, LocationId, LocationManagerError}; +use super::{watcher::LocationWatcher, LocationManagerError}; type LibraryId = Uuid; -type LocationAndLibraryKey = (LocationId, LibraryId); +type LocationAndLibraryKey = (location::id::Type, LibraryId); const LOCATION_CHECK_INTERVAL: Duration = Duration::from_secs(5); @@ -25,7 +25,7 @@ pub(super) async fn check_online( let location_path = location.path.as_ref(); let Some(location_path) = location_path.map(Path::new) else { - return Err(LocationManagerError::MissingPath) + return Err(LocationManagerError::MissingPath(location.id)) }; if location.node_id == Some(library.node_local_id) { @@ -51,9 +51,9 @@ pub(super) async fn check_online( } pub(super) async fn location_check_sleep( - location_id: LocationId, + location_id: location::id::Type, library: Library, -) -> (LocationId, Library) { +) -> (location::id::Type, Library) { sleep(LOCATION_CHECK_INTERVAL).await; (location_id, library) } @@ -101,7 +101,7 @@ pub(super) fn unwatch_location( } pub(super) fn drop_location( - location_id: LocationId, + location_id: location::id::Type, library_id: LibraryId, message: &str, locations_watched: &mut HashMap, @@ -115,7 +115,10 @@ pub(super) fn drop_location( } } -pub(super) async fn get_location(location_id: i32, library: &Library) -> Option { +pub(super) async fn get_location( + location_id: location::id::Type, + library: &Library, +) -> Option { library .db .location() @@ -129,7 +132,7 @@ pub(super) async fn get_location(location_id: i32, library: &Library) -> Option< } pub(super) async fn handle_remove_location_request( - location_id: LocationId, + location_id: location::id::Type, library: Library, response_tx: oneshot::Sender>, forced_unwatch: &mut HashSet, @@ -169,7 +172,7 @@ pub(super) async fn handle_remove_location_request( } pub(super) async fn handle_stop_watcher_request( - location_id: LocationId, + location_id: location::id::Type, library: Library, response_tx: oneshot::Sender>, forced_unwatch: &mut HashSet, @@ -177,7 +180,7 @@ pub(super) async fn handle_stop_watcher_request( locations_unwatched: &mut HashMap, ) { async fn inner( - location_id: LocationId, + location_id: location::id::Type, library: Library, forced_unwatch: &mut HashSet, locations_watched: &mut HashMap, @@ -212,7 +215,7 @@ pub(super) async fn handle_stop_watcher_request( } pub(super) async fn handle_reinit_watcher_request( - location_id: LocationId, + location_id: location::id::Type, library: Library, response_tx: oneshot::Sender>, forced_unwatch: &mut HashSet, @@ -220,7 +223,7 @@ pub(super) async fn handle_reinit_watcher_request( locations_unwatched: &mut HashMap, ) { async fn inner( - location_id: LocationId, + location_id: location::id::Type, library: Library, forced_unwatch: &mut HashSet, locations_watched: &mut HashMap, @@ -255,7 +258,7 @@ pub(super) async fn handle_reinit_watcher_request( } pub(super) fn handle_ignore_path_request( - location_id: LocationId, + location_id: location::id::Type, library: Library, path: PathBuf, ignore: bool, diff --git a/core/src/location/manager/mod.rs b/core/src/location/manager/mod.rs index 2a8f05fda..2edb3b98d 100644 --- a/core/src/location/manager/mod.rs +++ b/core/src/location/manager/mod.rs @@ -1,4 +1,4 @@ -use crate::{job::JobManagerError, library::Library, util::error::FileIOError}; +use crate::{job::JobManagerError, library::Library, prisma::location, util::error::FileIOError}; use std::{ collections::BTreeSet, @@ -18,7 +18,7 @@ use tracing::{debug, error}; use tokio::sync::mpsc; use uuid::Uuid; -use super::{file_path_helper::FilePathError, LocationId}; +use super::file_path_helper::FilePathError; #[cfg(feature = "location-watcher")] mod watcher; @@ -36,7 +36,7 @@ enum ManagementMessageAction { #[derive(Debug)] #[allow(dead_code)] pub struct LocationManagementMessage { - location_id: LocationId, + location_id: location::id::Type, library: Library, action: ManagementMessageAction, response_tx: oneshot::Sender>, @@ -53,7 +53,7 @@ enum WatcherManagementMessageAction { #[derive(Debug)] #[allow(dead_code)] pub struct WatcherManagementMessage { - location_id: LocationId, + location_id: location::id::Type, library: Library, action: WatcherManagementMessageAction, response_tx: oneshot::Sender>, @@ -84,10 +84,10 @@ pub enum LocationManagerError { FailedToStopOrReinitWatcher { reason: String }, #[error("Missing location from database: ")] - MissingLocation(LocationId), + MissingLocation(location::id::Type), #[error("Non local location: ")] - NonLocalLocation(LocationId), + NonLocalLocation(location::id::Type), #[error("failed to move file '{}' for reason: {reason}", .path.display())] MoveError { path: Box, reason: String }, @@ -102,8 +102,8 @@ pub enum LocationManagerError { CorruptedLocationPubId(#[from] uuid::Error), #[error("Job Manager error: (error: {0})")] JobManager(#[from] JobManagerError), - #[error("missing-location-path")] - MissingPath, + #[error("location missing location path: ")] + MissingPath(location::id::Type), #[error("invalid inode")] InvalidInode, @@ -170,7 +170,7 @@ impl LocationManager { #[allow(unused_variables)] async fn location_management_message( &self, - location_id: LocationId, + location_id: location::id::Type, library: Library, action: ManagementMessageAction, ) -> Result<(), LocationManagerError> { @@ -198,7 +198,7 @@ impl LocationManager { #[allow(unused_variables)] async fn watcher_management_message( &self, - location_id: LocationId, + location_id: location::id::Type, library: Library, action: WatcherManagementMessageAction, ) -> Result<(), LocationManagerError> { @@ -224,7 +224,7 @@ impl LocationManager { pub async fn add( &self, - location_id: LocationId, + location_id: location::id::Type, library: Library, ) -> Result<(), LocationManagerError> { self.location_management_message(location_id, library, ManagementMessageAction::Add) @@ -233,7 +233,7 @@ impl LocationManager { pub async fn remove( &self, - location_id: LocationId, + location_id: location::id::Type, library: Library, ) -> Result<(), LocationManagerError> { self.location_management_message(location_id, library, ManagementMessageAction::Remove) @@ -242,7 +242,7 @@ impl LocationManager { pub async fn stop_watcher( &self, - location_id: LocationId, + location_id: location::id::Type, library: Library, ) -> Result<(), LocationManagerError> { self.watcher_management_message(location_id, library, WatcherManagementMessageAction::Stop) @@ -251,7 +251,7 @@ impl LocationManager { pub async fn reinit_watcher( &self, - location_id: LocationId, + location_id: location::id::Type, library: Library, ) -> Result<(), LocationManagerError> { self.watcher_management_message( @@ -264,7 +264,7 @@ impl LocationManager { pub async fn temporary_stop( &self, - location_id: LocationId, + location_id: location::id::Type, library: Library, ) -> Result { self.stop_watcher(location_id, library.clone()).await?; @@ -278,7 +278,7 @@ impl LocationManager { pub async fn temporary_ignore_events_for_path( &self, - location_id: LocationId, + location_id: location::id::Type, library: Library, path: impl AsRef, ) -> Result { @@ -553,7 +553,7 @@ impl Drop for LocationManager { #[must_use = "this `StopWatcherGuard` must be held for some time, so the watcher is stopped"] pub struct StopWatcherGuard<'m> { manager: &'m LocationManager, - location_id: LocationId, + location_id: location::id::Type, library: Option, } @@ -575,7 +575,7 @@ impl Drop for StopWatcherGuard<'_> { pub struct IgnoreEventsForPathGuard<'m> { manager: &'m LocationManager, path: Option, - location_id: LocationId, + location_id: location::id::Type, library: Option, } diff --git a/core/src/location/manager/watcher/linux.rs b/core/src/location/manager/watcher/linux.rs index 2f4b4d43f..90b16c70b 100644 --- a/core/src/location/manager/watcher/linux.rs +++ b/core/src/location/manager/watcher/linux.rs @@ -7,7 +7,7 @@ //! a Create Dir event, this one is actually ok at least. use crate::{ - invalidate_query, library::Library, location::manager::LocationManagerError, + invalidate_query, library::Library, location::manager::LocationManagerError, prisma::location, util::error::FileIOError, }; @@ -26,12 +26,12 @@ use tracing::{error, trace}; use super::{ utils::{create_dir, file_creation_or_update, remove, rename}, - EventHandler, LocationId, HUNDRED_MILLIS, + EventHandler, HUNDRED_MILLIS, }; #[derive(Debug)] pub(super) struct LinuxEventHandler<'lib> { - location_id: LocationId, + location_id: location::id::Type, library: &'lib Library, last_check_rename: Instant, rename_from: HashMap, @@ -41,7 +41,7 @@ pub(super) struct LinuxEventHandler<'lib> { #[async_trait] impl<'lib> EventHandler<'lib> for LinuxEventHandler<'lib> { - fn new(location_id: LocationId, library: &'lib Library) -> Self { + fn new(location_id: location::id::Type, library: &'lib Library) -> Self { Self { location_id, library, diff --git a/core/src/location/manager/watcher/macos.rs b/core/src/location/manager/watcher/macos.rs index 40c3c8c9a..ca776e3e3 100644 --- a/core/src/location/manager/watcher/macos.rs +++ b/core/src/location/manager/watcher/macos.rs @@ -15,8 +15,8 @@ use crate::{ location::{ file_path_helper::{check_existing_file_path, get_inode_and_device, IsolatedFilePathData}, manager::LocationManagerError, - LocationId, }, + prisma::location, util::error::FileIOError, }; @@ -43,7 +43,7 @@ use super::{ #[derive(Debug)] pub(super) struct MacOsEventHandler<'lib> { - location_id: LocationId, + location_id: location::id::Type, library: &'lib Library, recently_created_files: BTreeMap, last_check_created_files: Instant, @@ -56,7 +56,7 @@ pub(super) struct MacOsEventHandler<'lib> { #[async_trait] impl<'lib> EventHandler<'lib> for MacOsEventHandler<'lib> { - fn new(location_id: LocationId, library: &'lib Library) -> Self + fn new(location_id: location::id::Type, library: &'lib Library) -> Self where Self: Sized, { diff --git a/core/src/location/manager/watcher/mod.rs b/core/src/location/manager/watcher/mod.rs index 29bacd657..ae4ba89a5 100644 --- a/core/src/location/manager/watcher/mod.rs +++ b/core/src/location/manager/watcher/mod.rs @@ -1,4 +1,4 @@ -use crate::{library::Library, location::LocationId, prisma::location}; +use crate::{library::Library, prisma::location}; use std::{ collections::HashSet, @@ -47,7 +47,7 @@ const HUNDRED_MILLIS: Duration = Duration::from_millis(100); #[async_trait] trait EventHandler<'lib> { - fn new(location_id: LocationId, library: &'lib Library) -> Self + fn new(location_id: location::id::Type, library: &'lib Library) -> Self where Self: Sized; @@ -107,7 +107,7 @@ impl LocationWatcher { )); let Some(path) = location.path else { - return Err(LocationManagerError::MissingPath) + return Err(LocationManagerError::MissingPath(location.id)) }; Ok(Self { @@ -121,7 +121,7 @@ impl LocationWatcher { } async fn handle_watch_events( - location_id: LocationId, + location_id: location::id::Type, location_pub_id: Uuid, library: Library, mut events_rx: mpsc::UnboundedReceiver>, @@ -181,7 +181,7 @@ impl LocationWatcher { } async fn handle_single_event<'lib>( - location_id: LocationId, + location_id: location::id::Type, location_pub_id: Uuid, event: Event, event_handler: &mut impl EventHandler<'lib>, @@ -261,9 +261,7 @@ impl Drop for LocationWatcher { // FIXME: change this Drop to async drop in the future if let Some(handle) = self.handle.take() { - if let Err(e) = - block_in_place(move || Handle::current().block_on(async move { handle.await })) - { + if let Err(e) = block_in_place(move || Handle::current().block_on(handle)) { error!("Failed to join watcher task: {e:#?}") } } @@ -344,6 +342,7 @@ impl Drop for LocationWatcher { * * ***************************************************************************************************/ #[cfg(test)] +#[allow(clippy::unwrap_used, clippy::panic)] mod tests { use std::{ io::ErrorKind, @@ -358,7 +357,7 @@ mod tests { use tempfile::{tempdir, TempDir}; use tokio::{fs, io::AsyncWriteExt, sync::mpsc, time::sleep}; use tracing::{debug, error}; - use tracing_test::traced_test; + // use tracing_test::traced_test; #[cfg(target_os = "macos")] use notify::event::DataChange; @@ -424,7 +423,7 @@ mod tests { } #[tokio::test] - #[traced_test] + // #[traced_test] async fn create_file_event() { let (root_dir, mut watcher, events_rx) = setup_watcher().await; @@ -462,7 +461,7 @@ mod tests { } #[tokio::test] - #[traced_test] + // #[traced_test] async fn create_dir_event() { let (root_dir, mut watcher, events_rx) = setup_watcher().await; @@ -492,7 +491,7 @@ mod tests { } #[tokio::test] - #[traced_test] + // #[traced_test] async fn update_file_event() { let (root_dir, mut watcher, events_rx) = setup_watcher().await; @@ -543,7 +542,7 @@ mod tests { } #[tokio::test] - #[traced_test] + // #[traced_test] async fn update_file_rename_event() { let (root_dir, mut watcher, events_rx) = setup_watcher().await; @@ -592,7 +591,7 @@ mod tests { } #[tokio::test] - #[traced_test] + // #[traced_test] async fn update_dir_event() { let (root_dir, mut watcher, events_rx) = setup_watcher().await; @@ -643,7 +642,7 @@ mod tests { } #[tokio::test] - #[traced_test] + // #[traced_test] async fn delete_file_event() { let (root_dir, mut watcher, events_rx) = setup_watcher().await; @@ -675,7 +674,7 @@ mod tests { } #[tokio::test] - #[traced_test] + // #[traced_test] async fn delete_dir_event() { let (root_dir, mut watcher, events_rx) = setup_watcher().await; diff --git a/core/src/location/manager/watcher/utils.rs b/core/src/location/manager/watcher/utils.rs index 7363d2b9a..0c8e9a892 100644 --- a/core/src/location/manager/watcher/utils.rs +++ b/core/src/location/manager/watcher/utils.rs @@ -12,7 +12,7 @@ use crate::{ }, find_location, location_with_indexer_rules, manager::LocationManagerError, - scan_location_sub_path, LocationId, + scan_location_sub_path, }, object::{ file_identifier::FileMetadata, @@ -62,7 +62,7 @@ pub(super) fn check_event(event: &Event, ignore_paths: &HashSet) -> boo } pub(super) async fn create_dir( - location_id: LocationId, + location_id: location::id::Type, path: impl AsRef, metadata: &Metadata, library: &Library, @@ -76,7 +76,7 @@ pub(super) async fn create_dir( let path = path.as_ref(); let Some(location_path) = &location.path else { - return Err(LocationManagerError::MissingPath) + return Err(LocationManagerError::MissingPath(location_id)) }; trace!( @@ -85,7 +85,7 @@ pub(super) async fn create_dir( path.display() ); - let materialized_path = IsolatedFilePathData::new(location.id, &location_path, path, true)?; + let materialized_path = IsolatedFilePathData::new(location.id, location_path, path, true)?; let (inode, device) = { #[cfg(target_family = "unix")] @@ -135,7 +135,7 @@ pub(super) async fn create_dir( } pub(super) async fn create_file( - location_id: LocationId, + location_id: location::id::Type, path: impl AsRef, metadata: &Metadata, library: &Library, @@ -248,7 +248,7 @@ pub(super) async fn create_file( } pub(super) async fn create_dir_or_file( - location_id: LocationId, + location_id: location::id::Type, path: impl AsRef, library: &Library, ) -> Result { @@ -266,7 +266,7 @@ pub(super) async fn create_dir_or_file( } pub(super) async fn file_creation_or_update( - location_id: LocationId, + location_id: location::id::Type, full_path: impl AsRef, library: &Library, ) -> Result<(), LocationManagerError> { @@ -299,7 +299,7 @@ pub(super) async fn file_creation_or_update( } pub(super) async fn update_file( - location_id: LocationId, + location_id: location::id::Type, full_path: impl AsRef, library: &Library, ) -> Result<(), LocationManagerError> { @@ -329,7 +329,7 @@ pub(super) async fn update_file( } async fn inner_update_file( - location_id: LocationId, + location_id: location::id::Type, file_path: &file_path_with_object::Data, full_path: impl AsRef, library @ Library { db, sync, .. }: &Library, @@ -343,7 +343,7 @@ async fn inner_update_file( .ok_or_else(|| LocationManagerError::MissingLocation(location_id))?; let Some(location_path) = location.path.map(PathBuf::from) else { - return Err(LocationManagerError::MissingPath) + return Err(LocationManagerError::MissingPath(location_id)) }; trace!( @@ -469,7 +469,7 @@ async fn inner_update_file( } pub(super) async fn rename( - location_id: LocationId, + location_id: location::id::Type, new_path: impl AsRef, old_path: impl AsRef, library: &Library, @@ -552,7 +552,7 @@ pub(super) async fn rename( } pub(super) async fn remove( - location_id: LocationId, + location_id: location::id::Type, full_path: impl AsRef, library: &Library, ) -> Result<(), LocationManagerError> { @@ -574,7 +574,7 @@ pub(super) async fn remove( } pub(super) async fn remove_by_file_path( - location_id: LocationId, + location_id: location::id::Type, path: impl AsRef, file_path: &file_path::Data, library: &Library, @@ -671,7 +671,7 @@ async fn generate_thumbnail( } pub(super) async fn extract_inode_and_device_from_path( - location_id: LocationId, + location_id: location::id::Type, path: impl AsRef, library: &Library, ) -> Result { @@ -683,7 +683,7 @@ pub(super) async fn extract_inode_and_device_from_path( .ok_or(LocationManagerError::MissingLocation(location_id))?; let Some(location_path) = &location.path else { - return Err(LocationManagerError::MissingPath) + return Err(LocationManagerError::MissingPath(location_id)) }; library @@ -715,7 +715,7 @@ pub(super) async fn extract_inode_and_device_from_path( } pub(super) async fn extract_location_path( - location_id: LocationId, + location_id: location::id::Type, library: &Library, ) -> Result { find_location(library, location_id) @@ -729,7 +729,7 @@ pub(super) async fn extract_location_path( location .path .map(PathBuf::from) - .ok_or(LocationManagerError::MissingPath) + .ok_or(LocationManagerError::MissingPath(location_id)) }, ) } diff --git a/core/src/location/manager/watcher/windows.rs b/core/src/location/manager/watcher/windows.rs index e040de308..bcf982c76 100644 --- a/core/src/location/manager/watcher/windows.rs +++ b/core/src/location/manager/watcher/windows.rs @@ -10,9 +10,8 @@ use crate::{ invalidate_query, library::Library, - location::{ - file_path_helper::get_inode_and_device_from_path, manager::LocationManagerError, LocationId, - }, + location::{file_path_helper::get_inode_and_device_from_path, manager::LocationManagerError}, + prisma::location, util::error::FileIOError, }; @@ -37,7 +36,7 @@ use super::{ /// Windows file system event handler #[derive(Debug)] pub(super) struct WindowsEventHandler<'lib> { - location_id: LocationId, + location_id: location::id::Type, library: &'lib Library, last_check_recently_files: Instant, recently_created_files: BTreeMap, @@ -50,7 +49,7 @@ pub(super) struct WindowsEventHandler<'lib> { #[async_trait] impl<'lib> EventHandler<'lib> for WindowsEventHandler<'lib> { - fn new(location_id: LocationId, library: &'lib Library) -> Self + fn new(location_id: location::id::Type, library: &'lib Library) -> Self where Self: Sized, { diff --git a/core/src/location/mod.rs b/core/src/location/mod.rs index 031938e92..1cae4a124 100644 --- a/core/src/location/mod.rs +++ b/core/src/location/mod.rs @@ -1,6 +1,6 @@ use crate::{ invalidate_query, - job::{Job, JobManagerError}, + job::{Job, JobError, JobManagerError}, library::Library, object::{ file_identifier::{self, file_identifier_job::FileIdentifierJobInit}, @@ -37,8 +37,6 @@ use indexer::IndexerJobInit; pub use manager::{LocationManager, LocationManagerError}; use metadata::SpacedriveLocationMetadataFile; -pub type LocationId = i32; - // Location includes! location::include!(location_with_indexer_rules { indexer_rules: select { indexer_rule } @@ -211,7 +209,7 @@ impl LocationCreateArgs { /// Old rules that aren't in this vector will be purged. #[derive(Type, Deserialize)] pub struct LocationUpdateArgs { - pub id: i32, + pub id: location::id::Type, pub name: Option, pub generate_preview_media: Option, pub sync_preview_media: Option, @@ -336,7 +334,10 @@ impl LocationUpdateArgs { } } -pub fn find_location(library: &Library, location_id: i32) -> location::FindUniqueQuery { +pub fn find_location( + library: &Library, + location_id: location::id::Type, +) -> location::FindUniqueQuery { library .db .location() @@ -345,7 +346,7 @@ pub fn find_location(library: &Library, location_id: i32) -> location::FindUniqu async fn link_location_and_indexer_rules( library: &Library, - location_id: i32, + location_id: location::id::Type, rules_ids: &[i32], ) -> Result<(), LocationError> { library @@ -432,7 +433,7 @@ pub async fn light_scan_location( library: Library, location: location_with_indexer_rules::Data, sub_path: impl AsRef, -) -> Result<(), JobManagerError> { +) -> Result<(), JobError> { let sub_path = sub_path.as_ref().to_path_buf(); if location.node_id != Some(library.node_local_id) { @@ -620,7 +621,10 @@ async fn create_location( })) } -pub async fn delete_location(library: &Library, location_id: i32) -> Result<(), LocationError> { +pub async fn delete_location( + library: &Library, + location_id: location::id::Type, +) -> Result<(), LocationError> { let Library { db, .. } = library; library @@ -663,7 +667,7 @@ pub async fn delete_location(library: &Library, location_id: i32) -> Result<(), /// this function is used to delete a location and when ingesting directory deletion events pub async fn delete_directory( library: &Library, - location_id: i32, + location_id: location::id::Type, parent_materialized_path: Option, ) -> Result<(), QueryError> { let Library { db, .. } = library; diff --git a/core/src/object/file_identifier/file_identifier_job.rs b/core/src/object/file_identifier/file_identifier_job.rs index 4504be01d..16a9a946a 100644 --- a/core/src/object/file_identifier/file_identifier_job.rs +++ b/core/src/object/file_identifier/file_identifier_job.rs @@ -1,4 +1,5 @@ use crate::{ + extract_job_data, extract_job_data_mut, job::{ JobError, JobInitData, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext, }, @@ -47,7 +48,7 @@ impl Hash for FileIdentifierJobInit { #[derive(Serialize, Deserialize)] pub struct FileIdentifierJobState { - cursor: i32, + cursor: file_path::id::Type, report: FileIdentifierReport, maybe_sub_iso_file_path: Option>, } @@ -119,10 +120,7 @@ impl StatefulJob for FileIdentifierJob { maybe_sub_iso_file_path, }); - let data = state - .data - .as_mut() - .expect("critical error: missing data on job state"); + let data = extract_job_data_mut!(state); if orphan_count == 0 { return Err(JobError::EarlyFinish { @@ -170,10 +168,7 @@ impl StatefulJob for FileIdentifierJob { ref mut cursor, ref mut report, ref maybe_sub_iso_file_path, - } = state - .data - .as_mut() - .expect("critical error: missing data on job state"); + } = extract_job_data_mut!(state); let step_number = state.step_number; let location = &state.init.location; @@ -223,11 +218,7 @@ impl StatefulJob for FileIdentifierJob { } async fn finalize(&mut self, _: WorkerContext, state: &mut JobState) -> JobResult { - let report = &state - .data - .as_ref() - .expect("critical error: missing data on job state") - .report; + let report = &extract_job_data!(state).report; info!("Finalizing identifier job: {report:?}"); @@ -236,8 +227,8 @@ impl StatefulJob for FileIdentifierJob { } fn orphan_path_filters( - location_id: i32, - file_path_id: Option, + location_id: location::id::Type, + file_path_id: Option, maybe_sub_iso_file_path: &Option>, ) -> Vec { chain_optional_iter( @@ -262,7 +253,7 @@ fn orphan_path_filters( async fn count_orphan_file_paths( db: &PrismaClient, - location_id: i32, + location_id: location::id::Type, maybe_sub_materialized_path: &Option>, ) -> Result { db.file_path() @@ -278,8 +269,8 @@ async fn count_orphan_file_paths( async fn get_orphan_file_paths( db: &PrismaClient, - location_id: i32, - file_path_id: i32, + location_id: location::id::Type, + file_path_id: file_path::id::Type, maybe_sub_materialized_path: &Option>, ) -> Result, prisma_client_rust::QueryError> { info!( diff --git a/core/src/object/file_identifier/mod.rs b/core/src/object/file_identifier/mod.rs index 90bf49288..111c0b3a8 100644 --- a/core/src/object/file_identifier/mod.rs +++ b/core/src/object/file_identifier/mod.rs @@ -14,13 +14,14 @@ use crate::{ use sd_file_ext::{extensions::Extension, kind::ObjectKind}; use sd_sync::CRDTOperation; -use futures::future::join_all; -use serde::{Deserialize, Serialize}; -use serde_json::json; use std::{ collections::{HashMap, HashSet}, path::{Path, PathBuf}, }; + +use futures::future::join_all; +use serde::{Deserialize, Serialize}; +use serde_json::json; use thiserror::Error; use tokio::{fs, io}; use tracing::{error, info}; @@ -345,7 +346,7 @@ async fn process_identifier_file_paths( location: &location::Data, file_paths: &[file_path_for_file_identifier::Data], step_number: usize, - cursor: &mut i32, + cursor: &mut file_path::id::Type, library: &Library, orphan_count: usize, ) -> Result<(usize, usize), JobError> { @@ -356,7 +357,7 @@ async fn process_identifier_file_paths( orphan_count ); - let counts = identifier_job_step(&library, location, file_paths).await?; + let counts = identifier_job_step(library, location, file_paths).await?; // set the step data cursor to the last row of this chunk if let Some(last_row) = file_paths.last() { diff --git a/core/src/object/file_identifier/shallow.rs b/core/src/object/file_identifier/shallow.rs index 7b1e01167..3432c3e88 100644 --- a/core/src/object/file_identifier/shallow.rs +++ b/core/src/object/file_identifier/shallow.rs @@ -19,7 +19,7 @@ use super::{process_identifier_file_paths, FileIdentifierJobError, CHUNK_SIZE}; #[derive(Serialize, Deserialize)] pub struct ShallowFileIdentifierJobState { - cursor: i32, + cursor: file_path::id::Type, sub_iso_file_path: IsolatedFilePathData<'static>, } @@ -83,7 +83,7 @@ pub async fn shallow( .select(file_path::select!({ id })) .exec() .await? - .unwrap(); // SAFETY: We already validated before that there are orphans `file_path`s + .expect("We already validated before that there are orphans `file_path`s"); // Initializing `state.data` here because we need a complete state in case of early finish let mut data = ShallowFileIdentifierJobState { @@ -106,7 +106,7 @@ pub async fn shallow( &file_paths, step_number, cursor, - &library, + library, orphan_count, ) .await?; @@ -118,8 +118,8 @@ pub async fn shallow( } fn orphan_path_filters( - location_id: i32, - file_path_id: Option, + location_id: location::id::Type, + file_path_id: Option, sub_iso_file_path: &IsolatedFilePathData<'_>, ) -> Vec { chain_optional_iter( @@ -139,7 +139,7 @@ fn orphan_path_filters( async fn count_orphan_file_paths( db: &PrismaClient, - location_id: i32, + location_id: location::id::Type, sub_iso_file_path: &IsolatedFilePathData<'_>, ) -> Result { db.file_path() @@ -151,8 +151,8 @@ async fn count_orphan_file_paths( async fn get_orphan_file_paths( db: &PrismaClient, - location_id: i32, - file_path_id_cursor: i32, + location_id: location::id::Type, + file_path_id_cursor: file_path::id::Type, sub_iso_file_path: &IsolatedFilePathData<'_>, ) -> Result, prisma_client_rust::QueryError> { info!( diff --git a/core/src/object/fs/copy.rs b/core/src/object/fs/copy.rs index 53e954203..e387b3de1 100644 --- a/core/src/object/fs/copy.rs +++ b/core/src/object/fs/copy.rs @@ -1,8 +1,11 @@ use crate::{ - invalidate_query, + extract_job_data, invalidate_query, job::{ JobError, JobInitData, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext, }, + library::Library, + location::file_path_helper::IsolatedFilePathData, + prisma::{file_path, location}, util::error::FileIOError, }; @@ -10,46 +13,34 @@ use std::{hash::Hash, path::PathBuf}; use serde::{Deserialize, Serialize}; use specta::Type; -use tokio::fs; +use tokio::{fs, io}; use tracing::{trace, warn}; -use super::{context_menu_fs_info, get_location_path_from_location_id, osstr_to_string, FsInfo}; +use super::{ + construct_target_filename, error::FileSystemJobsError, fetch_source_and_target_location_paths, + get_file_data_from_isolated_file_path, get_many_files_datas, FileData, +}; pub struct FileCopierJob {} #[derive(Serialize, Deserialize, Debug, Clone)] pub struct FileCopierJobState { - pub target_path: PathBuf, // target dir prefix too - pub source_fs_info: FsInfo, + sources_location_path: PathBuf, } #[derive(Serialize, Deserialize, Hash, Type)] pub struct FileCopierJobInit { - pub source_location_id: i32, - pub source_path_id: i32, - pub target_location_id: i32, - pub target_path: PathBuf, + pub source_location_id: location::id::Type, + pub target_location_id: location::id::Type, + pub sources_file_path_ids: Vec, + pub target_location_relative_directory_path: PathBuf, pub target_file_name_suffix: Option, } #[derive(Serialize, Deserialize, Debug, Clone)] -pub enum FileCopierJobStep { - Directory { path: PathBuf }, - File { path: PathBuf }, -} - -impl From for FileCopierJobStep { - fn from(value: FsInfo) -> Self { - if value.path_data.is_dir { - Self::Directory { - path: value.fs_path, - } - } else { - Self::File { - path: value.fs_path, - } - } - } +pub struct FileCopierJobStep { + pub source_file_data: FileData, + pub target_full_path: PathBuf, } impl JobInitData for FileCopierJobInit { @@ -69,50 +60,43 @@ impl StatefulJob for FileCopierJob { } async fn init(&self, ctx: WorkerContext, state: &mut JobState) -> Result<(), JobError> { - let source_fs_info = context_menu_fs_info( - &ctx.library.db, - state.init.source_location_id, - state.init.source_path_id, + let Library { db, .. } = &ctx.library; + + let (sources_location_path, targets_location_path) = + fetch_source_and_target_location_paths( + db, + state.init.source_location_id, + state.init.target_location_id, + ) + .await?; + + state.steps = get_many_files_datas( + db, + &sources_location_path, + &state.init.sources_file_path_ids, ) - .await?; + .await? + .into_iter() + .map(|file_data| { + // add the currently viewed subdirectory to the location root + let mut full_target_path = + targets_location_path.join(&state.init.target_location_relative_directory_path); - let mut full_target_path = - get_location_path_from_location_id(&ctx.library.db, state.init.target_location_id) - .await?; - - // add the currently viewed subdirectory to the location root - full_target_path.push(&state.init.target_path); - - // extension wizardry for cloning and such - // if no suffix has been selected, just use the file name - // if a suffix is provided and it's a directory, use the directory name + suffix - // if a suffix is provided and it's a file, use the (file name + suffix).extension - let file_name = osstr_to_string(source_fs_info.fs_path.file_name())?; - - let target_file_name = state.init.target_file_name_suffix.as_ref().map_or_else( - || Ok::<_, JobError>(file_name.clone()), - |suffix| { - Ok(if source_fs_info.path_data.is_dir { - format!("{file_name}{suffix}") - } else { - osstr_to_string(source_fs_info.fs_path.file_stem())? - + suffix + &source_fs_info.fs_path.extension().map_or_else( - || Ok(String::new()), - |ext| ext.to_str().map(|e| format!(".{e}")).ok_or(JobError::OsStr), - )? - }) - }, - )?; - - full_target_path.push(target_file_name); + full_target_path.push(construct_target_filename( + &file_data, + &state.init.target_file_name_suffix, + )); + FileCopierJobStep { + source_file_data: file_data, + target_full_path: full_target_path, + } + }) + .collect(); state.data = Some(FileCopierJobState { - target_path: full_target_path, - source_fs_info: source_fs_info.clone(), + sources_location_path, }); - state.steps.push_back(source_fs_info.into()); - ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]); Ok(()) @@ -123,109 +107,95 @@ impl StatefulJob for FileCopierJob { ctx: WorkerContext, state: &mut JobState, ) -> Result<(), JobError> { - let data = state - .data - .as_ref() - .expect("critical error: missing data on job state"); + let FileCopierJobStep { + source_file_data, + target_full_path, + } = &state.steps[0]; - match &state.steps[0] { - FileCopierJobStep::File { path } => { - let mut target_path = data.target_path.clone(); + let data = extract_job_data!(state); - if data.source_fs_info.path_data.is_dir { - // if root type is a dir, we need to preserve structure by making paths relative - target_path.push( - path.strip_prefix(&data.source_fs_info.fs_path) - .map_err(|_| JobError::Path)?, - ); - } + if source_file_data.file_path.is_dir { + fs::create_dir_all(target_full_path) + .await + .map_err(|e| FileIOError::from((target_full_path, e)))?; - let parent_path = path.parent().ok_or(JobError::Path)?; - let parent_target_path = target_path.parent().ok_or(JobError::Path)?; + let mut read_dir = fs::read_dir(&source_file_data.full_path) + .await + .map_err(|e| FileIOError::from((&source_file_data.full_path, e)))?; - if fs::canonicalize(parent_path) - .await - .map_err(|e| FileIOError::from((parent_path, e)))? - == fs::canonicalize(parent_target_path) - .await - .map_err(|e| FileIOError::from((parent_target_path, e)))? - { - return Err(JobError::MatchingSrcDest(path.clone())); - } + // Can't use the `steps` borrow from here ownwards, or you feel the wrath of the borrow checker + while let Some(children_entry) = read_dir + .next_entry() + .await + .map_err(|e| FileIOError::from((&state.steps[0].source_file_data.full_path, e)))? + { + let children_path = children_entry.path(); + let target_children_full_path = state.steps[0].target_full_path.join( + children_path + .strip_prefix(&state.steps[0].source_file_data.full_path) + .map_err(|_| JobError::Path)?, + ); - if fs::metadata(&target_path).await.is_ok() { + // Currently not supporting file_name suffixes children files in a directory being copied + state.steps.push_back(FileCopierJobStep { + target_full_path: target_children_full_path, + source_file_data: get_file_data_from_isolated_file_path( + &ctx.library.db, + &data.sources_location_path, + &IsolatedFilePathData::new( + state.init.source_location_id, + &data.sources_location_path, + &children_path, + children_entry + .metadata() + .await + .map_err(|e| FileIOError::from((&children_path, e)))? + .is_dir(), + ) + .map_err(FileSystemJobsError::from)?, + ) + .await?, + }); + + ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]); + } + } else { + if source_file_data.full_path.parent().ok_or(JobError::Path)? + == target_full_path.parent().ok_or(JobError::Path)? + { + return Err(FileSystemJobsError::MatchingSrcDest( + source_file_data.full_path.clone().into_boxed_path(), + ) + .into()); + } + + match fs::metadata(target_full_path).await { + Ok(_) => { // only skip as it could be half way through a huge directory copy and run into an issue warn!( "Skipping {} as it would be overwritten", - target_path.display() + target_full_path.display() ); - // TODO(brxken128): could possibly return an error if the skipped file was the *only* file to be copied? - } else { + } + Err(e) if e.kind() == io::ErrorKind::NotFound => { trace!( "Copying from {} to {}", - path.display(), - target_path.display() + source_file_data.full_path.display(), + target_full_path.display() ); - fs::copy(&path, &target_path) + fs::copy(&source_file_data.full_path, &target_full_path) .await - .map_err(|e| FileIOError::from((&target_path, e)))?; + .map_err(|e| FileIOError::from((target_full_path, e)))?; } + Err(e) => return Err(FileIOError::from((target_full_path, e)).into()), } - FileCopierJobStep::Directory { path } => { - // if this is the very first path, create the target dir - // fixes copying dirs with no child directories - if data.source_fs_info.path_data.is_dir && &data.source_fs_info.fs_path == path { - fs::create_dir_all(&data.target_path) - .await - .map_err(|e| FileIOError::from((&data.target_path, e)))?; - } - - let path = path.clone(); // To appease the borrowck - - let mut dir = fs::read_dir(&path) - .await - .map_err(|e| FileIOError::from((&path, e)))?; - - while let Some(entry) = dir - .next_entry() - .await - .map_err(|e| FileIOError::from((&path, e)))? - { - let entry_path = entry.path(); - if entry - .metadata() - .await - .map_err(|e| FileIOError::from((&entry_path, e)))? - .is_dir() - { - state - .steps - .push_back(FileCopierJobStep::Directory { path: entry.path() }); - - let full_path = data.target_path.join( - entry_path - .strip_prefix(&data.source_fs_info.fs_path) - .map_err(|_| JobError::Path)?, - ); - - fs::create_dir_all(&full_path) - .await - .map_err(|e| FileIOError::from((full_path, e)))?; - } else { - state - .steps - .push_back(FileCopierJobStep::File { path: entry.path() }); - }; - - ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]); - } - } - }; + } ctx.progress(vec![JobReportUpdate::CompletedTaskCount( state.step_number + 1, )]); + Ok(()) } diff --git a/core/src/object/fs/create.rs b/core/src/object/fs/create.rs index 81fe58364..9f9857cf9 100644 --- a/core/src/object/fs/create.rs +++ b/core/src/object/fs/create.rs @@ -10,7 +10,7 @@ // // if the location is remote, we queue a job for that client specifically // // the actual create_folder function should be an option on an enum for all vfs actions // pub async fn create_folder( -// location_id: i32, +// location_id: location::id::Type, // path: &str, // name: Option<&str>, // library: &LibraryContext, diff --git a/core/src/object/fs/cut.rs b/core/src/object/fs/cut.rs index 2c409c454..2fbf71380 100644 --- a/core/src/object/fs/cut.rs +++ b/core/src/object/fs/cut.rs @@ -1,8 +1,11 @@ use crate::{ - invalidate_query, + extract_job_data, invalidate_query, job::{ JobError, JobInitData, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext, }, + library::Library, + object::fs::{construct_target_filename, error::FileSystemJobsError}, + prisma::{file_path, location}, util::error::FileIOError, }; @@ -10,25 +13,24 @@ use std::{hash::Hash, path::PathBuf}; use serde::{Deserialize, Serialize}; use specta::Type; -use tokio::fs; +use tokio::{fs, io}; use tracing::{trace, warn}; -use super::{context_menu_fs_info, get_location_path_from_location_id, FsInfo}; +use super::{fetch_source_and_target_location_paths, get_many_files_datas, FileData}; pub struct FileCutterJob {} #[derive(Serialize, Deserialize, Hash, Type)] pub struct FileCutterJobInit { - pub source_location_id: i32, - pub source_path_id: i32, - pub target_location_id: i32, - pub target_path: PathBuf, + pub source_location_id: location::id::Type, + pub target_location_id: location::id::Type, + pub sources_file_path_ids: Vec, + pub target_location_relative_directory_path: PathBuf, } -#[derive(Serialize, Deserialize, Debug)] -pub struct FileCutterJobStep { - pub source_fs_info: FsInfo, - pub target_directory: PathBuf, +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct FileCutterJobState { + full_target_directory_path: PathBuf, } impl JobInitData for FileCutterJobInit { @@ -38,8 +40,8 @@ impl JobInitData for FileCutterJobInit { #[async_trait::async_trait] impl StatefulJob for FileCutterJob { type Init = FileCutterJobInit; - type Data = (); - type Step = FileCutterJobStep; + type Data = FileCutterJobState; + type Step = FileData; const NAME: &'static str = "file_cutter"; @@ -48,23 +50,30 @@ impl StatefulJob for FileCutterJob { } async fn init(&self, ctx: WorkerContext, state: &mut JobState) -> Result<(), JobError> { - let source_fs_info = context_menu_fs_info( - &ctx.library.db, - state.init.source_location_id, - state.init.source_path_id, - ) - .await?; + let Library { db, .. } = &ctx.library; - let mut full_target_path = - get_location_path_from_location_id(&ctx.library.db, state.init.target_location_id) - .await?; - full_target_path.push(&state.init.target_path); + let (sources_location_path, mut targets_location_path) = + fetch_source_and_target_location_paths( + db, + state.init.source_location_id, + state.init.target_location_id, + ) + .await?; - state.steps.push_back(FileCutterJobStep { - source_fs_info, - target_directory: full_target_path, + targets_location_path.push(&state.init.target_location_relative_directory_path); + + state.data = Some(FileCutterJobState { + full_target_directory_path: targets_location_path, }); + state.steps = get_many_files_datas( + db, + &sources_location_path, + &state.init.sources_file_path_ids, + ) + .await? + .into(); + ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]); Ok(()) @@ -75,50 +84,52 @@ impl StatefulJob for FileCutterJob { ctx: WorkerContext, state: &mut JobState, ) -> Result<(), JobError> { + let data = extract_job_data!(state); + let step = &state.steps[0]; - let source_info = &step.source_fs_info; - let full_output = step - .target_directory - .join(source_info.fs_path.file_name().ok_or(JobError::OsStr)?); + let full_output = data + .full_target_directory_path + .join(construct_target_filename(step, &None)); - let parent_source = source_info.fs_path.parent().ok_or(JobError::Path)?; - - let parent_output = full_output.parent().ok_or(JobError::Path)?; - - if fs::canonicalize(parent_source) - .await - .map_err(|e| FileIOError::from((parent_source, e)))? - == fs::canonicalize(parent_output) - .await - .map_err(|e| FileIOError::from((parent_output, e)))? + if step.full_path.parent().ok_or(JobError::Path)? + == full_output.parent().ok_or(JobError::Path)? { - return Err(JobError::MatchingSrcDest(source_info.fs_path.clone())); + return Err(FileSystemJobsError::MatchingSrcDest( + step.full_path.clone().into_boxed_path(), + ) + .into()); } - if fs::metadata(&full_output).await.is_ok() { - warn!( - "Skipping {} as it would be overwritten", - full_output.display() - ); + match fs::metadata(&full_output).await { + Ok(_) => { + warn!( + "Skipping {} as it would be overwritten", + full_output.display() + ); - return Err(JobError::WouldOverwrite(full_output)); + Err(FileSystemJobsError::WouldOverwrite(full_output.into_boxed_path()).into()) + } + Err(e) if e.kind() == io::ErrorKind::NotFound => { + trace!( + "Cutting {} to {}", + step.full_path.display(), + full_output.display() + ); + + fs::rename(&step.full_path, &full_output) + .await + .map_err(|e| FileIOError::from((&step.full_path, e)))?; + + ctx.progress(vec![JobReportUpdate::CompletedTaskCount( + state.step_number + 1, + )]); + + Ok(()) + } + + Err(e) => Err(FileIOError::from((&full_output, e)).into()), } - - trace!( - "Cutting {} to {}", - source_info.fs_path.display(), - full_output.display() - ); - - fs::rename(&source_info.fs_path, &full_output) - .await - .map_err(|e| FileIOError::from((&source_info.fs_path, e)))?; - - ctx.progress(vec![JobReportUpdate::CompletedTaskCount( - state.step_number + 1, - )]); - Ok(()) } async fn finalize(&mut self, ctx: WorkerContext, state: &mut JobState) -> JobResult { diff --git a/core/src/object/fs/decrypt.rs b/core/src/object/fs/decrypt.rs index ef587975f..6e9a30f73 100644 --- a/core/src/object/fs/decrypt.rs +++ b/core/src/object/fs/decrypt.rs @@ -1,38 +1,32 @@ -// use sd_crypto::{crypto::Decryptor, header::file::FileHeader, Protected}; -// use serde::{Deserialize, Serialize}; -// use specta::Type; -// use std::path::PathBuf; -// use tokio::fs::File; - // use crate::{ // invalidate_query, // job::{ // JobError, JobInitData, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext, // }, +// library::Library, +// location::{file_path_helper:: location::id::Type}, // util::error::FileIOError, // }; -// use super::{context_menu_fs_info, FsInfo, BYTES_EXT}; +// use sd_crypto::{crypto::Decryptor, header::file::FileHeader, Protected}; + +// use serde::{Deserialize, Serialize}; +// use specta::Type; +// use tokio::fs::File; + +// use super::{get_location_path_from_location_id, get_many_files_datas, FileData, BYTES_EXT}; // pub struct FileDecryptorJob; -// #[derive(Serialize, Deserialize, Debug)] -// pub struct FileDecryptorJobState {} // // decrypt could have an option to restore metadata (and another specific option for file name? - would turn "output file" into "output path" in the UI) // #[derive(Serialize, Deserialize, Debug, Type, Hash)] // pub struct FileDecryptorJobInit { -// pub location_id: i32, -// pub path_id: i32, +// pub location_id: location::id::Type, +// pub file_path_ids: Vec, // pub mount_associated_key: bool, -// pub output_path: Option, // pub password: Option, // if this is set, we can assume the user chose password decryption // pub save_to_library: Option, // } -// #[derive(Serialize, Deserialize, Debug)] -// pub struct FileDecryptorJobStep { -// pub fs_info: FsInfo, -// } - // impl JobInitData for FileDecryptorJobInit { // type Job = FileDecryptorJob; // } @@ -40,8 +34,8 @@ // #[async_trait::async_trait] // impl StatefulJob for FileDecryptorJob { // type Init = FileDecryptorJobInit; -// type Data = FileDecryptorJobState; -// type Step = FileDecryptorJobStep; +// type Data = (); +// type Step = FileData; // const NAME: &'static str = "file_decryptor"; @@ -50,13 +44,15 @@ // } // async fn init(&self, ctx: WorkerContext, state: &mut JobState) -> Result<(), JobError> { -// // enumerate files to decrypt -// // populate the steps with them (local file paths) -// let fs_info = -// context_menu_fs_info(&ctx.library.db, state.init.location_id, state.init.path_id) -// .await?; +// let Library { db, .. } = &ctx.library; -// state.steps.push_back(FileDecryptorJobStep { fs_info }); +// state.steps = get_many_files_datas( +// db, +// get_location_path_from_location_id(db, state.init.location_id).await?, +// &state.init.file_path_ids, +// ) +// .await? +// .into(); // ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]); @@ -68,29 +64,26 @@ // ctx: WorkerContext, // state: &mut JobState, // ) -> Result<(), JobError> { -// let info = &&state.steps[0].fs_info; +// let step = &state.steps[0]; // let key_manager = &ctx.library.key_manager; // // handle overwriting checks, and making sure there's enough available space -// let output_path = state.init.output_path.clone().map_or_else( -// || { -// let mut path = info.fs_path.clone(); -// let extension = path.extension().map_or("decrypted", |ext| { -// if ext == BYTES_EXT { -// "" -// } else { -// "decrypted" -// } -// }); -// path.set_extension(extension); -// path -// }, -// |p| p, -// ); +// let output_path = { +// let mut path = step.full_path.clone(); +// let extension = path.extension().map_or("decrypted", |ext| { +// if ext == BYTES_EXT { +// "" +// } else { +// "decrypted" +// } +// }); +// path.set_extension(extension); +// path +// }; -// let mut reader = File::open(info.fs_path.clone()) +// let mut reader = File::open(&step.full_path) // .await -// .map_err(|e| FileIOError::from((&info.fs_path, e)))?; +// .map_err(|e| FileIOError::from((&step.full_path, e)))?; // let mut writer = File::create(&output_path) // .await // .map_err(|e| FileIOError::from((output_path, e)))?; diff --git a/core/src/object/fs/delete.rs b/core/src/object/fs/delete.rs index d71d6c938..b00f87c3b 100644 --- a/core/src/object/fs/delete.rs +++ b/core/src/object/fs/delete.rs @@ -3,6 +3,8 @@ use crate::{ job::{ JobError, JobInitData, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext, }, + library::Library, + prisma::{file_path, location}, util::error::FileIOError, }; @@ -10,18 +12,16 @@ use std::hash::Hash; use serde::{Deserialize, Serialize}; use specta::Type; +use tokio::fs; -use super::{context_menu_fs_info, FsInfo}; +use super::{get_location_path_from_location_id, get_many_files_datas, FileData}; pub struct FileDeleterJob {} -#[derive(Serialize, Deserialize, Debug)] -pub struct FileDeleterJobState {} - #[derive(Serialize, Deserialize, Hash, Type)] pub struct FileDeleterJobInit { - pub location_id: i32, - pub path_id: i32, + pub location_id: location::id::Type, + pub file_path_ids: Vec, } impl JobInitData for FileDeleterJobInit { @@ -31,8 +31,8 @@ impl JobInitData for FileDeleterJobInit { #[async_trait::async_trait] impl StatefulJob for FileDeleterJob { type Init = FileDeleterJobInit; - type Data = FileDeleterJobState; - type Step = FsInfo; + type Data = (); + type Step = FileData; const NAME: &'static str = "file_deleter"; @@ -41,11 +41,16 @@ impl StatefulJob for FileDeleterJob { } async fn init(&self, ctx: WorkerContext, state: &mut JobState) -> Result<(), JobError> { - let fs_info = - context_menu_fs_info(&ctx.library.db, state.init.location_id, state.init.path_id) - .await?; + let Library { db, .. } = &ctx.library; - state.steps.push_back(fs_info); + state.steps = get_many_files_datas( + db, + get_location_path_from_location_id(db, state.init.location_id).await?, + &state.init.file_path_ids, + ) + .await? + .into_iter() + .collect(); ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]); @@ -57,21 +62,19 @@ impl StatefulJob for FileDeleterJob { ctx: WorkerContext, state: &mut JobState, ) -> Result<(), JobError> { - let info = &state.steps[0]; + let step = &state.steps[0]; - // need to handle stuff such as querying prisma for all paths of a file, and deleting all of those if requested (with a checkbox in the ui) - // maybe a files.countOccurances/and or files.getPath(location_id, path_id) to show how many of these files would be deleted (and where?) - - if info.path_data.is_dir { - tokio::fs::remove_dir_all(&info.fs_path).await + if step.file_path.is_dir { + fs::remove_dir_all(&step.full_path).await } else { - tokio::fs::remove_file(&info.fs_path).await + fs::remove_file(&step.full_path).await } - .map_err(|e| FileIOError::from((&info.fs_path, e)))?; + .map_err(|e| FileIOError::from((&step.full_path, e)))?; ctx.progress(vec![JobReportUpdate::CompletedTaskCount( state.step_number + 1, )]); + Ok(()) } diff --git a/core/src/object/fs/encrypt.rs b/core/src/object/fs/encrypt.rs index cafda5e96..7ecb1aeb8 100644 --- a/core/src/object/fs/encrypt.rs +++ b/core/src/object/fs/encrypt.rs @@ -1,38 +1,48 @@ -// use crate::{invalidate_query, job::*, library::Library, util::error::FileIOError}; +// use crate::{ +// invalidate_query, +// job::*, +// library::Library, +// location::{file_path_helper:: location::id::Type}, +// util::error::{FileIOError, NonUtf8PathError}, +// }; -// use std::path::PathBuf; - -// use chrono::FixedOffset; // use sd_crypto::{ // crypto::Encryptor, // header::{file::FileHeader, keyslot::Keyslot}, // primitives::{LATEST_FILE_HEADER, LATEST_KEYSLOT, LATEST_METADATA, LATEST_PREVIEW_MEDIA}, // types::{Algorithm, Key}, // }; + +// use chrono::FixedOffset; // use serde::{Deserialize, Serialize}; // use specta::Type; -// use tokio::{fs::File, io::AsyncReadExt}; +// use tokio::{ +// fs::{self, File}, +// io, +// }; // use tracing::{error, warn}; // use uuid::Uuid; -// use super::{context_menu_fs_info, FsInfo, BYTES_EXT}; +// use super::{ +// error::FileSystemJobsError, get_location_path_from_location_id, get_many_files_datas, FileData, +// BYTES_EXT, +// }; // pub struct FileEncryptorJob; // #[derive(Serialize, Deserialize, Type, Hash)] // pub struct FileEncryptorJobInit { -// pub location_id: i32, -// pub path_id: i32, +// pub location_id: location::id::Type, +// pub file_path_ids: Vec, // pub key_uuid: Uuid, // pub algorithm: Algorithm, // pub metadata: bool, // pub preview_media: bool, -// pub output_path: Option, // } // #[derive(Serialize, Deserialize)] // pub struct Metadata { -// pub path_id: i32, +// pub file_path_id: file_path::id::Type, // pub name: String, // pub hidden: bool, // pub favorite: bool, @@ -49,7 +59,7 @@ // impl StatefulJob for FileEncryptorJob { // type Init = FileEncryptorJobInit; // type Data = (); -// type Step = FsInfo; +// type Step = FileData; // const NAME: &'static str = "file_encryptor"; @@ -58,13 +68,15 @@ // } // async fn init(&self, ctx: WorkerContext, state: &mut JobState) -> Result<(), JobError> { -// state.steps.push_back( -// context_menu_fs_info(&ctx.library.db, state.init.location_id, state.init.path_id) -// .await -// .map_err(|_| JobError::MissingData { -// value: String::from("file_path that matches both location id and path id"), -// })?, -// ); +// let Library { db, .. } = &ctx.library; + +// state.steps = get_many_files_datas( +// db, +// get_location_path_from_location_id(db, state.init.location_id).await?, +// &state.init.file_path_ids, +// ) +// .await? +// .into(); // ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]); @@ -76,11 +88,11 @@ // ctx: WorkerContext, // state: &mut JobState, // ) -> Result<(), JobError> { -// let info = &state.steps[0]; +// let step = &state.steps[0]; // let Library { key_manager, .. } = &ctx.library; -// if !info.path_data.is_dir { +// if !step.file_path.is_dir { // // handle overwriting checks, and making sure there's enough available space // let user_key = key_manager @@ -90,30 +102,23 @@ // let user_key_details = key_manager.access_keystore(state.init.key_uuid).await?; -// let output_path = state.init.output_path.clone().map_or_else( -// || { -// let mut path = info.fs_path.clone(); -// let extension = path.extension().map_or_else( -// || Ok("bytes".to_string()), -// |extension| { -// Ok::( -// extension -// .to_str() -// .ok_or(JobError::MissingData { -// value: String::from( -// "path contents when converted to string", -// ), -// })? -// .to_string() + BYTES_EXT, -// ) -// }, -// )?; +// let output_path = { +// let mut path = step.full_path.clone(); +// let extension = path.extension().map_or_else( +// || Ok("bytes".to_string()), +// |extension| { +// Ok::(format!( +// "{}{BYTES_EXT}", +// extension.to_str().ok_or(FileSystemJobsError::FilePath( +// NonUtf8PathError(step.full_path.clone().into_boxed_path()).into() +// ))? +// )) +// }, +// )?; -// path.set_extension(extension); -// Ok::(path) -// }, -// Ok, -// )?; +// path.set_extension(extension); +// path +// }; // let _guard = ctx // .library @@ -135,9 +140,9 @@ // Some, // ); -// let mut reader = File::open(&info.fs_path) +// let mut reader = File::open(&step.full_path) // .await -// .map_err(|e| FileIOError::from((&info.fs_path, e)))?; +// .map_err(|e| FileIOError::from((&step.full_path, e)))?; // let mut writer = File::create(&output_path) // .await // .map_err(|e| FileIOError::from((output_path, e)))?; @@ -162,11 +167,11 @@ // if state.init.metadata || state.init.preview_media { // // if any are requested, we can make the query as it'll be used at least once -// if let Some(ref object) = info.path_data.object { +// if let Some(ref object) = step.file_path.object { // if state.init.metadata { // let metadata = Metadata { -// path_id: state.init.path_id, -// name: info.path_data.materialized_path.clone(), +// file_path_id: step.file_path.id, +// name: step.file_path.materialized_path.clone(), // hidden: object.hidden, // favorite: object.favorite, // important: object.important, @@ -188,38 +193,37 @@ // // && (object.has_thumbnail // // || object.has_video_preview || object.has_thumbstrip) -// // may not be the best - pvm isn't guaranteed to be webp -// let pvm_path = ctx +// // may not be the best - preview media (thumbnail) isn't guaranteed to be webp +// let thumbnail_path = ctx // .library // .config() // .data_directory() // .join("thumbnails") // .join( -// info.path_data +// step.file_path // .cas_id // .as_ref() // .ok_or(JobError::MissingCasId)?, // ) // .with_extension("wepb"); -// if tokio::fs::metadata(&pvm_path).await.is_ok() { -// let mut pvm_bytes = Vec::new(); -// let mut pvm_file = File::open(&pvm_path) -// .await -// .map_err(|e| FileIOError::from((&pvm_path, e)))?; -// pvm_file -// .read_to_end(&mut pvm_bytes) -// .await -// .map_err(|e| FileIOError::from((pvm_path, e)))?; - -// header -// .add_preview_media( -// LATEST_PREVIEW_MEDIA, -// state.init.algorithm, -// master_key.clone(), -// &pvm_bytes, -// ) -// .await?; +// match fs::read(&thumbnail_path).await { +// Ok(thumbnail_bytes) => { +// header +// .add_preview_media( +// LATEST_PREVIEW_MEDIA, +// state.init.algorithm, +// master_key.clone(), +// &thumbnail_bytes, +// ) +// .await?; +// } +// Err(e) if e.kind() == io::ErrorKind::NotFound => { +// // If the file just doesn't exist, then we don't care +// } +// Err(e) => { +// return Err(FileIOError::from((thumbnail_path, e)).into()); +// } // } // } else { // // should use container encryption if it's a directory @@ -236,8 +240,8 @@ // .await?; // } else { // warn!( -// "encryption is skipping {} as it isn't a file", -// info.path_data.materialized_path +// "encryption is skipping {}/{} as it isn't a file", +// step.file_path.materialized_path, step.file_path.name // ) // } diff --git a/core/src/object/fs/erase.rs b/core/src/object/fs/erase.rs index fd84ea327..ef0d69314 100644 --- a/core/src/object/fs/erase.rs +++ b/core/src/object/fs/erase.rs @@ -1,62 +1,58 @@ use crate::{ - invalidate_query, + extract_job_data_mut, invalidate_query, job::{ JobError, JobInitData, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext, }, + library::Library, + location::file_path_helper::IsolatedFilePathData, + prisma::{file_path, location}, util::error::FileIOError, }; use std::{hash::Hash, path::PathBuf}; +use futures::future::try_join_all; use serde::{Deserialize, Serialize}; use serde_with::{serde_as, DisplayFromStr}; use specta::Type; -use tokio::{fs::OpenOptions, io::AsyncWriteExt}; +use tokio::{ + fs::{self, OpenOptions}, + io::AsyncWriteExt, +}; use tracing::trace; -use super::{context_menu_fs_info, FsInfo}; +use super::{ + error::FileSystemJobsError, get_file_data_from_isolated_file_path, + get_location_path_from_location_id, get_many_files_datas, FileData, +}; pub struct FileEraserJob {} #[serde_as] #[derive(Serialize, Deserialize, Hash, Type)] pub struct FileEraserJobInit { - pub location_id: i32, - pub path_id: i32, + pub location_id: location::id::Type, + pub file_path_ids: Vec, #[specta(type = String)] #[serde_as(as = "DisplayFromStr")] pub passes: usize, } -#[derive(Serialize, Deserialize, Debug, Clone)] -pub enum FileEraserJobStep { - Directory { path: PathBuf }, - File { path: PathBuf }, -} - -impl From for FileEraserJobStep { - fn from(value: FsInfo) -> Self { - if value.path_data.is_dir { - Self::Directory { - path: value.fs_path, - } - } else { - Self::File { - path: value.fs_path, - } - } - } -} - impl JobInitData for FileEraserJobInit { type Job = FileEraserJob; } +#[derive(Serialize, Deserialize)] +pub struct FileEraserJobData { + location_path: PathBuf, + diretories_to_remove: Vec, +} + #[async_trait::async_trait] impl StatefulJob for FileEraserJob { type Init = FileEraserJobInit; - type Data = FsInfo; - type Step = FileEraserJobStep; + type Data = FileEraserJobData; + type Step = FileData; const NAME: &'static str = "file_eraser"; @@ -65,13 +61,18 @@ impl StatefulJob for FileEraserJob { } async fn init(&self, ctx: WorkerContext, state: &mut JobState) -> Result<(), JobError> { - let fs_info = - context_menu_fs_info(&ctx.library.db, state.init.location_id, state.init.path_id) - .await?; + let Library { db, .. } = &ctx.library; - state.data = Some(fs_info.clone()); + let location_path = get_location_path_from_location_id(db, state.init.location_id).await?; - state.steps.push_back(fs_info.into()); + state.steps = get_many_files_datas(db, &location_path, &state.init.file_path_ids) + .await? + .into(); + + state.data = Some(FileEraserJobData { + location_path, + diretories_to_remove: vec![], + }); ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]); @@ -86,84 +87,96 @@ impl StatefulJob for FileEraserJob { // need to handle stuff such as querying prisma for all paths of a file, and deleting all of those if requested (with a checkbox in the ui) // maybe a files.countOccurances/and or files.getPath(location_id, path_id) to show how many of these files would be erased (and where?) - match &state.steps[0] { - FileEraserJobStep::File { path } => { - let mut file = OpenOptions::new() - .read(true) - .write(true) - .open(path) - .await - .map_err(|e| FileIOError::from((path, e)))?; - let file_len = file - .metadata() - .await - .map_err(|e| FileIOError::from((path, e)))? - .len(); + let step = &state.steps[0]; - sd_crypto::fs::erase::erase(&mut file, file_len as usize, state.init.passes) - .await?; + // Had to use `state.steps[0]` all over the place to appease the borrow checker + if step.file_path.is_dir { + let data = extract_job_data_mut!(state); - file.set_len(0) - .await - .map_err(|e| FileIOError::from((path, e)))?; - file.flush() - .await - .map_err(|e| FileIOError::from((path, e)))?; - drop(file); + let mut dir = tokio::fs::read_dir(&step.full_path) + .await + .map_err(|e| FileIOError::from((&step.full_path, e)))?; - trace!("Erasing file: {:?}", path); + // Can't use the `step` borrow from here ownwards, or you feel the wrath of the borrow checker + while let Some(children_entry) = dir + .next_entry() + .await + .map_err(|e| FileIOError::from((&state.steps[0].full_path, e)))? + { + let children_path = children_entry.path(); - tokio::fs::remove_file(path) - .await - .map_err(|e| FileIOError::from((path, e)))?; + state.steps.push_back( + get_file_data_from_isolated_file_path( + &ctx.library.db, + &data.location_path, + &IsolatedFilePathData::new( + state.init.location_id, + &data.location_path, + &children_path, + children_entry + .metadata() + .await + .map_err(|e| FileIOError::from((&children_path, e)))? + .is_dir(), + ) + .map_err(FileSystemJobsError::from)?, + ) + .await?, + ); + + ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]); } - FileEraserJobStep::Directory { path } => { - let path = path.clone(); // To appease the borrowck + data.diretories_to_remove + .push(state.steps[0].full_path.clone()); + } else { + let mut file = OpenOptions::new() + .read(true) + .write(true) + .open(&step.full_path) + .await + .map_err(|e| FileIOError::from((&step.full_path, e)))?; + let file_len = file + .metadata() + .await + .map_err(|e| FileIOError::from((&step.full_path, e)))? + .len(); - let mut dir = tokio::fs::read_dir(&path) - .await - .map_err(|e| FileIOError::from((&path, e)))?; + sd_crypto::fs::erase::erase(&mut file, file_len as usize, state.init.passes).await?; - while let Some(entry) = dir - .next_entry() - .await - .map_err(|e| FileIOError::from((&path, e)))? - { - let entry_path = entry.path(); - state.steps.push_back( - if entry - .metadata() - .await - .map_err(|e| FileIOError::from((&entry_path, e)))? - .is_dir() - { - FileEraserJobStep::Directory { path: entry_path } - } else { - FileEraserJobStep::File { path: entry_path } - }, - ); + file.set_len(0) + .await + .map_err(|e| FileIOError::from((&step.full_path, e)))?; + file.flush() + .await + .map_err(|e| FileIOError::from((&step.full_path, e)))?; + drop(file); - ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]); - } - } - }; + trace!("Erasing file: {}", step.full_path.display()); + + fs::remove_file(&step.full_path) + .await + .map_err(|e| FileIOError::from((&step.full_path, e)))?; + } ctx.progress(vec![JobReportUpdate::CompletedTaskCount( state.step_number + 1, )]); + Ok(()) } async fn finalize(&mut self, ctx: WorkerContext, state: &mut JobState) -> JobResult { - let data = state - .data - .as_ref() - .expect("critical error: missing data on job state"); - if data.path_data.is_dir { - tokio::fs::remove_dir_all(&data.fs_path) - .await - .map_err(|e| FileIOError::from((&data.fs_path, e)))?; - } + try_join_all( + extract_job_data_mut!(state) + .diretories_to_remove + .drain(..) + .map(|data| async { + fs::remove_dir_all(&data) + .await + .map_err(|e| FileIOError::from((data, e))) + }), + ) + .await?; invalidate_query!(ctx.library, "search.paths"); diff --git a/core/src/object/fs/error.rs b/core/src/object/fs/error.rs index d48d00be9..f0e01cd40 100644 --- a/core/src/object/fs/error.rs +++ b/core/src/object/fs/error.rs @@ -1,14 +1,31 @@ +use crate::{ + location::{file_path_helper::FilePathError, LocationError}, + prisma::file_path, + util::error::FileIOError, +}; + +use std::path::Path; + +use prisma_client_rust::QueryError; use thiserror::Error; -use crate::location::LocationError; - -/// Error type for location related errors +/// Error type for file system related jobs errors #[derive(Error, Debug)] -pub enum VirtualFSError { +pub enum FileSystemJobsError { #[error("Location error: {0}")] - LocationError(#[from] LocationError), - #[error("Failed to create file or folder on disk at path (path: {0:?})")] - CreateFileOrFolder(#[from] std::io::Error), - #[error("Database error (error: {0:?})")] - DatabaseError(#[from] prisma_client_rust::QueryError), + Location(#[from] LocationError), + #[error("file_path not in database: ", .0.display())] + FilePathNotFound(Box), + #[error("file_path id not in database: ")] + FilePathIdNotFound(file_path::id::Type), + #[error("failed to create file or folder on disk")] + CreateFileOrFolder(FileIOError), + #[error("database error: {0}")] + Database(#[from] QueryError), + #[error(transparent)] + FilePath(#[from] FilePathError), + #[error("source and destination path are the same: {}", .0.display())] + MatchingSrcDest(Box), + #[error("action would overwrite another file: {}", .0.display())] + WouldOverwrite(Box), } diff --git a/core/src/object/fs/mod.rs b/core/src/object/fs/mod.rs index 34d6abcd5..b192f35f1 100644 --- a/core/src/object/fs/mod.rs +++ b/core/src/object/fs/mod.rs @@ -1,25 +1,28 @@ use crate::{ - job::JobError, - location::file_path_helper::{file_path_with_object, IsolatedFilePathData}, + location::{ + file_path_helper::{file_path_with_object, IsolatedFilePathData}, + LocationError, + }, prisma::{file_path, location, PrismaClient}, }; -use std::{ffi::OsStr, path::PathBuf}; +use std::path::{Path, PathBuf}; use serde::{Deserialize, Serialize}; pub mod create; +pub mod delete; +pub mod erase; pub mod copy; pub mod cut; // pub mod decrypt; -pub mod delete; // pub mod encrypt; pub mod error; -pub mod erase; +use error::FileSystemJobsError; // pub const BYTES_EXT: &str = ".bytes"; @@ -30,54 +33,144 @@ pub enum ObjectType { } #[derive(Serialize, Deserialize, Debug, Clone)] -pub struct FsInfo { - pub path_data: file_path_with_object::Data, - pub fs_path: PathBuf, -} - -pub fn osstr_to_string(os_str: Option<&OsStr>) -> Result { - os_str - .and_then(OsStr::to_str) - .map(str::to_string) - .ok_or(JobError::OsStr) +pub struct FileData { + pub file_path: file_path_with_object::Data, + pub full_path: PathBuf, } pub async fn get_location_path_from_location_id( db: &PrismaClient, - location_id: i32, -) -> Result { - Ok(db - .location() + location_id: file_path::id::Type, +) -> Result { + db.location() .find_unique(location::id::equals(location_id)) .exec() .await? - .ok_or(JobError::MissingData { - value: String::from("location which matches location_id"), - })? - .path - .ok_or(JobError::MissingPath)? - .into()) + .and_then(|location| location.path.map(PathBuf::from)) + .ok_or(FileSystemJobsError::Location(LocationError::IdNotFound( + location_id, + ))) } -pub async fn context_menu_fs_info( +pub async fn get_many_files_datas( db: &PrismaClient, - location_id: i32, - file_path_id: i32, -) -> Result { - let path_data = db - .file_path() - .find_unique(file_path::id::equals(file_path_id)) + location_path: impl AsRef, + file_path_ids: &[file_path::id::Type], +) -> Result, FileSystemJobsError> { + let location_path = location_path.as_ref(); + + db._batch( + file_path_ids + .iter() + .map(|file_path_id| { + db.file_path() + .find_unique(file_path::id::equals(*file_path_id)) + .include(file_path_with_object::include()) + }) + // FIXME:(fogodev -> Brendonovich) this collect is a workaround to a weird higher ranker lifetime error on + // the _batch function, it should be removed once the error is fixed + .collect::>(), + ) + .await? + .into_iter() + .zip(file_path_ids.iter()) + .map(|(maybe_file_path, file_path_id)| { + maybe_file_path + .ok_or(FileSystemJobsError::FilePathIdNotFound(*file_path_id)) + .map(|path_data| FileData { + full_path: location_path.join(IsolatedFilePathData::from(&path_data)), + file_path: path_data, + }) + }) + .collect::, _>>() +} + +pub async fn get_file_data_from_isolated_file_path( + db: &PrismaClient, + location_path: impl AsRef, + iso_file_path: &IsolatedFilePathData<'_>, +) -> Result { + db.file_path() + .find_unique(iso_file_path.into()) .include(file_path_with_object::include()) .exec() .await? - .ok_or(JobError::MissingData { - value: String::from("file_path that matches both location id and path id"), - })?; - - Ok(FsInfo { - fs_path: get_location_path_from_location_id(db, location_id) - .await? - .join(IsolatedFilePathData::from(&path_data)), - path_data, - }) + .ok_or_else(|| { + FileSystemJobsError::FilePathNotFound( + AsRef::::as_ref(iso_file_path) + .to_path_buf() + .into_boxed_path(), + ) + }) + .map(|path_data| FileData { + full_path: location_path + .as_ref() + .join(IsolatedFilePathData::from(&path_data)), + file_path: path_data, + }) +} + +pub async fn fetch_source_and_target_location_paths( + db: &PrismaClient, + source_location_id: location::id::Type, + target_location_id: location::id::Type, +) -> Result<(PathBuf, PathBuf), FileSystemJobsError> { + match db + ._batch(( + db.location() + .find_unique(location::id::equals(source_location_id)), + db.location() + .find_unique(location::id::equals(target_location_id)), + )) + .await? + { + (Some(source_location), Some(target_location)) => Ok(( + source_location + .path + .map(PathBuf::from) + .ok_or(FileSystemJobsError::Location(LocationError::MissingPath( + source_location_id, + )))?, + target_location + .path + .map(PathBuf::from) + .ok_or(FileSystemJobsError::Location(LocationError::MissingPath( + target_location_id, + )))?, + )), + (None, _) => Err(FileSystemJobsError::Location(LocationError::IdNotFound( + source_location_id, + ))), + (_, None) => Err(FileSystemJobsError::Location(LocationError::IdNotFound( + target_location_id, + ))), + } +} + +fn construct_target_filename( + source_file_data: &FileData, + target_file_name_suffix: &Option, +) -> String { + // extension wizardry for cloning and such + // if no suffix has been selected, just use the file name + // if a suffix is provided and it's a directory, use the directory name + suffix + // if a suffix is provided and it's a file, use the (file name + suffix).extension + + if let Some(ref suffix) = target_file_name_suffix { + if source_file_data.file_path.is_dir { + format!("{}{suffix}", source_file_data.file_path.name) + } else { + format!( + "{}{suffix}.{}", + source_file_data.file_path.name, source_file_data.file_path.extension, + ) + } + } else if source_file_data.file_path.is_dir { + source_file_data.file_path.name.clone() + } else { + format!( + "{}.{}", + source_file_data.file_path.name, source_file_data.file_path.extension + ) + } } diff --git a/core/src/object/preview/thumbnail/mod.rs b/core/src/object/preview/thumbnail/mod.rs index ceee46044..7ea7a45db 100644 --- a/core/src/object/preview/thumbnail/mod.rs +++ b/core/src/object/preview/thumbnail/mod.rs @@ -3,10 +3,7 @@ use crate::{ invalidate_query, job::{JobError, JobReportUpdate, JobResult, JobState, WorkerContext}, library::Library, - location::{ - file_path_helper::{file_path_for_thumbnailer, FilePathError, IsolatedFilePathData}, - LocationId, - }, + location::file_path_helper::{file_path_for_thumbnailer, FilePathError, IsolatedFilePathData}, prisma::location, util::{error::FileIOError, version_manager::VersionManagerError}, }; @@ -106,7 +103,7 @@ pub enum ThumbnailerError { #[derive(Debug, Serialize, Deserialize)] pub struct ThumbnailerJobReport { - location_id: LocationId, + location_id: location::id::Type, path: PathBuf, thumbnails_created: u32, } @@ -242,7 +239,7 @@ async fn process_step( .expect("critical error: missing data on job state"); let step_result = inner_process_step( - &step, + step, &data.location_path, &data.thumbnail_dir, &state.init.location, @@ -261,12 +258,14 @@ async fn process_step( pub async fn inner_process_step( step: &ThumbnailerJobStep, - location_path: &PathBuf, - thumbnail_dir: &PathBuf, + location_path: impl AsRef, + thumbnail_dir: impl AsRef, location: &location::Data, library: &Library, ) -> Result<(), JobError> { let ThumbnailerJobStep { file_path, kind } = step; + let location_path = location_path.as_ref(); + let thumbnail_dir = thumbnail_dir.as_ref(); // assemble the file path let path = location_path.join(IsolatedFilePathData::from((location.id, file_path))); diff --git a/core/src/object/preview/thumbnail/shallow.rs b/core/src/object/preview/thumbnail/shallow.rs index 2decb1aae..dfb5f79f1 100644 --- a/core/src/object/preview/thumbnail/shallow.rs +++ b/core/src/object/preview/thumbnail/shallow.rs @@ -5,12 +5,9 @@ use crate::{ invalidate_query, job::JobError, library::Library, - location::{ - file_path_helper::{ - ensure_file_path_exists, ensure_sub_path_is_directory, ensure_sub_path_is_in_location, - file_path_for_thumbnailer, IsolatedFilePathData, - }, - LocationId, + location::file_path_helper::{ + ensure_file_path_exists, ensure_sub_path_is_directory, ensure_sub_path_is_in_location, + file_path_for_thumbnailer, IsolatedFilePathData, }, object::preview::thumbnail, prisma::{file_path, location, PrismaClient}, @@ -117,7 +114,7 @@ pub async fn shallow_thumbnailer( .flatten(); for file in all_files { - thumbnail::inner_process_step(&file, &location_path, &thumbnail_dir, location, &library) + thumbnail::inner_process_step(&file, &location_path, &thumbnail_dir, location, library) .await?; } @@ -128,7 +125,7 @@ pub async fn shallow_thumbnailer( async fn get_files_by_extensions( db: &PrismaClient, - location_id: LocationId, + location_id: location::id::Type, parent_isolated_file_path_data: &IsolatedFilePathData<'_>, extensions: &[Extension], kind: ThumbnailerJobStepKind, diff --git a/core/src/object/preview/thumbnail/thumbnailer_job.rs b/core/src/object/preview/thumbnail/thumbnailer_job.rs index 95b98fc59..24b0f240a 100644 --- a/core/src/object/preview/thumbnail/thumbnailer_job.rs +++ b/core/src/object/preview/thumbnail/thumbnailer_job.rs @@ -1,4 +1,5 @@ use crate::{ + extract_job_data, job::{ JobError, JobInitData, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext, }, @@ -161,13 +162,7 @@ impl StatefulJob for ThumbnailerJob { } async fn finalize(&mut self, ctx: WorkerContext, state: &mut JobState) -> JobResult { - finalize_thumbnailer( - state - .data - .as_ref() - .expect("critical error: missing data on job state"), - ctx, - ) + finalize_thumbnailer(extract_job_data!(state), ctx) } } diff --git a/core/src/object/validation/validator_job.rs b/core/src/object/validation/validator_job.rs index f77643899..b4789e77e 100644 --- a/core/src/object/validation/validator_job.rs +++ b/core/src/object/validation/validator_job.rs @@ -1,10 +1,11 @@ use crate::{ + extract_job_data, job::{ JobError, JobInitData, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext, }, library::Library, location::file_path_helper::{file_path_for_object_validator, IsolatedFilePathData}, - prisma::file_path, + prisma::{file_path, location}, sync, util::error::FileIOError, }; @@ -32,7 +33,7 @@ pub struct ObjectValidatorJobState { // The validator can #[derive(Serialize, Deserialize, Debug, Hash)] pub struct ObjectValidatorJobInit { - pub location_id: i32, + pub location_id: location::id::Type, pub path: PathBuf, pub background: bool, } @@ -86,10 +87,7 @@ impl StatefulJob for ObjectValidatorJob { let Library { db, sync, .. } = &ctx.library; let file_path = &state.steps[0]; - let data = state - .data - .as_ref() - .expect("critical error: missing data on job state"); + let data = extract_job_data!(state); // this is to skip files that already have checksums // i'm unsure what the desired behaviour is in this case @@ -129,10 +127,7 @@ impl StatefulJob for ObjectValidatorJob { } async fn finalize(&mut self, _ctx: WorkerContext, state: &mut JobState) -> JobResult { - let data = state - .data - .as_ref() - .expect("critical error: missing data on job state"); + let data = extract_job_data!(state); info!( "finalizing validator job at {}: {} tasks", data.root_path.display(), diff --git a/core/src/util/migrator.rs b/core/src/util/migrator.rs index 42613969b..83a61f8c2 100644 --- a/core/src/util/migrator.rs +++ b/core/src/util/migrator.rs @@ -54,7 +54,7 @@ pub trait Migrate: Sized + DeserializeOwned + Serialize + Default { }; if let Some(obj) = y.as_object_mut() { - if let Some(_) = obj.get("version").and_then(|v| v.as_str()) { + if obj.contains_key("version") { return Err(MigratorError::HasSuperLegacyConfig); // This is just to make the error nicer } else { return Err(err.into()); @@ -74,7 +74,7 @@ pub trait Migrate: Sized + DeserializeOwned + Serialize + Default { let is_latest = cfg.version == Self::CURRENT_VERSION; for v in (cfg.version + 1)..=Self::CURRENT_VERSION { cfg.version = v; - match Self::migrate(v, &mut cfg.other, &ctx).await { + match Self::migrate(v, &mut cfg.other, ctx).await { Ok(()) => (), Err(err) => { file.write_all(serde_json::to_string(&cfg)?.as_bytes())?; // Writes updated version @@ -133,6 +133,7 @@ pub enum MigratorError { } #[cfg(test)] +#[allow(clippy::unwrap_used, clippy::panic)] mod test { use std::{fs, io::Read, path::PathBuf}; diff --git a/crates/crypto/Cargo.toml b/crates/crypto/Cargo.toml index 7e02c7582..b98b5a808 100644 --- a/crates/crypto/Cargo.toml +++ b/crates/crypto/Cargo.toml @@ -5,9 +5,9 @@ authors = ["Jake Robinson "] readme = "README.md" description = "A library to handle cryptographic functions within Spacedrive" rust-version = "1.67.0" -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } [features] rspc = ["dep:rspc", "dep:specta"] diff --git a/crates/deps-generator/Cargo.toml b/crates/deps-generator/Cargo.toml index 92f306f62..405b425db 100644 --- a/crates/deps-generator/Cargo.toml +++ b/crates/deps-generator/Cargo.toml @@ -3,9 +3,9 @@ name = "deps-generator" version = "0.0.0" authors = ["Jake Robinson "] description = "A tool to compile all Spacedrive dependencies and their respective licenses" -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } [dependencies] reqwest = { version = "0.11.18", features = ["blocking"] } diff --git a/crates/ffmpeg/Cargo.toml b/crates/ffmpeg/Cargo.toml index 8e0275a67..254aa72dc 100644 --- a/crates/ffmpeg/Cargo.toml +++ b/crates/ffmpeg/Cargo.toml @@ -5,9 +5,9 @@ authors = ["Ericson Soares "] readme = "README.md" description = "A simple library to generate video thumbnails using ffmpeg with the webp format" rust-version = "1.64.0" -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/crates/file-ext/Cargo.toml b/crates/file-ext/Cargo.toml index 6a2071418..4023b1a28 100644 --- a/crates/file-ext/Cargo.toml +++ b/crates/file-ext/Cargo.toml @@ -5,9 +5,9 @@ authors = [ "Brendan Allen ", "Jamie Pine ", ] -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] diff --git a/crates/heif/Cargo.toml b/crates/heif/Cargo.toml index 80e7f28c6..8014223b1 100644 --- a/crates/heif/Cargo.toml +++ b/crates/heif/Cargo.toml @@ -2,9 +2,9 @@ name = "sd-heif" version = "0.1.0" authors = ["Jake Robinson "] -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } [dependencies] libheif-rs = "0.19.2" diff --git a/crates/macos/Cargo.toml b/crates/macos/Cargo.toml index c2b97f3a3..157d4f6c8 100644 --- a/crates/macos/Cargo.toml +++ b/crates/macos/Cargo.toml @@ -1,14 +1,14 @@ [package] name = "sd-macos" version = "0.1.0" -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] -swift-rs.workspace = true +swift-rs = { workspace = true } [build-dependencies] swift-rs = { workspace = true, features = ["build"] } diff --git a/crates/p2p/Cargo.toml b/crates/p2p/Cargo.toml index 0d0516905..245515acc 100644 --- a/crates/p2p/Cargo.toml +++ b/crates/p2p/Cargo.toml @@ -3,9 +3,9 @@ name = "sd-p2p" version = "0.1.0" description = "Rust Peer to Peer Networking Library" authors = ["Oscar Beaumont "] -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } [features] default = [] diff --git a/crates/prisma-cli/Cargo.toml b/crates/prisma-cli/Cargo.toml index 241be2f4e..b1e2f99fb 100644 --- a/crates/prisma-cli/Cargo.toml +++ b/crates/prisma-cli/Cargo.toml @@ -1,9 +1,9 @@ [package] name = "prisma-cli" version = "0.1.0" -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } [dependencies] prisma-client-rust-cli = { workspace = true } diff --git a/crates/sync-generator/Cargo.toml b/crates/sync-generator/Cargo.toml index 03e45db51..9fcc97031 100644 --- a/crates/sync-generator/Cargo.toml +++ b/crates/sync-generator/Cargo.toml @@ -1,9 +1,9 @@ [package] name = "sd-sync-generator" version = "0.1.0" -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html diff --git a/crates/sync/Cargo.toml b/crates/sync/Cargo.toml index b9c9f062a..d1a7ee168 100644 --- a/crates/sync/Cargo.toml +++ b/crates/sync/Cargo.toml @@ -1,9 +1,9 @@ [package] name = "sd-sync" version = "0.1.0" -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } [dependencies] rand = "0.8.5" diff --git a/crates/sync/example/Cargo.toml b/crates/sync/example/Cargo.toml index 2313ec0e7..c83e2a47d 100644 --- a/crates/sync/example/Cargo.toml +++ b/crates/sync/example/Cargo.toml @@ -3,9 +3,9 @@ name = "sd-sync-example" version = "0.1.0" rust-version = "1.64" publish = false -license.workspace = true -repository.workspace = true -edition.workspace = true +license = { workspace = true } +repository = { workspace = true } +edition = { workspace = true } [dependencies] serde_json = "1.0.85" diff --git a/interface/app/$libraryId/Explorer/AssignTagMenuItems.tsx b/interface/app/$libraryId/Explorer/AssignTagMenuItems.tsx index 629900790..f9aff433a 100644 --- a/interface/app/$libraryId/Explorer/AssignTagMenuItems.tsx +++ b/interface/app/$libraryId/Explorer/AssignTagMenuItems.tsx @@ -87,7 +87,7 @@ export default (props: { objectId: number }) => { e.preventDefault(); assignTag.mutate({ tag_id: tag.id, - object_id: props.objectId, + object_ids: [props.objectId], unassign: active }); }} diff --git a/interface/app/$libraryId/Explorer/ContextMenu.tsx b/interface/app/$libraryId/Explorer/ContextMenu.tsx index d76ba02c6..4849c5e6d 100644 --- a/interface/app/$libraryId/Explorer/ContextMenu.tsx +++ b/interface/app/$libraryId/Explorer/ContextMenu.tsx @@ -92,9 +92,9 @@ export default (props: PropsWithChildren) => { params.path && copyFiles.mutate({ source_location_id: store.cutCopyState.sourceLocationId, - source_path_id: store.cutCopyState.sourcePathId, + sources_file_path_ids: [store.cutCopyState.sourcePathId], target_location_id: store.locationId, - target_path: params.path, + target_location_relative_directory_path: params.path, target_file_name_suffix: null }); } else { @@ -102,9 +102,9 @@ export default (props: PropsWithChildren) => { params.path && cutFiles.mutate({ source_location_id: store.cutCopyState.sourceLocationId, - source_path_id: store.cutCopyState.sourcePathId, + sources_file_path_ids: [store.cutCopyState.sourcePathId], target_location_id: store.locationId, - target_path: params.path + target_location_relative_directory_path: params.path }); } }} diff --git a/interface/app/$libraryId/Explorer/File/ContextMenu.tsx b/interface/app/$libraryId/Explorer/File/ContextMenu.tsx index 30de88a8d..326820c14 100644 --- a/interface/app/$libraryId/Explorer/File/ContextMenu.tsx +++ b/interface/app/$libraryId/Explorer/File/ContextMenu.tsx @@ -82,7 +82,7 @@ export default ({ data }: Props) => { - data.item.object_id && removeFromRecents.mutate(data.item.object_id) + data.item.object_id && removeFromRecents.mutate([data.item.object_id]) } /> )} @@ -129,9 +129,9 @@ export default ({ data }: Props) => { copyFiles.mutate({ source_location_id: store.locationId!, - source_path_id: data.item.id, + sources_file_path_ids: [data.item.id], target_location_id: store.locationId!, - target_path: params.path, + target_location_relative_directory_path: params.path, target_file_name_suffix: ' copy' }); }} @@ -303,7 +303,9 @@ const OpenOrDownloadOptions = (props: { data: ExplorerItem }) => { props.data.type === 'Path' && props.data.item.object_id && updateAccessTime.mutate(props.data.item.object_id); - openFilePath(library.uuid, filePath.id); + + // FIXME: treat error properly + openFilePath(library.uuid, [filePath.id]); }} /> )} diff --git a/interface/app/$libraryId/Explorer/File/ContextMenu/OpenWith.tsx b/interface/app/$libraryId/Explorer/File/ContextMenu/OpenWith.tsx index b1fa2002d..0533a45bb 100644 --- a/interface/app/$libraryId/Explorer/File/ContextMenu/OpenWith.tsx +++ b/interface/app/$libraryId/Explorer/File/ContextMenu/OpenWith.tsx @@ -34,29 +34,29 @@ const Items = ({ }) => { const { library } = useLibraryContext(); - const items = useQuery( + const items = useQuery( ['openWith', filePath.id], - () => actions.getFilePathOpenWithApps(library.uuid, filePath.id), + () => actions.getFilePathOpenWithApps(library.uuid, [filePath.id]), { suspense: true } ); return ( <> - {items.data?.map((d) => ( + {items.data?.map((data) => ( { try { - await actions.openFilePathWith(library.uuid, filePath.id, d.url); + await actions.openFilePathWith(library.uuid, [(filePath.id, data.c.url)]); } catch { showAlertDialog({ title: 'Error', - value: `Failed to open file, with: ${d.url}` + value: `Failed to open file, with: ${data.url}` }); } }} > - {d.name} + {data.name} )) ??

No apps available

} diff --git a/interface/app/$libraryId/Explorer/File/DecryptDialog.tsx b/interface/app/$libraryId/Explorer/File/DecryptDialog.tsx index 4e768a4dd..995cee306 100644 --- a/interface/app/$libraryId/Explorer/File/DecryptDialog.tsx +++ b/interface/app/$libraryId/Explorer/File/DecryptDialog.tsx @@ -69,7 +69,7 @@ // onSubmit={form.handleSubmit((data) => // decryptFile.mutateAsync({ // location_id: props.location_id, -// path_id: props.path_id, +// file_path_ids: [props.path_id], // output_path: data.outputPath !== '' ? data.outputPath : null, // mount_associated_key: data.mountAssociatedKey, // password: data.type === 'password' ? data.password : null, diff --git a/interface/app/$libraryId/Explorer/File/DeleteDialog.tsx b/interface/app/$libraryId/Explorer/File/DeleteDialog.tsx index 36e4b3b03..7e5679458 100644 --- a/interface/app/$libraryId/Explorer/File/DeleteDialog.tsx +++ b/interface/app/$libraryId/Explorer/File/DeleteDialog.tsx @@ -18,7 +18,7 @@ export default (props: Propps) => { onSubmit={form.handleSubmit(() => deleteFile.mutateAsync({ location_id: props.location_id, - path_id: props.path_id + file_path_ids: [props.path_id] }) )} dialog={useDialog(props)} diff --git a/interface/app/$libraryId/Explorer/File/EncryptDialog.tsx b/interface/app/$libraryId/Explorer/File/EncryptDialog.tsx index b2459c576..9daf21a7a 100644 --- a/interface/app/$libraryId/Explorer/File/EncryptDialog.tsx +++ b/interface/app/$libraryId/Explorer/File/EncryptDialog.tsx @@ -71,10 +71,9 @@ // algorithm: data.encryptionAlgo as Algorithm, // key_uuid: data.key, // location_id: props.location_id, -// path_id: props.path_id, +// file_path_ids: [props.path_id], // metadata: data.metadata, -// preview_media: data.previewMedia, -// output_path: data.outputPath || null +// preview_media: data.previewMedia // }) // )} // dialog={useDialog(props)} diff --git a/interface/app/$libraryId/Explorer/File/EraseDialog.tsx b/interface/app/$libraryId/Explorer/File/EraseDialog.tsx index ce3b74b20..4ffe13e20 100644 --- a/interface/app/$libraryId/Explorer/File/EraseDialog.tsx +++ b/interface/app/$libraryId/Explorer/File/EraseDialog.tsx @@ -30,7 +30,7 @@ export default (props: Props) => { onSubmit={form.handleSubmit((data) => eraseFile.mutateAsync({ location_id: props.location_id, - path_id: props.path_id, + file_path_ids: [props.path_id], passes: data.passes.toString() }) )} diff --git a/interface/app/$libraryId/Explorer/File/RenameTextBox.tsx b/interface/app/$libraryId/Explorer/File/RenameTextBox.tsx index 32313d08c..95bcaa79a 100644 --- a/interface/app/$libraryId/Explorer/File/RenameTextBox.tsx +++ b/interface/app/$libraryId/Explorer/File/RenameTextBox.tsx @@ -58,8 +58,12 @@ export default ({ if (newName !== oldName) { renameFile.mutate({ location_id: filePathData.location_id, - file_name: oldName, - new_file_name: newName + kind: { + One: { + from_file_path_id: filePathData.id, + to: newName + } + } }); } } diff --git a/interface/app/$libraryId/Explorer/File/Thumb.tsx b/interface/app/$libraryId/Explorer/File/Thumb.tsx index aa6b42df1..edda0613b 100644 --- a/interface/app/$libraryId/Explorer/File/Thumb.tsx +++ b/interface/app/$libraryId/Explorer/File/Thumb.tsx @@ -51,13 +51,13 @@ const Thumbnail = memo( videoBarsSize ? size && size.height >= size.width ? { - borderLeftWidth: videoBarsSize, - borderRightWidth: videoBarsSize - } + borderLeftWidth: videoBarsSize, + borderRightWidth: videoBarsSize + } : { - borderTopWidth: videoBarsSize, - borderBottomWidth: videoBarsSize - } + borderTopWidth: videoBarsSize, + borderBottomWidth: videoBarsSize + } : {} } onLoad={props.onLoad} @@ -75,11 +75,11 @@ const Thumbnail = memo( props.cover ? {} : size - ? { + ? { marginTop: Math.floor(size.height / 2) - 2, marginLeft: Math.floor(size.width / 2) - 2 - } - : { display: 'none' } + } + : { display: 'none' } } className={clsx( props.cover @@ -200,9 +200,9 @@ function FileThumb({ size, cover, ...props }: ThumbProps) { className={clsx( 'relative flex shrink-0 items-center justify-center', size && - kind !== 'Video' && - thumbType !== ThumbType.Icon && - 'border-2 border-transparent', + kind !== 'Video' && + thumbType !== ThumbType.Icon && + 'border-2 border-transparent', size || ['h-full', cover ? 'w-full overflow-hidden' : 'w-[90%]'], props.className )} @@ -299,9 +299,9 @@ function FileThumb({ size, cover, ...props }: ThumbProps) { 'shadow shadow-black/30' ], size && - (kind === 'Video' - ? 'border-x-0 border-black' - : size > 60 && 'border-2 border-app-line'), + (kind === 'Video' + ? 'border-x-0 border-black' + : size > 60 && 'border-2 border-app-line'), props.className )} crossOrigin={ThumbType.Original && 'anonymous'} // Here it is ok, because it is not a react attr diff --git a/interface/app/$libraryId/Explorer/View/index.tsx b/interface/app/$libraryId/Explorer/View/index.tsx index 935daf3ca..09d10e446 100644 --- a/interface/app/$libraryId/Explorer/View/index.tsx +++ b/interface/app/$libraryId/Explorer/View/index.tsx @@ -61,7 +61,7 @@ export const ViewItem = ({ data, children, ...props }: ViewItemProps) => { updateAccessTime.mutate(data.item.object_id); } - openFilePath(library.uuid, filePath.id); + openFilePath(library.uuid, [filePath.id]); } else { const { kind } = getExplorerItemData(data); diff --git a/interface/app/$libraryId/Layout/Sidebar/JobManager/Job.tsx b/interface/app/$libraryId/Layout/Sidebar/JobManager/Job.tsx index b647267ca..c38d12da1 100644 --- a/interface/app/$libraryId/Layout/Sidebar/JobManager/Job.tsx +++ b/interface/app/$libraryId/Layout/Sidebar/JobManager/Job.tsx @@ -32,16 +32,17 @@ const getNiceData = ( name: isGroup ? 'Indexing paths' : job.metadata?.location_path - ? `Indexed paths at ${job.metadata?.location_path} ` - : `Processing added location...`, + ? `Indexed paths at ${job.metadata?.location_path} ` + : `Processing added location...`, icon: Folder, subtext: `${numberWithCommas(job.metadata?.total_paths || 0)} ${appendPlural(job, 'path')}` }, thumbnailer: { - name: `${job.status === 'Running' || job.status === 'Queued' - ? 'Generating thumbnails' - : 'Generated thumbnails' - }`, + name: `${ + job.status === 'Running' || job.status === 'Queued' + ? 'Generating thumbnails' + : 'Generated thumbnails' + }`, icon: Camera, subtext: `${numberWithCommas(job.completed_task_count)} of ${numberWithCommas( job.task_count @@ -53,10 +54,11 @@ const getNiceData = ( subtext: `${numberWithCommas(job.task_count)} ${appendPlural(job, 'item')}` }, file_identifier: { - name: `${job.status === 'Running' || job.status === 'Queued' - ? 'Extracting metadata' - : 'Extracted metadata' - }`, + name: `${ + job.status === 'Running' || job.status === 'Queued' + ? 'Extracting metadata' + : 'Extracted metadata' + }`, icon: Eye, subtext: job.message || diff --git a/interface/app/$libraryId/Layout/Sidebar/JobManager/JobGroup.tsx b/interface/app/$libraryId/Layout/Sidebar/JobManager/JobGroup.tsx index ce9bb8a3c..7642a3ee8 100644 --- a/interface/app/$libraryId/Layout/Sidebar/JobManager/JobGroup.tsx +++ b/interface/app/$libraryId/Layout/Sidebar/JobManager/JobGroup.tsx @@ -72,8 +72,9 @@ function JobGroup({ data, clearJob }: JobGroupProps) {

{allJobsCompleted - ? `Added location "${data.metadata.init.location.name || '' - }"` + ? `Added location "${ + data.metadata.init.location.name || '' + }"` : `Indexing "${data.metadata.init.location.name || ''}"`}

diff --git a/interface/app/$libraryId/Layout/Sidebar/index.tsx b/interface/app/$libraryId/Layout/Sidebar/index.tsx index 093821d85..914ae7521 100644 --- a/interface/app/$libraryId/Layout/Sidebar/index.tsx +++ b/interface/app/$libraryId/Layout/Sidebar/index.tsx @@ -14,7 +14,7 @@ export default () => {

{showControls && } diff --git a/interface/app/$libraryId/location/$id.tsx b/interface/app/$libraryId/location/$id.tsx index 6794eeaf8..c4efee057 100644 --- a/interface/app/$libraryId/location/$id.tsx +++ b/interface/app/$libraryId/location/$id.tsx @@ -1,3 +1,6 @@ +import { useInfiniteQuery } from '@tanstack/react-query'; +import { useEffect, useMemo } from 'react'; +import { z } from 'zod'; import { ExplorerItem, useLibraryContext, @@ -6,9 +9,6 @@ import { useRspcLibraryContext } from '@sd/client'; import { Folder } from '~/components/Folder'; -import { useInfiniteQuery } from '@tanstack/react-query'; -import { useEffect, useMemo } from 'react'; -import { z } from 'zod'; import { getExplorerStore, useExplorerStore, @@ -57,14 +57,16 @@ export const Component = () => { <> +
{path ? getLastSectionOfPath(path) : location.data?.name} - {location.data && } + {location.data && ( + + )}
} right={ diff --git a/interface/app/$libraryId/location/LocationOptions.tsx b/interface/app/$libraryId/location/LocationOptions.tsx index e23a657a8..079728d38 100644 --- a/interface/app/$libraryId/location/LocationOptions.tsx +++ b/interface/app/$libraryId/location/LocationOptions.tsx @@ -58,7 +58,7 @@ export default function LocationOptions({ location, path }: { location: Location autoFocus className="mb-2" value={currentPath ?? ''} - onChange={() => {}} + onChange={() => { }} right={