remove unused code + cleanup Rust imports

This commit is contained in:
Oscar Beaumont 2022-05-30 22:26:59 +08:00
parent 562efe3c6b
commit 6e0c944d2e
57 changed files with 328 additions and 1178 deletions

264
Cargo.lock generated
View file

@ -131,7 +131,7 @@ dependencies = [
"actix-utils",
"futures-core",
"futures-util",
"mio 0.8.3",
"mio",
"num_cpus",
"socket2",
"tokio",
@ -336,7 +336,7 @@ version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d52a9bb7ec0cf484c551830a7ce27bd20d67eac647e1befb56b0be4ee39a55d2"
dependencies = [
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -431,7 +431,7 @@ dependencies = [
"slab",
"socket2",
"waker-fn",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -469,7 +469,7 @@ dependencies = [
"libc",
"once_cell",
"signal-hook",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -623,7 +623,7 @@ checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
dependencies = [
"hermit-abi",
"libc",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -1055,7 +1055,7 @@ dependencies = [
"num-traits",
"serde",
"time 0.1.44",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -1141,7 +1141,7 @@ checksum = "b3616f750b84d8f0de8a58bda93e08e2a81ad3f523089b05f1dffecab48c6cbd"
dependencies = [
"atty",
"lazy_static",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -1586,7 +1586,7 @@ checksum = "de0a745c25b32caa56b82a3950f5fec7893a960f4c10ca3b02060b0c38d8c2ce"
dependencies = [
"libc",
"libdbus-sys",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -1698,7 +1698,7 @@ checksum = "1b1d1d91c932ef41c0f2663aa8b0ca0342d444d842c06914aa0a7e352d0bada6"
dependencies = [
"libc",
"redox_users",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -1709,7 +1709,7 @@ checksum = "4ebda144c4fe02d1f7ea1a7d9641b6fc6b580adcfa024ae48797ecdeb6825b4d"
dependencies = [
"libc",
"redox_users",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -2009,7 +2009,7 @@ dependencies = [
"cfg-if 1.0.0",
"libc",
"redox_syscall 0.2.13",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -2155,41 +2155,6 @@ version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2022715d62ab30faffd124d40b76f4134a550a87792276512b18d63272333394"
[[package]]
name = "fsevent"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ab7d1bd1bd33cc98b0889831b72da23c0aa4df9cec7e0702f46ecea04b35db6"
dependencies = [
"bitflags",
"fsevent-sys",
]
[[package]]
name = "fsevent-sys"
version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f41b048a94555da0f42f1d632e2e19510084fb8e303b0daa2816e733fb3644a0"
dependencies = [
"libc",
]
[[package]]
name = "fuchsia-zircon"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
dependencies = [
"bitflags",
"fuchsia-zircon-sys",
]
[[package]]
name = "fuchsia-zircon-sys"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7"
[[package]]
name = "funty"
version = "1.2.0"
@ -2419,7 +2384,7 @@ dependencies = [
"libc",
"log",
"rustversion",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -2518,7 +2483,7 @@ dependencies = [
"gobject-sys",
"libc",
"system-deps 6.0.2",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -2800,17 +2765,7 @@ checksum = "3c731c3e10504cc8ed35cfe2f1db4c9274c3d35fa486e3b31df46f068ef3e867"
dependencies = [
"libc",
"match_cfg",
"winapi 0.3.9",
]
[[package]]
name = "hotwatch"
version = "0.4.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39301670a6f5798b75f36a1b149a379a50df5aa7c71be50f4b41ec6eab445cb8"
dependencies = [
"log",
"notify",
"winapi",
]
[[package]]
@ -2944,7 +2899,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cbc0fa01ffc752e9dbc72818cdb072cd028b86be5e09dd04c5a643704fe101a9"
dependencies = [
"libc",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -3078,26 +3033,6 @@ dependencies = [
"adler32",
]
[[package]]
name = "inotify"
version = "0.7.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4816c66d2c8ae673df83366c18341538f234a26d65a9ecea5c348b453ac1d02f"
dependencies = [
"bitflags",
"inotify-sys",
"libc",
]
[[package]]
name = "inotify-sys"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e05c02b5e89bff3b946cedeca278abc628fe811e604f027c45a8aa3cf793d0eb"
dependencies = [
"libc",
]
[[package]]
name = "instant"
version = "0.1.12"
@ -3130,15 +3065,6 @@ dependencies = [
"syn",
]
[[package]]
name = "iovec"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2b3ea6ff95e175473f8ffe6a7eb7c00d054240321b84c57051175fe3c1e075e"
dependencies = [
"libc",
]
[[package]]
name = "ipconfig"
version = "0.3.0"
@ -3147,7 +3073,7 @@ checksum = "723519edce41262b05d4143ceb95050e4c614f483e78e9fd9e39a8275a84ad98"
dependencies = [
"socket2",
"widestring",
"winapi 0.3.9",
"winapi",
"winreg 0.7.0",
]
@ -3279,16 +3205,6 @@ version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9b7d56ba4a8344d6be9729995e6b06f928af29998cdf79fe390cbf6b1fee838"
[[package]]
name = "kernel32-sys"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7507624b29483431c0ba2d82aece8ca6cdba9382bff4ddd0f7490560c056098d"
dependencies = [
"winapi 0.2.8",
"winapi-build",
]
[[package]]
name = "kuchiki"
version = "0.8.1"
@ -3429,7 +3345,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "efbc0f03f9a775e9f6aed295c6a1ba2253c5757a9e03d55c6caa46a681abcddd"
dependencies = [
"cfg-if 1.0.0",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -4248,25 +4164,6 @@ dependencies = [
"adler",
]
[[package]]
name = "mio"
version = "0.6.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4afd66f5b91bf2a3bc13fad0e21caedac168ca4c707504e75585648ae80e4cc4"
dependencies = [
"cfg-if 0.1.10",
"fuchsia-zircon",
"fuchsia-zircon-sys",
"iovec",
"kernel32-sys",
"libc",
"log",
"miow",
"net2",
"slab",
"winapi 0.2.8",
]
[[package]]
name = "mio"
version = "0.8.3"
@ -4279,30 +4176,6 @@ dependencies = [
"windows-sys",
]
[[package]]
name = "mio-extras"
version = "2.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "52403fe290012ce777c4626790c8951324a2b9e3316b3143779c72b029742f19"
dependencies = [
"lazycell",
"log",
"mio 0.6.23",
"slab",
]
[[package]]
name = "miow"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebd808424166322d4a38da87083bfddd3ac4c131334ed55856112eb06d46944d"
dependencies = [
"kernel32-sys",
"net2",
"winapi 0.2.8",
"ws2_32-sys",
]
[[package]]
name = "mobc"
version = "0.7.3"
@ -4498,7 +4371,7 @@ dependencies = [
"futures-util",
"lazy_static",
"lru",
"mio 0.8.3",
"mio",
"mysql_common",
"native-tls",
"once_cell",
@ -4618,17 +4491,6 @@ dependencies = [
"jni-sys",
]
[[package]]
name = "net2"
version = "0.2.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "391630d12b68002ae1e25e8f974306474966550ad82dac6886fb8910c19568ae"
dependencies = [
"cfg-if 0.1.10",
"libc",
"winapi 0.3.9",
]
[[package]]
name = "netlink-packet-core"
version = "0.4.2"
@ -4735,24 +4597,6 @@ dependencies = [
"minimal-lexical",
]
[[package]]
name = "notify"
version = "4.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ae03c8c853dba7bfd23e571ff0cff7bc9dceb40a4cd684cd1681824183f45257"
dependencies = [
"bitflags",
"filetime",
"fsevent",
"fsevent-sys",
"inotify",
"libc",
"mio 0.6.23",
"mio-extras",
"walkdir",
"winapi 0.3.9",
]
[[package]]
name = "notify-rust"
version = "4.5.8"
@ -4770,7 +4614,7 @@ version = "0.3.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c28774a7fd2fbb4f0babd8237ce554b73af68021b5f695a3cebd6c59bac0980f"
dependencies = [
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -4949,7 +4793,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0524af9508f9b5c4eb41dce095860456727748f63b478d625f119a70e0d764a"
dependencies = [
"pathdiff",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -5043,7 +4887,7 @@ checksum = "0eca3ecae1481e12c3d9379ec541b238a16f0b75c9a409942daa8ec20dbfdb62"
dependencies = [
"log",
"serde",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -5053,7 +4897,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2c92f2b54f081d635c77e7120862d48db8e91f7f21cef23ab1b4fe9971c59f55"
dependencies = [
"libc",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -5144,7 +4988,7 @@ dependencies = [
"libc",
"redox_syscall 0.1.57",
"smallvec",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -5158,7 +5002,7 @@ dependencies = [
"libc",
"redox_syscall 0.2.13",
"smallvec",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -5486,7 +5330,7 @@ dependencies = [
"libc",
"log",
"wepoll-ffi",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -6105,7 +5949,7 @@ version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3acd125665422973a33ac9d3dd2df85edad0f4ae9b00dafb1a05e43a9f5ef8e7"
dependencies = [
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -6212,7 +6056,7 @@ dependencies = [
"spin 0.5.2",
"untrusted",
"web-sys",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -6227,7 +6071,7 @@ dependencies = [
"spin 0.5.2",
"untrusted",
"web-sys",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -6451,7 +6295,6 @@ dependencies = [
"fs_extra",
"futures",
"hostname 0.3.1",
"hotwatch",
"image",
"include_dir",
"int-enum",
@ -6461,7 +6304,6 @@ dependencies = [
"ring 0.17.0-alpha.11",
"serde",
"serde_json",
"swift-rs",
"sysinfo",
"thiserror",
"tokio",
@ -6785,7 +6627,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b0d94659ad3c2137fef23ae75b03d5241d633f8acded53d672decfa0e6e0caef"
dependencies = [
"libc",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -6870,7 +6712,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0"
dependencies = [
"libc",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -7080,7 +6922,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0c2e86926081dda636c546d8c5e641661049d7562a68f5488be4a1f7f66f6086"
dependencies = [
"libc",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -7135,7 +6977,7 @@ dependencies = [
"ntapi",
"once_cell",
"rayon",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -7445,7 +7287,7 @@ dependencies = [
"libc",
"redox_syscall 0.2.13",
"remove_dir_all",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -7563,7 +7405,7 @@ checksum = "6db9e6914ab8b1ae1c260a4ae7a49b6c5611b40328a735b21862567685e73255"
dependencies = [
"libc",
"wasi 0.10.0+wasi-snapshot-preview1",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -7608,7 +7450,7 @@ dependencies = [
"bytes",
"libc",
"memchr",
"mio 0.8.3",
"mio",
"num_cpus",
"once_cell",
"parking_lot 0.12.0",
@ -7616,7 +7458,7 @@ dependencies = [
"signal-hook-registry",
"socket2",
"tokio-macros",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -8147,7 +7989,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "808cf2735cd4b6866113f648b791c6adc5714537bc222d9347bb203386ffda56"
dependencies = [
"same-file",
"winapi 0.3.9",
"winapi",
"winapi-util",
]
@ -8421,12 +8263,6 @@ version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6c48bd20df7e4ced539c12f570f937c6b4884928a87fee70a479d72f031d4e0"
[[package]]
name = "winapi"
version = "0.2.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "167dc9d6949a9b857f3451275e911c3f44255842c1f7a76f33c55103a909087a"
[[package]]
name = "winapi"
version = "0.3.9"
@ -8437,12 +8273,6 @@ dependencies = [
"winapi-x86_64-pc-windows-gnu",
]
[[package]]
name = "winapi-build"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2d315eee3b34aca4797b2da6b13ed88266e6d612562a0c46390af8299fc699bc"
[[package]]
name = "winapi-i686-pc-windows-gnu"
version = "0.4.0"
@ -8455,7 +8285,7 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178"
dependencies = [
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -8474,7 +8304,7 @@ dependencies = [
"byteorder",
"md5",
"rand 0.7.3",
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -8736,7 +8566,7 @@ version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0120db82e8a1e0b9fb3345a539c478767c0048d842860994d96113d5b667bd69"
dependencies = [
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -8745,7 +8575,7 @@ version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "80d0f4e272c85def139476380b12f9ac60926689dd2e01d4923222f40580869d"
dependencies = [
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -8774,7 +8604,7 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7daf138b6b14196e3830a588acf1e86966c694d3e8fb026fb105b8b5dca07e6e"
dependencies = [
"winapi 0.3.9",
"winapi",
]
[[package]]
@ -8809,16 +8639,6 @@ dependencies = [
"windows-implement",
]
[[package]]
name = "ws2_32-sys"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d59cefebd0c892fa2dd6de581e937301d8552cb44489cdff035c6187cb63fa5e"
dependencies = [
"winapi 0.2.8",
"winapi-build",
]
[[package]]
name = "wyz"
version = "0.4.0"

View file

@ -1,6 +1,5 @@
[workspace]
members = [
"apps/debug",
"apps/desktop/src-tauri",
"core",
"core/prisma",

View file

@ -1 +0,0 @@
/target/

View file

@ -1,19 +0,0 @@
[package]
name = "debug"
version = "0.1.0"
edition = "2021"
[dependencies]
# anyhow = "1.0.56"
# data-encoding = "2.3.2"
# kamadak-exif = "0.5.4"
# ring = "0.16.20"
# thumbnailer = "0.4.0"
# mime = "0.3.16"
# sha256 = "1.0.3"
# ffmpeg-next = "5.0.3"
# sdcore = { path = "../../core" }
# uhlc = "0.4.1"
# matroska = "0.11.0"
tokio = { version = "1.17.0", features = ["sync", "rt"] }
# chrono = "0.4.19"

View file

@ -1,11 +0,0 @@
#![allow(dead_code)]
// use sdcore::{prisma, sync::engine::test, sync::FakeCoreContext};
use std::fs::File;
fn main() {
let file = File::open("/Users/james/Desktop/Cloud/preview.mp4").unwrap();
println!("{:?}", file.metadata().unwrap())
}

View file

@ -1,58 +0,0 @@
// // DEPRECATE EVERYTHING IN THIS FILE
// use anyhow::Result;
// use sdcore::{
// file::{indexer, retrieve, retrieve::Directory, watcher::watch_dir},
// state::{client, client::ClientState},
// sys,
// sys::{volumes, volumes::Volume},
// };
// #[tauri::command(async)]
// pub async fn scan_dir(path: String) -> Result<(), String> {
// let files = indexer::scan(&path).await.map_err(|e| e.to_string());
// println!("file: {:?}", files);
// Ok(())
// }
// #[tauri::command(async)]
// pub async fn get_files(path: String) -> Result<Directory, String> {
// Ok(
// retrieve::get_dir_with_contents(&path)
// .await
// .map_err(|e| e.to_string())?,
// )
// }
// #[tauri::command]
// pub fn get_config() -> ClientState {
// client::get()
// }
// #[tauri::command]
// pub fn get_mounts() -> Result<Vec<Volume>, String> {
// Ok(volumes::get().unwrap())
// }
// #[tauri::command(async)]
// pub async fn test_scan() -> Result<(), String> {
// Ok(
// indexer::test_scan("/Users/jamie")
// .await
// .map_err(|e| e.to_string())?,
// )
// }
// #[tauri::command]
// pub async fn start_watcher(path: &str) -> Result<(), String> {
// println!("starting watcher for: {:?}", path);
// watch_dir(&path);
// Ok(())
// }
// #[tauri::command]
// pub async fn create_location(path: &str) -> Result<(), String> {
// let _location = sys::locations::create_location(path);
// Ok(())
// }

View file

@ -11,7 +11,6 @@ edition = "2021"
p2p = ["dep:libp2p"] # This feature controlls whether the Spacedrive Core contains the Peer to Peer syncing engine (It isn't required for the hosted core so we can disable it).
[dependencies]
hotwatch = "0.4.6"
hostname = "0.3.1"
# Universal Dependencies
@ -43,7 +42,4 @@ image = "0.24.1"
webp = "0.2.2"
uhlc = "0.4.1"
ffmpeg-next = "5.0.3"
fs_extra = "1.2.0"
[target.'cfg(target_os = "macos")'.dependencies]
swift-rs = "0.2.3"
fs_extra = "1.2.0"

View file

@ -1,13 +0,0 @@
use int_enum::IntEnum;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
#[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum EncryptionAlgorithm {
None = 0,
AES128 = 1,
AES192 = 2,
AES256 = 3,
}

View file

@ -1 +0,0 @@
pub mod encryption;

View file

@ -1,20 +0,0 @@
use crate::{prisma, prisma::PrismaClient};
use thiserror::Error;
pub mod migrate;
#[derive(Error, Debug)]
pub enum DatabaseError {
#[error("Failed to connect to database")]
MissingConnection,
#[error("Unable find current_library in the client config")]
MalformedConfig,
#[error("Unable to initialize the Prisma client")]
ClientError(#[from] prisma::NewClientError),
}
pub async fn create_connection(path: &str) -> Result<PrismaClient, DatabaseError> {
println!("Creating database connection: {:?}", path);
let client = prisma::new_client_with_url(&format!("file:{}", &path)).await?;
Ok(client)
}

View file

@ -1,7 +1,5 @@
extern crate ffmpeg_next as ffmpeg;
use chrono::NaiveDateTime;
use ffmpeg::{dictionary::Iter, format};
use std::{ffi::OsStr, path::Path};
use ffmpeg::format;
#[derive(Default, Debug)]
pub struct MediaItem {
@ -25,8 +23,8 @@ pub struct Stream {
#[derive(Debug)]
pub enum StreamKind {
Video(VideoStream),
Audio(AudioStream),
// Video(VideoStream),
// Audio(AudioStream),
}
#[derive(Debug)]
@ -46,91 +44,91 @@ pub struct AudioStream {
pub rate: u32,
}
fn extract(iter: &mut Iter, key: &str) -> Option<String> {
iter.find(|k| k.0.contains(key)).map(|k| k.1.to_string())
}
// fn extract(iter: &mut Iter, key: &str) -> Option<String> {
// iter.find(|k| k.0.contains(key)).map(|k| k.1.to_string())
// }
pub fn get_video_metadata(path: &str) -> Result<(), ffmpeg::Error> {
ffmpeg::init().unwrap();
// pub fn get_video_metadata(path: &str) -> Result<(), ffmpeg::Error> {
// ffmpeg::init().unwrap();
let mut name = Path::new(path)
.file_name()
.and_then(OsStr::to_str)
.map(ToString::to_string)
.unwrap_or(String::new());
// let mut name = Path::new(path)
// .file_name()
// .and_then(OsStr::to_str)
// .map(ToString::to_string)
// .unwrap_or(String::new());
// strip to exact potential date length and attempt to parse
name = name.chars().take(19).collect();
// specifically OBS uses this format for time, other checks could be added
let potential_date = NaiveDateTime::parse_from_str(&name, "%Y-%m-%d %H-%M-%S");
// // strip to exact potential date length and attempt to parse
// name = name.chars().take(19).collect();
// // specifically OBS uses this format for time, other checks could be added
// let potential_date = NaiveDateTime::parse_from_str(&name, "%Y-%m-%d %H-%M-%S");
match ffmpeg::format::input(&path) {
Ok(context) => {
let mut media_item = MediaItem::default();
let metadata = context.metadata();
let mut iter = metadata.iter();
// match ffmpeg::format::input(&path) {
// Ok(context) => {
// let mut media_item = MediaItem::default();
// let metadata = context.metadata();
// let mut iter = metadata.iter();
// creation_time is usually the creation date of the file
media_item.created_at = extract(&mut iter, "creation_time");
// apple photos use "com.apple.quicktime.creationdate", which we care more about than the creation_time
media_item.created_at = extract(&mut iter, "creationdate");
// fallback to potential time if exists
if media_item.created_at.is_none() {
media_item.created_at = potential_date.map(|d| d.to_string()).ok();
}
// origin metadata
media_item.brand = extract(&mut iter, "major_brand");
media_item.brand = extract(&mut iter, "make");
media_item.model = extract(&mut iter, "model");
// // creation_time is usually the creation date of the file
// media_item.created_at = extract(&mut iter, "creation_time");
// // apple photos use "com.apple.quicktime.creationdate", which we care more about than the creation_time
// media_item.created_at = extract(&mut iter, "creationdate");
// // fallback to potential time if exists
// if media_item.created_at.is_none() {
// media_item.created_at = potential_date.map(|d| d.to_string()).ok();
// }
// // origin metadata
// media_item.brand = extract(&mut iter, "major_brand");
// media_item.brand = extract(&mut iter, "make");
// media_item.model = extract(&mut iter, "model");
if let Some(stream) = context.streams().best(ffmpeg::media::Type::Video) {
media_item.best_video_stream_index = stream.index();
}
if let Some(stream) = context.streams().best(ffmpeg::media::Type::Audio) {
media_item.best_audio_stream_index = stream.index();
}
if let Some(stream) = context.streams().best(ffmpeg::media::Type::Subtitle) {
media_item.best_subtitle_stream_index = stream.index();
}
media_item.duration_seconds =
context.duration() as f64 / f64::from(ffmpeg::ffi::AV_TIME_BASE);
// if let Some(stream) = context.streams().best(ffmpeg::media::Type::Video) {
// media_item.best_video_stream_index = stream.index();
// }
// if let Some(stream) = context.streams().best(ffmpeg::media::Type::Audio) {
// media_item.best_audio_stream_index = stream.index();
// }
// if let Some(stream) = context.streams().best(ffmpeg::media::Type::Subtitle) {
// media_item.best_subtitle_stream_index = stream.index();
// }
// media_item.duration_seconds =
// context.duration() as f64 / f64::from(ffmpeg::ffi::AV_TIME_BASE);
for stream in context.streams() {
let codec = ffmpeg::codec::context::Context::from_parameters(stream.parameters())?;
// for stream in context.streams() {
// let codec = ffmpeg::codec::context::Context::from_parameters(stream.parameters())?;
let mut stream_item = Stream {
codec: codec.id().name().to_string(),
frames: stream.frames() as f64,
duration_seconds: stream.duration() as f64 * f64::from(stream.time_base()),
kind: None,
};
// let mut stream_item = Stream {
// codec: codec.id().name().to_string(),
// frames: stream.frames() as f64,
// duration_seconds: stream.duration() as f64 * f64::from(stream.time_base()),
// kind: None,
// };
if codec.medium() == ffmpeg::media::Type::Video {
if let Ok(video) = codec.decoder().video() {
stream_item.kind = Some(StreamKind::Video(VideoStream {
bitrate: video.bit_rate(),
format: video.format(),
width: video.width(),
height: video.height(),
aspect_ratio: video.aspect_ratio().to_string(),
}));
}
} else if codec.medium() == ffmpeg::media::Type::Audio {
if let Ok(audio) = codec.decoder().audio() {
stream_item.kind = Some(StreamKind::Audio(AudioStream {
channels: audio.channels(),
bitrate: audio.bit_rate(),
rate: audio.rate(),
format: audio.format(),
}));
}
}
media_item.steams.push(stream_item);
}
println!("{:#?}", media_item);
}
// if codec.medium() == ffmpeg::media::Type::Video {
// if let Ok(video) = codec.decoder().video() {
// stream_item.kind = Some(StreamKind::Video(VideoStream {
// bitrate: video.bit_rate(),
// format: video.format(),
// width: video.width(),
// height: video.height(),
// aspect_ratio: video.aspect_ratio().to_string(),
// }));
// }
// } else if codec.medium() == ffmpeg::media::Type::Audio {
// if let Ok(audio) = codec.decoder().audio() {
// stream_item.kind = Some(StreamKind::Audio(AudioStream {
// channels: audio.channels(),
// bitrate: audio.bit_rate(),
// rate: audio.rate(),
// format: audio.format(),
// }));
// }
// }
// media_item.steams.push(stream_item);
// }
// println!("{:#?}", media_item);
// }
Err(error) => println!("error: {}", error),
}
Ok(())
}
// Err(error) => println!("error: {}", error),
// }
// Ok(())
// }

View file

@ -1,2 +1,5 @@
pub mod metadata;
pub mod thumb;
mod metadata;
mod thumb;
pub use metadata::*;
pub use thumb::*;

View file

@ -1,7 +1,7 @@
use crate::job::jobs::JobReportUpdate;
use crate::node::state;
use crate::job::JobReportUpdate;
use crate::node::get_nodestate;
use crate::{
job::{jobs::Job, worker::WorkerContext},
job::{Job, WorkerContext},
prisma::file_path,
CoreContext,
};
@ -30,9 +30,9 @@ impl Job for ThumbnailJob {
"file_identifier"
}
async fn run(&self, ctx: WorkerContext) -> Result<()> {
let config = state::get();
let config = get_nodestate();
let core_ctx = ctx.core_ctx.clone();
let location = sys::locations::get_location(&core_ctx, self.location_id).await?;
let location = sys::get_location(&core_ctx, self.location_id).await?;
// create all necessary directories if they don't exist
fs::create_dir_all(

View file

@ -1 +0,0 @@

View file

@ -1 +0,0 @@

View file

@ -4,13 +4,7 @@ use data_encoding::HEXLOWER;
use ring::digest::{Context, SHA256};
use std::convert::TryInto;
use std::fs::File;
use std::io::{BufReader, Read};
#[cfg(target_family = "unix")]
use std::os::unix::prelude::FileExt;
#[cfg(target_family = "windows")]
use std::os::windows::prelude::*;
use std::path::PathBuf;
static SAMPLE_COUNT: u64 = 4;
@ -20,10 +14,16 @@ fn read_at(file: &File, offset: u64, size: u64) -> Result<Vec<u8>> {
let mut buf = vec![0u8; size as usize];
#[cfg(target_family = "unix")]
file.read_exact_at(&mut buf, offset)?;
{
use std::os::unix::prelude::FileExt;
file.read_exact_at(&mut buf, offset)?;
}
#[cfg(target_family = "windows")]
file.seek_read(&mut buf, offset)?;
{
use std::os::windows::prelude::*;
file.seek_read(&mut buf, offset)?;
}
Ok(buf)
}
@ -58,21 +58,21 @@ pub fn generate_cas_id(path: PathBuf, size: u64) -> Result<String> {
Ok(hex)
}
pub fn full_checksum(path: &str) -> Result<String> {
// read file as buffer and convert to digest
let mut reader = BufReader::new(File::open(path).unwrap());
let mut context = Context::new(&SHA256);
let mut buffer = [0; 1024];
loop {
let count = reader.read(&mut buffer)?;
if count == 0 {
break;
}
context.update(&buffer[..count]);
}
let digest = context.finish();
// create a lowercase hash from
let hex = HEXLOWER.encode(digest.as_ref());
// pub fn full_checksum(path: &str) -> Result<String> {
// // read file as buffer and convert to digest
// let mut reader = BufReader::new(File::open(path).unwrap());
// let mut context = Context::new(&SHA256);
// let mut buffer = [0; 1024];
// loop {
// let count = reader.read(&mut buffer)?;
// if count == 0 {
// break;
// }
// context.update(&buffer[..count]);
// }
// let digest = context.finish();
// // create a lowercase hash from
// let hex = HEXLOWER.encode(digest.as_ref());
Ok(hex)
}
// Ok(hex)
// }

View file

@ -1,11 +1,11 @@
use std::fs;
use std::path::Path;
use crate::job::jobs::JobReportUpdate;
use crate::sys::locations::get_location;
use crate::job::JobReportUpdate;
use crate::sys::get_location;
use crate::{
file::FileError,
job::{jobs::Job, worker::WorkerContext},
job::{Job, WorkerContext},
prisma::file_path,
CoreContext,
};

View file

@ -1,2 +1,5 @@
pub mod checksum;
pub mod identifier;
mod checksum;
mod identifier;
pub use checksum::*;
pub use identifier::*;

View file

@ -1 +1,3 @@
pub mod open;
mod open;
pub use open::*;

View file

@ -1,9 +1,9 @@
use crate::{
encode::thumb::THUMBNAIL_CACHE_DIR_NAME,
file::{DirectoryWithContents, File, FileError, FilePath},
node::state,
prisma::{file, file_path},
sys::locations::get_location,
encode::THUMBNAIL_CACHE_DIR_NAME,
file::{DirectoryWithContents, FileError, FilePath},
node::get_nodestate,
prisma::file_path,
sys::get_location,
CoreContext,
};
use std::path::Path;
@ -14,7 +14,7 @@ pub async fn open_dir(
path: &str,
) -> Result<DirectoryWithContents, FileError> {
let db = &ctx.database;
let config = state::get();
let config = get_nodestate();
// get location
let location = get_location(ctx, location_id.clone()).await?;

View file

@ -1,14 +1,12 @@
use crate::job::{
jobs::{Job, JobReportUpdate},
worker::WorkerContext,
};
use crate::job::{Job, JobReportUpdate, WorkerContext};
use anyhow::Result;
use self::scan::ScanProgress;
pub mod pathctx;
pub mod scan;
mod scan;
pub use {pathctx::PathContext, scan::scan_path};
pub use scan::*;
pub use scan::scan_path;
#[derive(Debug)]
pub struct IndexerJob {
@ -37,3 +35,17 @@ impl Job for IndexerJob {
Ok(())
}
}
// // PathContext provides the indexer with instruction to handle particular directory structures and identify rich context.
// pub struct PathContext {
// // an app specific key "com.github.repo"
// pub key: String,
// pub name: String,
// pub is_dir: bool,
// // possible file extensions for this path
// pub extensions: Vec<String>,
// // sub-paths that must be found
// pub must_contain_sub_paths: Vec<String>,
// // sub-paths that are ignored
// pub always_ignored_sub_paths: Option<String>,
// }

View file

@ -1,13 +0,0 @@
// PathContext provides the indexer with instruction to handle particular directory structures and identify rich context.
pub struct PathContext {
// an app specific key "com.github.repo"
pub key: String,
pub name: String,
pub is_dir: bool,
// possible file extensions for this path
pub extensions: Vec<String>,
// sub-paths that must be found
pub must_contain_sub_paths: Vec<String>,
// sub-paths that are ignored
pub always_ignored_sub_paths: Option<String>,
}

View file

@ -1,4 +1,4 @@
use crate::sys::locations::{create_location, LocationResource};
use crate::sys::{create_location, LocationResource};
use crate::CoreContext;
use anyhow::{anyhow, Result};
use prisma_client_rust::prisma_models::PrismaValue;

View file

@ -10,7 +10,6 @@ use crate::{
pub mod cas;
pub mod explorer;
pub mod indexer;
pub mod watcher;
// A unique file
#[derive(Debug, Clone, Serialize, Deserialize, TS)]

View file

@ -1,25 +0,0 @@
use std::path::Path;
use hotwatch::{
blocking::{Flow, Hotwatch},
Event,
};
pub fn watch_dir(path: &str) {
let mut hotwatch = Hotwatch::new().expect("hotwatch failed to initialize!");
hotwatch
.watch(&path, |event: Event| {
if let Event::Write(path) = event {
println!("{:?} changed!", path);
// Flow::Exit
Flow::Continue
} else {
Flow::Continue
}
})
.expect("failed to watch file!");
hotwatch.run();
println!("watching directory {:?}", Path::new(&path));
}

View file

@ -3,9 +3,8 @@ use super::{
JobError,
};
use crate::{
node::state,
node::get_nodestate,
prisma::{job, node},
sync::{crdt::Replicate, engine::SyncContext},
CoreContext,
};
use anyhow::Result;
@ -148,7 +147,7 @@ impl JobReport {
}
}
pub async fn create(&self, ctx: &CoreContext) -> Result<(), JobError> {
let config = state::get();
let config = get_nodestate();
ctx.database
.job()
.create(
@ -179,17 +178,6 @@ impl JobReport {
}
}
#[derive(Clone)]
pub struct JobReportCreate {}
#[async_trait::async_trait]
impl Replicate for JobReport {
type Create = JobReportCreate;
async fn create(_data: Self::Create, _ctx: SyncContext) {}
async fn delete(_ctx: SyncContext) {}
}
#[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]

View file

@ -3,8 +3,11 @@ use thiserror::Error;
use crate::prisma;
pub mod jobs;
pub mod worker;
mod jobs;
mod worker;
pub use jobs::*;
pub use worker::*;
#[derive(Error, Debug)]
pub enum JobError {

View file

@ -1,5 +1,8 @@
use super::jobs::{JobReport, JobReportUpdate, JobStatus};
use crate::{ClientQuery, CoreContext, CoreEvent, InternalEvent, Job};
use super::{
jobs::{JobReport, JobReportUpdate, JobStatus},
Job,
};
use crate::{ClientQuery, CoreContext, CoreEvent, InternalEvent};
use std::{sync::Arc, time::Duration};
use tokio::{
sync::{

View file

@ -1,8 +1,8 @@
use crate::{
file::cas::identifier::FileIdentifierJob, library::loader::get_library_path,
node::state::NodeState,
file::cas::FileIdentifierJob, library::get_library_path, node::NodeState,
util::db::create_connection,
};
use job::jobs::{Job, JobReport, Jobs};
use job::{Job, JobReport, Jobs};
use prisma::PrismaClient;
use serde::{Deserialize, Serialize};
use std::{fs, sync::Arc};
@ -13,23 +13,16 @@ use tokio::sync::{
};
use ts_rs::TS;
use crate::encode::thumb::ThumbnailJob;
use crate::encode::ThumbnailJob;
// init modules
pub mod crypto;
pub mod db;
pub mod encode;
pub mod file;
pub mod job;
pub mod library;
pub mod node;
#[cfg(target_os = "p2p")]
pub mod p2p;
pub mod prisma;
pub mod sync;
pub mod sys;
pub mod util;
// pub mod native;
mod encode;
mod file;
mod job;
mod library;
mod node;
mod prisma;
mod sys;
mod util;
// a wrapper around external input with a returning sender channel for core to respond
#[derive(Debug)]
@ -109,7 +102,7 @@ impl CoreContext {
pub struct Node {
state: NodeState,
jobs: job::jobs::Jobs,
jobs: job::Jobs,
database: Arc<PrismaClient>,
// filetype_registry: library::TypeRegistry,
// extension_registry: library::ExtensionRegistry,
@ -154,7 +147,7 @@ impl Node {
// connect to default library
let database = Arc::new(
db::create_connection(&get_library_path(&data_dir))
create_connection(&get_library_path(&data_dir))
.await
.unwrap(),
);
@ -229,16 +222,14 @@ impl Node {
let ctx = self.get_context();
if self.state.libraries.len() == 0 {
match library::loader::create(&ctx, None).await {
match library::create(&ctx, None).await {
Ok(library) => println!("Created new library: {:?}", library),
Err(e) => println!("Error creating library: {:?}", e),
}
} else {
for library in self.state.libraries.iter() {
// init database for library
match library::loader::load(&ctx, &library.library_path, &library.library_uuid)
.await
{
match library::load(&ctx, &library.library_path, &library.library_uuid).await {
Ok(library) => println!("Loaded library: {:?}", library),
Err(e) => println!("Error loading library: {:?}", e),
}
@ -257,7 +248,7 @@ impl Node {
Ok(match cmd {
// CRUD for locations
ClientCommand::LocCreate { path } => {
let loc = sys::locations::new_location_and_scan(&ctx, &path).await?;
let loc = sys::new_location_and_scan(&ctx, &path).await?;
// ctx.queue_job(Box::new(FileIdentifierJob));
CoreResponse::LocCreate(loc)
}
@ -291,8 +282,11 @@ impl Node {
// fs::remove_file(Path::new(&self.state.data_path).join("library.db")).unwrap();
// CoreResponse::Success(())
// }
ClientCommand::IdentifyUniqueFiles { id, path } => {
ctx.spawn_job(Box::new(FileIdentifierJob { location_id: id, path}));
ClientCommand::IdentifyUniqueFiles { id, path } => {
ctx.spawn_job(Box::new(FileIdentifierJob {
location_id: id,
path,
}));
CoreResponse::Success(())
}
})
@ -300,22 +294,18 @@ impl Node {
// query sources of data
async fn exec_query(&self, query: ClientQuery) -> Result<CoreResponse, CoreError> {
#[cfg(fdebug_assertions)]
println!("Core query: {:?}", query);
let ctx = self.get_context();
Ok(match query {
// return the client state from memory
ClientQuery::NodeGetState => CoreResponse::NodeGetState(self.state.clone()),
// get system volumes without saving to library
ClientQuery::SysGetVolumes => {
CoreResponse::SysGetVolumes(sys::volumes::Volume::get_volumes()?)
}
ClientQuery::SysGetVolumes => CoreResponse::SysGetVolumes(sys::Volume::get_volumes()?),
ClientQuery::SysGetLocations => {
CoreResponse::SysGetLocations(sys::locations::get_locations(&ctx).await?)
CoreResponse::SysGetLocations(sys::get_locations(&ctx).await?)
}
// get location from library
ClientQuery::SysGetLocation { id } => {
CoreResponse::SysGetLocation(sys::locations::get_location(&ctx, id).await?)
CoreResponse::SysGetLocation(sys::get_location(&ctx, id).await?)
}
// return contents of a directory for the explorer
ClientQuery::LibGetExplorerDir {
@ -323,7 +313,7 @@ impl Node {
location_id,
limit: _,
} => CoreResponse::LibGetExplorerDir(
file::explorer::open::open_dir(&ctx, &location_id, &path).await?,
file::explorer::open_dir(&ctx, &location_id, &path).await?,
),
ClientQuery::LibGetTags => todo!(),
ClientQuery::JobGetRunning => {
@ -332,9 +322,9 @@ impl Node {
ClientQuery::JobGetHistory => {
CoreResponse::JobGetHistory(Jobs::get_history(&ctx).await?)
}
ClientQuery::GetLibraryStatistics => CoreResponse::GetLibraryStatistics(
library::statistics::Statistics::calculate(&ctx).await?,
),
ClientQuery::GetLibraryStatistics => {
CoreResponse::GetLibraryStatistics(library::Statistics::calculate(&ctx).await?)
}
ClientQuery::GetNodes => todo!(),
})
}
@ -409,15 +399,15 @@ pub enum CoreEvent {
#[ts(export)]
pub enum CoreResponse {
Success(()),
SysGetVolumes(Vec<sys::volumes::Volume>),
SysGetLocation(sys::locations::LocationResource),
SysGetLocations(Vec<sys::locations::LocationResource>),
SysGetVolumes(Vec<sys::Volume>),
SysGetLocation(sys::LocationResource),
SysGetLocations(Vec<sys::LocationResource>),
LibGetExplorerDir(file::DirectoryWithContents),
NodeGetState(NodeState),
LocCreate(sys::locations::LocationResource),
LocCreate(sys::LocationResource),
JobGetRunning(Vec<JobReport>),
JobGetHistory(Vec<JobReport>),
GetLibraryStatistics(library::statistics::Statistics),
GetLibraryStatistics(library::Statistics),
}
#[derive(Error, Debug)]
@ -441,7 +431,7 @@ pub enum CoreError {
pub enum CoreResource {
Client,
Library,
Location(sys::locations::LocationResource),
Location(sys::LocationResource),
File(file::File),
Job(JobReport),
Tag,

View file

@ -1,11 +1,10 @@
use anyhow::Result;
use uuid::Uuid;
use crate::node::state::LibraryState;
use crate::{db::migrate, node::state, prisma::library};
use crate::{CoreContext, Node};
use super::LibraryError;
use crate::node::{get_nodestate, LibraryState};
use crate::prisma::library;
use crate::util::db::run_migrations;
use crate::CoreContext;
pub static LIBRARY_DB_NAME: &str = "library.db";
pub static DEFAULT_NAME: &str = "My Library";
@ -15,35 +14,35 @@ pub fn get_library_path(data_path: &str) -> String {
format!("{}/{}", path, LIBRARY_DB_NAME)
}
pub async fn get(core: &Node) -> Result<library::Data, LibraryError> {
let config = state::get();
let db = &core.database;
// pub async fn get(core: &Node) -> Result<library::Data, LibraryError> {
// let config = get_nodestate();
// let db = &core.database;
let library_state = config.get_current_library();
// let library_state = config.get_current_library();
println!("{:?}", library_state);
// println!("{:?}", library_state);
// get library from db
let library = match db
.library()
.find_unique(library::pub_id::equals(library_state.library_uuid.clone()))
.exec()
.await?
{
Some(library) => Ok(library),
None => {
// update config library state to offline
// config.libraries
// // get library from db
// let library = match db
// .library()
// .find_unique(library::pub_id::equals(library_state.library_uuid.clone()))
// .exec()
// .await?
// {
// Some(library) => Ok(library),
// None => {
// // update config library state to offline
// // config.libraries
Err(anyhow::anyhow!("library_not_found"))
}
};
// Err(anyhow::anyhow!("library_not_found"))
// }
// };
Ok(library.unwrap())
}
// Ok(library.unwrap())
// }
pub async fn load(ctx: &CoreContext, library_path: &str, library_id: &str) -> Result<()> {
let mut config = state::get();
let mut config = get_nodestate();
println!("Initializing library: {} {}", &library_id, library_path);
@ -52,13 +51,13 @@ pub async fn load(ctx: &CoreContext, library_path: &str, library_id: &str) -> Re
config.save();
}
// create connection with library database & run migrations
migrate::run_migrations(&ctx).await?;
run_migrations(&ctx).await?;
// if doesn't exist, mark as offline
Ok(())
}
pub async fn create(ctx: &CoreContext, name: Option<String>) -> Result<()> {
let mut config = state::get();
let mut config = get_nodestate();
let uuid = Uuid::new_v4().to_string();
@ -70,7 +69,7 @@ pub async fn create(ctx: &CoreContext, name: Option<String>) -> Result<()> {
..LibraryState::default()
};
migrate::run_migrations(&ctx).await?;
run_migrations(&ctx).await?;
config.libraries.push(library_state);

View file

@ -1,5 +1,8 @@
pub mod loader;
pub mod statistics;
mod loader;
mod statistics;
pub use loader::*;
pub use statistics::*;
use thiserror::Error;

View file

@ -1,7 +1,7 @@
use crate::{
node::state,
node::get_nodestate,
prisma::{library, library_statistics::*},
sys::volumes::Volume,
sys::Volume,
CoreContext,
};
use fs_extra::dir::get_size;
@ -53,7 +53,7 @@ impl Default for Statistics {
impl Statistics {
pub async fn retrieve(ctx: &CoreContext) -> Result<Statistics, LibraryError> {
let config = state::get();
let config = get_nodestate();
let db = &ctx.database;
let library_data = config.get_current_library();
@ -71,7 +71,7 @@ impl Statistics {
}
pub async fn calculate(ctx: &CoreContext) -> Result<Statistics, LibraryError> {
let config = state::get();
let config = get_nodestate();
let db = &ctx.database;
// get library from client state
let library_data = config.get_current_library();

View file

@ -1,33 +0,0 @@
// This file must use the following macros to select specific native bindings.
// #[cfg(target_os = "macos")]
// #[cfg(target_os = "linux")]
// #[cfg(target_os = "windows")]
#[cfg(target_os = "macos")]
use super::swift;
use crate::library::volumes::Volume;
use swift_rs::types::{SRObjectArray, SRString};
pub fn get_file_thumbnail_base64(path: &str) -> SRString {
#[cfg(target_os = "macos")]
unsafe {
swift::get_file_thumbnail_base64_(path.into())
}
}
pub fn get_mounts() -> SRObjectArray<Volume> {
#[cfg(target_os = "macos")]
unsafe {
swift::get_mounts_()
}
// #[cfg(target_os = "macos")]
// println!("getting mounts..");
// let mut mounts: Vec<Volume> = Vec::new();
// let swift_mounts = unsafe { swift::get_mounts_() };
// println!("mounts: {:?}", swift_mounts);
// for mount in swift_mounts.iter() {
// println!("mount: {:?}", *mount);
// // mounts.push((&**mount).clone());
// }
}

View file

@ -1,5 +0,0 @@
// This module contains the native bindings to the core library.
pub mod methods;
#[cfg(target_os = "macos")]
pub mod swift;

View file

@ -1,10 +0,0 @@
use crate::library::volumes::Volume;
pub use swift_rs::types::{SRObjectArray, SRString};
extern "C" {
#[link_name = "get_file_thumbnail_base64"]
pub fn get_file_thumbnail_base64_(path: SRString) -> SRString;
#[link_name = "get_mounts"]
pub fn get_mounts_() -> SRObjectArray<Volume>;
}

View file

@ -1,6 +1,6 @@
use crate::{
prisma::{self, node},
CoreContext, Node,
Node,
};
use chrono::{DateTime, Utc};
use int_enum::IntEnum;
@ -9,7 +9,9 @@ use std::env;
use thiserror::Error;
use ts_rs::TS;
pub mod state;
mod state;
pub use state::*;
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
@ -39,7 +41,7 @@ pub enum Platform {
impl LibraryNode {
pub async fn create(node: &Node) -> Result<(), NodeError> {
println!("Creating node...");
let mut config = state::get();
let mut config = state::get_nodestate();
let db = &node.database;
@ -83,19 +85,17 @@ impl LibraryNode {
Ok(())
}
pub async fn get_nodes(ctx: &CoreContext) -> Result<Vec<node::Data>, NodeError> {
let db = &ctx.database;
// pub async fn get_nodes(ctx: &CoreContext) -> Result<Vec<node::Data>, NodeError> {
// let db = &ctx.database;
let _node = db.node().find_many(vec![]).exec().await?;
// let _node = db.node().find_many(vec![]).exec().await?;
Ok(_node)
}
// Ok(_node)
// }
}
#[derive(Error, Debug)]
pub enum NodeError {
#[error("Database error")]
DatabaseError(#[from] prisma::QueryError),
#[error("Client not found error")]
ClientNotFound,
}

View file

@ -39,7 +39,7 @@ lazy_static! {
static ref CONFIG: RwLock<Option<NodeState>> = RwLock::new(None);
}
pub fn get() -> NodeState {
pub fn get_nodestate() -> NodeState {
match CONFIG.read() {
Ok(guard) => guard.clone().unwrap_or(NodeState::default()),
Err(_) => return NodeState::default(),

View file

@ -1,36 +0,0 @@
use std::net::{TcpListener, TcpStream};
use std::thread;
use autodiscover_rs::{self, Method};
use env_logger;
fn handle_client(stream: std::io::Result<TcpStream>) {
println!("Got a connection from {:?}", stream.unwrap().peer_addr());
}
pub fn listen() -> std::io::Result<()> {
env_logger::init();
// make sure to bind before announcing ready
let listener = TcpListener::bind(":::0")?;
// get the port we were bound too; note that the trailing :0 above gives us a random unused port
let socket = listener.local_addr()?;
thread::spawn(move || {
// this function blocks forever; running it a separate thread
autodiscover_rs::run(
&socket,
Method::Multicast("[ff0e::1]:1337".parse().unwrap()),
|s| {
// change this to task::spawn if using async_std or tokio
thread::spawn(|| handle_client(s));
},
)
.unwrap();
});
let mut incoming = listener.incoming();
while let Some(stream) = incoming.next() {
// if you are using an async library, such as async_std or tokio, you can convert the stream to the
// appropriate type before using task::spawn from your library of choice.
thread::spawn(|| handle_client(stream));
}
Ok(())
}

View file

@ -1,48 +0,0 @@
use futures::StreamExt;
use libp2p::{
identity, ping,
swarm::{Swarm, SwarmEvent},
Multiaddr, PeerId,
};
use std::error::Error;
pub async fn listen(port: Option<u32>) -> Result<(), Box<dyn Error>> {
let local_key = identity::Keypair::generate_ed25519();
let local_peer_id = PeerId::from(local_key.public());
println!("Local peer id: {:?}", local_peer_id);
let transport = libp2p::development_transport(local_key).await?;
// Create a ping network behavior.
//
// For illustrative purposes, the ping protocol is configured to
// keep the connection alive, so a continuous sequence of pings
// can be observed.
let behavior = ping::Behaviour::new(ping::Config::new().with_keep_alive(true));
let mut swarm = Swarm::new(transport, behavior, local_peer_id);
// Tell the swarm to listen on all interfaces and a random, OS-assigned
// port.
swarm.listen_on("/ip4/0.0.0.0/tcp/0".parse()?)?;
// Dial the peer identified by the multi-address given as the second
// command-line argument, if any.
if port.is_some() {
let addr = format!("{:?}{:?}", "/ip4/127.0.0.1/tcp/", port);
let remote: Multiaddr = addr.parse()?;
swarm.dial(remote)?;
println!("Dialed {}", addr)
}
loop {
match swarm.select_next_some().await {
SwarmEvent::NewListenAddr { address, .. } => {
println!("Listening on {:?}", address)
}
SwarmEvent::Behaviour(event) => println!("{:?}", event),
_ => {}
}
}
}

View file

@ -1,10 +0,0 @@
use tokio::sync::mpsc;
pub mod listener;
pub mod pool;
pub struct PeerConnection {
pub client_uuid: String,
pub tcp_address: String,
pub message_sender: mpsc::Sender<String>,
}

View file

@ -1,5 +0,0 @@
use crate::client::Client;
pub struct ClientPool {
pub clients: Vec<Client>,
}

View file

@ -1 +0,0 @@

View file

@ -1 +0,0 @@

View file

@ -1,18 +0,0 @@
pub mod operation;
pub mod replicate;
use serde::{Deserialize, Serialize};
pub use self::{
operation::{PoMethod, PropertyOperation},
replicate::{Replicate, ReplicateMethod},
};
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename = "cr")]
pub struct CrdtCtx<T> {
#[serde(rename = "u")]
pub uuid: String,
#[serde(rename = "r")]
pub resource: T,
}

View file

@ -1,35 +0,0 @@
use crate::sync::engine::SyncContext;
use serde::{Deserialize, Serialize};
#[async_trait::async_trait]
pub trait PropertyOperation {
type Create: Clone;
type Update: Clone;
async fn create(data: Self::Create, ctx: SyncContext)
where
Self: Sized;
async fn update(data: Self::Update, ctx: SyncContext)
where
Self: Sized;
async fn delete(ctx: SyncContext)
where
Self: Sized;
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum PoMethod<T: PropertyOperation + Clone> {
Create(T::Create),
Update(T::Update),
}
impl<T: PropertyOperation + Clone> PoMethod<T> {
pub fn apply(self, ctx: SyncContext) {
match self {
Self::Create(data) => T::create(data, ctx),
Self::Update(data) => T::update(data, ctx),
};
}
}

View file

@ -1,28 +0,0 @@
use crate::sync::engine::SyncContext;
use serde::{Deserialize, Serialize};
#[async_trait::async_trait]
pub trait Replicate {
type Create: Clone;
async fn create(data: Self::Create, ctx: SyncContext)
where
Self: Sized;
async fn delete(ctx: SyncContext)
where
Self: Sized;
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum ReplicateMethod<T: Replicate + Clone> {
Create(T::Create),
}
impl<T: Replicate + Clone> ReplicateMethod<T> {
pub fn apply(self, ctx: SyncContext) {
match self {
Self::Create(data) => T::create(data, ctx),
};
}
}

View file

@ -1,119 +0,0 @@
#![allow(dead_code)]
use futures::{channel::mpsc, SinkExt};
use serde::{Deserialize, Serialize};
use super::{
crdt::PoMethod, examples::tag::TagCreate, CrdtCtx, FakeCoreContext, PropertyOperation,
SyncMethod,
};
pub struct SyncEngine {
uhlc: uhlc::HLC, // clock
client_pool_sender: mpsc::Sender<SyncEvent>,
ctx: SyncContext,
}
#[derive(Clone)]
pub struct SyncContext {
// pub database: Arc<PrismaClient>,
}
#[derive(Serialize, Deserialize, Debug)]
#[serde(rename = "se")]
pub struct SyncEvent {
#[serde(rename = "u")]
pub client_uuid: String, // client that created change
#[serde(rename = "t")]
pub timestamp: uhlc::Timestamp, // unique hybrid logical clock timestamp
#[serde(rename = "m")]
pub method: SyncMethod, // the CRDT resource
#[serde(rename = "s")]
pub transport: SyncTransport, // method of data transport
}
impl SyncEvent {
pub fn new(client_uuid: String, timestamp: uhlc::Timestamp, method: SyncMethod) -> Self {
Self {
client_uuid,
timestamp,
method,
transport: SyncTransport::Message,
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub enum SyncTransport {
Message,
Binary,
}
impl SyncEngine {
pub fn new(_core_ctx: &FakeCoreContext) -> Self {
let (client_pool_sender, _client_pool_receiver) = mpsc::channel(10);
SyncEngine {
uhlc: uhlc::HLC::default(),
client_pool_sender,
ctx: SyncContext {
// database: core_ctx.database.clone(),
},
}
}
pub fn exec_event(&mut self, event: SyncEvent) {
let ctx = self.ctx.clone();
let time = self.uhlc.update_with_timestamp(&event.timestamp);
if time.is_err() {
println!("Time drift detected: {:?}", time);
return;
}
match event.method {
SyncMethod::PropertyOperation(operation) => PropertyOperation::apply(operation, ctx),
SyncMethod::Replicate(_) => todo!(),
}
}
pub async fn new_operation(&self, uuid: String, property_operation: PropertyOperation) {
// create an operation for this resource
let operation = SyncMethod::PropertyOperation(CrdtCtx {
uuid: uuid.clone(),
resource: property_operation,
});
// wrap in a sync event
let event = SyncEvent::new(uuid, self.uhlc.new_timestamp(), operation);
self.create_sync_event(event).await;
}
pub async fn create_sync_event(&self, event: SyncEvent) {
// let ctx = self.ctx.clone();
let mut sender = self.client_pool_sender.clone();
// run locally first
// if that worked, write sync event to database
// ctx.database;
println!("{}", serde_json::to_string_pretty(&event).unwrap());
// finally send to client pool
sender.send(event).await.unwrap();
}
// pub dn
}
pub async fn test(ctx: &FakeCoreContext) {
let engine = SyncEngine::new(&ctx);
let uuid = "12345".to_string();
let name = "test".to_string();
engine
.new_operation(
uuid,
PropertyOperation::Tag(PoMethod::Create(TagCreate { name })),
)
.await;
}

View file

@ -1,42 +0,0 @@
use serde::{Deserialize, Serialize};
use crate::sync::{
crdt::{PropertyOperation, Replicate},
engine::SyncContext,
};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct File {
pub id: i32,
pub uuid: String,
pub name: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct FileCreate {
pub uuid: String,
pub name: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum FileUpdate {
Name(String),
}
#[async_trait::async_trait]
impl PropertyOperation for File {
type Create = FileCreate;
type Update = FileUpdate;
async fn create(_data: Self::Create, _ctx: SyncContext) {}
async fn update(_data: Self::Update, _ctx: SyncContext) {}
async fn delete(_ctx: SyncContext) {}
}
#[async_trait::async_trait]
impl Replicate for File {
type Create = FileCreate;
async fn create(_data: Self::Create, _ctx: SyncContext) {}
async fn delete(_ctx: SyncContext) {}
}

View file

@ -1,2 +0,0 @@
pub mod file;
pub mod tag;

View file

@ -1,35 +0,0 @@
// this is a test for sync
use serde::{Deserialize, Serialize};
use crate::sync::{crdt::PropertyOperation, engine::SyncContext};
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct Tag {
pub id: String,
pub uuid: String,
pub name: String,
pub description: String,
pub color: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct TagCreate {
pub name: String,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum TagUpdate {
Name(String),
Description(String),
Color(String),
}
#[async_trait::async_trait]
impl PropertyOperation for Tag {
type Create = TagCreate;
type Update = TagUpdate;
async fn create(_data: Self::Create, _ctx: SyncContext) {}
async fn update(_data: Self::Update, _ctx: SyncContext) {}
async fn delete(_ctx: SyncContext) {}
}

View file

@ -1,39 +0,0 @@
use core_derive::PropertyOperationApply;
use serde::{Deserialize, Serialize};
use self::{
crdt::{CrdtCtx, PoMethod, ReplicateMethod},
examples::{file::File, tag::Tag},
};
pub mod crdt;
pub mod engine;
pub mod examples;
// Property Operation
#[derive(PropertyOperationApply, Serialize, Deserialize, Debug, Clone)]
#[serde(rename = "po")]
pub enum PropertyOperation {
Tag(PoMethod<Tag>),
File(PoMethod<File>),
// Job(PoMethod<Job>),
}
// Resource Replicate
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum Replicate {
FilePath(ReplicateMethod<File>),
// Job(ReplicateMethod<Job>),
}
#[derive(Serialize, Deserialize, Debug)]
pub enum SyncMethod {
// performs a property level operation on a resource
// - records the change data in the database
PropertyOperation(CrdtCtx<PropertyOperation>),
// replicates the latest version of a resource by querying the database
// - records timestamp in the database
Replicate(CrdtCtx<Replicate>),
}
pub struct FakeCoreContext {}

View file

@ -1,5 +1,6 @@
use crate::{
file::indexer::IndexerJob, node::state, prisma::location, ClientQuery, CoreContext, CoreEvent,
file::indexer::IndexerJob, node::get_nodestate, prisma::location, ClientQuery, CoreContext,
CoreEvent,
};
use anyhow::Result;
use serde::{Deserialize, Serialize};
@ -49,15 +50,15 @@ static DOTFILE_NAME: &str = ".spacedrive";
// checks to see if a location is:
// - accessible on from the local filesystem
// - already exists in the database
pub async fn check_location(path: &str) -> Result<DotSpacedrive, LocationError> {
let dotfile: DotSpacedrive = match fs::File::open(format!("{}/{}", path.clone(), DOTFILE_NAME))
{
Ok(file) => serde_json::from_reader(file).unwrap_or(DotSpacedrive::default()),
Err(e) => return Err(LocationError::DotfileReadFailure(e)),
};
// pub async fn check_location(path: &str) -> Result<DotSpacedrive, LocationError> {
// let dotfile: DotSpacedrive = match fs::File::open(format!("{}/{}", path.clone(), DOTFILE_NAME))
// {
// Ok(file) => serde_json::from_reader(file).unwrap_or(DotSpacedrive::default()),
// Err(e) => return Err(LocationError::DotfileReadFailure(e)),
// };
Ok(dotfile)
}
// Ok(dotfile)
// }
pub async fn get_location(
ctx: &CoreContext,
@ -110,7 +111,7 @@ pub async fn get_locations(ctx: &CoreContext) -> Result<Vec<LocationResource>, S
pub async fn create_location(ctx: &CoreContext, path: &str) -> Result<LocationResource, SysError> {
let db = &ctx.database;
let config = state::get();
let config = get_nodestate();
// check if we have access to this location
if !Path::new(path).exists() {

View file

@ -1,11 +1,13 @@
pub mod locations;
pub mod volumes;
mod locations;
mod volumes;
pub use locations::*;
pub use volumes::*;
use thiserror::Error;
use crate::{job, prisma};
use self::locations::LocationError;
#[derive(Error, Debug)]
pub enum SysError {
#[error("Location error")]

View file

@ -1,5 +1,5 @@
// use crate::native;
use crate::{node::state, prisma::volume::*};
use crate::{node::get_nodestate, prisma::volume::*};
use serde::{Deserialize, Serialize};
use ts_rs::TS;
// #[cfg(not(target_os = "macos"))]
@ -28,7 +28,7 @@ pub struct Volume {
impl Volume {
pub async fn save(ctx: &CoreContext) -> Result<(), SysError> {
let db = &ctx.database;
let config = state::get();
let config = get_nodestate();
let volumes = Self::get_volumes()?;

View file

@ -1,44 +0,0 @@
use chrono::{DateTime, Utc};
use prisma_client_rust::SerializeQuery;
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::state;
// an SQL commit to be sent to connected clients
#[derive(Serialize, Deserialize)]
pub struct Commit {
pub id: String,
pub timestamp: DateTime<Utc>,
pub client_uuid: String,
pub library_uuid: String,
pub query: String,
}
impl Commit {
pub fn new(query: String) -> Self {
let client = state::client::get();
let id = Uuid::new_v4().to_string();
let timestamp = Utc::now();
Self {
id,
sql,
client_uuid: client.client_id,
library_uuid: client.current_library_id,
timestamp,
}
}
pub fn from_query<T: SerializeQuery>(query: T) -> Self {
Self::new(query.serialize_query())
}
}
// example
fn do_something() {
Commit::new(db.client().create(
Client::pub_id().set(config.client_id.clone()),
Client::name().set(hostname.clone()),
vec![],
))
}

View file

@ -1,4 +1,4 @@
use crate::prisma::migration;
use crate::prisma::{self, migration, PrismaClient};
use crate::CoreContext;
use anyhow::Result;
use data_encoding::HEXLOWER;
@ -7,11 +7,24 @@ use prisma_client_rust::raw;
use ring::digest::{Context, Digest, SHA256};
use std::ffi::OsStr;
use std::io::{BufReader, Read};
use thiserror::Error;
const INIT_MIGRATION: &str =
include_str!("../../prisma/migrations/migration_table/migration.sql");
const INIT_MIGRATION: &str = include_str!("../../prisma/migrations/migration_table/migration.sql");
static MIGRATIONS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/prisma/migrations");
#[derive(Error, Debug)]
pub enum DatabaseError {
#[error("Unable to initialize the Prisma client")]
ClientError(#[from] prisma::NewClientError),
}
pub async fn create_connection(path: &str) -> Result<PrismaClient, DatabaseError> {
println!("Creating database connection: {:?}", path);
let client = prisma::new_client_with_url(&format!("file:{}", &path)).await?;
Ok(client)
}
pub fn sha256_digest<R: Read>(mut reader: R) -> Result<Digest> {
let mut context = Context::new(&SHA256);
let mut buffer = [0; 1024];
@ -36,8 +49,6 @@ pub async fn run_migrations(ctx: &CoreContext) -> Result<()> {
{
Ok(data) => {
if data.len() == 0 {
#[cfg(debug_assertions)]
println!("Migration table does not exist");
// execute migration
match client._execute_raw(raw!(INIT_MIGRATION)).await {
Ok(_) => {}
@ -55,9 +66,6 @@ pub async fn run_migrations(ctx: &CoreContext) -> Result<()> {
#[cfg(debug_assertions)]
println!("Migration table created: {:?}", value);
} else {
#[cfg(debug_assertions)]
println!("Migration table exists: {:?}", data);
}
let mut migration_subdirs = MIGRATIONS_DIR
@ -120,8 +128,6 @@ pub async fn run_migrations(ctx: &CoreContext) -> Result<()> {
for (i, step) in steps.iter().enumerate() {
match client._execute_raw(raw!(*step)).await {
Ok(_) => {
#[cfg(debug_assertions)]
println!("Step {} ran successfully", i);
client
.migration()
.find_unique(migration::checksum::equals(checksum.clone()))
@ -139,9 +145,6 @@ pub async fn run_migrations(ctx: &CoreContext) -> Result<()> {
#[cfg(debug_assertions)]
println!("Migration {} recorded successfully", name);
} else {
#[cfg(debug_assertions)]
println!("Migration {} already exists", name);
}
}
}

View file

@ -1 +1 @@
pub mod db;