diff --git a/.github/scripts/setup-system.ps1 b/.github/scripts/setup-system.ps1 index 9a8a206b5..29efe7a36 100644 --- a/.github/scripts/setup-system.ps1 +++ b/.github/scripts/setup-system.ps1 @@ -1,10 +1,181 @@ -Write-Host "This script is currently being used by CI and will need some more work before anyone can use it like the 'setup-system.sh' script for macOS and Linux!" +# Get ci parameter to check if running with ci +param( + [Parameter()] + [Switch]$ci +) -$VCINSTALLDIR = $(& "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" -latest -property installationPath) -Add-Content $env:GITHUB_ENV "LIBCLANG_PATH=${VCINSTALLDIR}\VC\Tools\LLVM\x64\bin`n" -Invoke-WebRequest "https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-full-shared.7z" -OutFile ffmpeg-release-full-shared.7z -7z x ffmpeg-release-full-shared.7z -mkdir ffmpeg -mv ffmpeg-*/* ffmpeg/ -Add-Content $env:GITHUB_ENV "FFMPEG_DIR=${pwd}\ffmpeg`n" -Add-Content $env:GITHUB_PATH "${pwd}\ffmpeg\bin`n" \ No newline at end of file +# Get temp folder +$temp = [System.IO.Path]::GetTempPath() + +# Get current running dir +$currentLocation = $((Get-Location).path) + +# Check to see if a command exists (eg if an app is installed) +Function CheckCommand { + + Param ($command) + + $oldPreference = $ErrorActionPreference + + $ErrorActionPreference = 'stop' + + try { if (Get-Command $command) { RETURN $true } } + + Catch { RETURN $false } + + Finally { $ErrorActionPreference = $oldPreference } + +} + +Write-Host "Spacedrive Development Environment Setup" -ForegroundColor Magenta +Write-Host @" + +To set up your machine for Spacedrive development, this script will do the following: + +1) Check for Rust and Cargo + +2) Install pnpm (if not installed) + +3) Install the latest version of Node.js using pnpm + +4) Install LLVM (compiler for ffmpeg-rust) + +4) Download ffmpeg and set as an environment variable + +"@ + +Write-Host "Checking for Rust and Cargo..." -ForegroundColor Yellow +Start-Sleep -Milliseconds 150 + +$cargoCheck = CheckCommand cargo + +if ($cargoCheck -eq $false) { + Write-Host @" +Cargo is not installed. + +To use Spacedrive on Windows, Cargo needs to be installed. +The Visual Studio C++ Build tools are also required. +Instructions can be found here: + +https://tauri.app/v1/guides/getting-started/prerequisites/#setting-up-windows + +Once you have installed Cargo, re-run this script. + +"@ + Exit +} +else { + Write-Host "Cargo is installed." +} + +Write-Host +Write-Host "Checking for pnpm..." -ForegroundColor Yellow +Start-Sleep -Milliseconds 150 + +$pnpmCheck = CheckCommand pnpm +if ($pnpmCheck -eq $false) { + + Write-Host "pnpm is not installed. Installing now." + Write-Host "Running the pnpm installer..." + + #pnpm installer taken from https://pnpm.io + Invoke-WebRequest https://get.pnpm.io/install.ps1 -useb | Invoke-Expression + + # Reset the PATH env variables to make sure pnpm is accessible + $env:PNPM_HOME = [System.Environment]::GetEnvironmentVariable("PNPM_HOME", "User") + $env:Path = [System.Environment]::ExpandEnvironmentVariables([System.Environment]::GetEnvironmentVariable("Path", "User")) + +} +else { + Write-Host "pnpm is installed." +} + +# A GitHub Action takes care of installing node, so this isn't necessary if running in the ci. +if ($ci -eq $True) { + Write-Host + Write-Host "Running with Ci, skipping Node install." -ForegroundColor Yellow +} +else { + Write-Host + Write-Host "Using pnpm to install the latest version of Node..." -ForegroundColor Yellow + Write-Host "This will set your global Node version to the latest!" + Start-Sleep -Milliseconds 150 + + # Runs the pnpm command to use the latest version of node, which also installs it + Start-Process -Wait -FilePath "pnpm" -ArgumentList "env use --global latest" -PassThru -Verb runAs +} + + + +# The ci has LLVM installed already, so we instead just set the env variables. +if ($ci -eq $True) { + Write-Host + Write-Host "Running with Ci, skipping LLVM install." -ForegroundColor Yellow + + $VCINSTALLDIR = $(& "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" -latest -property installationPath) + Add-Content $env:GITHUB_ENV "LIBCLANG_PATH=${VCINSTALLDIR}\VC\Tools\LLVM\x64\bin`n" + +} else { + Write-Host + Write-Host "Downloading the LLVM installer..." -ForegroundColor Yellow + # Downloads latest installer for LLVM + $filenamePattern = "*-win64.exe" + $releasesUri = "https://api.github.com/repos/llvm/llvm-project/releases/latest" + $downloadUri = ((Invoke-RestMethod -Method GET -Uri $releasesUri).assets | Where-Object name -like $filenamePattern ).browser_download_url + + Start-BitsTransfer -Source $downloadUri -Destination "$temp\llvm.exe" + + Write-Host + Write-Host "Running the LLVM installer..." -ForegroundColor Yellow + Write-Host "Please follow the instructions to install LLVM." + Write-Host "Ensure you add LLVM to your PATH." + + Start-Process "$temp\llvm.exe" -Wait +} + + + +Write-Host +Write-Host "Downloading the latest ffmpeg build..." -ForegroundColor Yellow + +# Downloads the latest shared build of ffmpeg from GitHub +$filenamePattern = "*-full_build-shared.zip" +$releasesUri = "https://api.github.com/repos/GyanD/codexffmpeg/releases/latest" +$downloadUri = ((Invoke-RestMethod -Method GET -Uri $releasesUri).assets | Where-Object name -like $filenamePattern ).browser_download_url +$filename = ((Invoke-RestMethod -Method GET -Uri $releasesUri).assets | Where-Object name -like $filenamePattern ).name +$remove = ".zip" +$foldername = $filename.Substring(0, ($filename.Length - $remove.Length)) + +Start-BitsTransfer -Source $downloadUri -Destination "$temp\ffmpeg.zip" + +Write-Host +Write-Host "Expanding ffmpeg zip..." -ForegroundColor Yellow + +Expand-Archive "$temp\ffmpeg.zip" $HOME -ErrorAction SilentlyContinue + +Remove-Item "$temp\ffmpeg.zip" + +Write-Host +Write-Host "Setting environment variables..." -ForegroundColor Yellow + +if ($ci -eq $True) { + # If running in ci, we need to use GITHUB_ENV and GITHUB_PATH instead of the normal PATH env variables, so we set them here + Add-Content $env:GITHUB_ENV "FFMPEG_DIR=$HOME\$foldername`n" + Add-Content $env:GITHUB_PATH "$HOME\$foldername\bin`n" +} +else { + # Sets environment variable for ffmpeg + [System.Environment]::SetEnvironmentVariable('FFMPEG_DIR', "$HOME\$foldername", [System.EnvironmentVariableTarget]::User) +} + +Write-Host +Write-Host "Copying Required .dll files..." -ForegroundColor Yellow + +# Create target\debug folder, continue if already exists +New-Item -Path $currentLocation\target\debug -ItemType Directory -ErrorAction SilentlyContinue + +# Copies all .dll required for rust-ffmpeg to target\debug folder +Get-ChildItem "$HOME\$foldername\bin" -recurse -filter *.dll | Copy-Item -Destination "$currentLocation\target\debug" + +Write-Host +Write-Host "Your machine has been setup for Spacedrive development!" diff --git a/.github/scripts/setup-system.sh b/.github/scripts/setup-system.sh index 68d0df0ab..a3e557318 100755 --- a/.github/scripts/setup-system.sh +++ b/.github/scripts/setup-system.sh @@ -23,8 +23,13 @@ fi if [[ "$OSTYPE" == "linux-gnu"* ]]; then if which apt-get &> /dev/null; then echo "Detected 'apt' based distro!" + + if [[ "$(lsb_release -si)" == "Pop" ]]; then + DEBIAN_FFMPEG_DEPS="libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavutil-dev libswscale-dev libswresample-dev ffmpeg" # FFMPEG dependencies + else + DEBIAN_FFMPEG_DEPS="libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavresample-dev libavutil-dev libswscale-dev libswresample-dev ffmpeg" # FFMPEG dependencies + fi DEBIAN_TAURI_DEPS="libwebkit2gtk-4.0-dev build-essential curl wget libssl-dev libgtk-3-dev libappindicator3-dev librsvg2-dev" # Tauri dependencies - DEBIAN_FFMPEG_DEPS="libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavresample-dev libavutil-dev libswscale-dev libswresample-dev ffmpeg" # FFMPEG dependencies DEBIAN_BINDGEN_DEPS="pkg-config clang" # Bindgen dependencies - it's used by a dependency of Spacedrive sudo apt-get -y update diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 013354f45..d78d7b382 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -36,7 +36,7 @@ jobs: id: pnpm-cache run: | echo "::set-output name=pnpm_cache_dir::$(pnpm store path)" - + - uses: actions/cache@v3 name: Setup pnpm cache with: @@ -44,7 +44,7 @@ jobs: key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }} restore-keys: | ${{ runner.os }}-pnpm-store- - + - name: Install pnpm dependencies run: pnpm --frozen-lockfile i @@ -81,7 +81,7 @@ jobs: with: version: 7 run_install: false - + - name: Install Rust stable uses: actions-rs/toolchain@v1 with: @@ -89,7 +89,7 @@ jobs: profile: minimal override: true components: rustfmt, rust-src - + - name: Cache Rust Dependencies uses: Swatinem/rust-cache@v1 with: @@ -98,10 +98,10 @@ jobs: - name: Run 'setup-system.sh' script if: matrix.platform == 'ubuntu-latest' || matrix.platform == 'macos-latest' run: ./.github/scripts/setup-system.sh - + - name: Run 'setup-system.ps1' script if: matrix.platform == 'windows-latest' - run: ./.github/scripts/setup-system.ps1 + run: ./.github/scripts/setup-system.ps1 -ci - name: Get pnpm store directory id: pnpm-cache @@ -116,7 +116,7 @@ jobs: ${{ runner.os }}-pnpm-store- - name: Install pnpm dependencies run: pnpm --frozen-lockfile i - + - name: Cache Prisma codegen id: cache-prisma uses: actions/cache@v3 @@ -127,13 +127,13 @@ jobs: - name: Generate Prisma client working-directory: core if: steps.cache-prisma.outputs.cache-hit != 'true' - run: cargo run --frozen -p prisma-cli --release -- generate + run: cargo run -p prisma-cli --release -- generate - name: Cargo fetch run: cargo fetch - name: Check Core - run: cargo check --frozen -p sdcore --release + run: cargo check -p sdcore --release - name: Bundle Desktop run: pnpm desktop tauri build @@ -141,7 +141,7 @@ jobs: - name: Build Server if: matrix.platform == 'ubuntu-latest' run: | - cargo build --frozen -p server --release + cargo build -p server --release cp ./target/release/server ./apps/server/server - name: Determine image name & tag diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 942c66d63..7c7f383f2 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -41,6 +41,8 @@ This project uses [Cargo](https://doc.rust-lang.org/cargo/getting-started/instal - `$ cd spacedrive` - For Linux or MacOS users run: `./.github/scripts/setup-system.sh` - This will install FFMPEG and any other required dependencies for Spacedrive to build. +- For Windows users run using PowerShell: `.\.github\scripts\setup-system.ps1` + - This will install pnpm, LLVM, FFMPEG and any other required dependencies for Spacedrive to build. - `$ pnpm i` - `$ pnpm prep` - Runs all necessary codegen & builds required dependencies. diff --git a/Cargo.lock b/Cargo.lock index d0af4fd42..2df75e10d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -51,9 +51,9 @@ dependencies = [ [[package]] name = "actix-http" -version = "3.1.0" +version = "3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd2e9f6794b5826aff6df65e3a0d0127b271d1c03629c774238f3582e903d4e4" +checksum = "6f9ffb6db08c1c3a1f4aef540f1a63193adc73c4fbd40b75a95fc8c5258f6e51" dependencies = [ "actix-codec", "actix-rt", @@ -195,7 +195,7 @@ dependencies = [ "serde_urlencoded", "smallvec", "socket2", - "time 0.3.9", + "time 0.3.11", "url", ] @@ -306,9 +306,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.57" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc" +checksum = "bb07d2053ccdbe10e2af2995a2f116c1330396493dc1269f6a91d0ae82e19704" [[package]] name = "arrayvec" @@ -419,9 +419,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "backtrace" -version = "0.3.65" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61" +checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7" dependencies = [ "addr2line", "cc", @@ -588,7 +588,7 @@ dependencies = [ "serde", "serde_bytes", "serde_json", - "time 0.3.9", + "time 0.3.11", "uuid 0.8.2", ] @@ -615,9 +615,9 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" [[package]] name = "bytemuck" -version = "1.9.1" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cdead85bdec19c194affaeeb670c0e41fe23de31459efd1c174d049269cf02cc" +checksum = "c53dfa917ec274df8ed3c572698f381a24eef2efba9492d797301b72b6db408a" [[package]] name = "byteorder" @@ -642,9 +642,9 @@ dependencies = [ [[package]] name = "cairo-rs" -version = "0.15.11" +version = "0.15.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62be3562254e90c1c6050a72aa638f6315593e98c5cdaba9017cedbabf0a5dee" +checksum = "c76ee391b03d35510d9fa917357c7f1855bd9a6659c95a1b392e33f49b3369bc" dependencies = [ "bitflags", "cairo-sys-rs", @@ -882,7 +882,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94d4706de1b0fa5b132270cddffa8585166037822e260a944fe161acd137ca05" dependencies = [ "percent-encoding", - "time 0.3.9", + "time 0.3.11", "version_check", ] @@ -964,17 +964,17 @@ dependencies = [ "crossbeam-deque", "crossbeam-epoch", "crossbeam-queue 0.3.5", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.10", ] [[package]] name = "crossbeam-channel" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53" +checksum = "4c02a4d71819009c192cf4872265391563fd6a84c81ff2c0f2a7026ca4c1d85c" dependencies = [ "cfg-if 1.0.0", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.10", ] [[package]] @@ -985,20 +985,20 @@ checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" dependencies = [ "cfg-if 1.0.0", "crossbeam-epoch", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.10", ] [[package]] name = "crossbeam-epoch" -version = "0.9.8" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1145cf131a2c6ba0615079ab6a638f7e1973ac9c2634fcbeaaad6114246efe8c" +checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d" dependencies = [ "autocfg", "cfg-if 1.0.0", - "crossbeam-utils 0.8.8", - "lazy_static", + "crossbeam-utils 0.8.10", "memoffset", + "once_cell", "scopeguard", ] @@ -1020,7 +1020,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f25d8400f4a7a5778f0e4e52384a48cbd9b5c495d110786187fc750075277a2" dependencies = [ "cfg-if 1.0.0", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.10", ] [[package]] @@ -1036,19 +1036,19 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.8" +version = "0.8.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" +checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83" dependencies = [ "cfg-if 1.0.0", - "lazy_static", + "once_cell", ] [[package]] name = "crypto-common" -version = "0.1.3" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" +checksum = "2ccfd8c0ee4cce11e45b3fd6f9d5e69e0cc62912aa6a0cb1bf4617b0eba5a12f" dependencies = [ "generic-array 0.14.5", "typenum", @@ -1186,9 +1186,9 @@ dependencies = [ [[package]] name = "dbus" -version = "0.9.5" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de0a745c25b32caa56b82a3950f5fec7893a960f4c10ca3b02060b0c38d8c2ce" +checksum = "6f8bcdd56d2e5c4ed26a529c5a9029f5db8290d433497506f958eae3be148eb6" dependencies = [ "libc", "libdbus-sys", @@ -1377,9 +1377,9 @@ dependencies = [ [[package]] name = "either" -version = "1.6.1" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457" +checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be" [[package]] name = "embed-resource" @@ -1598,14 +1598,14 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0408e2626025178a6a7f7ffc05a25bc47103229f19c113755de7bf63816290c" +checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c" dependencies = [ "cfg-if 1.0.0", "libc", "redox_syscall 0.2.13", - "winapi", + "windows-sys", ] [[package]] @@ -2010,9 +2010,9 @@ dependencies = [ [[package]] name = "gif" -version = "0.11.3" +version = "0.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3a7187e78088aead22ceedeee99779455b23fc231fe13ec443f99bb71694e5b" +checksum = "3edd93c6756b4dfaf2709eafcc345ba2636565295c198a9cfbf75fa5e3e00b06" dependencies = [ "color_quant", "weezl", @@ -2026,9 +2026,9 @@ checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4" [[package]] name = "gio" -version = "0.15.11" +version = "0.15.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f132be35e05d9662b9fa0fee3f349c6621f7782e0105917f4cc73c1bf47eceb" +checksum = "68fdbc90312d462781a395f7a16d96a2b379bb6ef8cd6310a2df272771c4283b" dependencies = [ "bitflags", "futures-channel", @@ -2056,9 +2056,9 @@ dependencies = [ [[package]] name = "glib" -version = "0.15.11" +version = "0.15.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd124026a2fa8c33a3d17a3fe59c103f2d9fa5bd92c19e029e037736729abeab" +checksum = "edb0306fbad0ab5428b0ca674a23893db909a98582969c9b537be4ced78c505d" dependencies = [ "bitflags", "futures-channel", @@ -2228,13 +2228,19 @@ dependencies = [ "ahash", ] +[[package]] +name = "hashbrown" +version = "0.12.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "607c8a29735385251a339424dd462993c0fed8fa09d378f259377df08c126022" + [[package]] name = "hashlink" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf" dependencies = [ - "hashbrown", + "hashbrown 0.11.2", ] [[package]] @@ -2359,9 +2365,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4" [[package]] name = "hyper" -version = "0.14.19" +version = "0.14.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "42dc3c131584288d375f2d07f822b0cb012d8c6fb899a5b9fdb3cb7eb9b6004f" +checksum = "02c929dc5c39e335a03c405292728118860721b10190d98c2a0f0efd5baafbac" dependencies = [ "bytes", "futures-channel", @@ -2427,7 +2433,7 @@ version = "0.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "713f1b139373f96a2e0ce3ac931cd01ee973c3c5dd7c40c0c2efe96ad2b6751d" dependencies = [ - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.10", "globset", "lazy_static", "log", @@ -2492,12 +2498,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "1.8.2" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6012d540c5baa3589337a98ce73408de9b5a25ec9fc2c6fd6be8f0d39e0ca5a" +checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e" dependencies = [ "autocfg", - "hashbrown", + "hashbrown 0.12.2", "serde", ] @@ -2866,10 +2872,19 @@ dependencies = [ ] [[package]] -name = "linked-hash-map" -version = "0.5.4" +name = "line-wrap" +version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" +checksum = "f30344350a2a51da54c1d53be93fade8a237e545dbcc4bdbe635413f2117cab9" +dependencies = [ + "safemem", +] + +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "local-channel" @@ -2934,11 +2949,11 @@ dependencies = [ [[package]] name = "lru" -version = "0.7.6" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8015d95cb7b2ddd3c0d32ca38283ceb1eea09b4713ee380bceb942d85a244228" +checksum = "c84e6fe5655adc6ce00787cf7dcaf8dc4f998a0565d23eafc207a8b08ca3349a" dependencies = [ - "hashbrown", + "hashbrown 0.11.2", ] [[package]] @@ -2958,15 +2973,15 @@ checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4" [[package]] name = "mac-notification-sys" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "042f74a606175d72ca483e14e0873fe0f6c003f7af45865b17b16fdaface7203" +checksum = "47a4acb83c904844ca12aafeac6fff6f781cf9e220a985c1db94fd94123993aa" dependencies = [ "cc", "dirs-next", "objc-foundation", "objc_id", - "time 0.3.9", + "time 0.3.11", ] [[package]] @@ -3093,9 +3108,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713d550d9b44d89174e066b7a6217ae06234c10cb47819a88290d2b353c31799" +checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf" dependencies = [ "libc", "log", @@ -3284,7 +3299,7 @@ dependencies = [ "smallvec", "subprocess", "thiserror", - "time 0.3.9", + "time 0.3.11", "uuid 0.8.2", ] @@ -3439,9 +3454,9 @@ dependencies = [ [[package]] name = "num-rational" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d41702bd167c2df5520b384281bc111a4b5efcf7fbc4c9c222c815b07e0a6a6a" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" dependencies = [ "autocfg", "num-integer", @@ -3538,18 +3553,18 @@ dependencies = [ [[package]] name = "object" -version = "0.28.4" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424" +checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225" +checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1" [[package]] name = "opaque-debug" @@ -3569,9 +3584,9 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.40" +version = "0.10.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb81a6430ac911acb25fe5ac8f1d2af1b4ea8a4fdfda0f1ee4292af2e2d8eb0e" +checksum = "618febf65336490dfcf20b73f885f5651a0c89c64c2d4a8c3662585a70bf5bd0" dependencies = [ "bitflags", "cfg-if 1.0.0", @@ -3601,9 +3616,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.74" +version = "0.9.75" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "835363342df5fba8354c5b453325b110ffd54044e588c539cf2f20a8014e4cb1" +checksum = "e5f9bd0c2710541a3cda73d6f9ac4f1b240de4ae261065d309dbe73d9dceb42f" dependencies = [ "autocfg", "cc", @@ -3980,18 +3995,18 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e" +checksum = "78203e83c48cffbe01e4a2d35d566ca4de445d79a85372fc64e378bfc812a260" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb" +checksum = "710faf75e1b33345361201d36d04e98ac1ed8909151a017ed384700836104c74" dependencies = [ "proc-macro2", "quote", @@ -4016,6 +4031,20 @@ version = "0.3.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" +[[package]] +name = "plist" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bd39bc6cdc9355ad1dc5eeedefee696bb35c34caf21768741e81826c0bbd7225" +dependencies = [ + "base64 0.13.0", + "indexmap", + "line-wrap", + "serde", + "time 0.3.11", + "xml-rs", +] + [[package]] name = "png" version = "0.11.0" @@ -4247,9 +4276,9 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" [[package]] name = "proc-macro2" -version = "1.0.39" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c54b25569025b7fc9651de43004ae593a75ad88543b17178aa5e1b9c4f15f56f" +checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7" dependencies = [ "unicode-ident", ] @@ -4361,9 +4390,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.18" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1" +checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804" dependencies = [ "proc-macro2", ] @@ -4484,7 +4513,7 @@ checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" dependencies = [ "crossbeam-channel", "crossbeam-deque", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.10", "num_cpus", ] @@ -4516,9 +4545,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.5.6" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1" +checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b" dependencies = [ "aho-corasick", "memchr", @@ -4536,9 +4565,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.6.26" +version = "0.6.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64" +checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244" [[package]] name = "remove_dir_all" @@ -4736,7 +4765,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.10", + "semver 1.0.12", ] [[package]] @@ -4772,9 +4801,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.6" +version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" +checksum = "a0a5f7c728f5d284929a1cccb5bc19884422bfe6ef4d6c409da2c41838983fcf" [[package]] name = "ryu" @@ -4782,6 +4811,12 @@ version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" +[[package]] +name = "safemem" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072" + [[package]] name = "same-file" version = "1.0.6" @@ -4862,7 +4897,6 @@ dependencies = [ "image", "include_dir", "int-enum", - "lazy_static", "log", "prisma-client-rust", "ring 0.17.0-alpha.11", @@ -4872,7 +4906,7 @@ dependencies = [ "thiserror", "tokio", "ts-rs", - "uuid 1.1.2", + "uuid 0.8.2", "walkdir", "webp", ] @@ -4940,9 +4974,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.10" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a41d061efea015927ac527063765e73601444cdc344ba855bc7bd44578b25e1c" +checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1" dependencies = [ "serde", ] @@ -4964,9 +4998,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.137" +version = "1.0.139" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1" +checksum = "0171ebb889e45aa68b44aee0859b3eede84c6f5f5c228e6f140c0b2a0a46cad6" dependencies = [ "serde_derive", ] @@ -4982,9 +5016,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.137" +version = "1.0.139" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be" +checksum = "dc1d3230c1de7932af58ad8ffbe1d784bd55efd5a9d84ac24f69c72d83543dfb" dependencies = [ "proc-macro2", "quote", @@ -4993,9 +5027,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.81" +version = "1.0.82" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c" +checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7" dependencies = [ "indexmap", "itoa 1.0.2", @@ -5204,9 +5238,9 @@ checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32" [[package]] name = "smallvec" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" +checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1" [[package]] name = "socket2" @@ -5442,9 +5476,9 @@ dependencies = [ [[package]] name = "syn" -version = "1.0.96" +version = "1.0.98" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0748dd251e24453cb8717f0354206b91557e4ec8703673a4b30208f2abaf1ebf" +checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd" dependencies = [ "proc-macro2", "quote", @@ -5500,9 +5534,9 @@ checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60" [[package]] name = "tao" -version = "0.11.2" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bfe4c782f0543f667ee3b732d026b2f1c64af39cd52e726dec1ea1f2d8f6b80" +checksum = "a71c32c2fa7bba46b01becf9cf470f6a781573af7e376c5e317a313ecce27545" dependencies = [ "bitflags", "cairo-rs", @@ -5537,7 +5571,6 @@ dependencies = [ "raw-window-handle", "scopeguard", "serde", - "tao-core-video-sys", "unicode-segmentation", "uuid 0.8.2", "windows 0.37.0", @@ -5545,18 +5578,6 @@ dependencies = [ "x11-dl", ] -[[package]] -name = "tao-core-video-sys" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271450eb289cb4d8d0720c6ce70c72c8c858c93dd61fc625881616752e6b98f6" -dependencies = [ - "cfg-if 1.0.0", - "core-foundation-sys", - "libc", - "objc", -] - [[package]] name = "tap" version = "1.0.1" @@ -5576,9 +5597,9 @@ dependencies = [ [[package]] name = "tauri" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e1ebb60bb8f246d5351ff9b7728fdfa7a6eba72baa722ab6021d553981caba1" +checksum = "d61fc211e0bd2c04c0aecd202d2cd72dd797a89da02989a39e1b9691462386d6" dependencies = [ "anyhow", "attohttpc", @@ -5605,7 +5626,7 @@ dependencies = [ "raw-window-handle", "regex", "rfd", - "semver 1.0.10", + "semver 1.0.12", "serde", "serde_json", "serde_repr", @@ -5629,14 +5650,15 @@ dependencies = [ [[package]] name = "tauri-build" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e7b26eb3523e962b90012fedbfb744ca153d9be85e7981e00737e106d5323941" +checksum = "2f2b32e551ec810ba4ab2ad735de5e3576e54bf0322ab0f4b7ce41244bc65ecf" dependencies = [ "anyhow", "cargo_toml", "heck 0.4.0", - "semver 1.0.10", + "json-patch", + "semver 1.0.12", "serde_json", "tauri-utils", "winres", @@ -5644,32 +5666,35 @@ dependencies = [ [[package]] name = "tauri-codegen" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9468c5189188c820ef605dfe4937c768cb2918e9460c8093dc4ee2cbd717b262" +checksum = "f6f1f7928dd040fc03c94207adfad506c0cf5b152982fd1dc0a621f7fd777e22" dependencies = [ "base64 0.13.0", "brotli", "ico", + "json-patch", + "plist", "png 0.17.5", "proc-macro2", "quote", "regex", - "semver 1.0.10", + "semver 1.0.12", "serde", "serde_json", "sha2", "tauri-utils", "thiserror", + "time 0.3.11", "uuid 1.1.2", "walkdir", ] [[package]] name = "tauri-macros" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40e3ffddd7a274fc7baaa260888c971a0d95d2ef403aa16600c878b8b1c00ffe" +checksum = "e50b9f52871c088857360319a37472d59f4644f1ed004489599d62831a1b6996" dependencies = [ "heck 0.4.0", "proc-macro2", @@ -5681,14 +5706,15 @@ dependencies = [ [[package]] name = "tauri-runtime" -version = "0.9.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb7dc4db360bb40584187b6cb7834da736ce4ef2ab0914e2be98014444fa9920" +checksum = "4e4cff3b4d9469727fa2107c4b3d2eda110df1ba45103fb420178e536362fae4" dependencies = [ "gtk", "http", "http-range", "infer", + "raw-window-handle", "serde", "serde_json", "tauri-utils", @@ -5700,14 +5726,15 @@ dependencies = [ [[package]] name = "tauri-runtime-wry" -version = "0.9.0" +version = "0.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c876fb3a6e7c6fe2ac466b2a6ecd83658528844b4df0914558a9bc1501b31cf3" +checksum = "3fa8c4edaf01d8b556e7172c844b1b4dd3399adcd1a606bd520fc3e65f698546" dependencies = [ "cocoa", "gtk", "percent-encoding", "rand 0.8.5", + "raw-window-handle", "tauri-runtime", "tauri-utils", "uuid 1.1.2", @@ -5719,9 +5746,9 @@ dependencies = [ [[package]] name = "tauri-utils" -version = "1.0.0" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "727145cb55b8897fa9f2bcea4fad31dc39394703d037c9669b40f2d1c0c2d7f3" +checksum = "12ff4b68d9faeb57c9c727bf58c9c9768d2b67d8e84e62ce6146e7859a2e9c6b" dependencies = [ "brotli", "ctor", @@ -5734,13 +5761,14 @@ dependencies = [ "phf 0.10.1", "proc-macro2", "quote", - "semver 1.0.10", + "semver 1.0.12", "serde", "serde_json", "serde_with", "thiserror", "url", "walkdir", + "windows 0.37.0", ] [[package]] @@ -5877,9 +5905,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.9" +version = "0.3.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd" +checksum = "72c91f41dcb2f096c05f0873d667dceec1087ce5bcf984ec8ffb19acddbb3217" dependencies = [ "itoa 1.0.2", "libc", @@ -6031,9 +6059,9 @@ dependencies = [ [[package]] name = "tower-service" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" @@ -6050,9 +6078,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.21" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c" +checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2" dependencies = [ "proc-macro2", "quote", @@ -6061,9 +6089,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7709595b8878a4965ce5e87ebf880a7d39c9afc6837721b21a5a816a8117d921" +checksum = "7b7358be39f2f274f322d2aaed611acc57f382e8eb1e5b48cb9ae30933495ce7" dependencies = [ "once_cell", "valuable", @@ -6105,13 +6133,13 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.11" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bc28f93baff38037f64e6f43d34cfa1605f27a49c34e8a04c5e78b0babf2596" +checksum = "3a713421342a5a666b7577783721d3117f1b69a393df803ee17bb73b1e122a59" dependencies = [ "ansi_term", - "lazy_static", "matchers", + "once_cell", "regex", "sharded-slab", "smallvec", @@ -6190,6 +6218,7 @@ dependencies = [ "chrono", "thiserror", "ts-rs-macros", + "uuid 0.8.2", ] [[package]] @@ -6235,9 +6264,9 @@ checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987" [[package]] name = "ucd-trie" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c" +checksum = "89570599c4fe5585de2b388aab47e99f7fa4e9238a1399f707a02e356058141c" [[package]] name = "unicode-bidi" @@ -6253,9 +6282,9 @@ checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c" [[package]] name = "unicode-normalization" -version = "0.1.19" +version = "0.1.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" +checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6" dependencies = [ "tinyvec", ] @@ -6341,19 +6370,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dd6469f4314d5f1ffec476e05f17cc9a78bc7a27a6a857842170bdf8d6f98d2f" dependencies = [ "getrandom 0.2.7", - "serde", - "uuid-macro-internal", -] - -[[package]] -name = "uuid-macro-internal" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "548f7181a5990efa50237abb7ebca410828b57a8955993334679f8b50b35c97d" -dependencies = [ - "proc-macro2", - "quote", - "syn", ] [[package]] @@ -6602,9 +6618,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.22.3" +version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d8de8415c823c8abd270ad483c6feeac771fad964890779f9a8cb24fbbc1bf" +checksum = "f1c760f0d366a6c24a02ed7816e23e691f5d92291f94d15e836006fd11b04daf" dependencies = [ "webpki", ] @@ -6648,9 +6664,9 @@ dependencies = [ [[package]] name = "weezl" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c97e489d8f836838d497091de568cf16b117486d529ec5579233521065bd5e4" +checksum = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb" [[package]] name = "widestring" @@ -6660,9 +6676,9 @@ checksum = "17882f045410753661207383517a6f62ec3dbeb6a4ed2acce01f0728238d1983" [[package]] name = "wildmatch" -version = "2.1.0" +version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6c48bd20df7e4ced539c12f570f937c6b4884928a87fee70a479d72f031d4e0" +checksum = "ee583bdc5ff1cf9db20e9db5bb3ff4c3089a8f6b8b31aff265c9aba85812db86" [[package]] name = "winapi" @@ -6924,9 +6940,9 @@ dependencies = [ [[package]] name = "wry" -version = "0.18.3" +version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b1ba327c7dd4292f46bf8e6ba8e6ec2db4443b2973c9d304a359d95e0aa856" +checksum = "ce19dddbd3ce01dc8f14eb6d4c8f914123bf8379aaa838f6da4f981ff7104a3f" dependencies = [ "block", "cocoa", diff --git a/LICENSE b/LICENSE index a6509367e..b3ce5ea17 100644 --- a/LICENSE +++ b/LICENSE @@ -1,25 +1,23 @@ Copyright (c) 2021-present Spacedrive Technology Inc. + + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. + Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble - The GNU General Public License is a free, copyleft license for -software and other kinds of works. + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to +our General Public Licenses are intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. +software for all its users. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you @@ -28,44 +26,34 @@ them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. The precise terms and conditions for copying, distribution and modification follow. @@ -74,7 +62,7 @@ modification follow. 0. Definitions. - "This License" refers to version 3 of the GNU General Public License. + "This License" refers to version 3 of the GNU Affero General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. @@ -551,35 +539,45 @@ to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. - 13. Use with the GNU Affero General Public License. + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single +under version 3 of the GNU General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General +Program specifies that a certain numbered version of the GNU Affero General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published +GNU Affero General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's +versions of the GNU Affero General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. @@ -637,40 +635,29 @@ the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. + GNU Affero General Public License for more details. - You should have received a copy of the GNU General Public License - along with this program. If not, see . + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. +For more information on this, and how to apply and follow the GNU AGPL, see +. \ No newline at end of file diff --git a/apps/desktop/src-tauri/src/main.rs b/apps/desktop/src-tauri/src/main.rs index 3dcad488c..3c6210e46 100644 --- a/apps/desktop/src-tauri/src/main.rs +++ b/apps/desktop/src-tauri/src/main.rs @@ -1,16 +1,15 @@ use std::time::{Duration, Instant}; use dotenvy::dotenv; -use sdcore::{ClientCommand, ClientQuery, CoreController, CoreEvent, CoreResponse, Node}; -use tauri::api::path; -use tauri::Manager; +use sdcore::{ClientCommand, ClientQuery, CoreEvent, CoreResponse, Node, NodeController}; +use tauri::{api::path, Manager}; #[cfg(target_os = "macos")] mod macos; mod menu; #[tauri::command(async)] async fn client_query_transport( - core: tauri::State<'_, CoreController>, + core: tauri::State<'_, NodeController>, data: ClientQuery, ) -> Result { match core.query(data).await { @@ -24,7 +23,7 @@ async fn client_query_transport( #[tauri::command(async)] async fn client_command_transport( - core: tauri::State<'_, CoreController>, + core: tauri::State<'_, NodeController>, data: ClientCommand, ) -> Result { match core.command(data).await { @@ -48,17 +47,11 @@ async fn main() { dotenv().ok(); env_logger::init(); - let data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./")); + let mut data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./")); + data_dir = data_dir.join("spacedrive"); // create an instance of the core - let (mut node, mut event_receiver) = Node::new(data_dir).await; - // run startup tasks - node.initializer().await; - // extract the node controller - let controller = node.get_controller(); - // throw the node into a dedicated thread - tokio::spawn(async move { - node.start().await; - }); + let (controller, mut event_receiver, node) = Node::new(data_dir).await; + tokio::spawn(node.start()); // create tauri app tauri::Builder::default() // pass controller to the tauri state manager diff --git a/apps/desktop/src-tauri/tauri.linux.conf.json b/apps/desktop/src-tauri/tauri.linux.conf.json index 51b5a339d..5fc781e7f 100644 --- a/apps/desktop/src-tauri/tauri.linux.conf.json +++ b/apps/desktop/src-tauri/tauri.linux.conf.json @@ -15,7 +15,13 @@ "active": true, "targets": "all", "identifier": "com.spacedrive.desktop", - "icon": ["icons/icon.icns"], + "icon": [ + "icons/32x32.png", + "icons/128x128.png", + "icons/128x128@2x.png", + "icons/icon.icns", + "icons/icon.ico" + ], "resources": [], "externalBin": [], "copyright": "Spacedrive Technology Inc.", diff --git a/apps/landing/src/pages/team.page.tsx b/apps/landing/src/pages/team.page.tsx index 08294e26e..301816da0 100644 --- a/apps/landing/src/pages/team.page.tsx +++ b/apps/landing/src/pages/team.page.tsx @@ -197,7 +197,7 @@ function Page() { style={{ transform: 'scale(2)' }} />
-

+

We believe file management should be universal.

diff --git a/apps/server/k8s/infrastructure.yaml b/apps/server/k8s/infrastructure.yaml deleted file mode 100644 index a5e44b4ee..000000000 --- a/apps/server/k8s/infrastructure.yaml +++ /dev/null @@ -1,42 +0,0 @@ -# Infrastructure setups up the Kubernetes cluster for Spacedrive! -# -# To get the service account token use the following: -# ```bash -# TOKENNAME=`kubectl -n spacedrive get sa/spacedrive-ci -o jsonpath='{.secrets[0].name}'` -# kubectl -n spacedrive get secret $TOKENNAME -o jsonpath='{.data.token}' | base64 -d -# ``` - -apiVersion: v1 -kind: Namespace -metadata: - name: spacedrive ---- -apiVersion: v1 -kind: ServiceAccount -metadata: - name: spacedrive-ci - namespace: spacedrive ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: Role -metadata: - name: spacedrive-ns-full - namespace: spacedrive -rules: - - apiGroups: ['apps'] - resources: ['deployments'] - verbs: ['get', 'patch'] ---- -apiVersion: rbac.authorization.k8s.io/v1 -kind: RoleBinding -metadata: - name: spacedrive-ci-rb - namespace: spacedrive -subjects: - - kind: ServiceAccount - name: spacedrive-ci - namespace: spacedrive -roleRef: - apiGroup: rbac.authorization.k8s.io - kind: Role - name: spacedrive-ns-full diff --git a/apps/server/k8s/sdserver.yaml b/apps/server/k8s/sdserver.yaml deleted file mode 100644 index 00f02c1c1..000000000 --- a/apps/server/k8s/sdserver.yaml +++ /dev/null @@ -1,118 +0,0 @@ -# This will deploy the Spacedrive Server container to the `spacedrive`` namespace on Kubernetes. - -apiVersion: networking.k8s.io/v1 -kind: Ingress -metadata: - name: sdserver-ingress - namespace: spacedrive - labels: - app.kubernetes.io/name: sdserver - app.kubernetes.io/component: webserver - annotations: - traefik.ingress.kubernetes.io/router.tls.certresolver: le - traefik.ingress.kubernetes.io/router.middlewares: kube-system-antiseo@kubernetescrd -spec: - rules: - - host: spacedrive.otbeaumont.me - http: - paths: - - path: / - pathType: Prefix - backend: - service: - name: sdserver-service - port: - number: 8080 ---- -apiVersion: v1 -kind: Service -metadata: - name: sdserver-service - namespace: spacedrive - labels: - app.kubernetes.io/name: sdserver - app.kubernetes.io/component: webserver -spec: - ports: - - port: 8080 - targetPort: 8080 - protocol: TCP - selector: - app.kubernetes.io/name: sdserver - app.kubernetes.io/component: webserver ---- -apiVersion: v1 -kind: PersistentVolumeClaim -metadata: - name: sdserver-pvc - namespace: spacedrive -spec: - accessModes: - - ReadWriteOnce - storageClassName: local-path - resources: - requests: - storage: 512M ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: sdserver-deployment - namespace: spacedrive - labels: - app.kubernetes.io/name: sdserver - app.kubernetes.io/component: webserver -spec: - replicas: 1 - selector: - matchLabels: - app.kubernetes.io/name: sdserver - app.kubernetes.io/component: webserver - template: - metadata: - labels: - app.kubernetes.io/name: sdserver - app.kubernetes.io/component: webserver - spec: - restartPolicy: Always - # refer to Dockerfile to find securityContext values - securityContext: - runAsUser: 101 - runAsGroup: 101 - fsGroup: 101 - containers: - - name: sdserver - image: ghcr.io/oscartbeaumont/spacedrive/server:staging - imagePullPolicy: Always - ports: - - containerPort: 8080 - volumeMounts: - - name: data-volume - mountPath: /data - securityContext: - allowPrivilegeEscalation: false - resources: - limits: - memory: 100Mi - cpu: 100m - requests: - memory: 5Mi - cpu: 10m - readinessProbe: - httpGet: - path: /health - port: 8080 - initialDelaySeconds: 10 - failureThreshold: 4 - periodSeconds: 5 - livenessProbe: - httpGet: - path: /health - port: 8080 - initialDelaySeconds: 20 - failureThreshold: 3 - periodSeconds: 10 - volumes: - - name: data-volume - persistentVolumeClaim: - claimName: sdserver-pvc diff --git a/apps/server/src/main.rs b/apps/server/src/main.rs index 7e9c4683e..5d7c85331 100644 --- a/apps/server/src/main.rs +++ b/apps/server/src/main.rs @@ -1,4 +1,4 @@ -use sdcore::{ClientCommand, ClientQuery, CoreController, CoreEvent, CoreResponse, Node}; +use sdcore::{ClientCommand, ClientQuery, CoreEvent, CoreResponse, Node, NodeController}; use std::{env, path::Path}; use actix::{ @@ -19,7 +19,7 @@ const DATA_DIR_ENV_VAR: &'static str = "DATA_DIR"; /// Define HTTP actor struct Socket { _event_receiver: web::Data>, - core: web::Data, + core: web::Data, } impl Actor for Socket { @@ -52,7 +52,15 @@ impl StreamHandler> for Socket { match msg { Ok(ws::Message::Ping(msg)) => ctx.pong(&msg), Ok(ws::Message::Text(text)) => { - let msg: SocketMessage = serde_json::from_str(&text).unwrap(); + let msg = serde_json::from_str::(&text); + + let msg = match msg { + Ok(msg) => msg, + Err(err) => { + println!("Error parsing message: {}", err); + return; + }, + }; let core = self.core.clone(); @@ -133,7 +141,7 @@ async fn ws_handler( req: HttpRequest, stream: web::Payload, event_receiver: web::Data>, - controller: web::Data, + controller: web::Data, ) -> Result { let resp = ws::start( Socket { @@ -178,7 +186,7 @@ async fn main() -> std::io::Result<()> { async fn setup() -> ( web::Data>, - web::Data, + web::Data, ) { let data_dir_path = match env::var(DATA_DIR_ENV_VAR) { Ok(path) => Path::new(&path).to_path_buf(), @@ -196,15 +204,8 @@ async fn setup() -> ( }, }; - let (mut node, event_receiver) = Node::new(data_dir_path).await; - - node.initializer().await; - - let controller = node.get_controller(); - - tokio::spawn(async move { - node.start().await; - }); + let (controller, event_receiver, node) = Node::new(data_dir_path).await; + tokio::spawn(node.start()); (web::Data::new(event_receiver), web::Data::new(controller)) } diff --git a/apps/web/src/App.tsx b/apps/web/src/App.tsx index 72560e3ff..f53220197 100644 --- a/apps/web/src/App.tsx +++ b/apps/web/src/App.tsx @@ -1,20 +1,47 @@ import { BaseTransport } from '@sd/client'; -import { ClientCommand, ClientQuery, CoreEvent } from '@sd/core'; +import { ClientCommand, ClientQuery } from '@sd/core'; import SpacedriveInterface from '@sd/interface'; import React, { useEffect } from 'react'; -const websocket = new WebSocket(import.meta.env.VITE_SDSERVER_BASE_URL || 'ws://localhost:8080/ws'); +const timeouts = [1000, 2000, 5000, 10000]; // In milliseconds const randomId = () => Math.random().toString(36).slice(2); // bind state to core via Tauri class Transport extends BaseTransport { + websocket: WebSocket; requestMap = new Map void>(); constructor() { super(); + this.websocket = new WebSocket( + import.meta.env.VITE_SDSERVER_BASE_URL || 'ws://localhost:8080/ws' + ); + this.attachEventListeners(); + } - websocket.addEventListener('message', (event) => { + async reconnect(timeoutIndex = 0) { + let timeout = + (timeouts[timeoutIndex] ?? timeouts[timeouts.length - 1]) + + (Math.floor(Math.random() * 5000 /* 5 Seconds */) + 1); + + setTimeout(() => { + let ws = new WebSocket(import.meta.env.VITE_SDSERVER_BASE_URL || 'ws://localhost:8080/ws'); + new Promise(function (resolve, reject) { + ws.addEventListener('open', () => resolve(null)); + ws.addEventListener('close', reject); + }) + .then(() => { + this.websocket = ws; + this.attachEventListeners(); + console.log('Reconnected!'); + }) + .catch((err) => this.reconnect(timeoutIndex++)); + }, timeout); + } + + attachEventListeners() { + this.websocket.addEventListener('message', (event) => { if (!event.data) return; const { id, payload } = JSON.parse(event.data); @@ -29,8 +56,24 @@ class Transport extends BaseTransport { } } }); + + this.websocket.addEventListener('close', () => { + console.log('GONE'); + this.reconnect(); + }); } + async query(query: ClientQuery) { + if (websocket.readyState == 0) { + let resolve: () => void; + const promise = new Promise((res) => { + resolve = () => res(undefined); + }); + // @ts-ignore + websocket.addEventListener('open', resolve); + await promise; + } + const id = randomId(); let resolve: (data: any) => void; @@ -41,7 +84,7 @@ class Transport extends BaseTransport { // @ts-ignore this.requestMap.set(id, resolve); - websocket.send(JSON.stringify({ id, payload: { type: 'query', data: query } })); + this.websocket.send(JSON.stringify({ id, payload: { type: 'query', data: query } })); return await promise; } @@ -56,12 +99,14 @@ class Transport extends BaseTransport { // @ts-ignore this.requestMap.set(id, resolve); - websocket.send(JSON.stringify({ id, payload: { type: 'command', data: command } })); + this.websocket.send(JSON.stringify({ id, payload: { type: 'command', data: command } })); return await promise; } } +const transport = new Transport(); + function App() { useEffect(() => { window.parent.postMessage('spacedrive-hello', '*'); @@ -72,7 +117,7 @@ function App() { {/*

*/} } | { key: "SysGetLocation", data: LocationResource } | { key: "SysGetLocations", data: Array } | { key: "LibGetExplorerDir", data: DirectoryWithContents } | { key: "NodeGetState", data: NodeState } | { key: "LocCreate", data: LocationResource } | { key: "JobGetRunning", data: Array } | { key: "JobGetHistory", data: Array } | { key: "GetLibraryStatistics", data: Statistics }; \ No newline at end of file +export type CoreResponse = { key: "Success", data: null } | { key: "Error", data: string } | { key: "NodeGetLibraries", data: Array } | { key: "SysGetVolumes", data: Array } | { key: "SysGetLocation", data: LocationResource } | { key: "SysGetLocations", data: Array } | { key: "LibGetExplorerDir", data: DirectoryWithContents } | { key: "NodeGetState", data: NodeState } | { key: "LocCreate", data: LocationResource } | { key: "JobGetRunning", data: Array } | { key: "JobGetHistory", data: Array } | { key: "GetLibraryStatistics", data: Statistics }; \ No newline at end of file diff --git a/core/bindings/LibraryCommand.ts b/core/bindings/LibraryCommand.ts new file mode 100644 index 000000000..713fc8989 --- /dev/null +++ b/core/bindings/LibraryCommand.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type LibraryCommand = { key: "FileReadMetaData", params: { id: number, } } | { key: "FileSetNote", params: { id: number, note: string | null, } } | { key: "FileDelete", params: { id: number, } } | { key: "TagCreate", params: { name: string, color: string, } } | { key: "TagUpdate", params: { name: string, color: string, } } | { key: "TagAssign", params: { file_id: number, tag_id: number, } } | { key: "TagDelete", params: { id: number, } } | { key: "LocCreate", params: { path: string, } } | { key: "LocUpdate", params: { id: number, name: string | null, } } | { key: "LocDelete", params: { id: number, } } | { key: "LocRescan", params: { id: number, } } | { key: "SysVolumeUnmount", params: { id: number, } } | { key: "GenerateThumbsForLocation", params: { id: number, path: string, } } | { key: "IdentifyUniqueFiles", params: { id: number, path: string, } }; \ No newline at end of file diff --git a/core/bindings/LibraryConfig.ts b/core/bindings/LibraryConfig.ts new file mode 100644 index 000000000..8a371014b --- /dev/null +++ b/core/bindings/LibraryConfig.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export interface LibraryConfig { version: string | null, name: string, description: string, } \ No newline at end of file diff --git a/core/bindings/LibraryConfigWrapped.ts b/core/bindings/LibraryConfigWrapped.ts new file mode 100644 index 000000000..ee5b5ccfe --- /dev/null +++ b/core/bindings/LibraryConfigWrapped.ts @@ -0,0 +1,4 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. +import type { LibraryConfig } from "./LibraryConfig"; + +export interface LibraryConfigWrapped { uuid: string, config: LibraryConfig, } \ No newline at end of file diff --git a/core/bindings/LibraryQuery.ts b/core/bindings/LibraryQuery.ts new file mode 100644 index 000000000..2aa14279c --- /dev/null +++ b/core/bindings/LibraryQuery.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export type LibraryQuery = { key: "LibGetTags" } | { key: "JobGetHistory" } | { key: "SysGetLocations" } | { key: "SysGetLocation", params: { id: number, } } | { key: "LibGetExplorerDir", params: { location_id: number, path: string, limit: number, } } | { key: "GetLibraryStatistics" }; \ No newline at end of file diff --git a/core/bindings/NodeConfig.ts b/core/bindings/NodeConfig.ts new file mode 100644 index 000000000..512f0202c --- /dev/null +++ b/core/bindings/NodeConfig.ts @@ -0,0 +1,3 @@ +// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. + +export interface NodeConfig { version: string | null, id: string, name: string, p2p_port: number | null, } \ No newline at end of file diff --git a/core/bindings/NodeState.ts b/core/bindings/NodeState.ts index 6fc2d5c22..978fb3103 100644 --- a/core/bindings/NodeState.ts +++ b/core/bindings/NodeState.ts @@ -1,4 +1,3 @@ // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. -import type { LibraryState } from "./LibraryState"; -export interface NodeState { node_pub_id: string, node_id: number, node_name: string, data_path: string, tcp_port: number, libraries: Array, current_library_uuid: string, } \ No newline at end of file +export interface NodeState { version: string | null, id: string, name: string, p2p_port: number | null, data_path: string, } \ No newline at end of file diff --git a/core/index.ts b/core/index.ts index 85eee6629..60cc1bc54 100644 --- a/core/index.ts +++ b/core/index.ts @@ -2,6 +2,7 @@ export * from './bindings/Client'; export * from './bindings/ClientCommand'; export * from './bindings/ClientQuery'; export * from './bindings/ClientState'; +export * from './bindings/ConfigMetadata'; export * from './bindings/CoreEvent'; export * from './bindings/CoreResource'; export * from './bindings/CoreResponse'; @@ -12,9 +13,14 @@ export * from './bindings/FileKind'; export * from './bindings/FilePath'; export * from './bindings/JobReport'; export * from './bindings/JobStatus'; +export * from './bindings/LibraryCommand'; +export * from './bindings/LibraryConfig'; +export * from './bindings/LibraryConfigWrapped'; export * from './bindings/LibraryNode'; +export * from './bindings/LibraryQuery'; export * from './bindings/LibraryState'; export * from './bindings/LocationResource'; +export * from './bindings/NodeConfig'; export * from './bindings/NodeState'; export * from './bindings/Platform'; export * from './bindings/Statistics'; diff --git a/core/prisma/migrations/20220625180107_remove_library/migration.sql b/core/prisma/migrations/20220625180107_remove_library/migration.sql new file mode 100644 index 000000000..63e4f056f --- /dev/null +++ b/core/prisma/migrations/20220625180107_remove_library/migration.sql @@ -0,0 +1,29 @@ +/* + Warnings: + + - You are about to drop the `libraries` table. If the table is not empty, all the data it contains will be lost. + - You are about to drop the `library_statistics` table. If the table is not empty, all the data it contains will be lost. + +*/ +-- DropTable +PRAGMA foreign_keys=off; +DROP TABLE "libraries"; +PRAGMA foreign_keys=on; + +-- DropTable +PRAGMA foreign_keys=off; +DROP TABLE "library_statistics"; +PRAGMA foreign_keys=on; + +-- CreateTable +CREATE TABLE "statistics" ( + "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT, + "date_captured" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP, + "total_file_count" INTEGER NOT NULL DEFAULT 0, + "library_db_size" TEXT NOT NULL DEFAULT '0', + "total_bytes_used" TEXT NOT NULL DEFAULT '0', + "total_bytes_capacity" TEXT NOT NULL DEFAULT '0', + "total_unique_bytes" TEXT NOT NULL DEFAULT '0', + "total_bytes_free" TEXT NOT NULL DEFAULT '0', + "preview_media_bytes" TEXT NOT NULL DEFAULT '0' +); diff --git a/core/prisma/schema.prisma b/core/prisma/schema.prisma index e8f911004..130151f62 100644 --- a/core/prisma/schema.prisma +++ b/core/prisma/schema.prisma @@ -35,21 +35,9 @@ model SyncEvent { @@map("sync_events") } -model Library { - id Int @id @default(autoincrement()) - pub_id String @unique - name String - is_primary Boolean @default(true) - date_created DateTime @default(now()) - timezone String? - - @@map("libraries") -} - -model LibraryStatistics { +model Statistics { id Int @id @default(autoincrement()) date_captured DateTime @default(now()) - library_id Int @unique total_file_count Int @default(0) library_db_size String @default("0") total_bytes_used String @default("0") @@ -58,7 +46,7 @@ model LibraryStatistics { total_bytes_free String @default("0") preview_media_bytes String @default("0") - @@map("library_statistics") + @@map("statistics") } model Node { diff --git a/core/src/encode/thumb.rs b/core/src/encode/thumb.rs index 989859506..b20d8f728 100644 --- a/core/src/encode/thumb.rs +++ b/core/src/encode/thumb.rs @@ -1,8 +1,8 @@ +use crate::library::LibraryContext; use crate::{ - job::{Job, JobReportUpdate, WorkerContext}, - node::get_nodestate, + job::{Job, JobReportUpdate, JobResult, WorkerContext}, prisma::file_path, - sys, CoreContext, CoreEvent, + sys, CoreEvent, }; use image::{self, imageops, DynamicImage, GenericImageView}; use log::{error, info}; @@ -28,11 +28,15 @@ impl Job for ThumbnailJob { fn name(&self) -> &'static str { "thumbnailer" } + async fn run(&self, ctx: WorkerContext) -> JobResult { + let library_ctx = ctx.library_ctx(); + let thumbnail_dir = library_ctx + .config() + .data_directory() + .join(THUMBNAIL_CACHE_DIR_NAME) + .join(self.location_id.to_string()); - async fn run(&self, ctx: WorkerContext) -> Result<(), Box> { - let config = get_nodestate(); - - let location = sys::get_location(&ctx.core_ctx, self.location_id).await?; + let location = sys::get_location(&library_ctx, self.location_id).await?; info!( "Searching for images in location {} at path {:#?}", @@ -40,19 +44,11 @@ impl Job for ThumbnailJob { ); // create all necessary directories if they don't exist - fs::create_dir_all( - config - .data_path - .as_ref() - .unwrap() - .join(THUMBNAIL_CACHE_DIR_NAME) - .join(format!("{}", self.location_id)), - ) - .await?; + fs::create_dir_all(&thumbnail_dir).await?; let root_path = location.path.unwrap(); // query database for all files in this location that need thumbnails - let image_files = get_images(&ctx.core_ctx, self.location_id, &self.path).await?; + let image_files = get_images(&library_ctx, self.location_id, &self.path).await?; info!("Found {:?} files", image_files.len()); ctx.progress(vec![ @@ -86,14 +82,7 @@ impl Job for ThumbnailJob { }; // Define and write the WebP-encoded file to a given path - let output_path = config - .data_path - .as_ref() - .unwrap() - .join(THUMBNAIL_CACHE_DIR_NAME) - .join(format!("{}", location.id)) - .join(&cas_id) - .with_extension("webp"); + let output_path = thumbnail_dir.join(&cas_id).with_extension("webp"); // check if file exists at output path if !output_path.exists() { @@ -105,7 +94,9 @@ impl Job for ThumbnailJob { ctx.progress(vec![JobReportUpdate::CompletedTaskCount(i + 1)]); if !self.background { - ctx.core_ctx.emit(CoreEvent::NewThumbnail { cas_id }).await; + ctx.library_ctx() + .emit(CoreEvent::NewThumbnail { cas_id }) + .await; }; } else { info!("Thumb exists, skipping... {}", output_path.display()); @@ -145,7 +136,7 @@ pub async fn generate_thumbnail>( } pub async fn get_images( - ctx: &CoreContext, + ctx: &LibraryContext, location_id: i32, path: impl AsRef, ) -> Result, std::io::Error> { @@ -167,7 +158,7 @@ pub async fn get_images( } let image_files = ctx - .database + .db .file_path() .find_many(params) .with(file_path::file::fetch()) diff --git a/core/src/file/cas/identifier.rs b/core/src/file/cas/identifier.rs index 855352385..a05684eda 100644 --- a/core/src/file/cas/identifier.rs +++ b/core/src/file/cas/identifier.rs @@ -2,10 +2,10 @@ use super::checksum::generate_cas_id; use crate::{ file::FileError, job::JobReportUpdate, - job::{Job, WorkerContext}, + job::{Job, JobResult, WorkerContext}, + library::LibraryContext, prisma::{file, file_path}, sys::get_location, - CoreContext, }; use chrono::{DateTime, FixedOffset}; use futures::future::join_all; @@ -13,7 +13,6 @@ use log::{error, info}; use prisma_client_rust::{prisma_models::PrismaValue, raw, raw::Raw, Direction}; use serde::{Deserialize, Serialize}; use std::collections::{HashMap, HashSet}; -use std::error::Error; use std::path::{Path, PathBuf}; use tokio::{fs, io}; @@ -35,13 +34,13 @@ impl Job for FileIdentifierJob { "file_identifier" } - async fn run(&self, ctx: WorkerContext) -> Result<(), Box> { + async fn run(&self, ctx: WorkerContext) -> JobResult { info!("Identifying orphan file paths..."); - let location = get_location(&ctx.core_ctx, self.location_id).await?; + let location = get_location(&ctx.library_ctx(), self.location_id).await?; let location_path = location.path.unwrap_or_else(|| "".to_string()); - let total_count = count_orphan_file_paths(&ctx.core_ctx, location.id.into()).await?; + let total_count = count_orphan_file_paths(&ctx.library_ctx(), location.id.into()).await?; info!("Found {} orphan file paths", total_count); let task_count = (total_count as f64 / CHUNK_SIZE as f64).ceil() as usize; @@ -59,7 +58,7 @@ impl Job for FileIdentifierJob { let mut cas_lookup: HashMap = HashMap::new(); // get chunk of orphans to process - let file_paths = match get_orphan_file_paths(&ctx.core_ctx, cursor).await { + let file_paths = match get_orphan_file_paths(&ctx.library_ctx(), cursor).await { Ok(file_paths) => file_paths, Err(e) => { info!("Error getting orphan file paths: {:#?}", e); @@ -93,8 +92,8 @@ impl Job for FileIdentifierJob { // find all existing files by cas id let generated_cas_ids = chunk.values().map(|c| c.cas_id.clone()).collect(); let existing_files = ctx - .core_ctx - .database + .library_ctx() + .db .file() .find_many(vec![file::cas_id::in_vec(generated_cas_ids)]) .exec() @@ -104,7 +103,8 @@ impl Job for FileIdentifierJob { // link those existing files to their file paths // Had to put the file_path in a variable outside of the closure, to satisfy the borrow checker - let prisma_file_path = ctx.core_ctx.database.file_path(); + let library_ctx = ctx.library_ctx(); + let prisma_file_path = library_ctx.db.file_path(); for result in join_all(existing_files.iter().map(|file| { prisma_file_path .find_unique(file_path::id::equals( @@ -133,7 +133,7 @@ impl Job for FileIdentifierJob { .collect::>(); // assemble prisma values for new unique files - let mut values: Vec = Vec::new(); + let mut values = Vec::with_capacity(new_files.len() * 3); for file in &new_files { values.extend([ PrismaValue::String(file.cas_id.clone()), @@ -144,8 +144,8 @@ impl Job for FileIdentifierJob { // create new file records with assembled values let created_files: Vec = ctx - .core_ctx - .database + .library_ctx() + .db ._query_raw(Raw::new( &format!( "INSERT INTO files (cas_id, size_in_bytes, date_created) VALUES {} @@ -210,10 +210,10 @@ struct CountRes { } pub async fn count_orphan_file_paths( - ctx: &CoreContext, + ctx: &LibraryContext, location_id: i64, ) -> Result { - let files_count = ctx.database + let files_count = ctx.db ._query_raw::(raw!( "SELECT COUNT(*) AS count FROM file_paths WHERE file_id IS NULL AND is_dir IS FALSE AND location_id = {}", PrismaValue::Int(location_id) @@ -223,14 +223,14 @@ pub async fn count_orphan_file_paths( } pub async fn get_orphan_file_paths( - ctx: &CoreContext, + ctx: &LibraryContext, cursor: i32, ) -> Result, FileError> { info!( "discovering {} orphan file paths at cursor: {:?}", CHUNK_SIZE, cursor ); - ctx.database + ctx.db .file_path() .find_many(vec![ file_path::file_id::equals(None), diff --git a/core/src/file/explorer/open.rs b/core/src/file/explorer/open.rs index 901fa3589..2237e0deb 100644 --- a/core/src/file/explorer/open.rs +++ b/core/src/file/explorer/open.rs @@ -1,28 +1,25 @@ use crate::{ encode::THUMBNAIL_CACHE_DIR_NAME, file::{DirectoryWithContents, FileError, FilePath}, - node::get_nodestate, + library::LibraryContext, prisma::file_path, sys::get_location, - CoreContext, }; use log::info; use std::path::Path; pub async fn open_dir( - ctx: &CoreContext, + ctx: &LibraryContext, location_id: i32, path: impl AsRef, ) -> Result { - let config = get_nodestate(); - // get location let location = get_location(ctx, location_id).await?; let path_str = path.as_ref().to_string_lossy().to_string(); let directory = ctx - .database + .db .file_path() .find_first(vec![ file_path::location_id::equals(Some(location.id)), @@ -36,7 +33,7 @@ pub async fn open_dir( info!("DIRECTORY: {:?}", directory); let mut file_paths: Vec = ctx - .database + .db .file_path() .find_many(vec![ file_path::location_id::equals(Some(location.id)), @@ -49,17 +46,17 @@ pub async fn open_dir( .map(Into::into) .collect(); - if let Some(ref data_path) = config.data_path { - for file_path in &mut file_paths { - if let Some(file) = &mut file_path.file { - let thumb_path = data_path - .join(THUMBNAIL_CACHE_DIR_NAME) - .join(location.id.to_string()) - .join(file.cas_id.clone()) - .with_extension("webp"); + for file_path in &mut file_paths { + if let Some(file) = &mut file_path.file { + let thumb_path = ctx + .config() + .data_directory() + .join(THUMBNAIL_CACHE_DIR_NAME) + .join(location.id.to_string()) + .join(&file.cas_id) + .with_extension("webp"); - file.has_thumbnail = thumb_path.exists(); - } + file.has_thumbnail = thumb_path.exists(); } } diff --git a/core/src/file/indexer/mod.rs b/core/src/file/indexer/mod.rs index d4e428a4d..942a1e3b8 100644 --- a/core/src/file/indexer/mod.rs +++ b/core/src/file/indexer/mod.rs @@ -1,5 +1,4 @@ -use crate::job::{Job, JobReportUpdate, WorkerContext}; -use std::error::Error; +use crate::job::{Job, JobReportUpdate, JobResult, WorkerContext}; use std::path::PathBuf; use self::scan::ScanProgress; @@ -20,8 +19,8 @@ impl Job for IndexerJob { fn name(&self) -> &'static str { "indexer" } - async fn run(&self, ctx: WorkerContext) -> Result<(), Box> { - scan_path(&ctx.core_ctx.clone(), &self.path, move |p| { + async fn run(&self, ctx: WorkerContext) -> JobResult { + scan_path(&ctx.library_ctx(), &self.path, move |p| { ctx.progress( p.iter() .map(|p| match p.clone() { diff --git a/core/src/file/indexer/scan.rs b/core/src/file/indexer/scan.rs index 6dd7a5bb7..87c830f6a 100644 --- a/core/src/file/indexer/scan.rs +++ b/core/src/file/indexer/scan.rs @@ -1,8 +1,6 @@ -use crate::{ - sys::{create_location, LocationResource}, - CoreContext, -}; - +use crate::job::JobResult; +use crate::library::LibraryContext; +use crate::sys::{create_location, LocationResource}; use chrono::{DateTime, Utc}; use log::{error, info}; use prisma_client_rust::prisma_models::PrismaValue; @@ -30,10 +28,10 @@ static BATCH_SIZE: usize = 100; // creates a vector of valid path buffers from a directory pub async fn scan_path( - ctx: &CoreContext, + ctx: &LibraryContext, path: impl AsRef + Debug, on_progress: impl Fn(Vec) + Send + Sync + 'static, -) -> Result<(), Box> { +) -> JobResult { let location = create_location(ctx, &path).await?; // query db to highers id, so we can increment it for the new files indexed @@ -43,7 +41,7 @@ pub async fn scan_path( } // grab the next id so we can increment in memory for batch inserting let first_file_id = match ctx - .database + .db ._query_raw::(raw!("SELECT MAX(id) id FROM file_paths")) .await { @@ -168,7 +166,7 @@ pub async fn scan_path( files ); - let count = ctx.database._execute_raw(raw).await; + let count = ctx.db._execute_raw(raw).await; info!("Inserted {:?} records", count); } diff --git a/core/src/file/mod.rs b/core/src/file/mod.rs index 9959af3ed..1829e3fa7 100644 --- a/core/src/file/mod.rs +++ b/core/src/file/mod.rs @@ -1,13 +1,15 @@ -use std::path::PathBuf; +use chrono::{DateTime, Utc}; use int_enum::IntEnum; use serde::{Deserialize, Serialize}; +use std::path::PathBuf; use thiserror::Error; use ts_rs::TS; use crate::{ + library::LibraryContext, prisma::{self, file, file_path}, sys::SysError, - ClientQuery, CoreContext, CoreError, CoreEvent, CoreResponse, + ClientQuery, CoreError, CoreEvent, CoreResponse, LibraryQuery, }; pub mod cas; pub mod explorer; @@ -33,9 +35,9 @@ pub struct File { pub ipfs_id: Option, pub note: Option, - pub date_created: chrono::DateTime, - pub date_modified: chrono::DateTime, - pub date_indexed: chrono::DateTime, + pub date_created: DateTime, + pub date_modified: DateTime, + pub date_indexed: DateTime, pub paths: Vec, // pub media_data: Option, @@ -56,9 +58,9 @@ pub struct FilePath { pub file_id: Option, pub parent_id: Option, - pub date_created: chrono::DateTime, - pub date_modified: chrono::DateTime, - pub date_indexed: chrono::DateTime, + pub date_created: DateTime, + pub date_modified: DateTime, + pub date_indexed: DateTime, pub file: Option, } @@ -148,12 +150,12 @@ pub enum FileError { } pub async fn set_note( - ctx: CoreContext, + ctx: LibraryContext, id: i32, note: Option, ) -> Result { let _response = ctx - .database + .db .file() .find_unique(file::id::equals(id)) .update(vec![file::note::set(note.clone())]) @@ -161,10 +163,13 @@ pub async fn set_note( .await .unwrap(); - ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibGetExplorerDir { - limit: 0, - path: "".to_string(), - location_id: 0, + ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibraryQuery { + library_id: ctx.id.to_string(), + query: LibraryQuery::LibGetExplorerDir { + limit: 0, + path: PathBuf::new(), + location_id: 0, + }, })) .await; diff --git a/core/src/job/jobs.rs b/core/src/job/jobs.rs index 3a0b56c5a..0cca9b8ad 100644 --- a/core/src/job/jobs.rs +++ b/core/src/job/jobs.rs @@ -3,48 +3,69 @@ use super::{ JobError, }; use crate::{ - node::get_nodestate, + library::LibraryContext, prisma::{job, node}, - CoreContext, }; use int_enum::IntEnum; -use log::info; +use log::{error, info}; use serde::{Deserialize, Serialize}; use std::{ collections::{HashMap, VecDeque}, + error::Error, fmt::Debug, sync::Arc, }; -use tokio::sync::Mutex; +use tokio::sync::{mpsc, Mutex, RwLock}; use ts_rs::TS; // db is single threaded, nerd const MAX_WORKERS: usize = 1; +pub type JobResult = Result<(), Box>; + #[async_trait::async_trait] pub trait Job: Send + Sync + Debug { fn name(&self) -> &'static str; - async fn run(&self, ctx: WorkerContext) -> Result<(), Box>; + async fn run(&self, ctx: WorkerContext) -> JobResult; +} + +pub enum JobManagerEvent { + IngestJob(LibraryContext, Box), } // jobs struct is maintained by the core -pub struct Jobs { - job_queue: VecDeque>, +pub struct JobManager { + job_queue: RwLock>>, // workers are spawned when jobs are picked off the queue - running_workers: HashMap>>, + running_workers: RwLock>>>, + internal_sender: mpsc::UnboundedSender, } -impl Jobs { - pub fn new() -> Self { - Self { - job_queue: VecDeque::new(), - running_workers: HashMap::new(), - } +impl JobManager { + pub fn new() -> Arc { + let (internal_sender, mut internal_receiver) = mpsc::unbounded_channel(); + let this = Arc::new(Self { + job_queue: RwLock::new(VecDeque::new()), + running_workers: RwLock::new(HashMap::new()), + internal_sender, + }); + + let this2 = this.clone(); + tokio::spawn(async move { + while let Some(event) = internal_receiver.recv().await { + match event { + JobManagerEvent::IngestJob(ctx, job) => this2.clone().ingest(&ctx, job).await, + } + } + }); + + this } - pub async fn ingest(&mut self, ctx: &CoreContext, job: Box) { + pub async fn ingest(self: Arc, ctx: &LibraryContext, job: Box) { // create worker to process job - if self.running_workers.len() < MAX_WORKERS { + let mut running_workers = self.running_workers.write().await; + if running_workers.len() < MAX_WORKERS { info!("Running job: {:?}", job.name()); let worker = Worker::new(job); @@ -52,51 +73,57 @@ impl Jobs { let wrapped_worker = Arc::new(Mutex::new(worker)); - Worker::spawn(Arc::clone(&wrapped_worker), ctx).await; + Worker::spawn(Arc::clone(&self), Arc::clone(&wrapped_worker), ctx.clone()).await; - self.running_workers.insert(id, wrapped_worker); + running_workers.insert(id, wrapped_worker); } else { - self.job_queue.push_back(job); + self.job_queue.write().await.push_back(job); } } - pub fn ingest_queue(&mut self, _ctx: &CoreContext, job: Box) { - self.job_queue.push_back(job); + pub async fn ingest_queue(&self, _ctx: &LibraryContext, job: Box) { + self.job_queue.write().await.push_back(job); } - pub async fn complete(&mut self, ctx: &CoreContext, job_id: String) { + + pub async fn complete(self: Arc, ctx: &LibraryContext, job_id: String) { // remove worker from running workers - self.running_workers.remove(&job_id); + self.running_workers.write().await.remove(&job_id); // continue queue - let job = self.job_queue.pop_front(); + let job = self.job_queue.write().await.pop_front(); if let Some(job) = job { - self.ingest(ctx, job).await; + // We can't directly execute `self.ingest` here because it would cause an async cycle. + self.internal_sender + .send(JobManagerEvent::IngestJob(ctx.clone(), job)) + .unwrap_or_else(|_| { + error!("Failed to ingest job!"); + }); } } pub async fn get_running(&self) -> Vec { let mut ret = vec![]; - for worker in self.running_workers.values() { + for worker in self.running_workers.read().await.values() { let worker = worker.lock().await; ret.push(worker.job_report.clone()); } ret } - pub async fn queue_pending_job(ctx: &CoreContext) -> Result<(), JobError> { - let _next_job = ctx - .database - .job() - .find_first(vec![job::status::equals(JobStatus::Queued.int_value())]) - .exec() - .await?; + // pub async fn queue_pending_job(ctx: &LibraryContext) -> Result<(), JobError> { + // let _next_job = ctx + // .db + // .job() + // .find_first(vec![job::status::equals(JobStatus::Queued.int_value())]) + // .exec() + // .await?; - Ok(()) - } + // Ok(()) + // } - pub async fn get_history(ctx: &CoreContext) -> Result, JobError> { + pub async fn get_history(ctx: &LibraryContext) -> Result, JobError> { let jobs = ctx - .database + .db .job() .find_many(vec![job::status::not(JobStatus::Running.int_value())]) .exec() @@ -171,30 +198,29 @@ impl JobReport { seconds_elapsed: 0, } } - pub async fn create(&self, ctx: &CoreContext) -> Result<(), JobError> { - let config = get_nodestate(); + pub async fn create(&self, ctx: &LibraryContext) -> Result<(), JobError> { let mut params = Vec::new(); if self.data.is_some() { params.push(job::data::set(self.data.clone())) } - ctx.database + ctx.db .job() .create( job::id::set(self.id.clone()), job::name::set(self.name.clone()), job::action::set(1), - job::nodes::link(node::id::equals(config.node_id)), + job::nodes::link(node::id::equals(ctx.node_local_id)), params, ) .exec() .await?; Ok(()) } - pub async fn update(&self, ctx: &CoreContext) -> Result<(), JobError> { - ctx.database + pub async fn update(&self, ctx: &LibraryContext) -> Result<(), JobError> { + ctx.db .job() .find_unique(job::id::equals(self.id.clone())) .update(vec![ diff --git a/core/src/job/worker.rs b/core/src/job/worker.rs index 1acf4f250..ddfd17ca2 100644 --- a/core/src/job/worker.rs +++ b/core/src/job/worker.rs @@ -1,8 +1,8 @@ use super::{ jobs::{JobReport, JobReportUpdate, JobStatus}, - Job, + Job, JobManager, }; -use crate::{ClientQuery, CoreContext, CoreEvent, InternalEvent}; +use crate::{library::LibraryContext, ClientQuery, CoreEvent, LibraryQuery}; use log::error; use std::{sync::Arc, time::Duration}; use tokio::{ @@ -29,8 +29,8 @@ enum WorkerState { #[derive(Clone)] pub struct WorkerContext { pub uuid: String, - pub core_ctx: CoreContext, - pub sender: UnboundedSender, + library_ctx: LibraryContext, + sender: UnboundedSender, } impl WorkerContext { @@ -39,9 +39,13 @@ impl WorkerContext { .send(WorkerEvent::Progressed(updates)) .unwrap_or(()); } + + pub fn library_ctx(&self) -> LibraryContext { + self.library_ctx.clone() + } + // save the job data to // pub fn save_data () { - // } } @@ -66,7 +70,11 @@ impl Worker { } } // spawns a thread and extracts channel sender to communicate with it - pub async fn spawn(worker: Arc>, ctx: &CoreContext) { + pub async fn spawn( + job_manager: Arc, + worker: Arc>, + ctx: LibraryContext, + ) { // we capture the worker receiver channel so state can be updated from inside the worker let mut worker_mut = worker.lock().await; // extract owned job and receiver from Self @@ -79,25 +87,26 @@ impl Worker { WorkerState::Running => unreachable!(), }; let worker_sender = worker_mut.worker_sender.clone(); - let core_ctx = ctx.clone(); worker_mut.job_report.status = JobStatus::Running; - worker_mut.job_report.create(ctx).await.unwrap_or(()); + worker_mut.job_report.create(&ctx).await.unwrap_or(()); // spawn task to handle receiving events from the worker + let library_ctx = ctx.clone(); tokio::spawn(Worker::track_progress( worker.clone(), worker_receiver, - ctx.clone(), + library_ctx.clone(), )); let uuid = worker_mut.job_report.id.clone(); // spawn task to handle running the job + tokio::spawn(async move { let worker_ctx = WorkerContext { uuid, - core_ctx, + library_ctx, sender: worker_sender, }; let job_start = Instant::now(); @@ -116,20 +125,15 @@ impl Worker { } }); - let result = job.run(worker_ctx.clone()).await; - - if let Err(e) = result { - error!("job failed {:?}", e); + if let Err(e) = job.run(worker_ctx.clone()).await { + error!("job '{}' failed with error: {}", worker_ctx.uuid, e); worker_ctx.sender.send(WorkerEvent::Failed).unwrap_or(()); } else { // handle completion worker_ctx.sender.send(WorkerEvent::Completed).unwrap_or(()); } - worker_ctx - .core_ctx - .internal_sender - .send(InternalEvent::JobComplete(worker_ctx.uuid.clone())) - .unwrap_or(()); + + job_manager.complete(&ctx, worker_ctx.uuid).await; }); } @@ -140,7 +144,7 @@ impl Worker { async fn track_progress( worker: Arc>, mut channel: UnboundedReceiver, - ctx: CoreContext, + ctx: LibraryContext, ) { while let Some(command) = channel.recv().await { let mut worker = worker.lock().await; @@ -179,16 +183,23 @@ impl Worker { ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetRunning)) .await; - ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory)) - .await; + + ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibraryQuery { + library_id: ctx.id.to_string(), + query: LibraryQuery::JobGetHistory, + })) + .await; break; } WorkerEvent::Failed => { worker.job_report.status = JobStatus::Failed; worker.job_report.update(&ctx).await.unwrap_or(()); - ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory)) - .await; + ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibraryQuery { + library_id: ctx.id.to_string(), + query: LibraryQuery::JobGetHistory, + })) + .await; break; } } diff --git a/core/src/lib.rs b/core/src/lib.rs index f0b0833d2..887d1526a 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -1,13 +1,13 @@ -use crate::{ - file::cas::FileIdentifierJob, library::get_library_path, node::NodeState, - prisma::file as prisma_file, prisma::location, util::db::create_connection, -}; -use job::{Job, JobReport, Jobs}; -use log::{error, info}; -use prisma::PrismaClient; +use crate::{file::cas::FileIdentifierJob, prisma::file as prisma_file, prisma::location}; +use job::{JobManager, JobReport}; +use library::{LibraryConfig, LibraryConfigWrapped, LibraryManager}; +use log::error; +use node::{NodeConfig, NodeConfigManager}; use serde::{Deserialize, Serialize}; -use std::path::PathBuf; -use std::sync::Arc; +use std::{ + path::{Path, PathBuf}, + sync::Arc, +}; use thiserror::Error; use tokio::{ fs, @@ -37,12 +37,12 @@ pub struct ReturnableMessage> { } // core controller is passed to the client to communicate with the core which runs in a dedicated thread -pub struct CoreController { +pub struct NodeController { query_sender: UnboundedSender>, command_sender: UnboundedSender>, } -impl CoreController { +impl NodeController { pub async fn query(&self, query: ClientQuery) -> Result { // a one time use channel to send and await a response let (sender, recv) = oneshot::channel(); @@ -69,48 +69,25 @@ impl CoreController { } } -#[derive(Debug)] -pub enum InternalEvent { - JobIngest(Box), - JobQueue(Box), - JobComplete(String), -} - #[derive(Clone)] -pub struct CoreContext { - pub database: Arc, +pub struct NodeContext { pub event_sender: mpsc::Sender, - pub internal_sender: UnboundedSender, + pub config: Arc, + pub jobs: Arc, } -impl CoreContext { - pub fn spawn_job(&self, job: Box) { - self.internal_sender - .send(InternalEvent::JobIngest(job)) - .unwrap_or_else(|e| { - error!("Failed to spawn job. {:?}", e); - }); - } - pub fn queue_job(&self, job: Box) { - self.internal_sender - .send(InternalEvent::JobQueue(job)) - .unwrap_or_else(|e| { - error!("Failed to queue job. {:?}", e); - }); - } +impl NodeContext { pub async fn emit(&self, event: CoreEvent) { self.event_sender.send(event).await.unwrap_or_else(|e| { - error!("Failed to emit event. {:?}", e); + error!("Failed to emit event. {:#?}", e); }); } } pub struct Node { - state: NodeState, - jobs: job::Jobs, - database: Arc, - // filetype_registry: library::TypeRegistry, - // extension_registry: library::ExtensionRegistry, + config: Arc, + library_manager: Arc, + jobs: Arc, // global messaging channels query_channel: ( @@ -122,73 +99,56 @@ pub struct Node { UnboundedReceiver>, ), event_sender: mpsc::Sender, - - // a channel for child threads to send events back to the core - internal_channel: ( - UnboundedSender, - UnboundedReceiver, - ), } impl Node { // create new instance of node, run startup tasks - pub async fn new(mut data_dir: PathBuf) -> (Node, mpsc::Receiver) { - let (event_sender, event_recv) = mpsc::channel(100); - - data_dir.push("spacedrive"); - // create data directory if it doesn't exist + pub async fn new( + data_dir: impl AsRef, + ) -> (NodeController, mpsc::Receiver, Node) { fs::create_dir_all(&data_dir).await.unwrap(); - // prepare basic client state - let mut state = NodeState::new(data_dir.clone(), "diamond-mastering-space-dragon").unwrap(); - // load from disk - state - .read_disk() + + let (event_sender, event_recv) = mpsc::channel(100); + let config = NodeConfigManager::new(data_dir.as_ref().to_owned()) .await - .unwrap_or_else(|_| error!("Error: No node state found, creating new one...")); - - state.save().await; - - info!("Node State: {:?}", state); - - // connect to default library - let database = Arc::new( - create_connection(&get_library_path(&data_dir)) - .await - .unwrap(), - ); - - let internal_channel = unbounded_channel::(); - - let node = Node { - state, - query_channel: unbounded_channel(), - command_channel: unbounded_channel(), - jobs: Jobs::new(), - event_sender, - database, - internal_channel, + .unwrap(); + let jobs = JobManager::new(); + let node_ctx = NodeContext { + event_sender: event_sender.clone(), + config: config.clone(), + jobs: jobs.clone(), }; - (node, event_recv) + let node = Node { + config, + library_manager: LibraryManager::new(data_dir.as_ref().join("libraries"), node_ctx) + .await + .unwrap(), + query_channel: unbounded_channel(), + command_channel: unbounded_channel(), + jobs, + event_sender, + }; + + ( + NodeController { + query_sender: node.query_channel.0.clone(), + command_sender: node.command_channel.0.clone(), + }, + event_recv, + node, + ) } - pub fn get_context(&self) -> CoreContext { - CoreContext { - database: self.database.clone(), + pub fn get_context(&self) -> NodeContext { + NodeContext { event_sender: self.event_sender.clone(), - internal_sender: self.internal_channel.0.clone(), + config: Arc::clone(&self.config), + jobs: Arc::clone(&self.jobs), } } - pub fn get_controller(&self) -> CoreController { - CoreController { - query_sender: self.query_channel.0.clone(), - command_sender: self.command_channel.0.clone(), - } - } - - pub async fn start(&mut self) { - let ctx = self.get_context(); + pub async fn start(mut self) { loop { // listen on global messaging channels for incoming messages tokio::select! { @@ -200,174 +160,200 @@ impl Node { let res = self.exec_command(msg.data).await; msg.return_sender.send(res).unwrap_or(()); } - Some(event) = self.internal_channel.1.recv() => { - match event { - InternalEvent::JobIngest(job) => { - self.jobs.ingest(&ctx, job).await; - }, - InternalEvent::JobQueue(job) => { - self.jobs.ingest_queue(&ctx, job); - }, - InternalEvent::JobComplete(id) => { - self.jobs.complete(&ctx, id).await; - }, - } - } } } } - // load library database + initialize client with db - pub async fn initializer(&self) { - info!("Initializing..."); - let ctx = self.get_context(); - - if self.state.libraries.is_empty() { - match library::create(&ctx, None).await { - Ok(library) => info!("Created new library: {:?}", library), - Err(e) => error!("Error creating library: {:?}", e), - } - } else { - for library in self.state.libraries.iter() { - // init database for library - match library::load(&ctx, &library.library_path, &library.library_uuid).await { - Ok(library) => info!("Loaded library: {:?}", library), - Err(e) => error!("Error loading library: {:?}", e), - } - } - } - // init node data within library - match node::LibraryNode::create(self).await { - Ok(_) => info!("Spacedrive online"), - Err(e) => error!("Error initializing node: {:?}", e), - }; - } async fn exec_command(&mut self, cmd: ClientCommand) -> Result { - info!("Core command: {:?}", cmd); - let ctx = self.get_context(); Ok(match cmd { - // CRUD for locations - ClientCommand::LocCreate { path } => { - let loc = sys::new_location_and_scan(&ctx, &path).await?; - // ctx.queue_job(Box::new(FileIdentifierJob)); - CoreResponse::LocCreate(loc) + ClientCommand::CreateLibrary { name } => { + self.library_manager + .create(LibraryConfig { + name: name.to_string(), + ..Default::default() + }) + .await + .unwrap(); + CoreResponse::Success(()) } - ClientCommand::LocUpdate { id, name } => { - ctx.database - .location() - .find_unique(location::id::equals(id)) - .update(vec![location::name::set(name)]) - .exec() - .await?; + ClientCommand::EditLibrary { + id, + name, + description, + } => { + self.library_manager + .edit_library(id, name, description) + .await + .unwrap(); + CoreResponse::Success(()) + } + ClientCommand::DeleteLibrary { id } => { + self.library_manager.delete_library(id).await.unwrap(); + CoreResponse::Success(()) + } + ClientCommand::LibraryCommand { + library_id, + command, + } => { + let ctx = self.library_manager.get_ctx(library_id).await.unwrap(); + match command { + // CRUD for locations + LibraryCommand::LocCreate { path } => { + let loc = sys::new_location_and_scan(&ctx, &path).await?; + // ctx.queue_job(Box::new(FileIdentifierJob)); + CoreResponse::LocCreate(loc) + } + LibraryCommand::LocUpdate { id, name } => { + ctx.db + .location() + .find_unique(location::id::equals(id)) + .update(vec![location::name::set(name)]) + .exec() + .await?; - CoreResponse::Success(()) - } - ClientCommand::LocDelete { id } => { - sys::delete_location(&ctx, id).await?; - CoreResponse::Success(()) - } - ClientCommand::LocRescan { id } => { - sys::scan_location(&ctx, id, String::new()); - CoreResponse::Success(()) - } - // CRUD for files - ClientCommand::FileReadMetaData { id: _ } => todo!(), - ClientCommand::FileSetNote { id, note } => file::set_note(ctx, id, note).await?, - // ClientCommand::FileEncrypt { id: _, algorithm: _ } => todo!(), - ClientCommand::FileDelete { id } => { - ctx.database - .file() - .find_unique(prisma_file::id::equals(id)) - .delete() - .exec() - .await?; + CoreResponse::Success(()) + } + LibraryCommand::LocDelete { id } => { + sys::delete_location(&ctx, id).await?; + CoreResponse::Success(()) + } + LibraryCommand::LocRescan { id } => { + sys::scan_location(&ctx, id, String::new()).await; + CoreResponse::Success(()) + } + // CRUD for files + LibraryCommand::FileReadMetaData { id: _ } => todo!(), + LibraryCommand::FileSetNote { id, note } => { + file::set_note(ctx, id, note).await? + } + // ClientCommand::FileEncrypt { id: _, algorithm: _ } => todo!(), + LibraryCommand::FileDelete { id } => { + ctx.db + .file() + .find_unique(prisma_file::id::equals(id)) + .delete() + .exec() + .await?; - CoreResponse::Success(()) - } - // CRUD for tags - ClientCommand::TagCreate { name: _, color: _ } => todo!(), - ClientCommand::TagAssign { - file_id: _, - tag_id: _, - } => todo!(), - ClientCommand::TagDelete { id: _ } => todo!(), - // CRUD for libraries - ClientCommand::SysVolumeUnmount { id: _ } => todo!(), - ClientCommand::LibDelete { id: _ } => todo!(), - ClientCommand::TagUpdate { name: _, color: _ } => todo!(), - ClientCommand::GenerateThumbsForLocation { id, path } => { - ctx.spawn_job(Box::new(ThumbnailJob { - location_id: id, - path, - background: false, // fix - })); - CoreResponse::Success(()) - } - // ClientCommand::PurgeDatabase => { - // info!("Purging database..."); - // fs::remove_file(Path::new(&self.state.data_path).join("library.db")).unwrap(); - // CoreResponse::Success(()) - // } - ClientCommand::IdentifyUniqueFiles { id, path } => { - ctx.spawn_job(Box::new(FileIdentifierJob { - location_id: id, - path, - })); - CoreResponse::Success(()) + CoreResponse::Success(()) + } + // CRUD for tags + LibraryCommand::TagCreate { name: _, color: _ } => todo!(), + LibraryCommand::TagAssign { + file_id: _, + tag_id: _, + } => todo!(), + LibraryCommand::TagUpdate { name: _, color: _ } => todo!(), + LibraryCommand::TagDelete { id: _ } => todo!(), + // CRUD for libraries + LibraryCommand::SysVolumeUnmount { id: _ } => todo!(), + LibraryCommand::GenerateThumbsForLocation { id, path } => { + ctx.spawn_job(Box::new(ThumbnailJob { + location_id: id, + path, + background: false, // fix + })) + .await; + CoreResponse::Success(()) + } + LibraryCommand::IdentifyUniqueFiles { id, path } => { + ctx.spawn_job(Box::new(FileIdentifierJob { + location_id: id, + path, + })) + .await; + CoreResponse::Success(()) + } + } } }) } // query sources of data async fn exec_query(&self, query: ClientQuery) -> Result { - let ctx = self.get_context(); Ok(match query { - // return the client state from memory - ClientQuery::NodeGetState => CoreResponse::NodeGetState(self.state.clone()), - // get system volumes without saving to library - ClientQuery::SysGetVolumes => CoreResponse::SysGetVolumes(sys::Volume::get_volumes()?), - ClientQuery::SysGetLocations => { - CoreResponse::SysGetLocations(sys::get_locations(&ctx).await?) - } - // get location from library - ClientQuery::SysGetLocation { id } => { - CoreResponse::SysGetLocation(sys::get_location(&ctx, id).await?) - } - // return contents of a directory for the explorer - ClientQuery::LibGetExplorerDir { - path, - location_id, - limit: _, - } => CoreResponse::LibGetExplorerDir( - file::explorer::open_dir(&ctx, location_id, &path).await?, + ClientQuery::NodeGetLibraries => CoreResponse::NodeGetLibraries( + self.library_manager.get_all_libraries_config().await, ), - ClientQuery::LibGetTags => todo!(), + ClientQuery::NodeGetState => CoreResponse::NodeGetState(NodeState { + config: self.config.get().await, + data_path: self.config.data_directory().to_str().unwrap().to_string(), + }), + ClientQuery::SysGetVolumes => CoreResponse::SysGetVolumes(sys::Volume::get_volumes()?), ClientQuery::JobGetRunning => { CoreResponse::JobGetRunning(self.jobs.get_running().await) } - ClientQuery::JobGetHistory => { - CoreResponse::JobGetHistory(Jobs::get_history(&ctx).await?) - } - ClientQuery::GetLibraryStatistics => { - CoreResponse::GetLibraryStatistics(library::Statistics::calculate(&ctx).await?) - } ClientQuery::GetNodes => todo!(), + ClientQuery::LibraryQuery { library_id, query } => { + let ctx = match self.library_manager.get_ctx(library_id.clone()).await { + Some(ctx) => ctx, + None => { + println!("Library '{}' not found!", library_id); + return Ok(CoreResponse::Error("Library not found".into())); + } + }; + match query { + LibraryQuery::SysGetLocations => { + CoreResponse::SysGetLocations(sys::get_locations(&ctx).await?) + } + // get location from library + LibraryQuery::SysGetLocation { id } => { + CoreResponse::SysGetLocation(sys::get_location(&ctx, id).await?) + } + // return contents of a directory for the explorer + LibraryQuery::LibGetExplorerDir { + location_id, + path, + limit: _, + } => CoreResponse::LibGetExplorerDir(Box::new( + file::explorer::open_dir(&ctx, location_id, path).await?, + )), + LibraryQuery::LibGetTags => todo!(), + LibraryQuery::JobGetHistory => { + CoreResponse::JobGetHistory(JobManager::get_history(&ctx).await?) + } + LibraryQuery::GetLibraryStatistics => CoreResponse::GetLibraryStatistics( + library::Statistics::calculate(&ctx).await?, + ), + } + } }) } } -// represents an event this library can emit +/// is a command destined for the core #[derive(Serialize, Deserialize, Debug, TS)] #[serde(tag = "key", content = "params")] #[ts(export)] pub enum ClientCommand { + // Libraries + CreateLibrary { + name: String, + }, + EditLibrary { + id: String, + name: Option, + description: Option, + }, + DeleteLibrary { + id: String, + }, + LibraryCommand { + library_id: String, + command: LibraryCommand, + }, +} + +/// is a command destined for a specific library which is loaded into the core. +#[derive(Serialize, Deserialize, Debug, TS)] +#[serde(tag = "key", content = "params")] +#[ts(export)] +pub enum LibraryCommand { // Files FileReadMetaData { id: i32 }, FileSetNote { id: i32, note: Option }, // FileEncrypt { id: i32, algorithm: EncryptionAlgorithm }, FileDelete { id: i32 }, - // Library - LibDelete { id: i32 }, // Tags TagCreate { name: String, color: String }, TagUpdate { name: String, color: String }, @@ -385,15 +371,28 @@ pub enum ClientCommand { IdentifyUniqueFiles { id: i32, path: PathBuf }, } -// represents an event this library can emit +/// is a query destined for the core #[derive(Serialize, Deserialize, Debug, TS)] #[serde(tag = "key", content = "params")] #[ts(export)] pub enum ClientQuery { + NodeGetLibraries, NodeGetState, SysGetVolumes, - LibGetTags, JobGetRunning, + GetNodes, + LibraryQuery { + library_id: String, + query: LibraryQuery, + }, +} + +/// is a query destined for a specific library which is loaded into the core. +#[derive(Serialize, Deserialize, Debug, TS)] +#[serde(tag = "key", content = "params")] +#[ts(export)] +pub enum LibraryQuery { + LibGetTags, JobGetHistory, SysGetLocations, SysGetLocation { @@ -401,11 +400,10 @@ pub enum ClientQuery { }, LibGetExplorerDir { location_id: i32, - path: String, + path: PathBuf, limit: i32, }, GetLibraryStatistics, - GetNodes, } // represents an event this library can emit @@ -422,15 +420,25 @@ pub enum CoreEvent { DatabaseDisconnected { reason: Option }, } +#[derive(Serialize, Deserialize, Debug, TS)] +#[ts(export)] +pub struct NodeState { + #[serde(flatten)] + pub config: NodeConfig, + pub data_path: String, +} + #[derive(Serialize, Deserialize, Debug, TS)] #[serde(tag = "key", content = "data")] #[ts(export)] pub enum CoreResponse { Success(()), + Error(String), + NodeGetLibraries(Vec), SysGetVolumes(Vec), SysGetLocation(sys::LocationResource), SysGetLocations(Vec), - LibGetExplorerDir(file::DirectoryWithContents), + LibGetExplorerDir(Box), NodeGetState(NodeState), LocCreate(sys::LocationResource), JobGetRunning(Vec), diff --git a/core/src/library/library_config.rs b/core/src/library/library_config.rs new file mode 100644 index 000000000..f3ab140f1 --- /dev/null +++ b/core/src/library/library_config.rs @@ -0,0 +1,69 @@ +use std::{ + fs::File, + io::{BufReader, Seek, SeekFrom}, + path::PathBuf, +}; + +use serde::{Deserialize, Serialize}; +use std::io::Write; +use ts_rs::TS; + +use crate::node::ConfigMetadata; + +use super::LibraryManagerError; + +/// LibraryConfig holds the configuration for a specific library. This is stored as a '{uuid}.sdlibrary' file. +#[derive(Debug, Serialize, Deserialize, Clone, TS, Default)] +#[ts(export)] +pub struct LibraryConfig { + #[serde(flatten)] + pub metadata: ConfigMetadata, + /// name is the display name of the library. This is used in the UI and is set by the user. + pub name: String, + /// description is a user set description of the library. This is used in the UI and is set by the user. + pub description: String, +} + +impl LibraryConfig { + /// read will read the configuration from disk and return it. + pub(super) async fn read(file_dir: PathBuf) -> Result { + let mut file = File::open(&file_dir)?; + let base_config: ConfigMetadata = serde_json::from_reader(BufReader::new(&mut file))?; + + Self::migrate_config(base_config.version, file_dir)?; + + file.seek(SeekFrom::Start(0))?; + Ok(serde_json::from_reader(BufReader::new(&mut file))?) + } + + /// save will write the configuration back to disk + pub(super) async fn save( + file_dir: PathBuf, + config: &LibraryConfig, + ) -> Result<(), LibraryManagerError> { + File::create(file_dir)?.write_all(serde_json::to_string(config)?.as_bytes())?; + Ok(()) + } + + /// migrate_config is a function used to apply breaking changes to the library config file. + fn migrate_config( + current_version: Option, + config_path: PathBuf, + ) -> Result<(), LibraryManagerError> { + match current_version { + None => Err(LibraryManagerError::Migration(format!( + "Your Spacedrive library at '{}' is missing the `version` field", + config_path.display() + ))), + _ => Ok(()), + } + } +} + +// used to return to the frontend with uuid context +#[derive(Serialize, Deserialize, Debug, TS)] +#[ts(export)] +pub struct LibraryConfigWrapped { + pub uuid: String, + pub config: LibraryConfig, +} diff --git a/core/src/library/library_ctx.rs b/core/src/library/library_ctx.rs new file mode 100644 index 000000000..50bc5ea94 --- /dev/null +++ b/core/src/library/library_ctx.rs @@ -0,0 +1,46 @@ +use std::sync::Arc; + +use uuid::Uuid; + +use crate::{job::Job, node::NodeConfigManager, prisma::PrismaClient, CoreEvent, NodeContext}; + +use super::LibraryConfig; + +/// LibraryContext holds context for a library which can be passed around the application. +#[derive(Clone)] +pub struct LibraryContext { + /// id holds the ID of the current library. + pub id: Uuid, + /// config holds the configuration of the current library. + pub config: LibraryConfig, + /// db holds the database client for the current library. + pub db: Arc, + /// node_local_id holds the local ID of the node which is running the library. + pub node_local_id: i32, + /// node_context holds the node context for the node which this library is running on. + pub(super) node_context: NodeContext, +} + +impl LibraryContext { + pub(crate) async fn spawn_job(&self, job: Box) { + self.node_context.jobs.clone().ingest(self, job).await; + } + + pub(crate) async fn queue_job(&self, job: Box) { + self.node_context.jobs.ingest_queue(self, job).await; + } + + pub(crate) async fn emit(&self, event: CoreEvent) { + self.node_context + .event_sender + .send(event) + .await + .unwrap_or_else(|e| { + println!("Failed to emit event. {:?}", e); + }); + } + + pub(crate) fn config(&self) -> Arc { + self.node_context.config.clone() + } +} diff --git a/core/src/library/library_manager.rs b/core/src/library/library_manager.rs new file mode 100644 index 000000000..ffabb814a --- /dev/null +++ b/core/src/library/library_manager.rs @@ -0,0 +1,264 @@ +use std::{ + env, fs, io, + path::{Path, PathBuf}, + str::FromStr, + sync::Arc, +}; + +use thiserror::Error; +use tokio::sync::RwLock; +use uuid::Uuid; + +use crate::{ + node::Platform, + prisma::{self, node}, + util::db::load_and_migrate, + ClientQuery, CoreEvent, NodeContext, +}; + +use super::{LibraryConfig, LibraryConfigWrapped, LibraryContext}; + +/// LibraryManager is a singleton that manages all libraries for a node. +pub struct LibraryManager { + /// libraries_dir holds the path to the directory where libraries are stored. + libraries_dir: PathBuf, + /// libraries holds the list of libraries which are currently loaded into the node. + libraries: RwLock>, + /// node_context holds the context for the node which this library manager is running on. + node_context: NodeContext, +} + +#[derive(Error, Debug)] +pub enum LibraryManagerError { + #[error("error saving or loading the config from the filesystem")] + IO(#[from] io::Error), + #[error("error serializing or deserializing the JSON in the config file")] + Json(#[from] serde_json::Error), + #[error("Database error")] + Database(#[from] prisma::QueryError), + #[error("Library not found error")] + LibraryNotFound, + #[error("error migrating the config file")] + Migration(String), + #[error("failed to parse uuid")] + Uuid(#[from] uuid::Error), +} + +impl LibraryManager { + pub(crate) async fn new( + libraries_dir: PathBuf, + node_context: NodeContext, + ) -> Result, LibraryManagerError> { + fs::create_dir_all(&libraries_dir)?; + + let mut libraries = Vec::new(); + for entry in fs::read_dir(&libraries_dir)? + .into_iter() + .filter_map(|entry| entry.ok()) + .filter(|entry| { + entry.path().is_file() + && entry + .path() + .extension() + .map(|v| &*v == "sdlibrary") + .unwrap_or(false) + }) { + let config_path = entry.path(); + let library_id = match Path::new(&config_path) + .file_stem() + .map(|v| v.to_str().map(Uuid::from_str)) + { + Some(Some(Ok(id))) => id, + _ => { + println!("Attempted to load library from path '{}' but it has an invalid filename. Skipping...", config_path.display()); + continue; + } + }; + + let db_path = config_path.clone().with_extension("db"); + if !db_path.exists() { + println!( + "Found library '{}' but no matching database file was found. Skipping...", + config_path.display() + ); + continue; + } + + let config = LibraryConfig::read(config_path).await?; + libraries.push( + Self::load( + library_id, + db_path.to_str().unwrap(), + config, + node_context.clone(), + ) + .await?, + ); + } + + let this = Arc::new(Self { + libraries: RwLock::new(libraries), + libraries_dir, + node_context, + }); + + // TODO: Remove this before merging PR -> Currently it exists to make the app usable + if this.libraries.read().await.len() == 0 { + this.create(LibraryConfig { + name: "My Default Library".into(), + ..Default::default() + }) + .await + .unwrap(); + } + + Ok(this) + } + + /// create creates a new library with the given config and mounts it into the running [LibraryManager]. + pub(crate) async fn create(&self, config: LibraryConfig) -> Result<(), LibraryManagerError> { + let id = Uuid::new_v4(); + LibraryConfig::save( + Path::new(&self.libraries_dir).join(format!("{id}.sdlibrary")), + &config, + ) + .await?; + + let library = Self::load( + id, + self.libraries_dir.join(format!("{id}.db")), + config, + self.node_context.clone(), + ) + .await?; + + self.libraries.write().await.push(library); + + self.node_context + .emit(CoreEvent::InvalidateQuery(ClientQuery::NodeGetLibraries)) + .await; + + Ok(()) + } + + pub(crate) async fn get_all_libraries_config(&self) -> Vec { + self.libraries + .read() + .await + .iter() + .map(|lib| LibraryConfigWrapped { + config: lib.config.clone(), + uuid: lib.id.to_string(), + }) + .collect() + } + + pub(crate) async fn edit_library( + &self, + id: String, + name: Option, + description: Option, + ) -> Result<(), LibraryManagerError> { + // check library is valid + let mut libraries = self.libraries.write().await; + let library = libraries + .iter_mut() + .find(|lib| lib.id == Uuid::from_str(&id).unwrap()) + .ok_or(LibraryManagerError::LibraryNotFound)?; + + // update the library + if let Some(name) = name { + library.config.name = name; + } + if let Some(description) = description { + library.config.description = description; + } + + LibraryConfig::save( + Path::new(&self.libraries_dir).join(format!("{id}.sdlibrary")), + &library.config, + ) + .await?; + + self.node_context + .emit(CoreEvent::InvalidateQuery(ClientQuery::NodeGetLibraries)) + .await; + Ok(()) + } + + pub async fn delete_library(&self, id: String) -> Result<(), LibraryManagerError> { + let mut libraries = self.libraries.write().await; + + let id = Uuid::parse_str(&id)?; + + let library = libraries + .iter() + .find(|l| l.id == id) + .ok_or(LibraryManagerError::LibraryNotFound)?; + + fs::remove_file(Path::new(&self.libraries_dir).join(format!("{}.db", library.id)))?; + fs::remove_file(Path::new(&self.libraries_dir).join(format!("{}.sdlibrary", library.id)))?; + + libraries.retain(|l| l.id != id); + + self.node_context + .emit(CoreEvent::InvalidateQuery(ClientQuery::NodeGetLibraries)) + .await; + Ok(()) + } + + // get_ctx will return the library context for the given library id. + pub(crate) async fn get_ctx(&self, library_id: String) -> Option { + self.libraries + .read() + .await + .iter() + .find(|lib| lib.id.to_string() == library_id) + .map(Clone::clone) + } + + /// load the library from a given path + pub(crate) async fn load( + id: Uuid, + db_path: impl AsRef, + config: LibraryConfig, + node_context: NodeContext, + ) -> Result { + let db = Arc::new( + load_and_migrate(&format!("file:{}", db_path.as_ref().to_string_lossy())) + .await + .unwrap(), + ); + + let node_config = node_context.config.get().await; + + let platform = match env::consts::OS { + "windows" => Platform::Windows, + "macos" => Platform::MacOS, + "linux" => Platform::Linux, + _ => Platform::Unknown, + }; + + let node_data = db + .node() + .upsert( + node::pub_id::equals(id.to_string()), + ( + node::pub_id::set(id.to_string()), + node::name::set(node_config.name.clone()), + vec![node::platform::set(platform as i32)], + ), + vec![node::name::set(node_config.name.clone())], + ) + .exec() + .await?; + + Ok(LibraryContext { + id, + config, + db, + node_local_id: node_data.id, + node_context, + }) + } +} diff --git a/core/src/library/loader.rs b/core/src/library/loader.rs index da4826f7b..8b1378917 100644 --- a/core/src/library/loader.rs +++ b/core/src/library/loader.rs @@ -1,102 +1 @@ -use log::info; -use std::fmt::Debug; -use std::path::{Path, PathBuf}; -use uuid::Uuid; -use crate::{ - node::{get_nodestate, LibraryState}, - prisma::library, - util::db::{run_migrations, DatabaseError}, - CoreContext, -}; - -pub static LIBRARY_DB_NAME: &str = "library.db"; -pub static DEFAULT_NAME: &str = "My Library"; - -pub fn get_library_path(data_path: impl AsRef) -> PathBuf { - data_path.as_ref().join(LIBRARY_DB_NAME) -} - -// pub async fn get(core: &Node) -> Result { -// let config = get_nodestate(); -// let db = &core.database; - -// let library_state = config.get_current_library(); - -// info!("{:?}", library_state); - -// // get library from db -// let library = match db -// .library() -// .find_unique(library::pub_id::equals(library_state.library_uuid.clone())) -// .exec() -// .await? -// { -// Some(library) => Ok(library), -// None => { -// // update config library state to offline -// // config.libraries - -// Err(anyhow::anyhow!("library_not_found")) -// } -// }; - -// Ok(library.unwrap()) -// } - -pub async fn load( - ctx: &CoreContext, - library_path: impl AsRef + Debug, - library_id: &str, -) -> Result<(), DatabaseError> { - let mut config = get_nodestate(); - - info!("Initializing library: {} {:#?}", &library_id, library_path); - - if config.current_library_uuid != library_id { - config.current_library_uuid = library_id.to_string(); - config.save().await; - } - // create connection with library database & run migrations - run_migrations(ctx).await?; - // if doesn't exist, mark as offline - Ok(()) -} - -pub async fn create(ctx: &CoreContext, name: Option) -> Result<(), ()> { - let mut config = get_nodestate(); - - let uuid = Uuid::new_v4().to_string(); - - info!("Creating library {:?}, UUID: {:?}", name, uuid); - - let library_state = LibraryState { - library_uuid: uuid.clone(), - library_path: get_library_path(config.data_path.as_ref().unwrap()), - ..LibraryState::default() - }; - - run_migrations(ctx).await.unwrap(); - - config.libraries.push(library_state); - - config.current_library_uuid = uuid; - - config.save().await; - - let library = ctx - .database - .library() - .create( - library::pub_id::set(config.current_library_uuid), - library::name::set(name.unwrap_or_else(|| DEFAULT_NAME.into())), - vec![], - ) - .exec() - .await - .unwrap(); - - info!("library created in database: {:?}", library); - - Ok(()) -} diff --git a/core/src/library/mod.rs b/core/src/library/mod.rs index 17dc6db10..23aed8efa 100644 --- a/core/src/library/mod.rs +++ b/core/src/library/mod.rs @@ -1,7 +1,11 @@ -mod loader; +mod library_config; +mod library_ctx; +mod library_manager; mod statistics; -pub use loader::*; +pub use library_config::*; +pub use library_ctx::*; +pub use library_manager::*; pub use statistics::*; use thiserror::Error; diff --git a/core/src/library/statistics.rs b/core/src/library/statistics.rs index 91157286c..5c078e30b 100644 --- a/core/src/library/statistics.rs +++ b/core/src/library/statistics.rs @@ -1,16 +1,10 @@ -use crate::{ - node::get_nodestate, - prisma::{library, library_statistics::*}, - sys::Volume, - CoreContext, -}; +use crate::{prisma::statistics::*, sys::Volume}; use fs_extra::dir::get_size; -use log::info; use serde::{Deserialize, Serialize}; use tokio::fs; use ts_rs::TS; -use super::LibraryError; +use super::{LibraryContext, LibraryError}; #[derive(Debug, Serialize, Deserialize, TS, Clone, Default)] #[ts(export)] @@ -39,46 +33,22 @@ impl From for Statistics { } impl Statistics { - pub async fn retrieve(ctx: &CoreContext) -> Result { - let config = get_nodestate(); - let library_data = config.get_current_library(); - + pub async fn retrieve(ctx: &LibraryContext) -> Result { let library_statistics_db = ctx - .database - .library_statistics() - .find_unique(id::equals(library_data.library_id)) + .db + .statistics() + .find_unique(id::equals(ctx.node_local_id)) .exec() .await? .map_or_else(Default::default, Into::into); Ok(library_statistics_db) } - pub async fn calculate(ctx: &CoreContext) -> Result { - let config = get_nodestate(); - // get library from client state - let library_data = config.get_current_library(); - info!( - "Calculating library statistics {:?}", - library_data.library_uuid - ); - // get library from db - let library = ctx - .database - .library() - .find_unique(library::pub_id::equals( - library_data.library_uuid.to_string(), - )) - .exec() - .await?; - - if library.is_none() { - return Err(LibraryError::LibraryNotFound); - } - - let library_statistics = ctx - .database - .library_statistics() - .find_unique(id::equals(library_data.library_id)) + pub async fn calculate(ctx: &LibraryContext) -> Result { + let _statistics = ctx + .db + .statistics() + .find_unique(id::equals(ctx.node_local_id)) .exec() .await?; @@ -97,14 +67,12 @@ impl Statistics { } } - let library_db_size = match fs::metadata(library_data.library_path).await { + let library_db_size = match fs::metadata(ctx.config().data_directory()).await { Ok(metadata) => metadata.len(), Err(_) => 0, }; - info!("{:?}", library_statistics); - - let thumbnail_folder_size = get_size(config.data_path.unwrap().join("thumbnails")); + let thumbnail_folder_size = get_size(ctx.config().data_directory().join("thumbnails")); let statistics = Statistics { library_db_size: library_db_size.to_string(), @@ -114,19 +82,11 @@ impl Statistics { ..Statistics::default() }; - let library_local_id = match library { - Some(library) => library.id, - None => library_data.library_id, - }; - - ctx.database - .library_statistics() + ctx.db + .statistics() .upsert( - library_id::equals(library_local_id), - ( - library_id::set(library_local_id), - vec![library_db_size::set(statistics.library_db_size.clone())], - ), + id::equals(1), + vec![library_db_size::set(statistics.library_db_size.clone())], vec![ total_file_count::set(statistics.total_file_count), total_bytes_used::set(statistics.total_bytes_used.clone()), diff --git a/core/src/node/config.rs b/core/src/node/config.rs new file mode 100644 index 000000000..ea1a09f1a --- /dev/null +++ b/core/src/node/config.rs @@ -0,0 +1,149 @@ +use serde::{Deserialize, Serialize}; +use std::fs::File; +use std::io::{self, BufReader, Seek, SeekFrom, Write}; +use std::path::{Path, PathBuf}; +use std::sync::Arc; +use thiserror::Error; +use tokio::sync::{RwLock, RwLockWriteGuard}; +use ts_rs::TS; +use uuid::Uuid; + +/// NODE_STATE_CONFIG_NAME is the name of the file which stores the NodeState +pub const NODE_STATE_CONFIG_NAME: &str = "node_state.sdconfig"; + +/// ConfigMetadata is a part of node configuration that is loaded before the main configuration and contains information about the schema of the config. +/// This allows us to migrate breaking changes to the config format between Spacedrive releases. +#[derive(Debug, Serialize, Deserialize, Clone, TS)] +#[ts(export)] +pub struct ConfigMetadata { + /// version of Spacedrive. Determined from `CARGO_PKG_VERSION` environment variable. + pub version: Option, +} + +impl Default for ConfigMetadata { + fn default() -> Self { + Self { + version: Some(env!("CARGO_PKG_VERSION").into()), + } + } +} + +/// NodeConfig is the configuration for a node. This is shared between all libraries and is stored in a JSON file on disk. +#[derive(Debug, Serialize, Deserialize, Clone, TS)] +#[ts(export)] +pub struct NodeConfig { + #[serde(flatten)] + pub metadata: ConfigMetadata, + /// id is a unique identifier for the current node. Each node has a public identifier (this one) and is given a local id for each library (done within the library code). + pub id: Uuid, + /// name is the display name of the current node. This is set by the user and is shown in the UI. // TODO: Length validation so it can fit in DNS record + pub name: String, + // the port this node uses for peer to peer communication. By default a random free port will be chosen each time the application is started. + pub p2p_port: Option, +} + +#[derive(Error, Debug)] +pub enum NodeConfigError { + #[error("error saving or loading the config from the filesystem")] + IO(#[from] io::Error), + #[error("error serializing or deserializing the JSON in the config file")] + Json(#[from] serde_json::Error), + #[error("error migrating the config file")] + Migration(String), +} + +impl NodeConfig { + fn default() -> Self { + NodeConfig { + id: Uuid::new_v4(), + name: match hostname::get() { + Ok(hostname) => hostname.to_string_lossy().into_owned(), + Err(err) => { + eprintln!("Falling back to default node name as an error occurred getting your systems hostname: '{}'", err); + "my-spacedrive".into() + } + }, + p2p_port: None, + metadata: ConfigMetadata { + version: Some(env!("CARGO_PKG_VERSION").into()), + }, + } + } +} + +pub struct NodeConfigManager(RwLock, PathBuf); + +impl NodeConfigManager { + /// new will create a new NodeConfigManager with the given path to the config file. + pub(crate) async fn new(data_path: PathBuf) -> Result, NodeConfigError> { + Ok(Arc::new(Self( + RwLock::new(Self::read(&data_path).await?), + data_path, + ))) + } + + /// get will return the current NodeConfig in a read only state. + pub(crate) async fn get(&self) -> NodeConfig { + self.0.read().await.clone() + } + + /// data_directory returns the path to the directory storing the configuration data. + pub(crate) fn data_directory(&self) -> PathBuf { + self.1.clone() + } + + /// write allows the user to update the configuration. This is done in a closure while a Mutex lock is held so that the user can't cause a race condition if the config were to be updated in multiple parts of the app at the same time. + #[allow(unused)] + pub(crate) async fn write)>( + &self, + mutation_fn: F, + ) -> Result { + mutation_fn(self.0.write().await); + let config = self.0.read().await; + Self::save(&self.1, &config).await?; + Ok(config.clone()) + } + + /// read will read the configuration from disk and return it. + async fn read(base_path: &PathBuf) -> Result { + let path = Path::new(base_path).join(NODE_STATE_CONFIG_NAME); + + match path.exists() { + true => { + let mut file = File::open(&path)?; + let base_config: ConfigMetadata = + serde_json::from_reader(BufReader::new(&mut file))?; + + Self::migrate_config(base_config.version, path)?; + + file.seek(SeekFrom::Start(0))?; + Ok(serde_json::from_reader(BufReader::new(&mut file))?) + } + false => { + let config = NodeConfig::default(); + Self::save(base_path, &config).await?; + Ok(config) + } + } + } + + /// save will write the configuration back to disk + async fn save(base_path: &PathBuf, config: &NodeConfig) -> Result<(), NodeConfigError> { + let path = Path::new(base_path).join(NODE_STATE_CONFIG_NAME); + File::create(path)?.write_all(serde_json::to_string(config)?.as_bytes())?; + Ok(()) + } + + /// migrate_config is a function used to apply breaking changes to the config file. + fn migrate_config( + current_version: Option, + config_path: PathBuf, + ) -> Result<(), NodeConfigError> { + match current_version { + None => { + Err(NodeConfigError::Migration(format!("Your Spacedrive config file stored at '{}' is missing the `version` field. If you just upgraded please delete the file and restart Spacedrive! Please note this upgrade will stop using your old 'library.db' as the folder structure has changed.", config_path.display()))) + } + _ => Ok(()), + } + } +} diff --git a/core/src/node/mod.rs b/core/src/node/mod.rs index 309815c97..5276ab015 100644 --- a/core/src/node/mod.rs +++ b/core/src/node/mod.rs @@ -1,18 +1,10 @@ -use crate::{ - prisma::{self, node}, - Node, -}; use chrono::{DateTime, Utc}; use int_enum::IntEnum; -use log::info; use serde::{Deserialize, Serialize}; -use std::env; -use thiserror::Error; use ts_rs::TS; - -mod state; - -pub use state::*; +mod config; +use crate::prisma::node; +pub use config::*; #[derive(Debug, Clone, Serialize, Deserialize, TS)] #[ts(export)] @@ -52,64 +44,3 @@ pub enum Platform { IOS = 4, Android = 5, } - -impl LibraryNode { - pub async fn create(node: &Node) -> Result<(), NodeError> { - info!("Creating node..."); - let mut config = state::get_nodestate(); - - let hostname = match hostname::get() { - Ok(hostname) => hostname.to_str().unwrap_or_default().to_owned(), - Err(_) => "unknown".to_owned(), - }; - - let platform = match env::consts::OS { - "windows" => Platform::Windows, - "macos" => Platform::MacOS, - "linux" => Platform::Linux, - _ => Platform::Unknown, - }; - - let node = if let Some(node) = node - .database - .node() - .find_unique(node::pub_id::equals(config.node_pub_id.clone())) - .exec() - .await? - { - node - } else { - node.database - .node() - .create( - node::pub_id::set(config.node_pub_id.clone()), - node::name::set(hostname.clone()), - vec![node::platform::set(platform as i32)], - ) - .exec() - .await? - }; - - config.node_name = hostname; - config.node_id = node.id; - config.save().await; - - info!("node: {:?}", node); - - Ok(()) - } - - // pub async fn get_nodes(ctx: &CoreContext) -> Result, NodeError> { - // let db = &ctx.database; - - // let _node = db.node().find_many(vec![]).exec().await?; - - // Ok(_node) - // } -} - -#[derive(Error, Debug)] -pub enum NodeError { - #[error("Database error")] - DatabaseError(#[from] prisma::QueryError), -} diff --git a/core/src/node/state.rs b/core/src/node/state.rs index 6892b0d37..8b1378917 100644 --- a/core/src/node/state.rs +++ b/core/src/node/state.rs @@ -1,109 +1 @@ -use lazy_static::lazy_static; -use serde::{Deserialize, Serialize}; -use std::path::PathBuf; -use std::sync::RwLock; -use tokio::io::AsyncReadExt; -use tokio::{ - fs, - io::{AsyncWriteExt, BufReader}, -}; -use ts_rs::TS; -use uuid::Uuid; -#[derive(Debug, Serialize, Deserialize, Clone, Default, TS)] -#[ts(export)] -pub struct NodeState { - pub node_pub_id: String, - pub node_id: i32, - pub node_name: String, - // config path is stored as struct can exist only in memory during startup and be written to disk later without supplying path - pub data_path: Option, - // the port this node uses to listen for incoming connections - pub tcp_port: u32, - // all the libraries loaded by this node - pub libraries: Vec, - // used to quickly find the default library - pub current_library_uuid: String, -} - -pub static NODE_STATE_CONFIG_NAME: &str = "node_state.json"; - -#[derive(Debug, Serialize, Deserialize, Clone, Default, TS)] -#[ts(export)] -pub struct LibraryState { - pub library_uuid: String, - pub library_id: i32, - pub library_path: PathBuf, - pub offline: bool, -} - -// global, thread-safe storage for node state -lazy_static! { - static ref CONFIG: RwLock> = RwLock::new(None); -} - -pub fn get_nodestate() -> NodeState { - if let Ok(guard) = CONFIG.read() { - guard.clone().unwrap_or_default() - } else { - NodeState::default() - } -} - -impl NodeState { - pub fn new(data_path: PathBuf, node_name: &str) -> Result { - let uuid = Uuid::new_v4().to_string(); - // create struct and assign defaults - let config = Self { - node_pub_id: uuid, - data_path: Some(data_path), - node_name: node_name.to_string(), - ..Default::default() - }; - Ok(config) - } - - pub async fn save(&self) { - self.write_memory(); - // only write to disk if config path is set - if let Some(ref data_path) = self.data_path { - let config_path = data_path.join(NODE_STATE_CONFIG_NAME); - let mut file = fs::File::create(config_path).await.unwrap(); - let json = serde_json::to_string(&self).unwrap(); - file.write_all(json.as_bytes()).await.unwrap(); - } - } - - pub async fn read_disk(&mut self) -> Result<(), ()> { - if let Some(ref data_path) = self.data_path { - let config_path = data_path.join(NODE_STATE_CONFIG_NAME); - // open the file and parse json - if let Ok(file) = fs::File::open(config_path).await { - let mut buf = vec![]; - let bytes = BufReader::new(file).read_to_end(&mut buf).await.unwrap(); - let data = serde_json::from_slice(&buf[..bytes]).unwrap(); - // assign to self - *self = data; - } - } - - Ok(()) - } - - fn write_memory(&self) { - let mut writeable = CONFIG.write().unwrap(); - *writeable = Some(self.clone()); - } - - pub fn get_current_library(&self) -> LibraryState { - self.libraries - .iter() - .find(|lib| lib.library_uuid == self.current_library_uuid) - .cloned() - .unwrap_or_default() - } - - pub fn get_current_library_db_path(&self) -> PathBuf { - self.get_current_library().library_path.join("library.db") - } -} diff --git a/core/src/sys/locations.rs b/core/src/sys/locations.rs index 162c80b91..6b99c20ee 100644 --- a/core/src/sys/locations.rs +++ b/core/src/sys/locations.rs @@ -1,8 +1,9 @@ use crate::{ file::{cas::FileIdentifierJob, indexer::IndexerJob}, - node::{get_nodestate, LibraryNode}, + library::LibraryContext, + node::LibraryNode, prisma::{file_path, location}, - ClientQuery, CoreContext, CoreEvent, + ClientQuery, CoreEvent, LibraryQuery, }; use log::info; @@ -10,10 +11,9 @@ use serde::{Deserialize, Serialize}; use std::fmt::Debug; use std::path::{Path, PathBuf}; use thiserror::Error; -use tokio::io::AsyncWriteExt; use tokio::{ fs::{metadata, File}, - io, + io::{self, AsyncWriteExt}, }; use ts_rs::TS; use uuid::Uuid; @@ -73,11 +73,11 @@ static DOTFILE_NAME: &str = ".spacedrive"; // } pub async fn get_location( - ctx: &CoreContext, + ctx: &LibraryContext, location_id: i32, ) -> Result { // get location by location_id from db and include location_paths - ctx.database + ctx.db .location() .find_unique(location::id::equals(location_id)) .exec() @@ -86,15 +86,17 @@ pub async fn get_location( .ok_or_else(|| LocationError::IdNotFound(location_id).into()) } -pub fn scan_location(ctx: &CoreContext, location_id: i32, path: impl AsRef) { +pub async fn scan_location(ctx: &LibraryContext, location_id: i32, path: impl AsRef) { let path_buf = path.as_ref().to_path_buf(); ctx.spawn_job(Box::new(IndexerJob { path: path_buf.clone(), - })); + })) + .await; ctx.queue_job(Box::new(FileIdentifierJob { location_id, path: path_buf, - })); + })) + .await; // TODO: make a way to stop jobs so this can be canceled without rebooting app // ctx.queue_job(Box::new(ThumbnailJob { // location_id, @@ -104,19 +106,19 @@ pub fn scan_location(ctx: &CoreContext, location_id: i32, path: impl AsRef } pub async fn new_location_and_scan( - ctx: &CoreContext, + ctx: &LibraryContext, path: impl AsRef + Debug, ) -> Result { let location = create_location(ctx, &path).await?; - scan_location(ctx, location.id, path); + scan_location(ctx, location.id, path).await; Ok(location) } -pub async fn get_locations(ctx: &CoreContext) -> Result, SysError> { +pub async fn get_locations(ctx: &LibraryContext) -> Result, SysError> { let locations = ctx - .database + .db .location() .find_many(vec![]) .with(location::node::fetch()) @@ -128,7 +130,7 @@ pub async fn get_locations(ctx: &CoreContext) -> Result, S } pub async fn create_location( - ctx: &CoreContext, + ctx: &LibraryContext, path: impl AsRef + Debug, ) -> Result { let path = path.as_ref(); @@ -151,7 +153,7 @@ pub async fn create_location( // check if location already exists let location_resource = if let Some(location) = ctx - .database + .db .location() .find_first(vec![location::local_path::equals(Some( path_string.clone(), @@ -162,15 +164,13 @@ pub async fn create_location( location.into() } else { info!( - "Location does not exist, creating new location for '{:#?}'", - path + "Location does not exist, creating new location for '{}'", + path_string ); let uuid = Uuid::new_v4(); - let config = get_nodestate(); - let location = ctx - .database + .db .location() .create( location::pub_id::set(uuid.to_string()), @@ -180,7 +180,7 @@ pub async fn create_location( )), location::is_online::set(true), location::local_path::set(Some(path_string)), - location::node_id::set(Some(config.node_id)), + location::node_id::set(Some(ctx.node_local_id)), ], ) .exec() @@ -195,7 +195,7 @@ pub async fn create_location( let data = DotSpacedrive { location_uuid: uuid, - library_uuid: config.current_library_uuid, + library_uuid: ctx.id.to_string(), }; let json_bytes = serde_json::to_vec(&data) @@ -206,8 +206,8 @@ pub async fn create_location( .await .map_err(|e| LocationError::DotfileWriteFailure(e, path.to_owned()))?; - ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::SysGetLocations)) - .await; + // ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::SysGetLocations)) + // .await; location.into() }; @@ -215,23 +215,26 @@ pub async fn create_location( Ok(location_resource) } -pub async fn delete_location(ctx: &CoreContext, location_id: i32) -> Result<(), SysError> { - ctx.database +pub async fn delete_location(ctx: &LibraryContext, location_id: i32) -> Result<(), SysError> { + ctx.db .file_path() .find_many(vec![file_path::location_id::equals(Some(location_id))]) .delete() .exec() .await?; - ctx.database + ctx.db .location() .find_unique(location::id::equals(location_id)) .delete() .exec() .await?; - ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::SysGetLocations)) - .await; + ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibraryQuery { + library_id: ctx.id.to_string(), + query: LibraryQuery::SysGetLocations, + })) + .await; info!("Location {} deleted", location_id); diff --git a/core/src/sys/volumes.rs b/core/src/sys/volumes.rs index ffb235c05..ca9666e5c 100644 --- a/core/src/sys/volumes.rs +++ b/core/src/sys/volumes.rs @@ -1,5 +1,5 @@ // use crate::native; -use crate::{node::get_nodestate, prisma::volume::*, CoreContext}; +use crate::{library::LibraryContext, prisma::volume::*}; use serde::{Deserialize, Serialize}; use ts_rs::TS; // #[cfg(not(target_os = "macos"))] @@ -24,23 +24,21 @@ pub struct Volume { } impl Volume { - pub async fn save(ctx: &CoreContext) -> Result<(), SysError> { - let config = get_nodestate(); - + pub async fn save(ctx: &LibraryContext) -> Result<(), SysError> { let volumes = Self::get_volumes()?; // enter all volumes associate with this client add to db for volume in volumes { - ctx.database + ctx.db .volume() .upsert( node_id_mount_point_name( - config.node_id, + ctx.node_local_id, volume.mount_point.to_string(), volume.name.to_string(), ), ( - node_id::set(config.node_id), + node_id::set(ctx.node_local_id), name::set(volume.name), mount_point::set(volume.mount_point), vec![ diff --git a/core/src/util/db.rs b/core/src/util/db.rs index e299c7b94..39e1a9a55 100644 --- a/core/src/util/db.rs +++ b/core/src/util/db.rs @@ -1,165 +1,121 @@ use crate::prisma::{self, migration, PrismaClient}; -use crate::CoreContext; use data_encoding::HEXLOWER; use include_dir::{include_dir, Dir}; -use log::{error, info}; -use prisma_client_rust::raw; -use ring::digest::{Context, Digest, SHA256}; -use std::ffi::OsStr; -use std::fmt::Debug; -use std::io::{self, BufReader, Read}; -use std::path::Path; +use prisma_client_rust::{raw, NewClientError}; +use ring::digest::{Context, SHA256}; use thiserror::Error; const INIT_MIGRATION: &str = include_str!("../../prisma/migrations/migration_table/migration.sql"); static MIGRATIONS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/prisma/migrations"); +/// MigrationError represents an error that occurring while opening a initialising and running migrations on the database. #[derive(Error, Debug)] -pub enum DatabaseError { - #[error("Unable to initialize the Prisma client")] - ClientError(#[from] prisma::NewClientError), +pub enum MigrationError { + #[error("An error occurred while initialising a new database connection")] + DatabaseInitialization(#[from] NewClientError), + #[error("An error occurred with the database while applying migrations")] + DatabaseError(#[from] prisma_client_rust::queries::Error), + #[error("An error occured reading the embedded migration files. {0}. Please report to Spacedrive developers!")] + InvalidEmbeddedMigration(&'static str), } -pub async fn create_connection( - path: impl AsRef + Debug, -) -> Result { - info!("Creating database connection: {:?}", path); - let client = - prisma::new_client_with_url(&format!("file:{}", path.as_ref().to_string_lossy())).await?; +/// load_and_migrate will load the database from the given path and migrate it to the latest version of the schema. +pub async fn load_and_migrate(db_url: &str) -> Result { + let client = prisma::new_client_with_url(db_url).await?; - Ok(client) -} - -pub fn sha256_digest(mut reader: R) -> Result { - let mut context = Context::new(&SHA256); - let mut buffer = [0; 1024]; - loop { - let count = reader.read(&mut buffer)?; - if count == 0 { - break; - } - context.update(&buffer[..count]); - } - Ok(context.finish()) -} - -pub async fn run_migrations(ctx: &CoreContext) -> Result<(), DatabaseError> { - let client = &ctx.database; - - match client + let migrations_table_missing = client ._query_raw::(raw!( "SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'" )) - .await - { - Ok(data) => { - if data.is_empty() { - // execute migration - match client._execute_raw(raw!(INIT_MIGRATION)).await { - Ok(_) => {} - Err(e) => { - info!("Failed to create migration table: {}", e); - } - }; + .await? + .is_empty(); - let value: Vec = client - ._query_raw(raw!( - "SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'" - )) - .await - .unwrap(); + if migrations_table_missing { + client._execute_raw(raw!(INIT_MIGRATION)).await?; + } - #[cfg(debug_assertions)] - info!("Migration table created: {:?}", value); - } - - let mut migration_subdirs = MIGRATIONS_DIR - .dirs() - .filter(|subdir| { - subdir - .path() - .file_name() - .map(|name| name != OsStr::new("migration_table")) - .unwrap_or(false) + let mut migration_directories = MIGRATIONS_DIR + .dirs() + .map(|dir| { + dir.path() + .file_name() + .ok_or(MigrationError::InvalidEmbeddedMigration( + "File has malformed name", + )) + .and_then(|name| { + name.to_str() + .ok_or(MigrationError::InvalidEmbeddedMigration( + "File name contains malformed characters", + )) + .map(|name| (name, dir)) }) - .collect::>(); + }) + .filter_map(|v| match v { + Ok((name, _)) if name == "migration_table" => None, + Ok((name, dir)) => match name[..14].parse::() { + Ok(timestamp) => Some(Ok((name, timestamp, dir))), + Err(_) => Some(Err(MigrationError::InvalidEmbeddedMigration( + "File name is incorrectly formatted", + ))), + }, + Err(v) => Some(Err(v)), + }) + .collect::, _>>()?; - migration_subdirs.sort_by(|a, b| { - let a_name = a.path().file_name().unwrap().to_str().unwrap(); - let b_name = b.path().file_name().unwrap().to_str().unwrap(); + // We sort the migrations so they are always applied in the correct order + migration_directories.sort_by(|(_, a_time, _), (_, b_time, _)| a_time.cmp(b_time)); - let a_time = a_name[..14].parse::().unwrap(); - let b_time = b_name[..14].parse::().unwrap(); + for (name, _, dir) in migration_directories { + let migration_file_raw = dir + .get_file(dir.path().join("./migration.sql")) + .ok_or(MigrationError::InvalidEmbeddedMigration( + "Failed to find 'migration.sql' file in '{}' migration subdirectory", + ))? + .contents_utf8() + .ok_or( + MigrationError::InvalidEmbeddedMigration( + "Failed to open the contents of 'migration.sql' file in '{}' migration subdirectory", + ) + )?; - a_time.cmp(&b_time) - }); + // Generate SHA256 checksum of migration + let mut checksum = Context::new(&SHA256); + checksum.update(migration_file_raw.as_bytes()); + let checksum = HEXLOWER.encode(checksum.finish().as_ref()); - for subdir in migration_subdirs { - info!("{:?}", subdir.path()); - let migration_file = subdir - .get_file(subdir.path().join("./migration.sql")) - .unwrap(); - let migration_sql = migration_file.contents_utf8().unwrap(); + // get existing migration by checksum, if it doesn't exist run the migration + if client + .migration() + .find_unique(migration::checksum::equals(checksum.clone())) + .exec() + .await? + .is_none() + { + // Create migration record + client + .migration() + .create( + migration::name::set(name.to_string()), + migration::checksum::set(checksum.clone()), + vec![], + ) + .exec() + .await?; - let digest = sha256_digest(BufReader::new(migration_file.contents())).unwrap(); - // create a lowercase hash from - let checksum = HEXLOWER.encode(digest.as_ref()); - let name = subdir.path().file_name().unwrap().to_str().unwrap(); - - // get existing migration by checksum, if it doesn't exist run the migration - let existing_migration = client + // Split the migrations file up into each individual step and apply them all + let steps = migration_file_raw.split(';').collect::>(); + let steps = &steps[0..steps.len() - 1]; + for (i, step) in steps.iter().enumerate() { + client._execute_raw(raw!(*step)).await?; + client .migration() .find_unique(migration::checksum::equals(checksum.clone())) + .update(vec![migration::steps_applied::set(i as i32 + 1)]) .exec() - .await - .unwrap(); - - if existing_migration.is_none() { - #[cfg(debug_assertions)] - info!("Running migration: {}", name); - - let steps = migration_sql.split(';').collect::>(); - let steps = &steps[0..steps.len() - 1]; - - client - .migration() - .create( - migration::name::set(name.to_string()), - migration::checksum::set(checksum.clone()), - vec![], - ) - .exec() - .await - .unwrap(); - - for (i, step) in steps.iter().enumerate() { - match client._execute_raw(raw!(*step)).await { - Ok(_) => { - client - .migration() - .find_unique(migration::checksum::equals(checksum.clone())) - .update(vec![migration::steps_applied::set(i as i32 + 1)]) - .exec() - .await - .unwrap(); - } - Err(e) => { - error!("Error running migration: {}", name); - error!("{:?}", e); - break; - } - } - } - - #[cfg(debug_assertions)] - info!("Migration {} recorded successfully", name); - } + .await?; } } - Err(err) => { - panic!("Failed to check migration table existence: {:?}", err); - } } - Ok(()) + Ok(client) } diff --git a/packages/client/package.json b/packages/client/package.json index 03ee06fc3..db68bbf13 100644 --- a/packages/client/package.json +++ b/packages/client/package.json @@ -13,25 +13,27 @@ "lint": "TIMING=1 eslint src --fix", "clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist" }, - "devDependencies": { - "@types/react": "^18.0.9", - "scripts": "*", - "tsconfig": "*", - "typescript": "^4.7.2" - }, "jest": { "preset": "scripts/jest/node" }, "dependencies": { "@sd/config": "workspace:*", "@sd/core": "workspace:*", + "@sd/interface": "workspace:*", "eventemitter3": "^4.0.7", "immer": "^9.0.14", - "react-query": "^3.39.1", + "lodash": "^4.17.21", + "react-query": "^3.34.19", "zustand": "4.0.0-rc.1" }, + "devDependencies": { + "@types/react": "^18.0.9", + "scripts": "*", + "tsconfig": "*", + "typescript": "^4.7.2", + "@types/lodash": "^4.14.182" + }, "peerDependencies": { - "react": "^18.0.0", - "react-query": "^3.34.19" + "react": "^18.0.0" } } diff --git a/packages/client/src/bridge.ts b/packages/client/src/bridge.ts index 712cbddde..41e430c66 100644 --- a/packages/client/src/bridge.ts +++ b/packages/client/src/bridge.ts @@ -1,12 +1,8 @@ -import { ClientCommand, ClientQuery, CoreResponse } from '@sd/core'; +import { ClientCommand, ClientQuery, CoreResponse, LibraryCommand, LibraryQuery } from '@sd/core'; import { EventEmitter } from 'eventemitter3'; -import { - UseMutationOptions, - UseQueryOptions, - UseQueryResult, - useMutation, - useQuery -} from 'react-query'; +import { UseMutationOptions, UseQueryOptions, useMutation, useQuery } from 'react-query'; + +import { useLibraryStore } from './stores'; // global var to store the transport TODO: not global :D export let transport: BaseTransport | null = null; @@ -23,11 +19,15 @@ export function setTransport(_transport: BaseTransport) { // extract keys from generated Rust query/command types type QueryKeyType = ClientQuery['key']; +type LibraryQueryKeyType = LibraryQuery['key']; type CommandKeyType = ClientCommand['key']; +type LibraryCommandKeyType = LibraryCommand['key']; // extract the type from the union type CQType = Extract; +type LQType = Extract; type CCType = Extract; +type LCType = Extract; type CRType = Extract; // extract payload type @@ -35,20 +35,18 @@ type ExtractParams

= P extends { params: any } ? P['params'] : never; type ExtractData = D extends { data: any } ? D['data'] : never; // vanilla method to call the transport -export async function queryBridge< - K extends QueryKeyType, - CQ extends CQType, - CR extends CRType ->(key: K, params?: ExtractParams): Promise> { +async function queryBridge, CR extends CRType>( + key: K, + params?: ExtractParams +): Promise> { const result = (await transport?.query({ key, params } as any)) as any; return result?.data; } -export async function commandBridge< - K extends CommandKeyType, - CC extends CCType, - CR extends CRType ->(key: K, params?: ExtractParams): Promise> { +async function commandBridge, CR extends CRType>( + key: K, + params?: ExtractParams +): Promise> { const result = (await transport?.command({ key, params } as any)) as any; return result?.data; } @@ -66,6 +64,21 @@ export function useBridgeQuery, CR ); } +export function useLibraryQuery< + K extends LibraryQueryKeyType, + CQ extends LQType, + CR extends CRType +>(key: K, params?: ExtractParams, options: UseQueryOptions> = {}) { + const library_id = useLibraryStore((state) => state.currentLibraryUuid); + if (!library_id) throw new Error(`Attempted to do library query '${key}' with no library set!`); + + return useQuery>( + [library_id, key, params], + async () => await queryBridge('LibraryQuery', { library_id, query: { key, params } as any }), + options + ); +} + export function useBridgeCommand< K extends CommandKeyType, CC extends CCType, @@ -78,9 +91,35 @@ export function useBridgeCommand< ); } +export function useLibraryCommand< + K extends LibraryCommandKeyType, + LC extends LCType, + CR extends CRType +>(key: K, options: UseMutationOptions> = {}) { + const library_id = useLibraryStore((state) => state.currentLibraryUuid); + if (!library_id) throw new Error(`Attempted to do library command '${key}' with no library set!`); + + return useMutation, unknown, ExtractParams>( + [library_id, key], + async (vars?: ExtractParams) => + await commandBridge('LibraryCommand', { library_id, command: { key, params: vars } as any }), + options + ); +} + export function command, CR extends CRType>( key: K, vars: ExtractParams ): Promise> { return commandBridge(key, vars); } + +export function libraryCommand< + K extends LibraryCommandKeyType, + LC extends LCType, + CR extends CRType +>(key: K, vars: ExtractParams): Promise> { + const library_id = useLibraryStore((state) => state.currentLibraryUuid); + if (!library_id) throw new Error(`Attempted to do library command '${key}' with no library set!`); + return commandBridge('LibraryCommand', { library_id, command: { key, params: vars } as any }); +} diff --git a/packages/interface/src/AppPropsContext.tsx b/packages/client/src/context/AppPropsContext.tsx similarity index 100% rename from packages/interface/src/AppPropsContext.tsx rename to packages/client/src/context/AppPropsContext.tsx diff --git a/packages/client/src/context/index.ts b/packages/client/src/context/index.ts new file mode 100644 index 000000000..70d70d64e --- /dev/null +++ b/packages/client/src/context/index.ts @@ -0,0 +1 @@ +export * from './AppPropsContext'; diff --git a/packages/client/src/files/index.ts b/packages/client/src/files/index.ts deleted file mode 100644 index 1b09d522d..000000000 --- a/packages/client/src/files/index.ts +++ /dev/null @@ -1,2 +0,0 @@ -export * from './query'; -export * from './state'; diff --git a/packages/client/src/files/query.ts b/packages/client/src/files/query.ts deleted file mode 100644 index a8ce691f0..000000000 --- a/packages/client/src/files/query.ts +++ /dev/null @@ -1,21 +0,0 @@ -import { useState } from 'react'; -import { useQuery } from 'react-query'; - -import { useBridgeCommand, useBridgeQuery } from '../bridge'; -import { useFileExplorerState } from './state'; - -// this hook initializes the explorer state and queries the core -export function useFileExplorer(initialPath = '/', initialLocation: number | null = null) { - const fileState = useFileExplorerState(); - // file explorer hooks maintain their own local state relative to exploration - const [path, setPath] = useState(initialPath); - const [locationId, setLocationId] = useState(initialPath); - - // const { data: volumes } = useQuery(['sys_get_volumes'], () => bridge('sys_get_volumes')); - - return { setPath, setLocationId }; -} - -// export function useVolumes() { -// return useQuery(['SysGetVolumes'], () => bridge('SysGetVolumes')); -// } diff --git a/packages/client/src/files/state.ts b/packages/client/src/files/state.ts deleted file mode 100644 index d817daee4..000000000 --- a/packages/client/src/files/state.ts +++ /dev/null @@ -1,23 +0,0 @@ -import produce from 'immer'; -import create from 'zustand'; - -export interface FileExplorerState { - current_location_id: number | null; - row_limit: number; -} - -interface FileExplorerStore extends FileExplorerState { - update_row_limit: (new_limit: number) => void; -} - -export const useFileExplorerState = create((set, get) => ({ - current_location_id: null, - row_limit: 10, - update_row_limit: (new_limit: number) => { - set((store) => - produce(store, (draft) => { - draft.row_limit = new_limit; - }) - ); - } -})); diff --git a/packages/client/src/hooks/index.ts b/packages/client/src/hooks/index.ts new file mode 100644 index 000000000..25c15a805 --- /dev/null +++ b/packages/client/src/hooks/index.ts @@ -0,0 +1 @@ +export * from './useCoreEvents'; diff --git a/packages/client/src/hooks/useCoreEvents.tsx b/packages/client/src/hooks/useCoreEvents.tsx new file mode 100644 index 000000000..178153c2c --- /dev/null +++ b/packages/client/src/hooks/useCoreEvents.tsx @@ -0,0 +1,59 @@ +import { CoreEvent } from '@sd/core'; +import { useContext, useEffect } from 'react'; +import { useQueryClient } from 'react-query'; + +import { transport, useExplorerStore } from '..'; + +export function useCoreEvents() { + const client = useQueryClient(); + + const { addNewThumbnail } = useExplorerStore(); + useEffect(() => { + function handleCoreEvent(e: CoreEvent) { + switch (e?.key) { + case 'NewThumbnail': + addNewThumbnail(e.data.cas_id); + break; + case 'InvalidateQuery': + case 'InvalidateQueryDebounced': + let query = []; + if (e.data.key === 'LibraryQuery') { + query = [e.data.params.library_id, e.data.params.query.key]; + + // TODO: find a way to make params accessible in TS + // also this method will only work for queries that use the whole params obj as the second key + // @ts-expect-error + if (e.data.params.query.params) { + // @ts-expect-error + query.push(e.data.params.query.params); + } + } else { + query = [e.data.key]; + + // TODO: find a way to make params accessible in TS + // also this method will only work for queries that use the whole params obj as the second key + // @ts-expect-error + if (e.data.params) { + // @ts-expect-error + query.push(e.data.params); + } + } + + client.invalidateQueries(query); + break; + + default: + break; + } + } + // check Tauri Event type + transport?.on('core_event', handleCoreEvent); + + return () => { + transport?.off('core_event', handleCoreEvent); + }; + + // listen('core_event', (e: { payload: CoreEvent }) => { + // }); + }, [transport]); +} diff --git a/packages/client/src/index.ts b/packages/client/src/index.ts index 75d5861c4..673524d79 100644 --- a/packages/client/src/index.ts +++ b/packages/client/src/index.ts @@ -1,3 +1,5 @@ export * from './bridge'; -export * from './files'; export * from './ClientProvider'; +export * from './stores'; +export * from './hooks'; +export * from './context'; diff --git a/packages/client/src/stores/index.ts b/packages/client/src/stores/index.ts new file mode 100644 index 000000000..c38ebef11 --- /dev/null +++ b/packages/client/src/stores/index.ts @@ -0,0 +1,4 @@ +export * from './useLibraryStore'; +export * from './useExplorerStore'; +export * from './useInspectorStore'; +export * from './useInspectorStore'; diff --git a/packages/interface/src/hooks/useExplorerState.ts b/packages/client/src/stores/useExplorerStore.ts similarity index 81% rename from packages/interface/src/hooks/useExplorerState.ts rename to packages/client/src/stores/useExplorerStore.ts index ef2a5ce17..185d88dd1 100644 --- a/packages/interface/src/hooks/useExplorerState.ts +++ b/packages/client/src/stores/useExplorerStore.ts @@ -1,15 +1,16 @@ import create from 'zustand'; -type ExplorerState = { +type ExplorerStore = { selectedRowIndex: number; setSelectedRowIndex: (index: number) => void; locationId: number; setLocationId: (index: number) => void; newThumbnails: Record; addNewThumbnail: (cas_id: string) => void; + reset: () => void; }; -export const useExplorerState = create((set) => ({ +export const useExplorerStore = create((set) => ({ selectedRowIndex: 1, setSelectedRowIndex: (index) => set((state) => ({ ...state, selectedRowIndex: index })), locationId: -1, @@ -19,5 +20,6 @@ export const useExplorerState = create((set) => ({ set((state) => ({ ...state, newThumbnails: { ...state.newThumbnails, [cas_id]: true } - })) + })), + reset: () => set(() => ({})) })); diff --git a/packages/interface/src/hooks/useInspectorState.tsx b/packages/client/src/stores/useInspectorStore.ts similarity index 82% rename from packages/interface/src/hooks/useInspectorState.tsx rename to packages/client/src/stores/useInspectorStore.ts index 7a7450645..97e9f29b8 100644 --- a/packages/interface/src/hooks/useInspectorState.tsx +++ b/packages/client/src/stores/useInspectorStore.ts @@ -1,17 +1,18 @@ -import { command } from '@sd/client'; import produce from 'immer'; import { debounce } from 'lodash'; import create from 'zustand'; +import { libraryCommand } from '../bridge'; + export type UpdateNoteFN = (vars: { id: number; note: string }) => void; -interface UseInspectorState { +interface InspectorStore { notes: Record; setNote: (file_id: number, note: string) => void; unCacheNote: (file_id: number) => void; } -export const useInspectorState = create((set) => ({ +export const useInspectorStore = create((set) => ({ notes: {}, // set the note locally setNote: (file_id, note) => { @@ -35,7 +36,7 @@ export const useInspectorState = create((set) => ({ // direct command call to update note export const updateNote = debounce(async (file_id: number, note: string) => { - return await command('FileSetNote', { + return await libraryCommand('FileSetNote', { id: file_id, note }); diff --git a/packages/client/src/stores/useLibraryStore.ts b/packages/client/src/stores/useLibraryStore.ts new file mode 100644 index 000000000..53a8a4e55 --- /dev/null +++ b/packages/client/src/stores/useLibraryStore.ts @@ -0,0 +1,67 @@ +import { LibraryConfigWrapped } from '@sd/core'; +import produce from 'immer'; +import { useMemo } from 'react'; +import { useQueryClient } from 'react-query'; +import create from 'zustand'; +import { devtools, persist } from 'zustand/middleware'; + +import { useBridgeQuery } from '../bridge'; +import { useExplorerStore } from './useExplorerStore'; + +type LibraryStore = { + // the uuid of the currently active library + currentLibraryUuid: string | null; + // for full functionality this should be triggered along-side query invalidation + switchLibrary: (uuid: string) => void; + // a function + init: (libraries: LibraryConfigWrapped[]) => Promise; +}; + +export const useLibraryStore = create()( + devtools( + persist( + (set) => ({ + currentLibraryUuid: null, + switchLibrary: (uuid) => { + set((state) => + produce(state, (draft) => { + draft.currentLibraryUuid = uuid; + }) + ); + // reset other stores + useExplorerStore().reset(); + }, + init: async (libraries) => { + set((state) => + produce(state, (draft) => { + // use first library default if none set + if (!state.currentLibraryUuid) { + draft.currentLibraryUuid = libraries[0].uuid; + } + }) + ); + } + }), + { name: 'sd-library-store' } + ) + ) +); + +// this must be used at least once in the app to correct the initial state +// is memorized and can be used safely in any component +export const useCurrentLibrary = () => { + const { currentLibraryUuid, switchLibrary } = useLibraryStore(); + const { data: libraries } = useBridgeQuery('NodeGetLibraries', undefined, {}); + + // memorize library to avoid re-running find function + const currentLibrary = useMemo(() => { + const current = libraries?.find((l) => l.uuid === currentLibraryUuid); + // switch to first library if none set + if (Array.isArray(libraries) && !current && libraries[0]?.uuid) { + switchLibrary(libraries[0]?.uuid); + } + return current; + }, [libraries, currentLibraryUuid]); + + return { currentLibrary, libraries, currentLibraryUuid }; +}; diff --git a/packages/interface/package.json b/packages/interface/package.json index c532d24d4..8577584cd 100644 --- a/packages/interface/package.json +++ b/packages/interface/package.json @@ -46,7 +46,7 @@ "react-loading-icons": "^1.1.0", "react-loading-skeleton": "^3.1.0", "react-portal": "^4.2.2", - "react-query": "^3.39.1", + "react-query": "^3.34.19", "react-router": "6.3.0", "react-router-dom": "6.3.0", "react-scrollbars-custom": "^4.0.27", @@ -55,6 +55,7 @@ "react-virtuoso": "^2.12.1", "rooks": "^5.11.2", "tailwindcss": "^3.0.24", + "use-debounce": "^8.0.1", "zustand": "4.0.0-rc.1" }, "devDependencies": { diff --git a/packages/interface/src/App.tsx b/packages/interface/src/App.tsx index 339ff96fd..514d60a36 100644 --- a/packages/interface/src/App.tsx +++ b/packages/interface/src/App.tsx @@ -1,14 +1,14 @@ import '@fontsource/inter/variable.css'; import { BaseTransport, ClientProvider, setTransport } from '@sd/client'; +import { useCoreEvents } from '@sd/client'; +import { AppProps, AppPropsContext } from '@sd/client'; import React from 'react'; import { ErrorBoundary } from 'react-error-boundary'; import { QueryClient, QueryClientProvider } from 'react-query'; import { MemoryRouter } from 'react-router-dom'; -import { AppProps, AppPropsContext } from './AppPropsContext'; import { AppRouter } from './AppRouter'; import { ErrorFallback } from './ErrorFallback'; -import { useCoreEvents } from './hooks/useCoreEvents'; import './style.scss'; const queryClient = new QueryClient(); diff --git a/packages/interface/src/AppLayout.tsx b/packages/interface/src/AppLayout.tsx index 7b1b2dfc5..0c3ecbb11 100644 --- a/packages/interface/src/AppLayout.tsx +++ b/packages/interface/src/AppLayout.tsx @@ -1,8 +1,8 @@ +import { AppPropsContext } from '@sd/client'; import clsx from 'clsx'; import React, { useContext } from 'react'; import { Outlet } from 'react-router-dom'; -import { AppPropsContext } from './AppPropsContext'; import { Sidebar } from './components/file/Sidebar'; export function AppLayout() { diff --git a/packages/interface/src/AppRouter.tsx b/packages/interface/src/AppRouter.tsx index 2c3bbfde1..db0ffdacd 100644 --- a/packages/interface/src/AppRouter.tsx +++ b/packages/interface/src/AppRouter.tsx @@ -1,3 +1,5 @@ +import { useBridgeQuery } from '@sd/client'; +import { useLibraryStore } from '@sd/client'; import React, { useEffect } from 'react'; import { Route, Routes, useLocation } from 'react-router-dom'; @@ -9,56 +11,81 @@ import { ExplorerScreen } from './screens/Explorer'; import { OverviewScreen } from './screens/Overview'; import { PhotosScreen } from './screens/Photos'; import { RedirectPage } from './screens/Redirect'; -import { SettingsScreen } from './screens/Settings'; import { TagScreen } from './screens/Tag'; -import AppearanceSettings from './screens/settings/AppearanceSettings'; -import ContactsSettings from './screens/settings/ContactsSettings'; -import ExperimentalSettings from './screens/settings/ExperimentalSettings'; -import GeneralSettings from './screens/settings/GeneralSettings'; -import KeysSettings from './screens/settings/KeysSetting'; -import LibrarySettings from './screens/settings/LibrarySettings'; -import LocationSettings from './screens/settings/LocationSettings'; -import SecuritySettings from './screens/settings/SecuritySettings'; -import SharingSettings from './screens/settings/SharingSettings'; -import SyncSettings from './screens/settings/SyncSettings'; -import TagsSettings from './screens/settings/TagsSettings'; +import { CurrentLibrarySettings } from './screens/settings/CurrentLibrarySettings'; +import { SettingsScreen } from './screens/settings/Settings'; +import AppearanceSettings from './screens/settings/client/AppearanceSettings'; +import GeneralSettings from './screens/settings/client/GeneralSettings'; +import ContactsSettings from './screens/settings/library/ContactsSettings'; +import KeysSettings from './screens/settings/library/KeysSetting'; +import LibraryGeneralSettings from './screens/settings/library/LibraryGeneralSettings'; +import LocationSettings from './screens/settings/library/LocationSettings'; +import SecuritySettings from './screens/settings/library/SecuritySettings'; +import SharingSettings from './screens/settings/library/SharingSettings'; +import SyncSettings from './screens/settings/library/SyncSettings'; +import TagsSettings from './screens/settings/library/TagsSettings'; +import ExperimentalSettings from './screens/settings/node/ExperimentalSettings'; +import LibrarySettings from './screens/settings/node/LibrariesSettings'; +import NodesSettings from './screens/settings/node/NodesSettings'; +import P2PSettings from './screens/settings/node/P2PSettings'; export function AppRouter() { let location = useLocation(); let state = location.state as { backgroundLocation?: Location }; + const libraryState = useLibraryStore(); + const { data: libraries } = useBridgeQuery('NodeGetLibraries'); + // TODO: This can be removed once we add a setup flow to the app useEffect(() => { - console.log({ url: location.pathname }); - }, [state]); + if (libraryState.currentLibraryUuid === null && libraries && libraries.length > 0) { + libraryState.switchLibrary(libraries[0].uuid); + } + }, [libraryState.currentLibraryUuid, libraries]); return ( <> - - }> - } /> - } /> - } /> - } /> - } /> - }> - } /> - } /> - } /> - } /> - } /> - } /> - } /> - } /> - } /> - } /> - } /> - } /> + {libraryState.currentLibraryUuid === null ? ( + <> + {/* TODO: Remove this when adding app setup flow */} +

No Library Loaded...

+ + ) : ( + + }> + } /> + } /> + } /> + } /> + } /> + }> + } /> + } /> + } /> + } /> + } /> + + }> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + } /> + + } /> + } /> + } /> - } /> - } /> - } /> - - + + )} ); } diff --git a/packages/interface/src/NotFound.tsx b/packages/interface/src/NotFound.tsx index c48e8141e..ae4acef1b 100644 --- a/packages/interface/src/NotFound.tsx +++ b/packages/interface/src/NotFound.tsx @@ -10,7 +10,7 @@ export function NotFound() { role="alert" className="flex flex-col items-center justify-center w-full h-full p-4 rounded-lg dark:text-white" > -

Error: 404

+

Error: 404

You chose nothingness.

+ {(locations?.length || 0) < 1 && ( + + )}
Tags diff --git a/packages/interface/src/components/layout/Card.tsx b/packages/interface/src/components/layout/Card.tsx new file mode 100644 index 000000000..6059bf0c0 --- /dev/null +++ b/packages/interface/src/components/layout/Card.tsx @@ -0,0 +1,15 @@ +import clsx from 'clsx'; +import React, { ReactNode } from 'react'; + +export default function Card(props: { children: ReactNode; className?: string }) { + return ( +
+ {props.children} +
+ ); +} diff --git a/packages/interface/src/components/layout/Dialog.tsx b/packages/interface/src/components/layout/Dialog.tsx index 3a30992ab..1c32ba432 100644 --- a/packages/interface/src/components/layout/Dialog.tsx +++ b/packages/interface/src/components/layout/Dialog.tsx @@ -5,7 +5,7 @@ import React, { ReactNode } from 'react'; import Loader from '../primitive/Loader'; -export interface DialogProps { +export interface DialogProps extends DialogPrimitive.DialogProps { trigger: ReactNode; ctaLabel?: string; ctaDanger?: boolean; @@ -18,13 +18,15 @@ export interface DialogProps { export default function Dialog(props: DialogProps) { return ( - + {props.trigger}
- {props.title} + + {props.title} + {props.description} diff --git a/packages/interface/src/components/layout/TopBar.tsx b/packages/interface/src/components/layout/TopBar.tsx index a451dc7f1..828666a01 100644 --- a/packages/interface/src/components/layout/TopBar.tsx +++ b/packages/interface/src/components/layout/TopBar.tsx @@ -1,5 +1,6 @@ import { ChevronLeftIcon, ChevronRightIcon } from '@heroicons/react/outline'; -import { useBridgeCommand } from '@sd/client'; +import { useLibraryCommand } from '@sd/client'; +import { useExplorerStore } from '@sd/client'; import { Dropdown } from '@sd/ui'; import clsx from 'clsx'; import { @@ -15,7 +16,6 @@ import { import React, { DetailedHTMLProps, HTMLAttributes } from 'react'; import { useNavigate } from 'react-router-dom'; -import { useExplorerState } from '../../hooks/useExplorerState'; import { Shortcut } from '../primitive/Shortcut'; import { DefaultProps } from '../primitive/types'; @@ -50,14 +50,14 @@ const TopBarButton: React.FC = ({ icon: Icon, ...props }) => }; export const TopBar: React.FC = (props) => { - const { locationId } = useExplorerState(); - const { mutate: generateThumbsForLocation } = useBridgeCommand('GenerateThumbsForLocation', { + const { locationId } = useExplorerStore(); + const { mutate: generateThumbsForLocation } = useLibraryCommand('GenerateThumbsForLocation', { onMutate: (data) => { console.log('GenerateThumbsForLocation', data); } }); - const { mutate: identifyUniqueFiles } = useBridgeCommand('IdentifyUniqueFiles', { + const { mutate: identifyUniqueFiles } = useLibraryCommand('IdentifyUniqueFiles', { onMutate: (data) => { console.log('IdentifyUniqueFiles', data); }, diff --git a/packages/interface/src/components/location/LocationListItem.tsx b/packages/interface/src/components/location/LocationListItem.tsx index 7b20e759c..3ccff9eb4 100644 --- a/packages/interface/src/components/location/LocationListItem.tsx +++ b/packages/interface/src/components/location/LocationListItem.tsx @@ -1,6 +1,6 @@ import { DotsVerticalIcon, RefreshIcon } from '@heroicons/react/outline'; -import { CogIcon, TrashIcon } from '@heroicons/react/solid'; -import { command, useBridgeCommand } from '@sd/client'; +import { TrashIcon } from '@heroicons/react/solid'; +import { useLibraryCommand } from '@sd/client'; import { LocationResource } from '@sd/core'; import { Button } from '@sd/ui'; import clsx from 'clsx'; @@ -16,9 +16,9 @@ interface LocationListItemProps { export default function LocationListItem({ location }: LocationListItemProps) { const [hide, setHide] = useState(false); - const { mutate: locRescan } = useBridgeCommand('LocRescan'); + const { mutate: locRescan } = useLibraryCommand('LocRescan'); - const { mutate: deleteLoc, isLoading: locDeletePending } = useBridgeCommand('LocDelete', { + const { mutate: deleteLoc, isLoading: locDeletePending } = useLibraryCommand('LocDelete', { onSuccess: () => { setHide(true); } diff --git a/packages/interface/src/components/settings/SettingsContainer.tsx b/packages/interface/src/components/settings/SettingsContainer.tsx index bcd448ab3..a5e313680 100644 --- a/packages/interface/src/components/settings/SettingsContainer.tsx +++ b/packages/interface/src/components/settings/SettingsContainer.tsx @@ -5,5 +5,5 @@ interface SettingsContainerProps { } export const SettingsContainer: React.FC = (props) => { - return
{props.children}
; + return
{props.children}
; }; diff --git a/packages/interface/src/components/settings/SettingsHeader.tsx b/packages/interface/src/components/settings/SettingsHeader.tsx index 633fa0328..f64df584b 100644 --- a/packages/interface/src/components/settings/SettingsHeader.tsx +++ b/packages/interface/src/components/settings/SettingsHeader.tsx @@ -1,15 +1,19 @@ -import React from 'react'; +import React, { ReactNode } from 'react'; interface SettingsHeaderProps { title: string; description: string; + rightArea?: ReactNode; } export const SettingsHeader: React.FC = (props) => { return ( -
-

{props.title}

-

{props.description}

+
+
+

{props.title}

+

{props.description}

+
+ {props.rightArea}
); diff --git a/packages/interface/src/components/settings/SettingsScreenContainer.tsx b/packages/interface/src/components/settings/SettingsScreenContainer.tsx new file mode 100644 index 000000000..fd0363a59 --- /dev/null +++ b/packages/interface/src/components/settings/SettingsScreenContainer.tsx @@ -0,0 +1,40 @@ +import clsx from 'clsx'; +import React from 'react'; +import { Outlet } from 'react-router'; + +interface SettingsScreenContainerProps { + children: React.ReactNode; +} + +export const SettingsIcon = ({ component: Icon, ...props }: any) => ( + +); + +export const SettingsHeading: React.FC<{ className?: string; children: string }> = ({ + children, + className +}) => ( +
+ {children} +
+); + +export const SettingsScreenContainer: React.FC = (props) => { + return ( +
+
+
+
{props.children}
+
+
+
+
+
+ +
+
+
+
+
+ ); +}; diff --git a/packages/interface/src/hooks/useCoreEvents.tsx b/packages/interface/src/hooks/useCoreEvents.tsx deleted file mode 100644 index 729068f73..000000000 --- a/packages/interface/src/hooks/useCoreEvents.tsx +++ /dev/null @@ -1,46 +0,0 @@ -import { transport } from '@sd/client'; -import { CoreEvent } from '@sd/core'; -import { useContext, useEffect } from 'react'; -import { useQueryClient } from 'react-query'; - -import { AppPropsContext } from '../AppPropsContext'; -import { useExplorerState } from './useExplorerState'; - -export function useCoreEvents() { - const client = useQueryClient(); - - const { addNewThumbnail } = useExplorerState(); - useEffect(() => { - function handleCoreEvent(e: CoreEvent) { - switch (e?.key) { - case 'NewThumbnail': - addNewThumbnail(e.data.cas_id); - break; - case 'InvalidateQuery': - case 'InvalidateQueryDebounced': - let query = [e.data.key]; - // TODO: find a way to make params accessible in TS - // also this method will only work for queries that use the whole params obj as the second key - // @ts-expect-error - if (e.data.params) { - // @ts-expect-error - query.push(e.data.params); - } - client.invalidateQueries(e.data.key); - break; - - default: - break; - } - } - // check Tauri Event type - transport?.on('core_event', handleCoreEvent); - - return () => { - transport?.off('core_event', handleCoreEvent); - }; - - // listen('core_event', (e: { payload: CoreEvent }) => { - // }); - }, [transport]); -} diff --git a/packages/interface/src/index.ts b/packages/interface/src/index.ts index eefb2853f..896dd051e 100644 --- a/packages/interface/src/index.ts +++ b/packages/interface/src/index.ts @@ -1,5 +1,6 @@ +import { AppProps, Platform } from '@sd/client'; + import App from './App'; -import { AppProps, Platform } from './AppPropsContext'; export type { AppProps, Platform }; diff --git a/packages/interface/src/screens/Debug.tsx b/packages/interface/src/screens/Debug.tsx index b5788c203..d0758facb 100644 --- a/packages/interface/src/screens/Debug.tsx +++ b/packages/interface/src/screens/Debug.tsx @@ -1,21 +1,22 @@ -import { useBridgeCommand, useBridgeQuery } from '@sd/client'; +import { useBridgeQuery, useLibraryCommand, useLibraryQuery } from '@sd/client'; +import { AppPropsContext } from '@sd/client'; import { Button } from '@sd/ui'; import React, { useContext } from 'react'; -import { AppPropsContext } from '../AppPropsContext'; import CodeBlock from '../components/primitive/Codeblock'; export const DebugScreen: React.FC<{}> = (props) => { const appPropsContext = useContext(AppPropsContext); - const { data: client } = useBridgeQuery('NodeGetState'); + const { data: nodeState } = useBridgeQuery('NodeGetState'); + const { data: libraryState } = useBridgeQuery('NodeGetLibraries'); const { data: jobs } = useBridgeQuery('JobGetRunning'); - const { data: jobHistory } = useBridgeQuery('JobGetHistory'); + const { data: jobHistory } = useLibraryQuery('JobGetHistory'); // const { mutate: purgeDB } = useBridgeCommand('PurgeDatabase', { // onMutate: () => { // alert('Database purged'); // } // }); - const { mutate: identifyFiles } = useBridgeCommand('IdentifyUniqueFiles'); + const { mutate: identifyFiles } = useLibraryCommand('IdentifyUniqueFiles'); return (
@@ -27,8 +28,8 @@ export const DebugScreen: React.FC<{}> = (props) => { variant="gray" size="sm" onClick={() => { - if (client && appPropsContext?.onOpen) { - appPropsContext.onOpen(client.data_path); + if (nodeState && appPropsContext?.onOpen) { + appPropsContext.onOpen(nodeState.data_path); } }} > @@ -39,8 +40,10 @@ export const DebugScreen: React.FC<{}> = (props) => {

Job History

-

Client State

- +

Node State

+ +

Libraries

+
); diff --git a/packages/interface/src/screens/Explorer.tsx b/packages/interface/src/screens/Explorer.tsx index 742339eab..4c1df35ec 100644 --- a/packages/interface/src/screens/Explorer.tsx +++ b/packages/interface/src/screens/Explorer.tsx @@ -1,11 +1,11 @@ -import { useBridgeQuery } from '@sd/client'; +import { useLibraryQuery } from '@sd/client'; +import { useExplorerStore } from '@sd/client'; import React from 'react'; import { useParams, useSearchParams } from 'react-router-dom'; import { FileList } from '../components/file/FileList'; import { Inspector } from '../components/file/Inspector'; import { TopBar } from '../components/layout/TopBar'; -import { useExplorerState } from '../hooks/useExplorerState'; export const ExplorerScreen: React.FC<{}> = () => { let [searchParams] = useSearchParams(); @@ -16,13 +16,13 @@ export const ExplorerScreen: React.FC<{}> = () => { const [limit, setLimit] = React.useState(100); - const { selectedRowIndex } = useExplorerState(); + const { selectedRowIndex } = useExplorerStore(); // Current Location - const { data: currentLocation } = useBridgeQuery('SysGetLocation', { id: location_id }); + const { data: currentLocation } = useLibraryQuery('SysGetLocation', { id: location_id }); // Current Directory - const { data: currentDir } = useBridgeQuery( + const { data: currentDir } = useLibraryQuery( 'LibGetExplorerDir', { location_id: location_id!, path, limit }, { enabled: !!location_id } diff --git a/packages/interface/src/screens/Overview.tsx b/packages/interface/src/screens/Overview.tsx index 38617eb0d..2b0cd4d13 100644 --- a/packages/interface/src/screens/Overview.tsx +++ b/packages/interface/src/screens/Overview.tsx @@ -1,5 +1,6 @@ -import { PlusIcon } from '@heroicons/react/solid'; -import { useBridgeQuery } from '@sd/client'; +import { DatabaseIcon, ExclamationCircleIcon, PlusIcon } from '@heroicons/react/solid'; +import { useBridgeQuery, useLibraryQuery } from '@sd/client'; +import { AppPropsContext } from '@sd/client'; import { Statistics } from '@sd/core'; import { Button, Input } from '@sd/ui'; import byteSize from 'byte-size'; @@ -10,7 +11,6 @@ import Skeleton from 'react-loading-skeleton'; import 'react-loading-skeleton/dist/skeleton.css'; import create from 'zustand'; -import { AppPropsContext } from '../AppPropsContext'; import { Device } from '../components/device/Device'; import Dialog from '../components/layout/Dialog'; @@ -102,7 +102,7 @@ const StatItem: React.FC = (props) => { export const OverviewScreen = () => { const { data: libraryStatistics, isLoading: isStatisticsLoading } = - useBridgeQuery('GetLibraryStatistics'); + useLibraryQuery('GetLibraryStatistics'); const { data: nodeState } = useBridgeQuery('NodeGetState'); const { overviewStats, setOverviewStats } = useOverviewState(); @@ -157,7 +157,17 @@ export const OverviewScreen = () => { {/* STAT HEADER */}
{/* STAT CONTAINER */} -
+
+ {!libraryStatistics && ( +
+
+ Missing library +
+ + Ensure the library you have loaded still exists on disk + +
+ )} {Object.entries(overviewStats).map(([key, value]) => { if (!displayableStatItems.includes(key)) return null; @@ -171,8 +181,9 @@ export const OverviewScreen = () => { ); })}
+
-
+
{
-
+
( - -); - -const Heading: React.FC<{ className?: string; children: string }> = ({ children, className }) => ( -
- {children} -
-); - -export const SettingsScreen: React.FC<{}> = () => { - return ( -
-
-
-
- Client - - - General - - - - Security - - - - Appearance - - - - Experimental - - - Library - - - Database - - - - Locations - - - - - Keys - - - - Tags - - - Cloud - - - Sync - - - - Contacts - -
-
-
-
-
-
- -
-
-
-
-
- ); -}; diff --git a/packages/interface/src/screens/settings/CurrentLibrarySettings.tsx b/packages/interface/src/screens/settings/CurrentLibrarySettings.tsx new file mode 100644 index 000000000..ec8426952 --- /dev/null +++ b/packages/interface/src/screens/settings/CurrentLibrarySettings.tsx @@ -0,0 +1,42 @@ +import { CogIcon, DatabaseIcon, KeyIcon, TagIcon } from '@heroicons/react/outline'; +import { HardDrive, ShareNetwork } from 'phosphor-react'; +import React from 'react'; + +import { SidebarLink } from '../../components/file/Sidebar'; +import { + SettingsHeading, + SettingsIcon, + SettingsScreenContainer +} from '../../components/settings/SettingsScreenContainer'; + +export const CurrentLibrarySettings: React.FC = () => { + return ( + + Library Settings + + + General + + + + Locations + + + + Tags + + + + Keys + + + + Backups + + + + Sync + + + ); +}; diff --git a/packages/interface/src/screens/settings/GeneralSettings.tsx b/packages/interface/src/screens/settings/GeneralSettings.tsx deleted file mode 100644 index e908b3789..000000000 --- a/packages/interface/src/screens/settings/GeneralSettings.tsx +++ /dev/null @@ -1,40 +0,0 @@ -import { useBridgeQuery } from '@sd/client'; -import React from 'react'; - -import { InputContainer } from '../../components/primitive/InputContainer'; -import Listbox from '../../components/primitive/Listbox'; -import { SettingsContainer } from '../../components/settings/SettingsContainer'; -import { SettingsHeader } from '../../components/settings/SettingsHeader'; - -export default function GeneralSettings() { - const { data: volumes } = useBridgeQuery('SysGetVolumes'); - - return ( - - - -
-
- { - const name = volume.name && volume.name.length ? volume.name : volume.mount_point; - return { - key: name, - option: name, - description: volume.mount_point - }; - }) ?? [] - } - /> -
-
-
- - {/*
{JSON.stringify({ config })}
*/} -
- ); -} diff --git a/packages/interface/src/screens/settings/LibrarySettings.tsx b/packages/interface/src/screens/settings/LibrarySettings.tsx deleted file mode 100644 index 9b54f8725..000000000 --- a/packages/interface/src/screens/settings/LibrarySettings.tsx +++ /dev/null @@ -1,32 +0,0 @@ -import React from 'react'; - -import { Toggle } from '../../components/primitive'; -import { InputContainer } from '../../components/primitive/InputContainer'; -import { SettingsContainer } from '../../components/settings/SettingsContainer'; -import { SettingsHeader } from '../../components/settings/SettingsHeader'; - -// type LibrarySecurity = 'public' | 'password' | 'vault'; - -export default function LibrarySettings() { - // const locations = useBridgeQuery("SysGetLocation") - const [encryptOnCloud, setEncryptOnCloud] = React.useState(false); - - return ( - - {/* */} - - -
- -
-
-
- ); -} diff --git a/packages/interface/src/screens/settings/SecuritySettings.tsx b/packages/interface/src/screens/settings/SecuritySettings.tsx deleted file mode 100644 index e8b39dec3..000000000 --- a/packages/interface/src/screens/settings/SecuritySettings.tsx +++ /dev/null @@ -1,23 +0,0 @@ -import { Button } from '@sd/ui'; -import React from 'react'; - -import { InputContainer } from '../../components/primitive/InputContainer'; -import { SettingsContainer } from '../../components/settings/SettingsContainer'; -import { SettingsHeader } from '../../components/settings/SettingsHeader'; - -export default function SecuritySettings() { - return ( - - - -
- - {/**/} -
-
-
- ); -} diff --git a/packages/interface/src/screens/settings/Settings.tsx b/packages/interface/src/screens/settings/Settings.tsx new file mode 100644 index 000000000..fc903d4aa --- /dev/null +++ b/packages/interface/src/screens/settings/Settings.tsx @@ -0,0 +1,83 @@ +import { + CogIcon, + CollectionIcon, + GlobeAltIcon, + KeyIcon, + TerminalIcon +} from '@heroicons/react/outline'; +import { HardDrive, PaintBrush, ShareNetwork } from 'phosphor-react'; +import React from 'react'; + +import { SidebarLink } from '../../components/file/Sidebar'; +import { + SettingsHeading, + SettingsIcon, + SettingsScreenContainer +} from '../../components/settings/SettingsScreenContainer'; + +export const SettingsScreen: React.FC = () => { + return ( + + Client + + + General + + + + Appearance + + + Node + + + Nodes + + + + P2P + + + + Libraries + + + + Security + + Developer + + + Experimental + + {/* Library + + + My Libraries + + + + Locations + + + + + Keys + + + + Tags + */} + + {/* Cloud + + + Sync + + + + Contacts + */} + + ); +}; diff --git a/packages/interface/src/screens/settings/AppearanceSettings.tsx b/packages/interface/src/screens/settings/client/AppearanceSettings.tsx similarity index 58% rename from packages/interface/src/screens/settings/AppearanceSettings.tsx rename to packages/interface/src/screens/settings/client/AppearanceSettings.tsx index 746d3d273..177b5b1a4 100644 --- a/packages/interface/src/screens/settings/AppearanceSettings.tsx +++ b/packages/interface/src/screens/settings/client/AppearanceSettings.tsx @@ -1,7 +1,7 @@ import React from 'react'; -import { SettingsContainer } from '../../components/settings/SettingsContainer'; -import { SettingsHeader } from '../../components/settings/SettingsHeader'; +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; export default function AppearanceSettings() { return ( diff --git a/packages/interface/src/screens/settings/client/GeneralSettings.tsx b/packages/interface/src/screens/settings/client/GeneralSettings.tsx new file mode 100644 index 000000000..66550f7c8 --- /dev/null +++ b/packages/interface/src/screens/settings/client/GeneralSettings.tsx @@ -0,0 +1,35 @@ +import React from 'react'; + +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; + +export default function GeneralSettings() { + // const { data: volumes } = useBridgeQuery('SysGetVolumes'); + + return ( + + + {/* +
+
+ { + const name = volume.name && volume.name.length ? volume.name : volume.mount_point; + return { + key: name, + option: name, + description: volume.mount_point + }; + }) ?? [] + } + /> +
+
+
*/} +
+ ); +} diff --git a/packages/interface/src/screens/settings/ContactsSettings.tsx b/packages/interface/src/screens/settings/library/ContactsSettings.tsx similarity index 58% rename from packages/interface/src/screens/settings/ContactsSettings.tsx rename to packages/interface/src/screens/settings/library/ContactsSettings.tsx index 581c1df18..014a7316d 100644 --- a/packages/interface/src/screens/settings/ContactsSettings.tsx +++ b/packages/interface/src/screens/settings/library/ContactsSettings.tsx @@ -1,7 +1,7 @@ import React from 'react'; -import { SettingsContainer } from '../../components/settings/SettingsContainer'; -import { SettingsHeader } from '../../components/settings/SettingsHeader'; +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; export default function ContactsSettings() { return ( diff --git a/packages/interface/src/screens/settings/KeysSetting.tsx b/packages/interface/src/screens/settings/library/KeysSetting.tsx similarity index 55% rename from packages/interface/src/screens/settings/KeysSetting.tsx rename to packages/interface/src/screens/settings/library/KeysSetting.tsx index 5e9087fce..388d3fc44 100644 --- a/packages/interface/src/screens/settings/KeysSetting.tsx +++ b/packages/interface/src/screens/settings/library/KeysSetting.tsx @@ -1,7 +1,7 @@ import React from 'react'; -import { SettingsContainer } from '../../components/settings/SettingsContainer'; -import { SettingsHeader } from '../../components/settings/SettingsHeader'; +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; export default function KeysSettings() { return ( diff --git a/packages/interface/src/screens/settings/library/LibraryGeneralSettings.tsx b/packages/interface/src/screens/settings/library/LibraryGeneralSettings.tsx new file mode 100644 index 000000000..87e70e4e0 --- /dev/null +++ b/packages/interface/src/screens/settings/library/LibraryGeneralSettings.tsx @@ -0,0 +1,91 @@ +import { useBridgeCommand, useBridgeQuery } from '@sd/client'; +import { useCurrentLibrary } from '@sd/client'; +import { Button, Input } from '@sd/ui'; +import React, { useCallback, useEffect, useState } from 'react'; +import { useDebounce } from 'use-debounce'; + +import { Toggle } from '../../../components/primitive'; +import { InputContainer } from '../../../components/primitive/InputContainer'; +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; + +export default function LibraryGeneralSettings() { + const { currentLibrary, libraries, currentLibraryUuid } = useCurrentLibrary(); + + const { mutate: editLibrary } = useBridgeCommand('EditLibrary'); + + const [name, setName] = useState(''); + const [description, setDescription] = useState(''); + const [encryptLibrary, setEncryptLibrary] = useState(false); + + const [nameDebounced] = useDebounce(name, 500); + const [descriptionDebounced] = useDebounce(description, 500); + + useEffect(() => { + if (currentLibrary) { + const { name, description } = currentLibrary.config; + // currentLibrary must be loaded, name must not be empty, and must be different from the current + if (nameDebounced && (nameDebounced !== name || descriptionDebounced !== description)) { + editLibrary({ + id: currentLibraryUuid!, + name: nameDebounced, + description: descriptionDebounced + }); + } + } + }, [nameDebounced, descriptionDebounced]); + + useEffect(() => { + if (currentLibrary) { + setName(currentLibrary.config.name); + setDescription(currentLibrary.config.description); + } + }, [libraries]); + + return ( + + +
+
+ Name + setName(e.target.value)} + defaultValue="My Default Library" + /> +
+
+ Description + setDescription(e.target.value)} + placeholder="" + /> +
+
+ + +
+ +
+
+ +
+ +
+
+
+ ); +} diff --git a/packages/interface/src/screens/settings/library/LocationSettings.tsx b/packages/interface/src/screens/settings/library/LocationSettings.tsx new file mode 100644 index 000000000..3b6c67c26 --- /dev/null +++ b/packages/interface/src/screens/settings/library/LocationSettings.tsx @@ -0,0 +1,55 @@ +import { PlusIcon } from '@heroicons/react/solid'; +import { useBridgeQuery, useLibraryCommand, useLibraryQuery } from '@sd/client'; +import { AppPropsContext } from '@sd/client'; +import { Button } from '@sd/ui'; +import React, { useContext } from 'react'; + +import LocationListItem from '../../../components/location/LocationListItem'; +import { InputContainer } from '../../../components/primitive/InputContainer'; +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; + +// const exampleLocations = [ +// { option: 'Macintosh HD', key: 'macintosh_hd' }, +// { option: 'LaCie External', key: 'lacie_external' }, +// { option: 'Seagate 8TB', key: 'seagate_8tb' } +// ]; + +export default function LocationSettings() { + const { data: locations } = useLibraryQuery('SysGetLocations'); + + const appProps = useContext(AppPropsContext); + + const { mutate: createLocation } = useLibraryCommand('LocCreate'); + + return ( + + {/**/} + + +
+ } + /> + +
+ {locations?.map((location) => ( + + ))} +
+ + ); +} diff --git a/packages/interface/src/screens/settings/library/SecuritySettings.tsx b/packages/interface/src/screens/settings/library/SecuritySettings.tsx new file mode 100644 index 000000000..ac3e7a87d --- /dev/null +++ b/packages/interface/src/screens/settings/library/SecuritySettings.tsx @@ -0,0 +1,14 @@ +import { Button } from '@sd/ui'; +import React from 'react'; + +import { InputContainer } from '../../../components/primitive/InputContainer'; +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; + +export default function SecuritySettings() { + return ( + + + + ); +} diff --git a/packages/interface/src/screens/settings/SharingSettings.tsx b/packages/interface/src/screens/settings/library/SharingSettings.tsx similarity index 58% rename from packages/interface/src/screens/settings/SharingSettings.tsx rename to packages/interface/src/screens/settings/library/SharingSettings.tsx index 4403271c1..23ef94e67 100644 --- a/packages/interface/src/screens/settings/SharingSettings.tsx +++ b/packages/interface/src/screens/settings/library/SharingSettings.tsx @@ -1,7 +1,7 @@ import React from 'react'; -import { SettingsContainer } from '../../components/settings/SettingsContainer'; -import { SettingsHeader } from '../../components/settings/SettingsHeader'; +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; export default function SharingSettings() { return ( diff --git a/packages/interface/src/screens/settings/SyncSettings.tsx b/packages/interface/src/screens/settings/library/SyncSettings.tsx similarity index 56% rename from packages/interface/src/screens/settings/SyncSettings.tsx rename to packages/interface/src/screens/settings/library/SyncSettings.tsx index 73842468d..9cdb85193 100644 --- a/packages/interface/src/screens/settings/SyncSettings.tsx +++ b/packages/interface/src/screens/settings/library/SyncSettings.tsx @@ -1,7 +1,7 @@ import React from 'react'; -import { SettingsContainer } from '../../components/settings/SettingsContainer'; -import { SettingsHeader } from '../../components/settings/SettingsHeader'; +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; export default function SyncSettings() { return ( diff --git a/packages/interface/src/screens/settings/TagsSettings.tsx b/packages/interface/src/screens/settings/library/TagsSettings.tsx similarity index 55% rename from packages/interface/src/screens/settings/TagsSettings.tsx rename to packages/interface/src/screens/settings/library/TagsSettings.tsx index d1aac3e81..19bb977f6 100644 --- a/packages/interface/src/screens/settings/TagsSettings.tsx +++ b/packages/interface/src/screens/settings/library/TagsSettings.tsx @@ -1,7 +1,7 @@ import React from 'react'; -import { SettingsContainer } from '../../components/settings/SettingsContainer'; -import { SettingsHeader } from '../../components/settings/SettingsHeader'; +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; export default function TagsSettings() { return ( diff --git a/packages/interface/src/screens/settings/ExperimentalSettings.tsx b/packages/interface/src/screens/settings/node/ExperimentalSettings.tsx similarity index 64% rename from packages/interface/src/screens/settings/ExperimentalSettings.tsx rename to packages/interface/src/screens/settings/node/ExperimentalSettings.tsx index 62a253f8a..c274da7dd 100644 --- a/packages/interface/src/screens/settings/ExperimentalSettings.tsx +++ b/packages/interface/src/screens/settings/node/ExperimentalSettings.tsx @@ -1,14 +1,12 @@ import React from 'react'; -import { useNodeStore } from '../../components/device/Stores'; -import { Toggle } from '../../components/primitive'; -import { InputContainer } from '../../components/primitive/InputContainer'; -import { SettingsContainer } from '../../components/settings/SettingsContainer'; -import { SettingsHeader } from '../../components/settings/SettingsHeader'; +import { useNodeStore } from '../../../components/device/Stores'; +import { Toggle } from '../../../components/primitive'; +import { InputContainer } from '../../../components/primitive/InputContainer'; +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; export default function ExperimentalSettings() { - // const locations = useBridgeQuery("SysGetLocation") - const { isExperimental, setIsExperimental } = useNodeStore(); return ( diff --git a/packages/interface/src/screens/settings/node/LibrariesSettings.tsx b/packages/interface/src/screens/settings/node/LibrariesSettings.tsx new file mode 100644 index 000000000..3f1b9c3a0 --- /dev/null +++ b/packages/interface/src/screens/settings/node/LibrariesSettings.tsx @@ -0,0 +1,113 @@ +import { CollectionIcon, TrashIcon } from '@heroicons/react/outline'; +import { PlusIcon } from '@heroicons/react/solid'; +import { useBridgeCommand, useBridgeQuery } from '@sd/client'; +import { AppPropsContext } from '@sd/client'; +import { LibraryConfig, LibraryConfigWrapped } from '@sd/core'; +import { Button, Input } from '@sd/ui'; +import React, { useContext, useState } from 'react'; + +import Card from '../../../components/layout/Card'; +import Dialog from '../../../components/layout/Dialog'; +import { Toggle } from '../../../components/primitive'; +import { InputContainer } from '../../../components/primitive/InputContainer'; +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; + +// type LibrarySecurity = 'public' | 'password' | 'vault'; + +function LibraryListItem(props: { library: LibraryConfigWrapped }) { + const [openDeleteModal, setOpenDeleteModal] = useState(false); + + const { mutate: deleteLib, isLoading: libDeletePending } = useBridgeCommand('DeleteLibrary', { + onSuccess: () => { + setOpenDeleteModal(false); + } + }); + + return ( + +
+

{props.library.config.name}

+

{props.library.uuid}

+
+
+ { + deleteLib({ id: props.library.uuid }); + }} + loading={libDeletePending} + ctaDanger + ctaLabel="Delete" + trigger={ + + } + /> +
+
+ ); +} + +export default function LibrarySettings() { + const [openCreateModal, setOpenCreateModal] = useState(false); + const [newLibName, setNewLibName] = useState(''); + + const { mutate: createLibrary, isLoading: createLibLoading } = useBridgeCommand('CreateLibrary', { + onSuccess: () => { + setOpenCreateModal(false); + } + }); + + const { data: libraries } = useBridgeQuery('NodeGetLibraries'); + + function createNewLib() { + if (newLibName) { + createLibrary({ name: newLibName }); + } + } + + return ( + + + + Add Library + + } + > + setNewLibName(e.target.value)} + /> + +
+ } + /> + +
+ {libraries?.map((library) => ( + + ))} +
+ + ); +} diff --git a/packages/interface/src/screens/settings/node/NodesSettings.tsx b/packages/interface/src/screens/settings/node/NodesSettings.tsx new file mode 100644 index 000000000..75595f42f --- /dev/null +++ b/packages/interface/src/screens/settings/node/NodesSettings.tsx @@ -0,0 +1,12 @@ +import React from 'react'; + +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; + +export default function NodesSettings() { + return ( + + + + ); +} diff --git a/packages/interface/src/screens/settings/node/P2PSettings.tsx b/packages/interface/src/screens/settings/node/P2PSettings.tsx new file mode 100644 index 000000000..aee248624 --- /dev/null +++ b/packages/interface/src/screens/settings/node/P2PSettings.tsx @@ -0,0 +1,40 @@ +import { useBridgeQuery } from '@sd/client'; +import { Button, Input } from '@sd/ui'; +import React from 'react'; + +import { Toggle } from '../../../components/primitive'; +import { InputContainer } from '../../../components/primitive/InputContainer'; +import Listbox from '../../../components/primitive/Listbox'; +import { SettingsContainer } from '../../../components/settings/SettingsContainer'; +import { SettingsHeader } from '../../../components/settings/SettingsHeader'; + +export default function P2PSettings() { + return ( + + + + + + + + +
+ +
+ Change +
+
+
+
+ ); +} diff --git a/packages/ui/package.json b/packages/ui/package.json index ad1cc798d..888cf519d 100644 --- a/packages/ui/package.json +++ b/packages/ui/package.json @@ -17,7 +17,7 @@ "storybook:build": "build-storybook" }, "dependencies": { - "@headlessui/react": "^1.6.4", + "@headlessui/react": "^1.6.6", "@heroicons/react": "^1.0.6", "@radix-ui/react-context-menu": "^0.1.6", "clsx": "^1.1.1", diff --git a/packages/ui/src/Input.tsx b/packages/ui/src/Input.tsx index 5c32fc513..5120351fc 100644 --- a/packages/ui/src/Input.tsx +++ b/packages/ui/src/Input.tsx @@ -39,7 +39,7 @@ export const Input = React.forwardRef(({ ...props ref={ref} {...props} className={clsx( - `px-3 py-1 rounded-md border leading-7 outline-none shadow-xs focus:ring-2 transition-all`, + `px-3 py-1 text-sm rounded-md border leading-7 outline-none shadow-xs focus:ring-2 transition-all`, variants[props.variant || 'default'], props.className )} diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 14a3699c6..b4f6a891d 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -50,7 +50,7 @@ importers: react-dom: 18.1.0_react@18.1.0 devDependencies: '@tauri-apps/cli': 1.0.0 - '@tauri-apps/tauricon': github.com/tauri-apps/tauricon/f104e2af7a19e1cdf9ee8212d2d3f6456d3aa00f + '@tauri-apps/tauricon': github.com/tauri-apps/tauricon/5eea916a4a8e13aa41a943beaa7b4b71977e190d '@types/babel-core': 6.25.7 '@types/byte-size': 8.1.0 '@types/react': 18.0.9 @@ -229,10 +229,13 @@ importers: specifiers: '@sd/config': workspace:* '@sd/core': workspace:* + '@sd/interface': workspace:* + '@types/lodash': ^4.14.182 '@types/react': ^18.0.9 eventemitter3: ^4.0.7 immer: ^9.0.14 - react-query: ^3.39.1 + lodash: ^4.17.21 + react-query: ^3.34.19 scripts: '*' tsconfig: '*' typescript: ^4.7.2 @@ -240,11 +243,14 @@ importers: dependencies: '@sd/config': link:../config '@sd/core': link:../../core + '@sd/interface': link:../interface eventemitter3: 4.0.7 immer: 9.0.15 + lodash: 4.17.21 react-query: 3.39.1 zustand: 4.0.0-rc.1_immer@9.0.15 devDependencies: + '@types/lodash': 4.14.182 '@types/react': 18.0.9 scripts: 0.1.0 tsconfig: 7.0.0 @@ -300,7 +306,7 @@ importers: react-loading-icons: ^1.1.0 react-loading-skeleton: ^3.1.0 react-portal: ^4.2.2 - react-query: ^3.39.1 + react-query: ^3.34.19 react-router: 6.3.0 react-router-dom: 6.3.0 react-scrollbars-custom: ^4.0.27 @@ -310,6 +316,7 @@ importers: rooks: ^5.11.2 tailwindcss: ^3.0.24 typescript: ^4.7.2 + use-debounce: ^8.0.1 vite: ^2.9.9 vite-plugin-svgr: ^2.1.0 zustand: 4.0.0-rc.1 @@ -355,6 +362,7 @@ importers: react-virtuoso: 2.13.2_ef5jwxihqo6n7gxfmzogljlgcm rooks: 5.11.2_ef5jwxihqo6n7gxfmzogljlgcm tailwindcss: 3.1.3 + use-debounce: 8.0.1_react@18.1.0 zustand: 4.0.0-rc.1_immer@9.0.15+react@18.1.0 devDependencies: '@types/babel-core': 6.25.7 @@ -377,7 +385,7 @@ importers: packages/ui: specifiers: '@babel/core': ^7.18.2 - '@headlessui/react': ^1.6.4 + '@headlessui/react': ^1.6.6 '@heroicons/react': ^1.0.6 '@radix-ui/react-context-menu': ^0.1.6 '@sd/config': workspace:* @@ -408,7 +416,7 @@ importers: tailwindcss: ^3.0.24 typescript: ^4.7.2 dependencies: - '@headlessui/react': 1.6.4_ef5jwxihqo6n7gxfmzogljlgcm + '@headlessui/react': 1.6.6_ef5jwxihqo6n7gxfmzogljlgcm '@heroicons/react': 1.0.6_react@18.1.0 '@radix-ui/react-context-menu': 0.1.6_ohobp6rpsmerwlq5ipwfh5yigy clsx: 1.1.1 @@ -2212,6 +2220,17 @@ packages: react-dom: 18.1.0_react@18.1.0 dev: false + /@headlessui/react/1.6.6_ef5jwxihqo6n7gxfmzogljlgcm: + resolution: {integrity: sha512-MFJtmj9Xh/hhBMhLccGbBoSk+sk61BlP6sJe4uQcVMtXZhCgGqd2GyIQzzmsdPdTEWGSF434CBi8mnhR6um46Q==} + engines: {node: '>=10'} + peerDependencies: + react: ^16 || ^17 || ^18 + react-dom: ^16 || ^17 || ^18 + dependencies: + react: 18.1.0 + react-dom: 18.1.0_react@18.1.0 + dev: false + /@heroicons/react/1.0.6_react@18.1.0: resolution: {integrity: sha512-JJCXydOFWMDpCP4q13iEplA503MQO3xLoZiKum+955ZCtHINWnx26CUxVxxFQu/uLb4LW3ge15ZpzIkXKkJ8oQ==} peerDependencies: @@ -3784,6 +3803,7 @@ packages: webpack-hot-middleware: 2.25.1 webpack-virtual-modules: 0.2.2 transitivePeerDependencies: + - bluebird - eslint - supports-color - vue-template-compiler @@ -4159,6 +4179,7 @@ packages: x-default-browser: 0.4.0 transitivePeerDependencies: - '@storybook/mdx2-csf' + - bluebird - bufferutil - encoding - eslint @@ -4196,6 +4217,7 @@ packages: webpack: 5.73.0 transitivePeerDependencies: - '@storybook/mdx2-csf' + - bluebird - bufferutil - encoding - eslint @@ -4316,6 +4338,7 @@ packages: webpack-dev-middleware: 3.7.3_webpack@4.46.0 webpack-virtual-modules: 0.2.2 transitivePeerDependencies: + - bluebird - encoding - eslint - supports-color @@ -4546,6 +4569,7 @@ packages: - '@storybook/mdx2-csf' - '@swc/core' - '@types/webpack' + - bluebird - bufferutil - encoding - esbuild @@ -5970,6 +5994,8 @@ packages: dependencies: micromatch: 3.1.10 normalize-path: 2.1.1 + transitivePeerDependencies: + - supports-color dev: true /anymatch/3.1.2: @@ -6603,6 +6629,8 @@ packages: qs: 6.5.2 raw-body: 2.3.3 type-is: 1.6.18 + transitivePeerDependencies: + - supports-color dev: true /body-parser/1.20.0: @@ -6621,6 +6649,8 @@ packages: raw-body: 2.5.1 type-is: 1.6.18 unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color /boolbase/1.0.0: resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} @@ -6677,6 +6707,8 @@ packages: snapdragon-node: 2.1.1 split-string: 3.1.0 to-regex: 3.0.2 + transitivePeerDependencies: + - supports-color dev: true /braces/3.0.2: @@ -6798,7 +6830,7 @@ packages: dev: true /buffer-equal/0.0.1: - resolution: {integrity: sha512-RgSV6InVQ9ODPdLWJ5UAqBqJBOg370Nz6ZQtRzpt6nUjc8v0St97uJ4PYC6NztqIScrAXafKM3mZPMygSe1ggA==} + resolution: {integrity: sha1-kbx0sR6kBbyRa8aqkI+q+ltKrEs=} engines: {node: '>=0.4.0'} dev: true @@ -6887,7 +6919,7 @@ packages: mississippi: 3.0.0 mkdirp: 0.5.6 move-concurrently: 1.0.1 - promise-inflight: 1.0.1 + promise-inflight: 1.0.1_bluebird@3.7.2 rimraf: 2.7.1 ssri: 6.0.2 unique-filename: 1.1.1 @@ -6916,6 +6948,8 @@ packages: ssri: 8.0.1 tar: 6.1.11 unique-filename: 1.1.1 + transitivePeerDependencies: + - bluebird dev: true /cache-base/1.0.1: @@ -7137,6 +7171,8 @@ packages: upath: 1.2.0 optionalDependencies: fsevents: 1.2.13 + transitivePeerDependencies: + - supports-color dev: true optional: true @@ -7417,6 +7453,8 @@ packages: on-headers: 1.0.2 safe-buffer: 5.1.2 vary: 1.1.2 + transitivePeerDependencies: + - supports-color dev: true /compression/1.7.4: @@ -7430,6 +7468,8 @@ packages: on-headers: 1.0.2 safe-buffer: 5.1.2 vary: 1.1.2 + transitivePeerDependencies: + - supports-color /concat-map/0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} @@ -7627,6 +7667,8 @@ packages: p-all: 2.1.0 p-filter: 2.1.0 p-map: 3.0.0 + transitivePeerDependencies: + - supports-color dev: true /create-ecdh/4.0.4: @@ -7862,15 +7904,37 @@ packages: /debug/2.6.9: resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true dependencies: ms: 2.0.0 /debug/3.2.7: resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true dependencies: ms: 2.1.3 dev: true + /debug/3.2.7_supports-color@5.5.0: + resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + dependencies: + ms: 2.1.3 + supports-color: 5.5.0 + dev: true + /debug/4.3.4: resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} engines: {node: '>=6.0'} @@ -8107,6 +8171,8 @@ packages: dependencies: address: 1.2.0 debug: 2.6.9 + transitivePeerDependencies: + - supports-color dev: true /detective/5.2.1: @@ -8874,6 +8940,8 @@ packages: regex-not: 1.0.2 snapdragon: 0.8.2 to-regex: 3.0.2 + transitivePeerDependencies: + - supports-color dev: true /expand-template/2.0.3: @@ -8915,6 +8983,8 @@ packages: type-is: 1.6.18 utils-merge: 1.0.1 vary: 1.1.2 + transitivePeerDependencies: + - supports-color dev: true /express/4.18.1: @@ -8952,6 +9022,8 @@ packages: type-is: 1.6.18 utils-merge: 1.0.1 vary: 1.1.2 + transitivePeerDependencies: + - supports-color /ext-list/2.2.2: resolution: {integrity: sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA==} @@ -9008,6 +9080,8 @@ packages: regex-not: 1.0.2 snapdragon: 0.8.2 to-regex: 3.0.2 + transitivePeerDependencies: + - supports-color dev: true /fast-deep-equal/2.0.1: @@ -9027,6 +9101,8 @@ packages: is-glob: 4.0.3 merge2: 1.4.1 micromatch: 3.1.10 + transitivePeerDependencies: + - supports-color dev: true /fast-glob/3.2.11: @@ -9233,6 +9309,8 @@ packages: parseurl: 1.3.3 statuses: 1.4.0 unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color dev: true /finalhandler/1.2.0: @@ -9246,6 +9324,8 @@ packages: parseurl: 1.3.3 statuses: 2.0.1 unpipe: 1.0.0 + transitivePeerDependencies: + - supports-color /find-cache-dir/2.1.0: resolution: {integrity: sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ==} @@ -9419,6 +9499,8 @@ packages: typescript: 4.7.2 webpack: 4.46.0 worker-rpc: 0.1.1 + transitivePeerDependencies: + - supports-color dev: true /fork-ts-checker-webpack-plugin/6.5.2_2uut6pkjgoy643sdkylfmypqbm: @@ -9737,7 +9819,7 @@ packages: dev: true /github-from-package/0.0.0: - resolution: {integrity: sha512-SyHy3T1v2NUXn29OsWdxmK6RwHD+vkj3v8en8AOBZ1wBQ/hCAQ5bAQTD02kW4W9tUp/3Qh6J8r9EvntiyCmOOw==} + resolution: {integrity: sha1-l/tdlr/eiXMxPyDoKI75oWf6ZM4=} dev: true /github-slugger/1.4.0: @@ -9876,10 +9958,12 @@ packages: ignore: 4.0.6 pify: 4.0.1 slash: 2.0.0 + transitivePeerDependencies: + - supports-color dev: true - /got/12.0.4: - resolution: {integrity: sha512-2Eyz4iU/ktq7wtMFXxzK7g5p35uNYLLdiZarZ5/Yn3IJlNEpBd5+dCgcAyxN8/8guZLszffwe3wVyw+DEVrpBg==} + /got/12.1.0: + resolution: {integrity: sha512-hBv2ty9QN2RdbJJMK3hesmSkFTjVIHyIDDbssCKnSmq62edGgImJWD10Eb1k77TiV1bxloxqcFAVK8+9pkhOig==} engines: {node: '>=14.16'} dependencies: '@sindresorhus/is': 4.6.0 @@ -11099,6 +11183,8 @@ packages: walker: 1.0.8 optionalDependencies: fsevents: 2.3.2 + transitivePeerDependencies: + - supports-color dev: true /jest-mock/27.5.1: @@ -11917,6 +12003,8 @@ packages: regex-not: 1.0.2 snapdragon: 0.8.2 to-regex: 3.0.2 + transitivePeerDependencies: + - supports-color dev: true /micromatch/4.0.5: @@ -12112,6 +12200,8 @@ packages: depd: 1.1.2 on-finished: 2.3.0 on-headers: 1.0.2 + transitivePeerDependencies: + - supports-color dev: true /move-concurrently/1.0.1: @@ -12173,6 +12263,8 @@ packages: regex-not: 1.0.2 snapdragon: 0.8.2 to-regex: 3.0.2 + transitivePeerDependencies: + - supports-color dev: true /napi-build-utils/1.0.2: @@ -12186,6 +12278,9 @@ packages: rimraf: 2.7.1 tracer: 0.8.15 ws: 2.3.1 + transitivePeerDependencies: + - bufferutil + - utf-8-validate dev: true /negotiator/0.6.3: @@ -12297,7 +12392,7 @@ packages: requiresBuild: true dependencies: chokidar: 3.5.3 - debug: 3.2.7 + debug: 3.2.7_supports-color@5.5.0 ignore-by-default: 1.0.1 minimatch: 3.1.2 pstree.remy: 1.1.8 @@ -13437,6 +13532,22 @@ packages: /promise-inflight/1.0.1: resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==} + peerDependencies: + bluebird: '*' + peerDependenciesMeta: + bluebird: + optional: true + dev: true + + /promise-inflight/1.0.1_bluebird@3.7.2: + resolution: {integrity: sha512-6zWPyEOFaQBJYcGMHBKTKJ3u6TBsnMFOIZSa6ce1e/ZrrsOlnHRHbabMjLiBYKp+n44X9eUI6VUPaukCXHuG4g==} + peerDependencies: + bluebird: '*' + peerDependenciesMeta: + bluebird: + optional: true + dependencies: + bluebird: 3.7.2 dev: true /promise.allsettled/1.0.5: @@ -14216,6 +14327,8 @@ packages: graceful-fs: 4.2.10 micromatch: 3.1.10 readable-stream: 2.3.7 + transitivePeerDependencies: + - supports-color dev: true optional: true @@ -14626,6 +14739,8 @@ packages: micromatch: 3.1.10 minimist: 1.2.6 walker: 1.0.8 + transitivePeerDependencies: + - supports-color dev: true /sass-loader/13.0.0_sass@1.52.1: @@ -14789,6 +14904,8 @@ packages: on-finished: 2.3.0 range-parser: 1.2.1 statuses: 1.4.0 + transitivePeerDependencies: + - supports-color dev: true /send/0.18.0: @@ -14808,6 +14925,8 @@ packages: on-finished: 2.4.1 range-parser: 1.2.1 statuses: 2.0.1 + transitivePeerDependencies: + - supports-color /serialize-error/7.0.1: resolution: {integrity: sha512-8I8TjW5KMOKsZQTvoxjuSIa7foAwPWGOts+6o7sgjz41/qMD9VQHEDxi6PBvK2l0MXUmqZyNpUK+T2tQaaElvw==} @@ -14853,6 +14972,8 @@ packages: escape-html: 1.0.3 parseurl: 1.3.3 send: 0.16.2 + transitivePeerDependencies: + - supports-color dev: true /serve-static/1.15.0: @@ -14863,6 +14984,8 @@ packages: escape-html: 1.0.3 parseurl: 1.3.3 send: 0.18.0 + transitivePeerDependencies: + - supports-color /set-blocking/2.0.0: resolution: {integrity: sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==} @@ -15029,6 +15152,8 @@ packages: source-map: 0.5.7 source-map-resolve: 0.5.3 use: 3.1.1 + transitivePeerDependencies: + - supports-color dev: true /sort-keys-length/1.0.1: @@ -15562,6 +15687,10 @@ packages: timer2: 1.0.0 uuidv4: 3.0.1 ws: 6.2.0 + transitivePeerDependencies: + - bufferutil + - supports-color + - utf-8-validate dev: true /tailwindcss/3.1.3: @@ -15710,6 +15839,8 @@ packages: terser: 5.14.1 webpack: 4.46.0 webpack-sources: 1.4.3 + transitivePeerDependencies: + - bluebird dev: true /terser-webpack-plugin/5.3.3_webpack@5.73.0: @@ -16796,6 +16927,15 @@ packages: react: 18.1.0 dev: false + /use-debounce/8.0.1_react@18.1.0: + resolution: {integrity: sha512-6tGAFJKJ0qCalecaV7/gm/M6n238nmitNppvR89ff1yfwSFjwFKR7IQZzIZf1KZRQhqNireBzytzU6jgb29oVg==} + engines: {node: '>= 10.0.0'} + peerDependencies: + react: '>=16.8.0' + dependencies: + react: 18.1.0 + dev: false + /use-isomorphic-layout-effect/1.1.2_7cpxmzzodpxnolj5zcc5cr63ji: resolution: {integrity: sha512-49L8yCO3iGT/ZF9QttjwLF/ZD9Iwto5LnH5LmEdk/6cFmXddqi2ulF0edxTwjj+7mqvpVVGQWvbXZdn32wRSHA==} peerDependencies: @@ -17102,6 +17242,8 @@ packages: requiresBuild: true dependencies: chokidar: 2.1.8 + transitivePeerDependencies: + - supports-color dev: true optional: true @@ -17113,6 +17255,8 @@ packages: optionalDependencies: chokidar: 3.5.3 watchpack-chokidar2: 2.0.1 + transitivePeerDependencies: + - supports-color dev: true /watchpack/2.4.0: @@ -17207,6 +17351,8 @@ packages: resolution: {integrity: sha512-kDUmfm3BZrei0y+1NTHJInejzxfhtU8eDj2M7OKb2IWrPFAeO1SOH2KuQ68MSZu9IGEHcxbkKKR1v18FrUSOmA==} dependencies: debug: 3.2.7 + transitivePeerDependencies: + - supports-color dev: true /webpack-virtual-modules/0.4.3: @@ -17249,6 +17395,8 @@ packages: terser-webpack-plugin: 1.4.5_webpack@4.46.0 watchpack: 1.7.5 webpack-sources: 1.4.3 + transitivePeerDependencies: + - supports-color dev: true /webpack/5.73.0: @@ -17378,6 +17526,14 @@ packages: /ws/2.3.1: resolution: {integrity: sha512-61a+9LgtYZxTq1hAonhX8Xwpo2riK4IOR/BIVxioFbCfc3QFKmpE4x9dLExfLHKtUfVZigYa36tThVhO57erEw==} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true dependencies: safe-buffer: 5.0.1 ultron: 1.1.1 @@ -17385,6 +17541,14 @@ packages: /ws/6.2.0: resolution: {integrity: sha512-deZYUNlt2O4buFCa3t5bKLf8A7FPP/TVjwOeVNpw818Ma5nk4MLXls2eoEGS39o8119QIYxTrTDoPQ5B/gTD6w==} + peerDependencies: + bufferutil: ^4.0.1 + utf-8-validate: ^5.0.2 + peerDependenciesMeta: + bufferutil: + optional: true + utf-8-validate: + optional: true dependencies: async-limiter: 1.0.1 dev: true @@ -17588,10 +17752,10 @@ packages: resolution: {integrity: sha512-V50KMwwzqJV0NpZIZFwfOD5/lyny3WlSzRiXgA0G7VUnRlqttta1L6UQIHzd6EuBY/cHGfwTIck7w1yH6Q5zUw==} dev: true - github.com/tauri-apps/tauricon/f104e2af7a19e1cdf9ee8212d2d3f6456d3aa00f: - resolution: {tarball: https://codeload.github.com/tauri-apps/tauricon/tar.gz/f104e2af7a19e1cdf9ee8212d2d3f6456d3aa00f} + github.com/tauri-apps/tauricon/5eea916a4a8e13aa41a943beaa7b4b71977e190d: + resolution: {tarball: https://codeload.github.com/tauri-apps/tauricon/tar.gz/5eea916a4a8e13aa41a943beaa7b4b71977e190d} name: '@tauri-apps/tauricon' - version: 1.0.2 + version: 1.0.3 engines: {node: '>= 12.13.0', npm: '>= 6.6.0', yarn: '>= 1.19.1'} hasBin: true dependencies: @@ -17603,7 +17767,7 @@ packages: fs-extra: 10.1.0 glob: 8.0.3 global-agent: 3.0.0 - got: 12.0.4 + got: 12.1.0 imagemin: 8.0.1 imagemin-optipng: 8.0.0 imagemin-zopfli: 7.0.0