diff --git a/.github/scripts/setup-system.ps1 b/.github/scripts/setup-system.ps1
index 9a8a206b5..29efe7a36 100644
--- a/.github/scripts/setup-system.ps1
+++ b/.github/scripts/setup-system.ps1
@@ -1,10 +1,181 @@
-Write-Host "This script is currently being used by CI and will need some more work before anyone can use it like the 'setup-system.sh' script for macOS and Linux!"
+# Get ci parameter to check if running with ci
+param(
+ [Parameter()]
+ [Switch]$ci
+)
-$VCINSTALLDIR = $(& "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" -latest -property installationPath)
-Add-Content $env:GITHUB_ENV "LIBCLANG_PATH=${VCINSTALLDIR}\VC\Tools\LLVM\x64\bin`n"
-Invoke-WebRequest "https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-full-shared.7z" -OutFile ffmpeg-release-full-shared.7z
-7z x ffmpeg-release-full-shared.7z
-mkdir ffmpeg
-mv ffmpeg-*/* ffmpeg/
-Add-Content $env:GITHUB_ENV "FFMPEG_DIR=${pwd}\ffmpeg`n"
-Add-Content $env:GITHUB_PATH "${pwd}\ffmpeg\bin`n"
\ No newline at end of file
+# Get temp folder
+$temp = [System.IO.Path]::GetTempPath()
+
+# Get current running dir
+$currentLocation = $((Get-Location).path)
+
+# Check to see if a command exists (eg if an app is installed)
+Function CheckCommand {
+
+ Param ($command)
+
+ $oldPreference = $ErrorActionPreference
+
+ $ErrorActionPreference = 'stop'
+
+ try { if (Get-Command $command) { RETURN $true } }
+
+ Catch { RETURN $false }
+
+ Finally { $ErrorActionPreference = $oldPreference }
+
+}
+
+Write-Host "Spacedrive Development Environment Setup" -ForegroundColor Magenta
+Write-Host @"
+
+To set up your machine for Spacedrive development, this script will do the following:
+
+1) Check for Rust and Cargo
+
+2) Install pnpm (if not installed)
+
+3) Install the latest version of Node.js using pnpm
+
+4) Install LLVM (compiler for ffmpeg-rust)
+
+4) Download ffmpeg and set as an environment variable
+
+"@
+
+Write-Host "Checking for Rust and Cargo..." -ForegroundColor Yellow
+Start-Sleep -Milliseconds 150
+
+$cargoCheck = CheckCommand cargo
+
+if ($cargoCheck -eq $false) {
+ Write-Host @"
+Cargo is not installed.
+
+To use Spacedrive on Windows, Cargo needs to be installed.
+The Visual Studio C++ Build tools are also required.
+Instructions can be found here:
+
+https://tauri.app/v1/guides/getting-started/prerequisites/#setting-up-windows
+
+Once you have installed Cargo, re-run this script.
+
+"@
+ Exit
+}
+else {
+ Write-Host "Cargo is installed."
+}
+
+Write-Host
+Write-Host "Checking for pnpm..." -ForegroundColor Yellow
+Start-Sleep -Milliseconds 150
+
+$pnpmCheck = CheckCommand pnpm
+if ($pnpmCheck -eq $false) {
+
+ Write-Host "pnpm is not installed. Installing now."
+ Write-Host "Running the pnpm installer..."
+
+ #pnpm installer taken from https://pnpm.io
+ Invoke-WebRequest https://get.pnpm.io/install.ps1 -useb | Invoke-Expression
+
+ # Reset the PATH env variables to make sure pnpm is accessible
+ $env:PNPM_HOME = [System.Environment]::GetEnvironmentVariable("PNPM_HOME", "User")
+ $env:Path = [System.Environment]::ExpandEnvironmentVariables([System.Environment]::GetEnvironmentVariable("Path", "User"))
+
+}
+else {
+ Write-Host "pnpm is installed."
+}
+
+# A GitHub Action takes care of installing node, so this isn't necessary if running in the ci.
+if ($ci -eq $True) {
+ Write-Host
+ Write-Host "Running with Ci, skipping Node install." -ForegroundColor Yellow
+}
+else {
+ Write-Host
+ Write-Host "Using pnpm to install the latest version of Node..." -ForegroundColor Yellow
+ Write-Host "This will set your global Node version to the latest!"
+ Start-Sleep -Milliseconds 150
+
+ # Runs the pnpm command to use the latest version of node, which also installs it
+ Start-Process -Wait -FilePath "pnpm" -ArgumentList "env use --global latest" -PassThru -Verb runAs
+}
+
+
+
+# The ci has LLVM installed already, so we instead just set the env variables.
+if ($ci -eq $True) {
+ Write-Host
+ Write-Host "Running with Ci, skipping LLVM install." -ForegroundColor Yellow
+
+ $VCINSTALLDIR = $(& "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" -latest -property installationPath)
+ Add-Content $env:GITHUB_ENV "LIBCLANG_PATH=${VCINSTALLDIR}\VC\Tools\LLVM\x64\bin`n"
+
+} else {
+ Write-Host
+ Write-Host "Downloading the LLVM installer..." -ForegroundColor Yellow
+ # Downloads latest installer for LLVM
+ $filenamePattern = "*-win64.exe"
+ $releasesUri = "https://api.github.com/repos/llvm/llvm-project/releases/latest"
+ $downloadUri = ((Invoke-RestMethod -Method GET -Uri $releasesUri).assets | Where-Object name -like $filenamePattern ).browser_download_url
+
+ Start-BitsTransfer -Source $downloadUri -Destination "$temp\llvm.exe"
+
+ Write-Host
+ Write-Host "Running the LLVM installer..." -ForegroundColor Yellow
+ Write-Host "Please follow the instructions to install LLVM."
+ Write-Host "Ensure you add LLVM to your PATH."
+
+ Start-Process "$temp\llvm.exe" -Wait
+}
+
+
+
+Write-Host
+Write-Host "Downloading the latest ffmpeg build..." -ForegroundColor Yellow
+
+# Downloads the latest shared build of ffmpeg from GitHub
+$filenamePattern = "*-full_build-shared.zip"
+$releasesUri = "https://api.github.com/repos/GyanD/codexffmpeg/releases/latest"
+$downloadUri = ((Invoke-RestMethod -Method GET -Uri $releasesUri).assets | Where-Object name -like $filenamePattern ).browser_download_url
+$filename = ((Invoke-RestMethod -Method GET -Uri $releasesUri).assets | Where-Object name -like $filenamePattern ).name
+$remove = ".zip"
+$foldername = $filename.Substring(0, ($filename.Length - $remove.Length))
+
+Start-BitsTransfer -Source $downloadUri -Destination "$temp\ffmpeg.zip"
+
+Write-Host
+Write-Host "Expanding ffmpeg zip..." -ForegroundColor Yellow
+
+Expand-Archive "$temp\ffmpeg.zip" $HOME -ErrorAction SilentlyContinue
+
+Remove-Item "$temp\ffmpeg.zip"
+
+Write-Host
+Write-Host "Setting environment variables..." -ForegroundColor Yellow
+
+if ($ci -eq $True) {
+ # If running in ci, we need to use GITHUB_ENV and GITHUB_PATH instead of the normal PATH env variables, so we set them here
+ Add-Content $env:GITHUB_ENV "FFMPEG_DIR=$HOME\$foldername`n"
+ Add-Content $env:GITHUB_PATH "$HOME\$foldername\bin`n"
+}
+else {
+ # Sets environment variable for ffmpeg
+ [System.Environment]::SetEnvironmentVariable('FFMPEG_DIR', "$HOME\$foldername", [System.EnvironmentVariableTarget]::User)
+}
+
+Write-Host
+Write-Host "Copying Required .dll files..." -ForegroundColor Yellow
+
+# Create target\debug folder, continue if already exists
+New-Item -Path $currentLocation\target\debug -ItemType Directory -ErrorAction SilentlyContinue
+
+# Copies all .dll required for rust-ffmpeg to target\debug folder
+Get-ChildItem "$HOME\$foldername\bin" -recurse -filter *.dll | Copy-Item -Destination "$currentLocation\target\debug"
+
+Write-Host
+Write-Host "Your machine has been setup for Spacedrive development!"
diff --git a/.github/scripts/setup-system.sh b/.github/scripts/setup-system.sh
index 68d0df0ab..a3e557318 100755
--- a/.github/scripts/setup-system.sh
+++ b/.github/scripts/setup-system.sh
@@ -23,8 +23,13 @@ fi
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
if which apt-get &> /dev/null; then
echo "Detected 'apt' based distro!"
+
+ if [[ "$(lsb_release -si)" == "Pop" ]]; then
+ DEBIAN_FFMPEG_DEPS="libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavutil-dev libswscale-dev libswresample-dev ffmpeg" # FFMPEG dependencies
+ else
+ DEBIAN_FFMPEG_DEPS="libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavresample-dev libavutil-dev libswscale-dev libswresample-dev ffmpeg" # FFMPEG dependencies
+ fi
DEBIAN_TAURI_DEPS="libwebkit2gtk-4.0-dev build-essential curl wget libssl-dev libgtk-3-dev libappindicator3-dev librsvg2-dev" # Tauri dependencies
- DEBIAN_FFMPEG_DEPS="libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavresample-dev libavutil-dev libswscale-dev libswresample-dev ffmpeg" # FFMPEG dependencies
DEBIAN_BINDGEN_DEPS="pkg-config clang" # Bindgen dependencies - it's used by a dependency of Spacedrive
sudo apt-get -y update
diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml
index 013354f45..d78d7b382 100644
--- a/.github/workflows/ci.yml
+++ b/.github/workflows/ci.yml
@@ -36,7 +36,7 @@ jobs:
id: pnpm-cache
run: |
echo "::set-output name=pnpm_cache_dir::$(pnpm store path)"
-
+
- uses: actions/cache@v3
name: Setup pnpm cache
with:
@@ -44,7 +44,7 @@ jobs:
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
-
+
- name: Install pnpm dependencies
run: pnpm --frozen-lockfile i
@@ -81,7 +81,7 @@ jobs:
with:
version: 7
run_install: false
-
+
- name: Install Rust stable
uses: actions-rs/toolchain@v1
with:
@@ -89,7 +89,7 @@ jobs:
profile: minimal
override: true
components: rustfmt, rust-src
-
+
- name: Cache Rust Dependencies
uses: Swatinem/rust-cache@v1
with:
@@ -98,10 +98,10 @@ jobs:
- name: Run 'setup-system.sh' script
if: matrix.platform == 'ubuntu-latest' || matrix.platform == 'macos-latest'
run: ./.github/scripts/setup-system.sh
-
+
- name: Run 'setup-system.ps1' script
if: matrix.platform == 'windows-latest'
- run: ./.github/scripts/setup-system.ps1
+ run: ./.github/scripts/setup-system.ps1 -ci
- name: Get pnpm store directory
id: pnpm-cache
@@ -116,7 +116,7 @@ jobs:
${{ runner.os }}-pnpm-store-
- name: Install pnpm dependencies
run: pnpm --frozen-lockfile i
-
+
- name: Cache Prisma codegen
id: cache-prisma
uses: actions/cache@v3
@@ -127,13 +127,13 @@ jobs:
- name: Generate Prisma client
working-directory: core
if: steps.cache-prisma.outputs.cache-hit != 'true'
- run: cargo run --frozen -p prisma-cli --release -- generate
+ run: cargo run -p prisma-cli --release -- generate
- name: Cargo fetch
run: cargo fetch
- name: Check Core
- run: cargo check --frozen -p sdcore --release
+ run: cargo check -p sdcore --release
- name: Bundle Desktop
run: pnpm desktop tauri build
@@ -141,7 +141,7 @@ jobs:
- name: Build Server
if: matrix.platform == 'ubuntu-latest'
run: |
- cargo build --frozen -p server --release
+ cargo build -p server --release
cp ./target/release/server ./apps/server/server
- name: Determine image name & tag
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 942c66d63..7c7f383f2 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -41,6 +41,8 @@ This project uses [Cargo](https://doc.rust-lang.org/cargo/getting-started/instal
- `$ cd spacedrive`
- For Linux or MacOS users run: `./.github/scripts/setup-system.sh`
- This will install FFMPEG and any other required dependencies for Spacedrive to build.
+- For Windows users run using PowerShell: `.\.github\scripts\setup-system.ps1`
+ - This will install pnpm, LLVM, FFMPEG and any other required dependencies for Spacedrive to build.
- `$ pnpm i`
- `$ pnpm prep` - Runs all necessary codegen & builds required dependencies.
diff --git a/Cargo.lock b/Cargo.lock
index d0af4fd42..2df75e10d 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -51,9 +51,9 @@ dependencies = [
[[package]]
name = "actix-http"
-version = "3.1.0"
+version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd2e9f6794b5826aff6df65e3a0d0127b271d1c03629c774238f3582e903d4e4"
+checksum = "6f9ffb6db08c1c3a1f4aef540f1a63193adc73c4fbd40b75a95fc8c5258f6e51"
dependencies = [
"actix-codec",
"actix-rt",
@@ -195,7 +195,7 @@ dependencies = [
"serde_urlencoded",
"smallvec",
"socket2",
- "time 0.3.9",
+ "time 0.3.11",
"url",
]
@@ -306,9 +306,9 @@ dependencies = [
[[package]]
name = "anyhow"
-version = "1.0.57"
+version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc"
+checksum = "bb07d2053ccdbe10e2af2995a2f116c1330396493dc1269f6a91d0ae82e19704"
[[package]]
name = "arrayvec"
@@ -419,9 +419,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "backtrace"
-version = "0.3.65"
+version = "0.3.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61"
+checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
dependencies = [
"addr2line",
"cc",
@@ -588,7 +588,7 @@ dependencies = [
"serde",
"serde_bytes",
"serde_json",
- "time 0.3.9",
+ "time 0.3.11",
"uuid 0.8.2",
]
@@ -615,9 +615,9 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7"
[[package]]
name = "bytemuck"
-version = "1.9.1"
+version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cdead85bdec19c194affaeeb670c0e41fe23de31459efd1c174d049269cf02cc"
+checksum = "c53dfa917ec274df8ed3c572698f381a24eef2efba9492d797301b72b6db408a"
[[package]]
name = "byteorder"
@@ -642,9 +642,9 @@ dependencies = [
[[package]]
name = "cairo-rs"
-version = "0.15.11"
+version = "0.15.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "62be3562254e90c1c6050a72aa638f6315593e98c5cdaba9017cedbabf0a5dee"
+checksum = "c76ee391b03d35510d9fa917357c7f1855bd9a6659c95a1b392e33f49b3369bc"
dependencies = [
"bitflags",
"cairo-sys-rs",
@@ -882,7 +882,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94d4706de1b0fa5b132270cddffa8585166037822e260a944fe161acd137ca05"
dependencies = [
"percent-encoding",
- "time 0.3.9",
+ "time 0.3.11",
"version_check",
]
@@ -964,17 +964,17 @@ dependencies = [
"crossbeam-deque",
"crossbeam-epoch",
"crossbeam-queue 0.3.5",
- "crossbeam-utils 0.8.8",
+ "crossbeam-utils 0.8.10",
]
[[package]]
name = "crossbeam-channel"
-version = "0.5.4"
+version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53"
+checksum = "4c02a4d71819009c192cf4872265391563fd6a84c81ff2c0f2a7026ca4c1d85c"
dependencies = [
"cfg-if 1.0.0",
- "crossbeam-utils 0.8.8",
+ "crossbeam-utils 0.8.10",
]
[[package]]
@@ -985,20 +985,20 @@ checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
dependencies = [
"cfg-if 1.0.0",
"crossbeam-epoch",
- "crossbeam-utils 0.8.8",
+ "crossbeam-utils 0.8.10",
]
[[package]]
name = "crossbeam-epoch"
-version = "0.9.8"
+version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1145cf131a2c6ba0615079ab6a638f7e1973ac9c2634fcbeaaad6114246efe8c"
+checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d"
dependencies = [
"autocfg",
"cfg-if 1.0.0",
- "crossbeam-utils 0.8.8",
- "lazy_static",
+ "crossbeam-utils 0.8.10",
"memoffset",
+ "once_cell",
"scopeguard",
]
@@ -1020,7 +1020,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f25d8400f4a7a5778f0e4e52384a48cbd9b5c495d110786187fc750075277a2"
dependencies = [
"cfg-if 1.0.0",
- "crossbeam-utils 0.8.8",
+ "crossbeam-utils 0.8.10",
]
[[package]]
@@ -1036,19 +1036,19 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
-version = "0.8.8"
+version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38"
+checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83"
dependencies = [
"cfg-if 1.0.0",
- "lazy_static",
+ "once_cell",
]
[[package]]
name = "crypto-common"
-version = "0.1.3"
+version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8"
+checksum = "2ccfd8c0ee4cce11e45b3fd6f9d5e69e0cc62912aa6a0cb1bf4617b0eba5a12f"
dependencies = [
"generic-array 0.14.5",
"typenum",
@@ -1186,9 +1186,9 @@ dependencies = [
[[package]]
name = "dbus"
-version = "0.9.5"
+version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "de0a745c25b32caa56b82a3950f5fec7893a960f4c10ca3b02060b0c38d8c2ce"
+checksum = "6f8bcdd56d2e5c4ed26a529c5a9029f5db8290d433497506f958eae3be148eb6"
dependencies = [
"libc",
"libdbus-sys",
@@ -1377,9 +1377,9 @@ dependencies = [
[[package]]
name = "either"
-version = "1.6.1"
+version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
+checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be"
[[package]]
name = "embed-resource"
@@ -1598,14 +1598,14 @@ dependencies = [
[[package]]
name = "filetime"
-version = "0.2.16"
+version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c0408e2626025178a6a7f7ffc05a25bc47103229f19c113755de7bf63816290c"
+checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c"
dependencies = [
"cfg-if 1.0.0",
"libc",
"redox_syscall 0.2.13",
- "winapi",
+ "windows-sys",
]
[[package]]
@@ -2010,9 +2010,9 @@ dependencies = [
[[package]]
name = "gif"
-version = "0.11.3"
+version = "0.11.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c3a7187e78088aead22ceedeee99779455b23fc231fe13ec443f99bb71694e5b"
+checksum = "3edd93c6756b4dfaf2709eafcc345ba2636565295c198a9cfbf75fa5e3e00b06"
dependencies = [
"color_quant",
"weezl",
@@ -2026,9 +2026,9 @@ checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4"
[[package]]
name = "gio"
-version = "0.15.11"
+version = "0.15.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0f132be35e05d9662b9fa0fee3f349c6621f7782e0105917f4cc73c1bf47eceb"
+checksum = "68fdbc90312d462781a395f7a16d96a2b379bb6ef8cd6310a2df272771c4283b"
dependencies = [
"bitflags",
"futures-channel",
@@ -2056,9 +2056,9 @@ dependencies = [
[[package]]
name = "glib"
-version = "0.15.11"
+version = "0.15.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "bd124026a2fa8c33a3d17a3fe59c103f2d9fa5bd92c19e029e037736729abeab"
+checksum = "edb0306fbad0ab5428b0ca674a23893db909a98582969c9b537be4ced78c505d"
dependencies = [
"bitflags",
"futures-channel",
@@ -2228,13 +2228,19 @@ dependencies = [
"ahash",
]
+[[package]]
+name = "hashbrown"
+version = "0.12.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "607c8a29735385251a339424dd462993c0fed8fa09d378f259377df08c126022"
+
[[package]]
name = "hashlink"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf"
dependencies = [
- "hashbrown",
+ "hashbrown 0.11.2",
]
[[package]]
@@ -2359,9 +2365,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "hyper"
-version = "0.14.19"
+version = "0.14.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "42dc3c131584288d375f2d07f822b0cb012d8c6fb899a5b9fdb3cb7eb9b6004f"
+checksum = "02c929dc5c39e335a03c405292728118860721b10190d98c2a0f0efd5baafbac"
dependencies = [
"bytes",
"futures-channel",
@@ -2427,7 +2433,7 @@ version = "0.4.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "713f1b139373f96a2e0ce3ac931cd01ee973c3c5dd7c40c0c2efe96ad2b6751d"
dependencies = [
- "crossbeam-utils 0.8.8",
+ "crossbeam-utils 0.8.10",
"globset",
"lazy_static",
"log",
@@ -2492,12 +2498,12 @@ dependencies = [
[[package]]
name = "indexmap"
-version = "1.8.2"
+version = "1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e6012d540c5baa3589337a98ce73408de9b5a25ec9fc2c6fd6be8f0d39e0ca5a"
+checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
dependencies = [
"autocfg",
- "hashbrown",
+ "hashbrown 0.12.2",
"serde",
]
@@ -2866,10 +2872,19 @@ dependencies = [
]
[[package]]
-name = "linked-hash-map"
-version = "0.5.4"
+name = "line-wrap"
+version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
+checksum = "f30344350a2a51da54c1d53be93fade8a237e545dbcc4bdbe635413f2117cab9"
+dependencies = [
+ "safemem",
+]
+
+[[package]]
+name = "linked-hash-map"
+version = "0.5.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "local-channel"
@@ -2934,11 +2949,11 @@ dependencies = [
[[package]]
name = "lru"
-version = "0.7.6"
+version = "0.7.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8015d95cb7b2ddd3c0d32ca38283ceb1eea09b4713ee380bceb942d85a244228"
+checksum = "c84e6fe5655adc6ce00787cf7dcaf8dc4f998a0565d23eafc207a8b08ca3349a"
dependencies = [
- "hashbrown",
+ "hashbrown 0.11.2",
]
[[package]]
@@ -2958,15 +2973,15 @@ checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
[[package]]
name = "mac-notification-sys"
-version = "0.5.2"
+version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "042f74a606175d72ca483e14e0873fe0f6c003f7af45865b17b16fdaface7203"
+checksum = "47a4acb83c904844ca12aafeac6fff6f781cf9e220a985c1db94fd94123993aa"
dependencies = [
"cc",
"dirs-next",
"objc-foundation",
"objc_id",
- "time 0.3.9",
+ "time 0.3.11",
]
[[package]]
@@ -3093,9 +3108,9 @@ dependencies = [
[[package]]
name = "mio"
-version = "0.8.3"
+version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "713d550d9b44d89174e066b7a6217ae06234c10cb47819a88290d2b353c31799"
+checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf"
dependencies = [
"libc",
"log",
@@ -3284,7 +3299,7 @@ dependencies = [
"smallvec",
"subprocess",
"thiserror",
- "time 0.3.9",
+ "time 0.3.11",
"uuid 0.8.2",
]
@@ -3439,9 +3454,9 @@ dependencies = [
[[package]]
name = "num-rational"
-version = "0.4.0"
+version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d41702bd167c2df5520b384281bc111a4b5efcf7fbc4c9c222c815b07e0a6a6a"
+checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0"
dependencies = [
"autocfg",
"num-integer",
@@ -3538,18 +3553,18 @@ dependencies = [
[[package]]
name = "object"
-version = "0.28.4"
+version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424"
+checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
dependencies = [
"memchr",
]
[[package]]
name = "once_cell"
-version = "1.12.0"
+version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225"
+checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1"
[[package]]
name = "opaque-debug"
@@ -3569,9 +3584,9 @@ dependencies = [
[[package]]
name = "openssl"
-version = "0.10.40"
+version = "0.10.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fb81a6430ac911acb25fe5ac8f1d2af1b4ea8a4fdfda0f1ee4292af2e2d8eb0e"
+checksum = "618febf65336490dfcf20b73f885f5651a0c89c64c2d4a8c3662585a70bf5bd0"
dependencies = [
"bitflags",
"cfg-if 1.0.0",
@@ -3601,9 +3616,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
-version = "0.9.74"
+version = "0.9.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "835363342df5fba8354c5b453325b110ffd54044e588c539cf2f20a8014e4cb1"
+checksum = "e5f9bd0c2710541a3cda73d6f9ac4f1b240de4ae261065d309dbe73d9dceb42f"
dependencies = [
"autocfg",
"cc",
@@ -3980,18 +3995,18 @@ dependencies = [
[[package]]
name = "pin-project"
-version = "1.0.10"
+version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e"
+checksum = "78203e83c48cffbe01e4a2d35d566ca4de445d79a85372fc64e378bfc812a260"
dependencies = [
"pin-project-internal",
]
[[package]]
name = "pin-project-internal"
-version = "1.0.10"
+version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb"
+checksum = "710faf75e1b33345361201d36d04e98ac1ed8909151a017ed384700836104c74"
dependencies = [
"proc-macro2",
"quote",
@@ -4016,6 +4031,20 @@ version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae"
+[[package]]
+name = "plist"
+version = "1.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd39bc6cdc9355ad1dc5eeedefee696bb35c34caf21768741e81826c0bbd7225"
+dependencies = [
+ "base64 0.13.0",
+ "indexmap",
+ "line-wrap",
+ "serde",
+ "time 0.3.11",
+ "xml-rs",
+]
+
[[package]]
name = "png"
version = "0.11.0"
@@ -4247,9 +4276,9 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
[[package]]
name = "proc-macro2"
-version = "1.0.39"
+version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c54b25569025b7fc9651de43004ae593a75ad88543b17178aa5e1b9c4f15f56f"
+checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7"
dependencies = [
"unicode-ident",
]
@@ -4361,9 +4390,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quote"
-version = "1.0.18"
+version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1"
+checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804"
dependencies = [
"proc-macro2",
]
@@ -4484,7 +4513,7 @@ checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
- "crossbeam-utils 0.8.8",
+ "crossbeam-utils 0.8.10",
"num_cpus",
]
@@ -4516,9 +4545,9 @@ dependencies = [
[[package]]
name = "regex"
-version = "1.5.6"
+version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1"
+checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
dependencies = [
"aho-corasick",
"memchr",
@@ -4536,9 +4565,9 @@ dependencies = [
[[package]]
name = "regex-syntax"
-version = "0.6.26"
+version = "0.6.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64"
+checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
[[package]]
name = "remove_dir_all"
@@ -4736,7 +4765,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
- "semver 1.0.10",
+ "semver 1.0.12",
]
[[package]]
@@ -4772,9 +4801,9 @@ dependencies = [
[[package]]
name = "rustversion"
-version = "1.0.6"
+version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f"
+checksum = "a0a5f7c728f5d284929a1cccb5bc19884422bfe6ef4d6c409da2c41838983fcf"
[[package]]
name = "ryu"
@@ -4782,6 +4811,12 @@ version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695"
+[[package]]
+name = "safemem"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072"
+
[[package]]
name = "same-file"
version = "1.0.6"
@@ -4862,7 +4897,6 @@ dependencies = [
"image",
"include_dir",
"int-enum",
- "lazy_static",
"log",
"prisma-client-rust",
"ring 0.17.0-alpha.11",
@@ -4872,7 +4906,7 @@ dependencies = [
"thiserror",
"tokio",
"ts-rs",
- "uuid 1.1.2",
+ "uuid 0.8.2",
"walkdir",
"webp",
]
@@ -4940,9 +4974,9 @@ dependencies = [
[[package]]
name = "semver"
-version = "1.0.10"
+version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a41d061efea015927ac527063765e73601444cdc344ba855bc7bd44578b25e1c"
+checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1"
dependencies = [
"serde",
]
@@ -4964,9 +4998,9 @@ dependencies = [
[[package]]
name = "serde"
-version = "1.0.137"
+version = "1.0.139"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1"
+checksum = "0171ebb889e45aa68b44aee0859b3eede84c6f5f5c228e6f140c0b2a0a46cad6"
dependencies = [
"serde_derive",
]
@@ -4982,9 +5016,9 @@ dependencies = [
[[package]]
name = "serde_derive"
-version = "1.0.137"
+version = "1.0.139"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be"
+checksum = "dc1d3230c1de7932af58ad8ffbe1d784bd55efd5a9d84ac24f69c72d83543dfb"
dependencies = [
"proc-macro2",
"quote",
@@ -4993,9 +5027,9 @@ dependencies = [
[[package]]
name = "serde_json"
-version = "1.0.81"
+version = "1.0.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c"
+checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7"
dependencies = [
"indexmap",
"itoa 1.0.2",
@@ -5204,9 +5238,9 @@ checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32"
[[package]]
name = "smallvec"
-version = "1.8.0"
+version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
+checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
[[package]]
name = "socket2"
@@ -5442,9 +5476,9 @@ dependencies = [
[[package]]
name = "syn"
-version = "1.0.96"
+version = "1.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "0748dd251e24453cb8717f0354206b91557e4ec8703673a4b30208f2abaf1ebf"
+checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd"
dependencies = [
"proc-macro2",
"quote",
@@ -5500,9 +5534,9 @@ checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60"
[[package]]
name = "tao"
-version = "0.11.2"
+version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3bfe4c782f0543f667ee3b732d026b2f1c64af39cd52e726dec1ea1f2d8f6b80"
+checksum = "a71c32c2fa7bba46b01becf9cf470f6a781573af7e376c5e317a313ecce27545"
dependencies = [
"bitflags",
"cairo-rs",
@@ -5537,7 +5571,6 @@ dependencies = [
"raw-window-handle",
"scopeguard",
"serde",
- "tao-core-video-sys",
"unicode-segmentation",
"uuid 0.8.2",
"windows 0.37.0",
@@ -5545,18 +5578,6 @@ dependencies = [
"x11-dl",
]
-[[package]]
-name = "tao-core-video-sys"
-version = "0.2.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "271450eb289cb4d8d0720c6ce70c72c8c858c93dd61fc625881616752e6b98f6"
-dependencies = [
- "cfg-if 1.0.0",
- "core-foundation-sys",
- "libc",
- "objc",
-]
-
[[package]]
name = "tap"
version = "1.0.1"
@@ -5576,9 +5597,9 @@ dependencies = [
[[package]]
name = "tauri"
-version = "1.0.0"
+version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8e1ebb60bb8f246d5351ff9b7728fdfa7a6eba72baa722ab6021d553981caba1"
+checksum = "d61fc211e0bd2c04c0aecd202d2cd72dd797a89da02989a39e1b9691462386d6"
dependencies = [
"anyhow",
"attohttpc",
@@ -5605,7 +5626,7 @@ dependencies = [
"raw-window-handle",
"regex",
"rfd",
- "semver 1.0.10",
+ "semver 1.0.12",
"serde",
"serde_json",
"serde_repr",
@@ -5629,14 +5650,15 @@ dependencies = [
[[package]]
name = "tauri-build"
-version = "1.0.0"
+version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e7b26eb3523e962b90012fedbfb744ca153d9be85e7981e00737e106d5323941"
+checksum = "2f2b32e551ec810ba4ab2ad735de5e3576e54bf0322ab0f4b7ce41244bc65ecf"
dependencies = [
"anyhow",
"cargo_toml",
"heck 0.4.0",
- "semver 1.0.10",
+ "json-patch",
+ "semver 1.0.12",
"serde_json",
"tauri-utils",
"winres",
@@ -5644,32 +5666,35 @@ dependencies = [
[[package]]
name = "tauri-codegen"
-version = "1.0.0"
+version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9468c5189188c820ef605dfe4937c768cb2918e9460c8093dc4ee2cbd717b262"
+checksum = "f6f1f7928dd040fc03c94207adfad506c0cf5b152982fd1dc0a621f7fd777e22"
dependencies = [
"base64 0.13.0",
"brotli",
"ico",
+ "json-patch",
+ "plist",
"png 0.17.5",
"proc-macro2",
"quote",
"regex",
- "semver 1.0.10",
+ "semver 1.0.12",
"serde",
"serde_json",
"sha2",
"tauri-utils",
"thiserror",
+ "time 0.3.11",
"uuid 1.1.2",
"walkdir",
]
[[package]]
name = "tauri-macros"
-version = "1.0.0"
+version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "40e3ffddd7a274fc7baaa260888c971a0d95d2ef403aa16600c878b8b1c00ffe"
+checksum = "e50b9f52871c088857360319a37472d59f4644f1ed004489599d62831a1b6996"
dependencies = [
"heck 0.4.0",
"proc-macro2",
@@ -5681,14 +5706,15 @@ dependencies = [
[[package]]
name = "tauri-runtime"
-version = "0.9.0"
+version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "fb7dc4db360bb40584187b6cb7834da736ce4ef2ab0914e2be98014444fa9920"
+checksum = "4e4cff3b4d9469727fa2107c4b3d2eda110df1ba45103fb420178e536362fae4"
dependencies = [
"gtk",
"http",
"http-range",
"infer",
+ "raw-window-handle",
"serde",
"serde_json",
"tauri-utils",
@@ -5700,14 +5726,15 @@ dependencies = [
[[package]]
name = "tauri-runtime-wry"
-version = "0.9.0"
+version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c876fb3a6e7c6fe2ac466b2a6ecd83658528844b4df0914558a9bc1501b31cf3"
+checksum = "3fa8c4edaf01d8b556e7172c844b1b4dd3399adcd1a606bd520fc3e65f698546"
dependencies = [
"cocoa",
"gtk",
"percent-encoding",
"rand 0.8.5",
+ "raw-window-handle",
"tauri-runtime",
"tauri-utils",
"uuid 1.1.2",
@@ -5719,9 +5746,9 @@ dependencies = [
[[package]]
name = "tauri-utils"
-version = "1.0.0"
+version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "727145cb55b8897fa9f2bcea4fad31dc39394703d037c9669b40f2d1c0c2d7f3"
+checksum = "12ff4b68d9faeb57c9c727bf58c9c9768d2b67d8e84e62ce6146e7859a2e9c6b"
dependencies = [
"brotli",
"ctor",
@@ -5734,13 +5761,14 @@ dependencies = [
"phf 0.10.1",
"proc-macro2",
"quote",
- "semver 1.0.10",
+ "semver 1.0.12",
"serde",
"serde_json",
"serde_with",
"thiserror",
"url",
"walkdir",
+ "windows 0.37.0",
]
[[package]]
@@ -5877,9 +5905,9 @@ dependencies = [
[[package]]
name = "time"
-version = "0.3.9"
+version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd"
+checksum = "72c91f41dcb2f096c05f0873d667dceec1087ce5bcf984ec8ffb19acddbb3217"
dependencies = [
"itoa 1.0.2",
"libc",
@@ -6031,9 +6059,9 @@ dependencies = [
[[package]]
name = "tower-service"
-version = "0.3.1"
+version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6"
+checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
[[package]]
name = "tracing"
@@ -6050,9 +6078,9 @@ dependencies = [
[[package]]
name = "tracing-attributes"
-version = "0.1.21"
+version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c"
+checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2"
dependencies = [
"proc-macro2",
"quote",
@@ -6061,9 +6089,9 @@ dependencies = [
[[package]]
name = "tracing-core"
-version = "0.1.27"
+version = "0.1.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7709595b8878a4965ce5e87ebf880a7d39c9afc6837721b21a5a816a8117d921"
+checksum = "7b7358be39f2f274f322d2aaed611acc57f382e8eb1e5b48cb9ae30933495ce7"
dependencies = [
"once_cell",
"valuable",
@@ -6105,13 +6133,13 @@ dependencies = [
[[package]]
name = "tracing-subscriber"
-version = "0.3.11"
+version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4bc28f93baff38037f64e6f43d34cfa1605f27a49c34e8a04c5e78b0babf2596"
+checksum = "3a713421342a5a666b7577783721d3117f1b69a393df803ee17bb73b1e122a59"
dependencies = [
"ansi_term",
- "lazy_static",
"matchers",
+ "once_cell",
"regex",
"sharded-slab",
"smallvec",
@@ -6190,6 +6218,7 @@ dependencies = [
"chrono",
"thiserror",
"ts-rs-macros",
+ "uuid 0.8.2",
]
[[package]]
@@ -6235,9 +6264,9 @@ checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
[[package]]
name = "ucd-trie"
-version = "0.1.3"
+version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c"
+checksum = "89570599c4fe5585de2b388aab47e99f7fa4e9238a1399f707a02e356058141c"
[[package]]
name = "unicode-bidi"
@@ -6253,9 +6282,9 @@ checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c"
[[package]]
name = "unicode-normalization"
-version = "0.1.19"
+version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9"
+checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6"
dependencies = [
"tinyvec",
]
@@ -6341,19 +6370,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd6469f4314d5f1ffec476e05f17cc9a78bc7a27a6a857842170bdf8d6f98d2f"
dependencies = [
"getrandom 0.2.7",
- "serde",
- "uuid-macro-internal",
-]
-
-[[package]]
-name = "uuid-macro-internal"
-version = "1.1.2"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "548f7181a5990efa50237abb7ebca410828b57a8955993334679f8b50b35c97d"
-dependencies = [
- "proc-macro2",
- "quote",
- "syn",
]
[[package]]
@@ -6602,9 +6618,9 @@ dependencies = [
[[package]]
name = "webpki-roots"
-version = "0.22.3"
+version = "0.22.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "44d8de8415c823c8abd270ad483c6feeac771fad964890779f9a8cb24fbbc1bf"
+checksum = "f1c760f0d366a6c24a02ed7816e23e691f5d92291f94d15e836006fd11b04daf"
dependencies = [
"webpki",
]
@@ -6648,9 +6664,9 @@ dependencies = [
[[package]]
name = "weezl"
-version = "0.1.6"
+version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c97e489d8f836838d497091de568cf16b117486d529ec5579233521065bd5e4"
+checksum = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb"
[[package]]
name = "widestring"
@@ -6660,9 +6676,9 @@ checksum = "17882f045410753661207383517a6f62ec3dbeb6a4ed2acce01f0728238d1983"
[[package]]
name = "wildmatch"
-version = "2.1.0"
+version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d6c48bd20df7e4ced539c12f570f937c6b4884928a87fee70a479d72f031d4e0"
+checksum = "ee583bdc5ff1cf9db20e9db5bb3ff4c3089a8f6b8b31aff265c9aba85812db86"
[[package]]
name = "winapi"
@@ -6924,9 +6940,9 @@ dependencies = [
[[package]]
name = "wry"
-version = "0.18.3"
+version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "26b1ba327c7dd4292f46bf8e6ba8e6ec2db4443b2973c9d304a359d95e0aa856"
+checksum = "ce19dddbd3ce01dc8f14eb6d4c8f914123bf8379aaa838f6da4f981ff7104a3f"
dependencies = [
"block",
"cocoa",
diff --git a/LICENSE b/LICENSE
index a6509367e..b3ce5ea17 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,25 +1,23 @@
Copyright (c) 2021-present Spacedrive Technology Inc.
+
+ GNU AFFERO GENERAL PUBLIC LICENSE
+ Version 3, 19 November 2007
- GNU GENERAL PUBLIC LICENSE
- Version 3, 29 June 2007
-
- Copyright (C) 2007 Free Software Foundation, Inc.
+ Copyright (C) 2007 Free Software Foundation, Inc.
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
- The GNU General Public License is a free, copyleft license for
-software and other kinds of works.
+ The GNU Affero General Public License is a free, copyleft license for
+software and other kinds of works, specifically designed to ensure
+cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
-the GNU General Public License is intended to guarantee your freedom to
+our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
-software for all its users. We, the Free Software Foundation, use the
-GNU General Public License for most of our software; it applies also to
-any other work released this way by its authors. You can apply it to
-your programs, too.
+software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
@@ -28,44 +26,34 @@ them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
- To protect your rights, we need to prevent others from denying you
-these rights or asking you to surrender the rights. Therefore, you have
-certain responsibilities if you distribute copies of the software, or if
-you modify it: responsibilities to respect the freedom of others.
+ Developers that use our General Public Licenses protect your rights
+with two steps: (1) assert copyright on the software, and (2) offer
+you this License which gives you legal permission to copy, distribute
+and/or modify the software.
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must pass on to the recipients the same
-freedoms that you received. You must make sure that they, too, receive
-or can get the source code. And you must show them these terms so they
-know their rights.
+ A secondary benefit of defending all users' freedom is that
+improvements made in alternate versions of the program, if they
+receive widespread use, become available for other developers to
+incorporate. Many developers of free software are heartened and
+encouraged by the resulting cooperation. However, in the case of
+software used on network servers, this result may fail to come about.
+The GNU General Public License permits making a modified version and
+letting the public access it on a server without ever releasing its
+source code to the public.
- Developers that use the GNU GPL protect your rights with two steps:
-(1) assert copyright on the software, and (2) offer you this License
-giving you legal permission to copy, distribute and/or modify it.
+ The GNU Affero General Public License is designed specifically to
+ensure that, in such cases, the modified source code becomes available
+to the community. It requires the operator of a network server to
+provide the source code of the modified version running there to the
+users of that server. Therefore, public use of a modified version, on
+a publicly accessible server, gives the public access to the source
+code of the modified version.
- For the developers' and authors' protection, the GPL clearly explains
-that there is no warranty for this free software. For both users' and
-authors' sake, the GPL requires that modified versions be marked as
-changed, so that their problems will not be attributed erroneously to
-authors of previous versions.
-
- Some devices are designed to deny users access to install or run
-modified versions of the software inside them, although the manufacturer
-can do so. This is fundamentally incompatible with the aim of
-protecting users' freedom to change the software. The systematic
-pattern of such abuse occurs in the area of products for individuals to
-use, which is precisely where it is most unacceptable. Therefore, we
-have designed this version of the GPL to prohibit the practice for those
-products. If such problems arise substantially in other domains, we
-stand ready to extend this provision to those domains in future versions
-of the GPL, as needed to protect the freedom of users.
-
- Finally, every program is threatened constantly by software patents.
-States should not allow patents to restrict development and use of
-software on general-purpose computers, but in those that do, we wish to
-avoid the special danger that patents applied to a free program could
-make it effectively proprietary. To prevent this, the GPL assures that
-patents cannot be used to render the program non-free.
+ An older license, called the Affero General Public License and
+published by Affero, was designed to accomplish similar goals. This is
+a different license, not a version of the Affero GPL, but Affero has
+released a new version of the Affero GPL which permits relicensing under
+this license.
The precise terms and conditions for copying, distribution and
modification follow.
@@ -74,7 +62,7 @@ modification follow.
0. Definitions.
- "This License" refers to version 3 of the GNU General Public License.
+ "This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
@@ -551,35 +539,45 @@ to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
- 13. Use with the GNU Affero General Public License.
+ 13. Remote Network Interaction; Use with the GNU General Public License.
+
+ Notwithstanding any other provision of this License, if you modify the
+Program, your modified version must prominently offer all users
+interacting with it remotely through a computer network (if your version
+supports such interaction) an opportunity to receive the Corresponding
+Source of your version by providing access to the Corresponding Source
+from a network server at no charge, through some standard or customary
+means of facilitating copying of software. This Corresponding Source
+shall include the Corresponding Source for any work covered by version 3
+of the GNU General Public License that is incorporated pursuant to the
+following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
-under version 3 of the GNU Affero General Public License into a single
+under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
-but the special requirements of the GNU Affero General Public License,
-section 13, concerning interaction through a network will apply to the
-combination as such.
+but the work with which it is combined will remain governed by version
+3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
-the GNU General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
+the GNU Affero General Public License from time to time. Such new versions
+will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
-Program specifies that a certain numbered version of the GNU General
+Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
-GNU General Public License, you may choose any version ever published
+GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
-versions of the GNU General Public License can be used, that proxy's
+versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
@@ -637,40 +635,29 @@ the "copyright" line and a pointer to where the full notice is found.
Copyright (C)
This program is free software: you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation, either version 3 of the License, or
+ it under the terms of the GNU Affero General Public License as published
+ by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
+ GNU Affero General Public License for more details.
- You should have received a copy of the GNU General Public License
- along with this program. If not, see .
+ You should have received a copy of the GNU Affero General Public License
+ along with this program. If not, see .
Also add information on how to contact you by electronic and paper mail.
- If the program does terminal interaction, make it output a short
-notice like this when it starts in an interactive mode:
-
- Copyright (C)
- This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, your program's commands
-might be different; for a GUI interface, you would use an "about box".
+ If your software can interact with users remotely through a computer
+network, you should also make sure that it provides a way for users to
+get its source. For example, if your program is a web application, its
+interface could display a "Source" link that leads users to an archive
+of the code. There are many ways you could offer source, and different
+solutions will be better for different programs; see section 13 for the
+specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
-For more information on this, and how to apply and follow the GNU GPL, see
-.
-
- The GNU General Public License does not permit incorporating your program
-into proprietary programs. If your program is a subroutine library, you
-may consider it more useful to permit linking proprietary applications with
-the library. If this is what you want to do, use the GNU Lesser General
-Public License instead of this License. But first, please read
-.
+For more information on this, and how to apply and follow the GNU AGPL, see
+.
\ No newline at end of file
diff --git a/apps/desktop/src-tauri/src/main.rs b/apps/desktop/src-tauri/src/main.rs
index 3dcad488c..3c6210e46 100644
--- a/apps/desktop/src-tauri/src/main.rs
+++ b/apps/desktop/src-tauri/src/main.rs
@@ -1,16 +1,15 @@
use std::time::{Duration, Instant};
use dotenvy::dotenv;
-use sdcore::{ClientCommand, ClientQuery, CoreController, CoreEvent, CoreResponse, Node};
-use tauri::api::path;
-use tauri::Manager;
+use sdcore::{ClientCommand, ClientQuery, CoreEvent, CoreResponse, Node, NodeController};
+use tauri::{api::path, Manager};
#[cfg(target_os = "macos")]
mod macos;
mod menu;
#[tauri::command(async)]
async fn client_query_transport(
- core: tauri::State<'_, CoreController>,
+ core: tauri::State<'_, NodeController>,
data: ClientQuery,
) -> Result {
match core.query(data).await {
@@ -24,7 +23,7 @@ async fn client_query_transport(
#[tauri::command(async)]
async fn client_command_transport(
- core: tauri::State<'_, CoreController>,
+ core: tauri::State<'_, NodeController>,
data: ClientCommand,
) -> Result {
match core.command(data).await {
@@ -48,17 +47,11 @@ async fn main() {
dotenv().ok();
env_logger::init();
- let data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./"));
+ let mut data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./"));
+ data_dir = data_dir.join("spacedrive");
// create an instance of the core
- let (mut node, mut event_receiver) = Node::new(data_dir).await;
- // run startup tasks
- node.initializer().await;
- // extract the node controller
- let controller = node.get_controller();
- // throw the node into a dedicated thread
- tokio::spawn(async move {
- node.start().await;
- });
+ let (controller, mut event_receiver, node) = Node::new(data_dir).await;
+ tokio::spawn(node.start());
// create tauri app
tauri::Builder::default()
// pass controller to the tauri state manager
diff --git a/apps/desktop/src-tauri/tauri.linux.conf.json b/apps/desktop/src-tauri/tauri.linux.conf.json
index 51b5a339d..5fc781e7f 100644
--- a/apps/desktop/src-tauri/tauri.linux.conf.json
+++ b/apps/desktop/src-tauri/tauri.linux.conf.json
@@ -15,7 +15,13 @@
"active": true,
"targets": "all",
"identifier": "com.spacedrive.desktop",
- "icon": ["icons/icon.icns"],
+ "icon": [
+ "icons/32x32.png",
+ "icons/128x128.png",
+ "icons/128x128@2x.png",
+ "icons/icon.icns",
+ "icons/icon.ico"
+ ],
"resources": [],
"externalBin": [],
"copyright": "Spacedrive Technology Inc.",
diff --git a/apps/landing/src/pages/team.page.tsx b/apps/landing/src/pages/team.page.tsx
index 08294e26e..301816da0 100644
--- a/apps/landing/src/pages/team.page.tsx
+++ b/apps/landing/src/pages/team.page.tsx
@@ -197,7 +197,7 @@ function Page() {
style={{ transform: 'scale(2)' }}
/>
-
+
We believe file management should be universal.
diff --git a/apps/server/k8s/infrastructure.yaml b/apps/server/k8s/infrastructure.yaml
deleted file mode 100644
index a5e44b4ee..000000000
--- a/apps/server/k8s/infrastructure.yaml
+++ /dev/null
@@ -1,42 +0,0 @@
-# Infrastructure setups up the Kubernetes cluster for Spacedrive!
-#
-# To get the service account token use the following:
-# ```bash
-# TOKENNAME=`kubectl -n spacedrive get sa/spacedrive-ci -o jsonpath='{.secrets[0].name}'`
-# kubectl -n spacedrive get secret $TOKENNAME -o jsonpath='{.data.token}' | base64 -d
-# ```
-
-apiVersion: v1
-kind: Namespace
-metadata:
- name: spacedrive
----
-apiVersion: v1
-kind: ServiceAccount
-metadata:
- name: spacedrive-ci
- namespace: spacedrive
----
-apiVersion: rbac.authorization.k8s.io/v1
-kind: Role
-metadata:
- name: spacedrive-ns-full
- namespace: spacedrive
-rules:
- - apiGroups: ['apps']
- resources: ['deployments']
- verbs: ['get', 'patch']
----
-apiVersion: rbac.authorization.k8s.io/v1
-kind: RoleBinding
-metadata:
- name: spacedrive-ci-rb
- namespace: spacedrive
-subjects:
- - kind: ServiceAccount
- name: spacedrive-ci
- namespace: spacedrive
-roleRef:
- apiGroup: rbac.authorization.k8s.io
- kind: Role
- name: spacedrive-ns-full
diff --git a/apps/server/k8s/sdserver.yaml b/apps/server/k8s/sdserver.yaml
deleted file mode 100644
index 00f02c1c1..000000000
--- a/apps/server/k8s/sdserver.yaml
+++ /dev/null
@@ -1,118 +0,0 @@
-# This will deploy the Spacedrive Server container to the `spacedrive`` namespace on Kubernetes.
-
-apiVersion: networking.k8s.io/v1
-kind: Ingress
-metadata:
- name: sdserver-ingress
- namespace: spacedrive
- labels:
- app.kubernetes.io/name: sdserver
- app.kubernetes.io/component: webserver
- annotations:
- traefik.ingress.kubernetes.io/router.tls.certresolver: le
- traefik.ingress.kubernetes.io/router.middlewares: kube-system-antiseo@kubernetescrd
-spec:
- rules:
- - host: spacedrive.otbeaumont.me
- http:
- paths:
- - path: /
- pathType: Prefix
- backend:
- service:
- name: sdserver-service
- port:
- number: 8080
----
-apiVersion: v1
-kind: Service
-metadata:
- name: sdserver-service
- namespace: spacedrive
- labels:
- app.kubernetes.io/name: sdserver
- app.kubernetes.io/component: webserver
-spec:
- ports:
- - port: 8080
- targetPort: 8080
- protocol: TCP
- selector:
- app.kubernetes.io/name: sdserver
- app.kubernetes.io/component: webserver
----
-apiVersion: v1
-kind: PersistentVolumeClaim
-metadata:
- name: sdserver-pvc
- namespace: spacedrive
-spec:
- accessModes:
- - ReadWriteOnce
- storageClassName: local-path
- resources:
- requests:
- storage: 512M
----
-apiVersion: apps/v1
-kind: Deployment
-metadata:
- name: sdserver-deployment
- namespace: spacedrive
- labels:
- app.kubernetes.io/name: sdserver
- app.kubernetes.io/component: webserver
-spec:
- replicas: 1
- selector:
- matchLabels:
- app.kubernetes.io/name: sdserver
- app.kubernetes.io/component: webserver
- template:
- metadata:
- labels:
- app.kubernetes.io/name: sdserver
- app.kubernetes.io/component: webserver
- spec:
- restartPolicy: Always
- # refer to Dockerfile to find securityContext values
- securityContext:
- runAsUser: 101
- runAsGroup: 101
- fsGroup: 101
- containers:
- - name: sdserver
- image: ghcr.io/oscartbeaumont/spacedrive/server:staging
- imagePullPolicy: Always
- ports:
- - containerPort: 8080
- volumeMounts:
- - name: data-volume
- mountPath: /data
- securityContext:
- allowPrivilegeEscalation: false
- resources:
- limits:
- memory: 100Mi
- cpu: 100m
- requests:
- memory: 5Mi
- cpu: 10m
- readinessProbe:
- httpGet:
- path: /health
- port: 8080
- initialDelaySeconds: 10
- failureThreshold: 4
- periodSeconds: 5
- livenessProbe:
- httpGet:
- path: /health
- port: 8080
- initialDelaySeconds: 20
- failureThreshold: 3
- periodSeconds: 10
- volumes:
- - name: data-volume
- persistentVolumeClaim:
- claimName: sdserver-pvc
diff --git a/apps/server/src/main.rs b/apps/server/src/main.rs
index 7e9c4683e..5d7c85331 100644
--- a/apps/server/src/main.rs
+++ b/apps/server/src/main.rs
@@ -1,4 +1,4 @@
-use sdcore::{ClientCommand, ClientQuery, CoreController, CoreEvent, CoreResponse, Node};
+use sdcore::{ClientCommand, ClientQuery, CoreEvent, CoreResponse, Node, NodeController};
use std::{env, path::Path};
use actix::{
@@ -19,7 +19,7 @@ const DATA_DIR_ENV_VAR: &'static str = "DATA_DIR";
/// Define HTTP actor
struct Socket {
_event_receiver: web::Data>,
- core: web::Data,
+ core: web::Data,
}
impl Actor for Socket {
@@ -52,7 +52,15 @@ impl StreamHandler> for Socket {
match msg {
Ok(ws::Message::Ping(msg)) => ctx.pong(&msg),
Ok(ws::Message::Text(text)) => {
- let msg: SocketMessage = serde_json::from_str(&text).unwrap();
+ let msg = serde_json::from_str::(&text);
+
+ let msg = match msg {
+ Ok(msg) => msg,
+ Err(err) => {
+ println!("Error parsing message: {}", err);
+ return;
+ },
+ };
let core = self.core.clone();
@@ -133,7 +141,7 @@ async fn ws_handler(
req: HttpRequest,
stream: web::Payload,
event_receiver: web::Data>,
- controller: web::Data,
+ controller: web::Data,
) -> Result {
let resp = ws::start(
Socket {
@@ -178,7 +186,7 @@ async fn main() -> std::io::Result<()> {
async fn setup() -> (
web::Data>,
- web::Data,
+ web::Data,
) {
let data_dir_path = match env::var(DATA_DIR_ENV_VAR) {
Ok(path) => Path::new(&path).to_path_buf(),
@@ -196,15 +204,8 @@ async fn setup() -> (
},
};
- let (mut node, event_receiver) = Node::new(data_dir_path).await;
-
- node.initializer().await;
-
- let controller = node.get_controller();
-
- tokio::spawn(async move {
- node.start().await;
- });
+ let (controller, event_receiver, node) = Node::new(data_dir_path).await;
+ tokio::spawn(node.start());
(web::Data::new(event_receiver), web::Data::new(controller))
}
diff --git a/apps/web/src/App.tsx b/apps/web/src/App.tsx
index 72560e3ff..f53220197 100644
--- a/apps/web/src/App.tsx
+++ b/apps/web/src/App.tsx
@@ -1,20 +1,47 @@
import { BaseTransport } from '@sd/client';
-import { ClientCommand, ClientQuery, CoreEvent } from '@sd/core';
+import { ClientCommand, ClientQuery } from '@sd/core';
import SpacedriveInterface from '@sd/interface';
import React, { useEffect } from 'react';
-const websocket = new WebSocket(import.meta.env.VITE_SDSERVER_BASE_URL || 'ws://localhost:8080/ws');
+const timeouts = [1000, 2000, 5000, 10000]; // In milliseconds
const randomId = () => Math.random().toString(36).slice(2);
// bind state to core via Tauri
class Transport extends BaseTransport {
+ websocket: WebSocket;
requestMap = new Map void>();
constructor() {
super();
+ this.websocket = new WebSocket(
+ import.meta.env.VITE_SDSERVER_BASE_URL || 'ws://localhost:8080/ws'
+ );
+ this.attachEventListeners();
+ }
- websocket.addEventListener('message', (event) => {
+ async reconnect(timeoutIndex = 0) {
+ let timeout =
+ (timeouts[timeoutIndex] ?? timeouts[timeouts.length - 1]) +
+ (Math.floor(Math.random() * 5000 /* 5 Seconds */) + 1);
+
+ setTimeout(() => {
+ let ws = new WebSocket(import.meta.env.VITE_SDSERVER_BASE_URL || 'ws://localhost:8080/ws');
+ new Promise(function (resolve, reject) {
+ ws.addEventListener('open', () => resolve(null));
+ ws.addEventListener('close', reject);
+ })
+ .then(() => {
+ this.websocket = ws;
+ this.attachEventListeners();
+ console.log('Reconnected!');
+ })
+ .catch((err) => this.reconnect(timeoutIndex++));
+ }, timeout);
+ }
+
+ attachEventListeners() {
+ this.websocket.addEventListener('message', (event) => {
if (!event.data) return;
const { id, payload } = JSON.parse(event.data);
@@ -29,8 +56,24 @@ class Transport extends BaseTransport {
}
}
});
+
+ this.websocket.addEventListener('close', () => {
+ console.log('GONE');
+ this.reconnect();
+ });
}
+
async query(query: ClientQuery) {
+ if (websocket.readyState == 0) {
+ let resolve: () => void;
+ const promise = new Promise((res) => {
+ resolve = () => res(undefined);
+ });
+ // @ts-ignore
+ websocket.addEventListener('open', resolve);
+ await promise;
+ }
+
const id = randomId();
let resolve: (data: any) => void;
@@ -41,7 +84,7 @@ class Transport extends BaseTransport {
// @ts-ignore
this.requestMap.set(id, resolve);
- websocket.send(JSON.stringify({ id, payload: { type: 'query', data: query } }));
+ this.websocket.send(JSON.stringify({ id, payload: { type: 'query', data: query } }));
return await promise;
}
@@ -56,12 +99,14 @@ class Transport extends BaseTransport {
// @ts-ignore
this.requestMap.set(id, resolve);
- websocket.send(JSON.stringify({ id, payload: { type: 'command', data: command } }));
+ this.websocket.send(JSON.stringify({ id, payload: { type: 'command', data: command } }));
return await promise;
}
}
+const transport = new Transport();
+
function App() {
useEffect(() => {
window.parent.postMessage('spacedrive-hello', '*');
@@ -72,7 +117,7 @@ function App() {
{/* */}
} | { key: "SysGetLocation", data: LocationResource } | { key: "SysGetLocations", data: Array } | { key: "LibGetExplorerDir", data: DirectoryWithContents } | { key: "NodeGetState", data: NodeState } | { key: "LocCreate", data: LocationResource } | { key: "JobGetRunning", data: Array } | { key: "JobGetHistory", data: Array } | { key: "GetLibraryStatistics", data: Statistics };
\ No newline at end of file
+export type CoreResponse = { key: "Success", data: null } | { key: "Error", data: string } | { key: "NodeGetLibraries", data: Array } | { key: "SysGetVolumes", data: Array } | { key: "SysGetLocation", data: LocationResource } | { key: "SysGetLocations", data: Array } | { key: "LibGetExplorerDir", data: DirectoryWithContents } | { key: "NodeGetState", data: NodeState } | { key: "LocCreate", data: LocationResource } | { key: "JobGetRunning", data: Array } | { key: "JobGetHistory", data: Array } | { key: "GetLibraryStatistics", data: Statistics };
\ No newline at end of file
diff --git a/core/bindings/LibraryCommand.ts b/core/bindings/LibraryCommand.ts
new file mode 100644
index 000000000..713fc8989
--- /dev/null
+++ b/core/bindings/LibraryCommand.ts
@@ -0,0 +1,3 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+export type LibraryCommand = { key: "FileReadMetaData", params: { id: number, } } | { key: "FileSetNote", params: { id: number, note: string | null, } } | { key: "FileDelete", params: { id: number, } } | { key: "TagCreate", params: { name: string, color: string, } } | { key: "TagUpdate", params: { name: string, color: string, } } | { key: "TagAssign", params: { file_id: number, tag_id: number, } } | { key: "TagDelete", params: { id: number, } } | { key: "LocCreate", params: { path: string, } } | { key: "LocUpdate", params: { id: number, name: string | null, } } | { key: "LocDelete", params: { id: number, } } | { key: "LocRescan", params: { id: number, } } | { key: "SysVolumeUnmount", params: { id: number, } } | { key: "GenerateThumbsForLocation", params: { id: number, path: string, } } | { key: "IdentifyUniqueFiles", params: { id: number, path: string, } };
\ No newline at end of file
diff --git a/core/bindings/LibraryConfig.ts b/core/bindings/LibraryConfig.ts
new file mode 100644
index 000000000..8a371014b
--- /dev/null
+++ b/core/bindings/LibraryConfig.ts
@@ -0,0 +1,3 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+export interface LibraryConfig { version: string | null, name: string, description: string, }
\ No newline at end of file
diff --git a/core/bindings/LibraryConfigWrapped.ts b/core/bindings/LibraryConfigWrapped.ts
new file mode 100644
index 000000000..ee5b5ccfe
--- /dev/null
+++ b/core/bindings/LibraryConfigWrapped.ts
@@ -0,0 +1,4 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+import type { LibraryConfig } from "./LibraryConfig";
+
+export interface LibraryConfigWrapped { uuid: string, config: LibraryConfig, }
\ No newline at end of file
diff --git a/core/bindings/LibraryQuery.ts b/core/bindings/LibraryQuery.ts
new file mode 100644
index 000000000..2aa14279c
--- /dev/null
+++ b/core/bindings/LibraryQuery.ts
@@ -0,0 +1,3 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+export type LibraryQuery = { key: "LibGetTags" } | { key: "JobGetHistory" } | { key: "SysGetLocations" } | { key: "SysGetLocation", params: { id: number, } } | { key: "LibGetExplorerDir", params: { location_id: number, path: string, limit: number, } } | { key: "GetLibraryStatistics" };
\ No newline at end of file
diff --git a/core/bindings/NodeConfig.ts b/core/bindings/NodeConfig.ts
new file mode 100644
index 000000000..512f0202c
--- /dev/null
+++ b/core/bindings/NodeConfig.ts
@@ -0,0 +1,3 @@
+// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
+
+export interface NodeConfig { version: string | null, id: string, name: string, p2p_port: number | null, }
\ No newline at end of file
diff --git a/core/bindings/NodeState.ts b/core/bindings/NodeState.ts
index 6fc2d5c22..978fb3103 100644
--- a/core/bindings/NodeState.ts
+++ b/core/bindings/NodeState.ts
@@ -1,4 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
-import type { LibraryState } from "./LibraryState";
-export interface NodeState { node_pub_id: string, node_id: number, node_name: string, data_path: string, tcp_port: number, libraries: Array, current_library_uuid: string, }
\ No newline at end of file
+export interface NodeState { version: string | null, id: string, name: string, p2p_port: number | null, data_path: string, }
\ No newline at end of file
diff --git a/core/index.ts b/core/index.ts
index 85eee6629..60cc1bc54 100644
--- a/core/index.ts
+++ b/core/index.ts
@@ -2,6 +2,7 @@ export * from './bindings/Client';
export * from './bindings/ClientCommand';
export * from './bindings/ClientQuery';
export * from './bindings/ClientState';
+export * from './bindings/ConfigMetadata';
export * from './bindings/CoreEvent';
export * from './bindings/CoreResource';
export * from './bindings/CoreResponse';
@@ -12,9 +13,14 @@ export * from './bindings/FileKind';
export * from './bindings/FilePath';
export * from './bindings/JobReport';
export * from './bindings/JobStatus';
+export * from './bindings/LibraryCommand';
+export * from './bindings/LibraryConfig';
+export * from './bindings/LibraryConfigWrapped';
export * from './bindings/LibraryNode';
+export * from './bindings/LibraryQuery';
export * from './bindings/LibraryState';
export * from './bindings/LocationResource';
+export * from './bindings/NodeConfig';
export * from './bindings/NodeState';
export * from './bindings/Platform';
export * from './bindings/Statistics';
diff --git a/core/prisma/migrations/20220625180107_remove_library/migration.sql b/core/prisma/migrations/20220625180107_remove_library/migration.sql
new file mode 100644
index 000000000..63e4f056f
--- /dev/null
+++ b/core/prisma/migrations/20220625180107_remove_library/migration.sql
@@ -0,0 +1,29 @@
+/*
+ Warnings:
+
+ - You are about to drop the `libraries` table. If the table is not empty, all the data it contains will be lost.
+ - You are about to drop the `library_statistics` table. If the table is not empty, all the data it contains will be lost.
+
+*/
+-- DropTable
+PRAGMA foreign_keys=off;
+DROP TABLE "libraries";
+PRAGMA foreign_keys=on;
+
+-- DropTable
+PRAGMA foreign_keys=off;
+DROP TABLE "library_statistics";
+PRAGMA foreign_keys=on;
+
+-- CreateTable
+CREATE TABLE "statistics" (
+ "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
+ "date_captured" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
+ "total_file_count" INTEGER NOT NULL DEFAULT 0,
+ "library_db_size" TEXT NOT NULL DEFAULT '0',
+ "total_bytes_used" TEXT NOT NULL DEFAULT '0',
+ "total_bytes_capacity" TEXT NOT NULL DEFAULT '0',
+ "total_unique_bytes" TEXT NOT NULL DEFAULT '0',
+ "total_bytes_free" TEXT NOT NULL DEFAULT '0',
+ "preview_media_bytes" TEXT NOT NULL DEFAULT '0'
+);
diff --git a/core/prisma/schema.prisma b/core/prisma/schema.prisma
index e8f911004..130151f62 100644
--- a/core/prisma/schema.prisma
+++ b/core/prisma/schema.prisma
@@ -35,21 +35,9 @@ model SyncEvent {
@@map("sync_events")
}
-model Library {
- id Int @id @default(autoincrement())
- pub_id String @unique
- name String
- is_primary Boolean @default(true)
- date_created DateTime @default(now())
- timezone String?
-
- @@map("libraries")
-}
-
-model LibraryStatistics {
+model Statistics {
id Int @id @default(autoincrement())
date_captured DateTime @default(now())
- library_id Int @unique
total_file_count Int @default(0)
library_db_size String @default("0")
total_bytes_used String @default("0")
@@ -58,7 +46,7 @@ model LibraryStatistics {
total_bytes_free String @default("0")
preview_media_bytes String @default("0")
- @@map("library_statistics")
+ @@map("statistics")
}
model Node {
diff --git a/core/src/encode/thumb.rs b/core/src/encode/thumb.rs
index 989859506..b20d8f728 100644
--- a/core/src/encode/thumb.rs
+++ b/core/src/encode/thumb.rs
@@ -1,8 +1,8 @@
+use crate::library::LibraryContext;
use crate::{
- job::{Job, JobReportUpdate, WorkerContext},
- node::get_nodestate,
+ job::{Job, JobReportUpdate, JobResult, WorkerContext},
prisma::file_path,
- sys, CoreContext, CoreEvent,
+ sys, CoreEvent,
};
use image::{self, imageops, DynamicImage, GenericImageView};
use log::{error, info};
@@ -28,11 +28,15 @@ impl Job for ThumbnailJob {
fn name(&self) -> &'static str {
"thumbnailer"
}
+ async fn run(&self, ctx: WorkerContext) -> JobResult {
+ let library_ctx = ctx.library_ctx();
+ let thumbnail_dir = library_ctx
+ .config()
+ .data_directory()
+ .join(THUMBNAIL_CACHE_DIR_NAME)
+ .join(self.location_id.to_string());
- async fn run(&self, ctx: WorkerContext) -> Result<(), Box> {
- let config = get_nodestate();
-
- let location = sys::get_location(&ctx.core_ctx, self.location_id).await?;
+ let location = sys::get_location(&library_ctx, self.location_id).await?;
info!(
"Searching for images in location {} at path {:#?}",
@@ -40,19 +44,11 @@ impl Job for ThumbnailJob {
);
// create all necessary directories if they don't exist
- fs::create_dir_all(
- config
- .data_path
- .as_ref()
- .unwrap()
- .join(THUMBNAIL_CACHE_DIR_NAME)
- .join(format!("{}", self.location_id)),
- )
- .await?;
+ fs::create_dir_all(&thumbnail_dir).await?;
let root_path = location.path.unwrap();
// query database for all files in this location that need thumbnails
- let image_files = get_images(&ctx.core_ctx, self.location_id, &self.path).await?;
+ let image_files = get_images(&library_ctx, self.location_id, &self.path).await?;
info!("Found {:?} files", image_files.len());
ctx.progress(vec![
@@ -86,14 +82,7 @@ impl Job for ThumbnailJob {
};
// Define and write the WebP-encoded file to a given path
- let output_path = config
- .data_path
- .as_ref()
- .unwrap()
- .join(THUMBNAIL_CACHE_DIR_NAME)
- .join(format!("{}", location.id))
- .join(&cas_id)
- .with_extension("webp");
+ let output_path = thumbnail_dir.join(&cas_id).with_extension("webp");
// check if file exists at output path
if !output_path.exists() {
@@ -105,7 +94,9 @@ impl Job for ThumbnailJob {
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(i + 1)]);
if !self.background {
- ctx.core_ctx.emit(CoreEvent::NewThumbnail { cas_id }).await;
+ ctx.library_ctx()
+ .emit(CoreEvent::NewThumbnail { cas_id })
+ .await;
};
} else {
info!("Thumb exists, skipping... {}", output_path.display());
@@ -145,7 +136,7 @@ pub async fn generate_thumbnail>(
}
pub async fn get_images(
- ctx: &CoreContext,
+ ctx: &LibraryContext,
location_id: i32,
path: impl AsRef,
) -> Result, std::io::Error> {
@@ -167,7 +158,7 @@ pub async fn get_images(
}
let image_files = ctx
- .database
+ .db
.file_path()
.find_many(params)
.with(file_path::file::fetch())
diff --git a/core/src/file/cas/identifier.rs b/core/src/file/cas/identifier.rs
index 855352385..a05684eda 100644
--- a/core/src/file/cas/identifier.rs
+++ b/core/src/file/cas/identifier.rs
@@ -2,10 +2,10 @@ use super::checksum::generate_cas_id;
use crate::{
file::FileError,
job::JobReportUpdate,
- job::{Job, WorkerContext},
+ job::{Job, JobResult, WorkerContext},
+ library::LibraryContext,
prisma::{file, file_path},
sys::get_location,
- CoreContext,
};
use chrono::{DateTime, FixedOffset};
use futures::future::join_all;
@@ -13,7 +13,6 @@ use log::{error, info};
use prisma_client_rust::{prisma_models::PrismaValue, raw, raw::Raw, Direction};
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
-use std::error::Error;
use std::path::{Path, PathBuf};
use tokio::{fs, io};
@@ -35,13 +34,13 @@ impl Job for FileIdentifierJob {
"file_identifier"
}
- async fn run(&self, ctx: WorkerContext) -> Result<(), Box> {
+ async fn run(&self, ctx: WorkerContext) -> JobResult {
info!("Identifying orphan file paths...");
- let location = get_location(&ctx.core_ctx, self.location_id).await?;
+ let location = get_location(&ctx.library_ctx(), self.location_id).await?;
let location_path = location.path.unwrap_or_else(|| "".to_string());
- let total_count = count_orphan_file_paths(&ctx.core_ctx, location.id.into()).await?;
+ let total_count = count_orphan_file_paths(&ctx.library_ctx(), location.id.into()).await?;
info!("Found {} orphan file paths", total_count);
let task_count = (total_count as f64 / CHUNK_SIZE as f64).ceil() as usize;
@@ -59,7 +58,7 @@ impl Job for FileIdentifierJob {
let mut cas_lookup: HashMap = HashMap::new();
// get chunk of orphans to process
- let file_paths = match get_orphan_file_paths(&ctx.core_ctx, cursor).await {
+ let file_paths = match get_orphan_file_paths(&ctx.library_ctx(), cursor).await {
Ok(file_paths) => file_paths,
Err(e) => {
info!("Error getting orphan file paths: {:#?}", e);
@@ -93,8 +92,8 @@ impl Job for FileIdentifierJob {
// find all existing files by cas id
let generated_cas_ids = chunk.values().map(|c| c.cas_id.clone()).collect();
let existing_files = ctx
- .core_ctx
- .database
+ .library_ctx()
+ .db
.file()
.find_many(vec![file::cas_id::in_vec(generated_cas_ids)])
.exec()
@@ -104,7 +103,8 @@ impl Job for FileIdentifierJob {
// link those existing files to their file paths
// Had to put the file_path in a variable outside of the closure, to satisfy the borrow checker
- let prisma_file_path = ctx.core_ctx.database.file_path();
+ let library_ctx = ctx.library_ctx();
+ let prisma_file_path = library_ctx.db.file_path();
for result in join_all(existing_files.iter().map(|file| {
prisma_file_path
.find_unique(file_path::id::equals(
@@ -133,7 +133,7 @@ impl Job for FileIdentifierJob {
.collect::>();
// assemble prisma values for new unique files
- let mut values: Vec = Vec::new();
+ let mut values = Vec::with_capacity(new_files.len() * 3);
for file in &new_files {
values.extend([
PrismaValue::String(file.cas_id.clone()),
@@ -144,8 +144,8 @@ impl Job for FileIdentifierJob {
// create new file records with assembled values
let created_files: Vec = ctx
- .core_ctx
- .database
+ .library_ctx()
+ .db
._query_raw(Raw::new(
&format!(
"INSERT INTO files (cas_id, size_in_bytes, date_created) VALUES {}
@@ -210,10 +210,10 @@ struct CountRes {
}
pub async fn count_orphan_file_paths(
- ctx: &CoreContext,
+ ctx: &LibraryContext,
location_id: i64,
) -> Result {
- let files_count = ctx.database
+ let files_count = ctx.db
._query_raw::(raw!(
"SELECT COUNT(*) AS count FROM file_paths WHERE file_id IS NULL AND is_dir IS FALSE AND location_id = {}",
PrismaValue::Int(location_id)
@@ -223,14 +223,14 @@ pub async fn count_orphan_file_paths(
}
pub async fn get_orphan_file_paths(
- ctx: &CoreContext,
+ ctx: &LibraryContext,
cursor: i32,
) -> Result, FileError> {
info!(
"discovering {} orphan file paths at cursor: {:?}",
CHUNK_SIZE, cursor
);
- ctx.database
+ ctx.db
.file_path()
.find_many(vec![
file_path::file_id::equals(None),
diff --git a/core/src/file/explorer/open.rs b/core/src/file/explorer/open.rs
index 901fa3589..2237e0deb 100644
--- a/core/src/file/explorer/open.rs
+++ b/core/src/file/explorer/open.rs
@@ -1,28 +1,25 @@
use crate::{
encode::THUMBNAIL_CACHE_DIR_NAME,
file::{DirectoryWithContents, FileError, FilePath},
- node::get_nodestate,
+ library::LibraryContext,
prisma::file_path,
sys::get_location,
- CoreContext,
};
use log::info;
use std::path::Path;
pub async fn open_dir(
- ctx: &CoreContext,
+ ctx: &LibraryContext,
location_id: i32,
path: impl AsRef,
) -> Result {
- let config = get_nodestate();
-
// get location
let location = get_location(ctx, location_id).await?;
let path_str = path.as_ref().to_string_lossy().to_string();
let directory = ctx
- .database
+ .db
.file_path()
.find_first(vec![
file_path::location_id::equals(Some(location.id)),
@@ -36,7 +33,7 @@ pub async fn open_dir(
info!("DIRECTORY: {:?}", directory);
let mut file_paths: Vec = ctx
- .database
+ .db
.file_path()
.find_many(vec![
file_path::location_id::equals(Some(location.id)),
@@ -49,17 +46,17 @@ pub async fn open_dir(
.map(Into::into)
.collect();
- if let Some(ref data_path) = config.data_path {
- for file_path in &mut file_paths {
- if let Some(file) = &mut file_path.file {
- let thumb_path = data_path
- .join(THUMBNAIL_CACHE_DIR_NAME)
- .join(location.id.to_string())
- .join(file.cas_id.clone())
- .with_extension("webp");
+ for file_path in &mut file_paths {
+ if let Some(file) = &mut file_path.file {
+ let thumb_path = ctx
+ .config()
+ .data_directory()
+ .join(THUMBNAIL_CACHE_DIR_NAME)
+ .join(location.id.to_string())
+ .join(&file.cas_id)
+ .with_extension("webp");
- file.has_thumbnail = thumb_path.exists();
- }
+ file.has_thumbnail = thumb_path.exists();
}
}
diff --git a/core/src/file/indexer/mod.rs b/core/src/file/indexer/mod.rs
index d4e428a4d..942a1e3b8 100644
--- a/core/src/file/indexer/mod.rs
+++ b/core/src/file/indexer/mod.rs
@@ -1,5 +1,4 @@
-use crate::job::{Job, JobReportUpdate, WorkerContext};
-use std::error::Error;
+use crate::job::{Job, JobReportUpdate, JobResult, WorkerContext};
use std::path::PathBuf;
use self::scan::ScanProgress;
@@ -20,8 +19,8 @@ impl Job for IndexerJob {
fn name(&self) -> &'static str {
"indexer"
}
- async fn run(&self, ctx: WorkerContext) -> Result<(), Box> {
- scan_path(&ctx.core_ctx.clone(), &self.path, move |p| {
+ async fn run(&self, ctx: WorkerContext) -> JobResult {
+ scan_path(&ctx.library_ctx(), &self.path, move |p| {
ctx.progress(
p.iter()
.map(|p| match p.clone() {
diff --git a/core/src/file/indexer/scan.rs b/core/src/file/indexer/scan.rs
index 6dd7a5bb7..87c830f6a 100644
--- a/core/src/file/indexer/scan.rs
+++ b/core/src/file/indexer/scan.rs
@@ -1,8 +1,6 @@
-use crate::{
- sys::{create_location, LocationResource},
- CoreContext,
-};
-
+use crate::job::JobResult;
+use crate::library::LibraryContext;
+use crate::sys::{create_location, LocationResource};
use chrono::{DateTime, Utc};
use log::{error, info};
use prisma_client_rust::prisma_models::PrismaValue;
@@ -30,10 +28,10 @@ static BATCH_SIZE: usize = 100;
// creates a vector of valid path buffers from a directory
pub async fn scan_path(
- ctx: &CoreContext,
+ ctx: &LibraryContext,
path: impl AsRef + Debug,
on_progress: impl Fn(Vec) + Send + Sync + 'static,
-) -> Result<(), Box> {
+) -> JobResult {
let location = create_location(ctx, &path).await?;
// query db to highers id, so we can increment it for the new files indexed
@@ -43,7 +41,7 @@ pub async fn scan_path(
}
// grab the next id so we can increment in memory for batch inserting
let first_file_id = match ctx
- .database
+ .db
._query_raw::(raw!("SELECT MAX(id) id FROM file_paths"))
.await
{
@@ -168,7 +166,7 @@ pub async fn scan_path(
files
);
- let count = ctx.database._execute_raw(raw).await;
+ let count = ctx.db._execute_raw(raw).await;
info!("Inserted {:?} records", count);
}
diff --git a/core/src/file/mod.rs b/core/src/file/mod.rs
index 9959af3ed..1829e3fa7 100644
--- a/core/src/file/mod.rs
+++ b/core/src/file/mod.rs
@@ -1,13 +1,15 @@
-use std::path::PathBuf;
+use chrono::{DateTime, Utc};
use int_enum::IntEnum;
use serde::{Deserialize, Serialize};
+use std::path::PathBuf;
use thiserror::Error;
use ts_rs::TS;
use crate::{
+ library::LibraryContext,
prisma::{self, file, file_path},
sys::SysError,
- ClientQuery, CoreContext, CoreError, CoreEvent, CoreResponse,
+ ClientQuery, CoreError, CoreEvent, CoreResponse, LibraryQuery,
};
pub mod cas;
pub mod explorer;
@@ -33,9 +35,9 @@ pub struct File {
pub ipfs_id: Option,
pub note: Option,
- pub date_created: chrono::DateTime,
- pub date_modified: chrono::DateTime,
- pub date_indexed: chrono::DateTime,
+ pub date_created: DateTime,
+ pub date_modified: DateTime,
+ pub date_indexed: DateTime,
pub paths: Vec,
// pub media_data: Option,
@@ -56,9 +58,9 @@ pub struct FilePath {
pub file_id: Option,
pub parent_id: Option,
- pub date_created: chrono::DateTime,
- pub date_modified: chrono::DateTime,
- pub date_indexed: chrono::DateTime,
+ pub date_created: DateTime,
+ pub date_modified: DateTime,
+ pub date_indexed: DateTime,
pub file: Option,
}
@@ -148,12 +150,12 @@ pub enum FileError {
}
pub async fn set_note(
- ctx: CoreContext,
+ ctx: LibraryContext,
id: i32,
note: Option,
) -> Result {
let _response = ctx
- .database
+ .db
.file()
.find_unique(file::id::equals(id))
.update(vec![file::note::set(note.clone())])
@@ -161,10 +163,13 @@ pub async fn set_note(
.await
.unwrap();
- ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibGetExplorerDir {
- limit: 0,
- path: "".to_string(),
- location_id: 0,
+ ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibraryQuery {
+ library_id: ctx.id.to_string(),
+ query: LibraryQuery::LibGetExplorerDir {
+ limit: 0,
+ path: PathBuf::new(),
+ location_id: 0,
+ },
}))
.await;
diff --git a/core/src/job/jobs.rs b/core/src/job/jobs.rs
index 3a0b56c5a..0cca9b8ad 100644
--- a/core/src/job/jobs.rs
+++ b/core/src/job/jobs.rs
@@ -3,48 +3,69 @@ use super::{
JobError,
};
use crate::{
- node::get_nodestate,
+ library::LibraryContext,
prisma::{job, node},
- CoreContext,
};
use int_enum::IntEnum;
-use log::info;
+use log::{error, info};
use serde::{Deserialize, Serialize};
use std::{
collections::{HashMap, VecDeque},
+ error::Error,
fmt::Debug,
sync::Arc,
};
-use tokio::sync::Mutex;
+use tokio::sync::{mpsc, Mutex, RwLock};
use ts_rs::TS;
// db is single threaded, nerd
const MAX_WORKERS: usize = 1;
+pub type JobResult = Result<(), Box>;
+
#[async_trait::async_trait]
pub trait Job: Send + Sync + Debug {
fn name(&self) -> &'static str;
- async fn run(&self, ctx: WorkerContext) -> Result<(), Box>;
+ async fn run(&self, ctx: WorkerContext) -> JobResult;
+}
+
+pub enum JobManagerEvent {
+ IngestJob(LibraryContext, Box),
}
// jobs struct is maintained by the core
-pub struct Jobs {
- job_queue: VecDeque>,
+pub struct JobManager {
+ job_queue: RwLock>>,
// workers are spawned when jobs are picked off the queue
- running_workers: HashMap>>,
+ running_workers: RwLock>>>,
+ internal_sender: mpsc::UnboundedSender,
}
-impl Jobs {
- pub fn new() -> Self {
- Self {
- job_queue: VecDeque::new(),
- running_workers: HashMap::new(),
- }
+impl JobManager {
+ pub fn new() -> Arc {
+ let (internal_sender, mut internal_receiver) = mpsc::unbounded_channel();
+ let this = Arc::new(Self {
+ job_queue: RwLock::new(VecDeque::new()),
+ running_workers: RwLock::new(HashMap::new()),
+ internal_sender,
+ });
+
+ let this2 = this.clone();
+ tokio::spawn(async move {
+ while let Some(event) = internal_receiver.recv().await {
+ match event {
+ JobManagerEvent::IngestJob(ctx, job) => this2.clone().ingest(&ctx, job).await,
+ }
+ }
+ });
+
+ this
}
- pub async fn ingest(&mut self, ctx: &CoreContext, job: Box) {
+ pub async fn ingest(self: Arc, ctx: &LibraryContext, job: Box) {
// create worker to process job
- if self.running_workers.len() < MAX_WORKERS {
+ let mut running_workers = self.running_workers.write().await;
+ if running_workers.len() < MAX_WORKERS {
info!("Running job: {:?}", job.name());
let worker = Worker::new(job);
@@ -52,51 +73,57 @@ impl Jobs {
let wrapped_worker = Arc::new(Mutex::new(worker));
- Worker::spawn(Arc::clone(&wrapped_worker), ctx).await;
+ Worker::spawn(Arc::clone(&self), Arc::clone(&wrapped_worker), ctx.clone()).await;
- self.running_workers.insert(id, wrapped_worker);
+ running_workers.insert(id, wrapped_worker);
} else {
- self.job_queue.push_back(job);
+ self.job_queue.write().await.push_back(job);
}
}
- pub fn ingest_queue(&mut self, _ctx: &CoreContext, job: Box) {
- self.job_queue.push_back(job);
+ pub async fn ingest_queue(&self, _ctx: &LibraryContext, job: Box) {
+ self.job_queue.write().await.push_back(job);
}
- pub async fn complete(&mut self, ctx: &CoreContext, job_id: String) {
+
+ pub async fn complete(self: Arc, ctx: &LibraryContext, job_id: String) {
// remove worker from running workers
- self.running_workers.remove(&job_id);
+ self.running_workers.write().await.remove(&job_id);
// continue queue
- let job = self.job_queue.pop_front();
+ let job = self.job_queue.write().await.pop_front();
if let Some(job) = job {
- self.ingest(ctx, job).await;
+ // We can't directly execute `self.ingest` here because it would cause an async cycle.
+ self.internal_sender
+ .send(JobManagerEvent::IngestJob(ctx.clone(), job))
+ .unwrap_or_else(|_| {
+ error!("Failed to ingest job!");
+ });
}
}
pub async fn get_running(&self) -> Vec {
let mut ret = vec![];
- for worker in self.running_workers.values() {
+ for worker in self.running_workers.read().await.values() {
let worker = worker.lock().await;
ret.push(worker.job_report.clone());
}
ret
}
- pub async fn queue_pending_job(ctx: &CoreContext) -> Result<(), JobError> {
- let _next_job = ctx
- .database
- .job()
- .find_first(vec![job::status::equals(JobStatus::Queued.int_value())])
- .exec()
- .await?;
+ // pub async fn queue_pending_job(ctx: &LibraryContext) -> Result<(), JobError> {
+ // let _next_job = ctx
+ // .db
+ // .job()
+ // .find_first(vec![job::status::equals(JobStatus::Queued.int_value())])
+ // .exec()
+ // .await?;
- Ok(())
- }
+ // Ok(())
+ // }
- pub async fn get_history(ctx: &CoreContext) -> Result, JobError> {
+ pub async fn get_history(ctx: &LibraryContext) -> Result, JobError> {
let jobs = ctx
- .database
+ .db
.job()
.find_many(vec![job::status::not(JobStatus::Running.int_value())])
.exec()
@@ -171,30 +198,29 @@ impl JobReport {
seconds_elapsed: 0,
}
}
- pub async fn create(&self, ctx: &CoreContext) -> Result<(), JobError> {
- let config = get_nodestate();
+ pub async fn create(&self, ctx: &LibraryContext) -> Result<(), JobError> {
let mut params = Vec::new();
if self.data.is_some() {
params.push(job::data::set(self.data.clone()))
}
- ctx.database
+ ctx.db
.job()
.create(
job::id::set(self.id.clone()),
job::name::set(self.name.clone()),
job::action::set(1),
- job::nodes::link(node::id::equals(config.node_id)),
+ job::nodes::link(node::id::equals(ctx.node_local_id)),
params,
)
.exec()
.await?;
Ok(())
}
- pub async fn update(&self, ctx: &CoreContext) -> Result<(), JobError> {
- ctx.database
+ pub async fn update(&self, ctx: &LibraryContext) -> Result<(), JobError> {
+ ctx.db
.job()
.find_unique(job::id::equals(self.id.clone()))
.update(vec![
diff --git a/core/src/job/worker.rs b/core/src/job/worker.rs
index 1acf4f250..ddfd17ca2 100644
--- a/core/src/job/worker.rs
+++ b/core/src/job/worker.rs
@@ -1,8 +1,8 @@
use super::{
jobs::{JobReport, JobReportUpdate, JobStatus},
- Job,
+ Job, JobManager,
};
-use crate::{ClientQuery, CoreContext, CoreEvent, InternalEvent};
+use crate::{library::LibraryContext, ClientQuery, CoreEvent, LibraryQuery};
use log::error;
use std::{sync::Arc, time::Duration};
use tokio::{
@@ -29,8 +29,8 @@ enum WorkerState {
#[derive(Clone)]
pub struct WorkerContext {
pub uuid: String,
- pub core_ctx: CoreContext,
- pub sender: UnboundedSender,
+ library_ctx: LibraryContext,
+ sender: UnboundedSender,
}
impl WorkerContext {
@@ -39,9 +39,13 @@ impl WorkerContext {
.send(WorkerEvent::Progressed(updates))
.unwrap_or(());
}
+
+ pub fn library_ctx(&self) -> LibraryContext {
+ self.library_ctx.clone()
+ }
+
// save the job data to
// pub fn save_data () {
-
// }
}
@@ -66,7 +70,11 @@ impl Worker {
}
}
// spawns a thread and extracts channel sender to communicate with it
- pub async fn spawn(worker: Arc>, ctx: &CoreContext) {
+ pub async fn spawn(
+ job_manager: Arc,
+ worker: Arc>,
+ ctx: LibraryContext,
+ ) {
// we capture the worker receiver channel so state can be updated from inside the worker
let mut worker_mut = worker.lock().await;
// extract owned job and receiver from Self
@@ -79,25 +87,26 @@ impl Worker {
WorkerState::Running => unreachable!(),
};
let worker_sender = worker_mut.worker_sender.clone();
- let core_ctx = ctx.clone();
worker_mut.job_report.status = JobStatus::Running;
- worker_mut.job_report.create(ctx).await.unwrap_or(());
+ worker_mut.job_report.create(&ctx).await.unwrap_or(());
// spawn task to handle receiving events from the worker
+ let library_ctx = ctx.clone();
tokio::spawn(Worker::track_progress(
worker.clone(),
worker_receiver,
- ctx.clone(),
+ library_ctx.clone(),
));
let uuid = worker_mut.job_report.id.clone();
// spawn task to handle running the job
+
tokio::spawn(async move {
let worker_ctx = WorkerContext {
uuid,
- core_ctx,
+ library_ctx,
sender: worker_sender,
};
let job_start = Instant::now();
@@ -116,20 +125,15 @@ impl Worker {
}
});
- let result = job.run(worker_ctx.clone()).await;
-
- if let Err(e) = result {
- error!("job failed {:?}", e);
+ if let Err(e) = job.run(worker_ctx.clone()).await {
+ error!("job '{}' failed with error: {}", worker_ctx.uuid, e);
worker_ctx.sender.send(WorkerEvent::Failed).unwrap_or(());
} else {
// handle completion
worker_ctx.sender.send(WorkerEvent::Completed).unwrap_or(());
}
- worker_ctx
- .core_ctx
- .internal_sender
- .send(InternalEvent::JobComplete(worker_ctx.uuid.clone()))
- .unwrap_or(());
+
+ job_manager.complete(&ctx, worker_ctx.uuid).await;
});
}
@@ -140,7 +144,7 @@ impl Worker {
async fn track_progress(
worker: Arc>,
mut channel: UnboundedReceiver,
- ctx: CoreContext,
+ ctx: LibraryContext,
) {
while let Some(command) = channel.recv().await {
let mut worker = worker.lock().await;
@@ -179,16 +183,23 @@ impl Worker {
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetRunning))
.await;
- ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
- .await;
+
+ ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibraryQuery {
+ library_id: ctx.id.to_string(),
+ query: LibraryQuery::JobGetHistory,
+ }))
+ .await;
break;
}
WorkerEvent::Failed => {
worker.job_report.status = JobStatus::Failed;
worker.job_report.update(&ctx).await.unwrap_or(());
- ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
- .await;
+ ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibraryQuery {
+ library_id: ctx.id.to_string(),
+ query: LibraryQuery::JobGetHistory,
+ }))
+ .await;
break;
}
}
diff --git a/core/src/lib.rs b/core/src/lib.rs
index f0b0833d2..887d1526a 100644
--- a/core/src/lib.rs
+++ b/core/src/lib.rs
@@ -1,13 +1,13 @@
-use crate::{
- file::cas::FileIdentifierJob, library::get_library_path, node::NodeState,
- prisma::file as prisma_file, prisma::location, util::db::create_connection,
-};
-use job::{Job, JobReport, Jobs};
-use log::{error, info};
-use prisma::PrismaClient;
+use crate::{file::cas::FileIdentifierJob, prisma::file as prisma_file, prisma::location};
+use job::{JobManager, JobReport};
+use library::{LibraryConfig, LibraryConfigWrapped, LibraryManager};
+use log::error;
+use node::{NodeConfig, NodeConfigManager};
use serde::{Deserialize, Serialize};
-use std::path::PathBuf;
-use std::sync::Arc;
+use std::{
+ path::{Path, PathBuf},
+ sync::Arc,
+};
use thiserror::Error;
use tokio::{
fs,
@@ -37,12 +37,12 @@ pub struct ReturnableMessage> {
}
// core controller is passed to the client to communicate with the core which runs in a dedicated thread
-pub struct CoreController {
+pub struct NodeController {
query_sender: UnboundedSender>,
command_sender: UnboundedSender>,
}
-impl CoreController {
+impl NodeController {
pub async fn query(&self, query: ClientQuery) -> Result {
// a one time use channel to send and await a response
let (sender, recv) = oneshot::channel();
@@ -69,48 +69,25 @@ impl CoreController {
}
}
-#[derive(Debug)]
-pub enum InternalEvent {
- JobIngest(Box),
- JobQueue(Box),
- JobComplete(String),
-}
-
#[derive(Clone)]
-pub struct CoreContext {
- pub database: Arc,
+pub struct NodeContext {
pub event_sender: mpsc::Sender,
- pub internal_sender: UnboundedSender,
+ pub config: Arc,
+ pub jobs: Arc,
}
-impl CoreContext {
- pub fn spawn_job(&self, job: Box) {
- self.internal_sender
- .send(InternalEvent::JobIngest(job))
- .unwrap_or_else(|e| {
- error!("Failed to spawn job. {:?}", e);
- });
- }
- pub fn queue_job(&self, job: Box) {
- self.internal_sender
- .send(InternalEvent::JobQueue(job))
- .unwrap_or_else(|e| {
- error!("Failed to queue job. {:?}", e);
- });
- }
+impl NodeContext {
pub async fn emit(&self, event: CoreEvent) {
self.event_sender.send(event).await.unwrap_or_else(|e| {
- error!("Failed to emit event. {:?}", e);
+ error!("Failed to emit event. {:#?}", e);
});
}
}
pub struct Node {
- state: NodeState,
- jobs: job::Jobs,
- database: Arc,
- // filetype_registry: library::TypeRegistry,
- // extension_registry: library::ExtensionRegistry,
+ config: Arc,
+ library_manager: Arc,
+ jobs: Arc,
// global messaging channels
query_channel: (
@@ -122,73 +99,56 @@ pub struct Node {
UnboundedReceiver>,
),
event_sender: mpsc::Sender,
-
- // a channel for child threads to send events back to the core
- internal_channel: (
- UnboundedSender,
- UnboundedReceiver,
- ),
}
impl Node {
// create new instance of node, run startup tasks
- pub async fn new(mut data_dir: PathBuf) -> (Node, mpsc::Receiver) {
- let (event_sender, event_recv) = mpsc::channel(100);
-
- data_dir.push("spacedrive");
- // create data directory if it doesn't exist
+ pub async fn new(
+ data_dir: impl AsRef,
+ ) -> (NodeController, mpsc::Receiver, Node) {
fs::create_dir_all(&data_dir).await.unwrap();
- // prepare basic client state
- let mut state = NodeState::new(data_dir.clone(), "diamond-mastering-space-dragon").unwrap();
- // load from disk
- state
- .read_disk()
+
+ let (event_sender, event_recv) = mpsc::channel(100);
+ let config = NodeConfigManager::new(data_dir.as_ref().to_owned())
.await
- .unwrap_or_else(|_| error!("Error: No node state found, creating new one..."));
-
- state.save().await;
-
- info!("Node State: {:?}", state);
-
- // connect to default library
- let database = Arc::new(
- create_connection(&get_library_path(&data_dir))
- .await
- .unwrap(),
- );
-
- let internal_channel = unbounded_channel::();
-
- let node = Node {
- state,
- query_channel: unbounded_channel(),
- command_channel: unbounded_channel(),
- jobs: Jobs::new(),
- event_sender,
- database,
- internal_channel,
+ .unwrap();
+ let jobs = JobManager::new();
+ let node_ctx = NodeContext {
+ event_sender: event_sender.clone(),
+ config: config.clone(),
+ jobs: jobs.clone(),
};
- (node, event_recv)
+ let node = Node {
+ config,
+ library_manager: LibraryManager::new(data_dir.as_ref().join("libraries"), node_ctx)
+ .await
+ .unwrap(),
+ query_channel: unbounded_channel(),
+ command_channel: unbounded_channel(),
+ jobs,
+ event_sender,
+ };
+
+ (
+ NodeController {
+ query_sender: node.query_channel.0.clone(),
+ command_sender: node.command_channel.0.clone(),
+ },
+ event_recv,
+ node,
+ )
}
- pub fn get_context(&self) -> CoreContext {
- CoreContext {
- database: self.database.clone(),
+ pub fn get_context(&self) -> NodeContext {
+ NodeContext {
event_sender: self.event_sender.clone(),
- internal_sender: self.internal_channel.0.clone(),
+ config: Arc::clone(&self.config),
+ jobs: Arc::clone(&self.jobs),
}
}
- pub fn get_controller(&self) -> CoreController {
- CoreController {
- query_sender: self.query_channel.0.clone(),
- command_sender: self.command_channel.0.clone(),
- }
- }
-
- pub async fn start(&mut self) {
- let ctx = self.get_context();
+ pub async fn start(mut self) {
loop {
// listen on global messaging channels for incoming messages
tokio::select! {
@@ -200,174 +160,200 @@ impl Node {
let res = self.exec_command(msg.data).await;
msg.return_sender.send(res).unwrap_or(());
}
- Some(event) = self.internal_channel.1.recv() => {
- match event {
- InternalEvent::JobIngest(job) => {
- self.jobs.ingest(&ctx, job).await;
- },
- InternalEvent::JobQueue(job) => {
- self.jobs.ingest_queue(&ctx, job);
- },
- InternalEvent::JobComplete(id) => {
- self.jobs.complete(&ctx, id).await;
- },
- }
- }
}
}
}
- // load library database + initialize client with db
- pub async fn initializer(&self) {
- info!("Initializing...");
- let ctx = self.get_context();
-
- if self.state.libraries.is_empty() {
- match library::create(&ctx, None).await {
- Ok(library) => info!("Created new library: {:?}", library),
- Err(e) => error!("Error creating library: {:?}", e),
- }
- } else {
- for library in self.state.libraries.iter() {
- // init database for library
- match library::load(&ctx, &library.library_path, &library.library_uuid).await {
- Ok(library) => info!("Loaded library: {:?}", library),
- Err(e) => error!("Error loading library: {:?}", e),
- }
- }
- }
- // init node data within library
- match node::LibraryNode::create(self).await {
- Ok(_) => info!("Spacedrive online"),
- Err(e) => error!("Error initializing node: {:?}", e),
- };
- }
async fn exec_command(&mut self, cmd: ClientCommand) -> Result {
- info!("Core command: {:?}", cmd);
- let ctx = self.get_context();
Ok(match cmd {
- // CRUD for locations
- ClientCommand::LocCreate { path } => {
- let loc = sys::new_location_and_scan(&ctx, &path).await?;
- // ctx.queue_job(Box::new(FileIdentifierJob));
- CoreResponse::LocCreate(loc)
+ ClientCommand::CreateLibrary { name } => {
+ self.library_manager
+ .create(LibraryConfig {
+ name: name.to_string(),
+ ..Default::default()
+ })
+ .await
+ .unwrap();
+ CoreResponse::Success(())
}
- ClientCommand::LocUpdate { id, name } => {
- ctx.database
- .location()
- .find_unique(location::id::equals(id))
- .update(vec![location::name::set(name)])
- .exec()
- .await?;
+ ClientCommand::EditLibrary {
+ id,
+ name,
+ description,
+ } => {
+ self.library_manager
+ .edit_library(id, name, description)
+ .await
+ .unwrap();
+ CoreResponse::Success(())
+ }
+ ClientCommand::DeleteLibrary { id } => {
+ self.library_manager.delete_library(id).await.unwrap();
+ CoreResponse::Success(())
+ }
+ ClientCommand::LibraryCommand {
+ library_id,
+ command,
+ } => {
+ let ctx = self.library_manager.get_ctx(library_id).await.unwrap();
+ match command {
+ // CRUD for locations
+ LibraryCommand::LocCreate { path } => {
+ let loc = sys::new_location_and_scan(&ctx, &path).await?;
+ // ctx.queue_job(Box::new(FileIdentifierJob));
+ CoreResponse::LocCreate(loc)
+ }
+ LibraryCommand::LocUpdate { id, name } => {
+ ctx.db
+ .location()
+ .find_unique(location::id::equals(id))
+ .update(vec![location::name::set(name)])
+ .exec()
+ .await?;
- CoreResponse::Success(())
- }
- ClientCommand::LocDelete { id } => {
- sys::delete_location(&ctx, id).await?;
- CoreResponse::Success(())
- }
- ClientCommand::LocRescan { id } => {
- sys::scan_location(&ctx, id, String::new());
- CoreResponse::Success(())
- }
- // CRUD for files
- ClientCommand::FileReadMetaData { id: _ } => todo!(),
- ClientCommand::FileSetNote { id, note } => file::set_note(ctx, id, note).await?,
- // ClientCommand::FileEncrypt { id: _, algorithm: _ } => todo!(),
- ClientCommand::FileDelete { id } => {
- ctx.database
- .file()
- .find_unique(prisma_file::id::equals(id))
- .delete()
- .exec()
- .await?;
+ CoreResponse::Success(())
+ }
+ LibraryCommand::LocDelete { id } => {
+ sys::delete_location(&ctx, id).await?;
+ CoreResponse::Success(())
+ }
+ LibraryCommand::LocRescan { id } => {
+ sys::scan_location(&ctx, id, String::new()).await;
+ CoreResponse::Success(())
+ }
+ // CRUD for files
+ LibraryCommand::FileReadMetaData { id: _ } => todo!(),
+ LibraryCommand::FileSetNote { id, note } => {
+ file::set_note(ctx, id, note).await?
+ }
+ // ClientCommand::FileEncrypt { id: _, algorithm: _ } => todo!(),
+ LibraryCommand::FileDelete { id } => {
+ ctx.db
+ .file()
+ .find_unique(prisma_file::id::equals(id))
+ .delete()
+ .exec()
+ .await?;
- CoreResponse::Success(())
- }
- // CRUD for tags
- ClientCommand::TagCreate { name: _, color: _ } => todo!(),
- ClientCommand::TagAssign {
- file_id: _,
- tag_id: _,
- } => todo!(),
- ClientCommand::TagDelete { id: _ } => todo!(),
- // CRUD for libraries
- ClientCommand::SysVolumeUnmount { id: _ } => todo!(),
- ClientCommand::LibDelete { id: _ } => todo!(),
- ClientCommand::TagUpdate { name: _, color: _ } => todo!(),
- ClientCommand::GenerateThumbsForLocation { id, path } => {
- ctx.spawn_job(Box::new(ThumbnailJob {
- location_id: id,
- path,
- background: false, // fix
- }));
- CoreResponse::Success(())
- }
- // ClientCommand::PurgeDatabase => {
- // info!("Purging database...");
- // fs::remove_file(Path::new(&self.state.data_path).join("library.db")).unwrap();
- // CoreResponse::Success(())
- // }
- ClientCommand::IdentifyUniqueFiles { id, path } => {
- ctx.spawn_job(Box::new(FileIdentifierJob {
- location_id: id,
- path,
- }));
- CoreResponse::Success(())
+ CoreResponse::Success(())
+ }
+ // CRUD for tags
+ LibraryCommand::TagCreate { name: _, color: _ } => todo!(),
+ LibraryCommand::TagAssign {
+ file_id: _,
+ tag_id: _,
+ } => todo!(),
+ LibraryCommand::TagUpdate { name: _, color: _ } => todo!(),
+ LibraryCommand::TagDelete { id: _ } => todo!(),
+ // CRUD for libraries
+ LibraryCommand::SysVolumeUnmount { id: _ } => todo!(),
+ LibraryCommand::GenerateThumbsForLocation { id, path } => {
+ ctx.spawn_job(Box::new(ThumbnailJob {
+ location_id: id,
+ path,
+ background: false, // fix
+ }))
+ .await;
+ CoreResponse::Success(())
+ }
+ LibraryCommand::IdentifyUniqueFiles { id, path } => {
+ ctx.spawn_job(Box::new(FileIdentifierJob {
+ location_id: id,
+ path,
+ }))
+ .await;
+ CoreResponse::Success(())
+ }
+ }
}
})
}
// query sources of data
async fn exec_query(&self, query: ClientQuery) -> Result {
- let ctx = self.get_context();
Ok(match query {
- // return the client state from memory
- ClientQuery::NodeGetState => CoreResponse::NodeGetState(self.state.clone()),
- // get system volumes without saving to library
- ClientQuery::SysGetVolumes => CoreResponse::SysGetVolumes(sys::Volume::get_volumes()?),
- ClientQuery::SysGetLocations => {
- CoreResponse::SysGetLocations(sys::get_locations(&ctx).await?)
- }
- // get location from library
- ClientQuery::SysGetLocation { id } => {
- CoreResponse::SysGetLocation(sys::get_location(&ctx, id).await?)
- }
- // return contents of a directory for the explorer
- ClientQuery::LibGetExplorerDir {
- path,
- location_id,
- limit: _,
- } => CoreResponse::LibGetExplorerDir(
- file::explorer::open_dir(&ctx, location_id, &path).await?,
+ ClientQuery::NodeGetLibraries => CoreResponse::NodeGetLibraries(
+ self.library_manager.get_all_libraries_config().await,
),
- ClientQuery::LibGetTags => todo!(),
+ ClientQuery::NodeGetState => CoreResponse::NodeGetState(NodeState {
+ config: self.config.get().await,
+ data_path: self.config.data_directory().to_str().unwrap().to_string(),
+ }),
+ ClientQuery::SysGetVolumes => CoreResponse::SysGetVolumes(sys::Volume::get_volumes()?),
ClientQuery::JobGetRunning => {
CoreResponse::JobGetRunning(self.jobs.get_running().await)
}
- ClientQuery::JobGetHistory => {
- CoreResponse::JobGetHistory(Jobs::get_history(&ctx).await?)
- }
- ClientQuery::GetLibraryStatistics => {
- CoreResponse::GetLibraryStatistics(library::Statistics::calculate(&ctx).await?)
- }
ClientQuery::GetNodes => todo!(),
+ ClientQuery::LibraryQuery { library_id, query } => {
+ let ctx = match self.library_manager.get_ctx(library_id.clone()).await {
+ Some(ctx) => ctx,
+ None => {
+ println!("Library '{}' not found!", library_id);
+ return Ok(CoreResponse::Error("Library not found".into()));
+ }
+ };
+ match query {
+ LibraryQuery::SysGetLocations => {
+ CoreResponse::SysGetLocations(sys::get_locations(&ctx).await?)
+ }
+ // get location from library
+ LibraryQuery::SysGetLocation { id } => {
+ CoreResponse::SysGetLocation(sys::get_location(&ctx, id).await?)
+ }
+ // return contents of a directory for the explorer
+ LibraryQuery::LibGetExplorerDir {
+ location_id,
+ path,
+ limit: _,
+ } => CoreResponse::LibGetExplorerDir(Box::new(
+ file::explorer::open_dir(&ctx, location_id, path).await?,
+ )),
+ LibraryQuery::LibGetTags => todo!(),
+ LibraryQuery::JobGetHistory => {
+ CoreResponse::JobGetHistory(JobManager::get_history(&ctx).await?)
+ }
+ LibraryQuery::GetLibraryStatistics => CoreResponse::GetLibraryStatistics(
+ library::Statistics::calculate(&ctx).await?,
+ ),
+ }
+ }
})
}
}
-// represents an event this library can emit
+/// is a command destined for the core
#[derive(Serialize, Deserialize, Debug, TS)]
#[serde(tag = "key", content = "params")]
#[ts(export)]
pub enum ClientCommand {
+ // Libraries
+ CreateLibrary {
+ name: String,
+ },
+ EditLibrary {
+ id: String,
+ name: Option,
+ description: Option,
+ },
+ DeleteLibrary {
+ id: String,
+ },
+ LibraryCommand {
+ library_id: String,
+ command: LibraryCommand,
+ },
+}
+
+/// is a command destined for a specific library which is loaded into the core.
+#[derive(Serialize, Deserialize, Debug, TS)]
+#[serde(tag = "key", content = "params")]
+#[ts(export)]
+pub enum LibraryCommand {
// Files
FileReadMetaData { id: i32 },
FileSetNote { id: i32, note: Option },
// FileEncrypt { id: i32, algorithm: EncryptionAlgorithm },
FileDelete { id: i32 },
- // Library
- LibDelete { id: i32 },
// Tags
TagCreate { name: String, color: String },
TagUpdate { name: String, color: String },
@@ -385,15 +371,28 @@ pub enum ClientCommand {
IdentifyUniqueFiles { id: i32, path: PathBuf },
}
-// represents an event this library can emit
+/// is a query destined for the core
#[derive(Serialize, Deserialize, Debug, TS)]
#[serde(tag = "key", content = "params")]
#[ts(export)]
pub enum ClientQuery {
+ NodeGetLibraries,
NodeGetState,
SysGetVolumes,
- LibGetTags,
JobGetRunning,
+ GetNodes,
+ LibraryQuery {
+ library_id: String,
+ query: LibraryQuery,
+ },
+}
+
+/// is a query destined for a specific library which is loaded into the core.
+#[derive(Serialize, Deserialize, Debug, TS)]
+#[serde(tag = "key", content = "params")]
+#[ts(export)]
+pub enum LibraryQuery {
+ LibGetTags,
JobGetHistory,
SysGetLocations,
SysGetLocation {
@@ -401,11 +400,10 @@ pub enum ClientQuery {
},
LibGetExplorerDir {
location_id: i32,
- path: String,
+ path: PathBuf,
limit: i32,
},
GetLibraryStatistics,
- GetNodes,
}
// represents an event this library can emit
@@ -422,15 +420,25 @@ pub enum CoreEvent {
DatabaseDisconnected { reason: Option },
}
+#[derive(Serialize, Deserialize, Debug, TS)]
+#[ts(export)]
+pub struct NodeState {
+ #[serde(flatten)]
+ pub config: NodeConfig,
+ pub data_path: String,
+}
+
#[derive(Serialize, Deserialize, Debug, TS)]
#[serde(tag = "key", content = "data")]
#[ts(export)]
pub enum CoreResponse {
Success(()),
+ Error(String),
+ NodeGetLibraries(Vec),
SysGetVolumes(Vec),
SysGetLocation(sys::LocationResource),
SysGetLocations(Vec),
- LibGetExplorerDir(file::DirectoryWithContents),
+ LibGetExplorerDir(Box),
NodeGetState(NodeState),
LocCreate(sys::LocationResource),
JobGetRunning(Vec),
diff --git a/core/src/library/library_config.rs b/core/src/library/library_config.rs
new file mode 100644
index 000000000..f3ab140f1
--- /dev/null
+++ b/core/src/library/library_config.rs
@@ -0,0 +1,69 @@
+use std::{
+ fs::File,
+ io::{BufReader, Seek, SeekFrom},
+ path::PathBuf,
+};
+
+use serde::{Deserialize, Serialize};
+use std::io::Write;
+use ts_rs::TS;
+
+use crate::node::ConfigMetadata;
+
+use super::LibraryManagerError;
+
+/// LibraryConfig holds the configuration for a specific library. This is stored as a '{uuid}.sdlibrary' file.
+#[derive(Debug, Serialize, Deserialize, Clone, TS, Default)]
+#[ts(export)]
+pub struct LibraryConfig {
+ #[serde(flatten)]
+ pub metadata: ConfigMetadata,
+ /// name is the display name of the library. This is used in the UI and is set by the user.
+ pub name: String,
+ /// description is a user set description of the library. This is used in the UI and is set by the user.
+ pub description: String,
+}
+
+impl LibraryConfig {
+ /// read will read the configuration from disk and return it.
+ pub(super) async fn read(file_dir: PathBuf) -> Result {
+ let mut file = File::open(&file_dir)?;
+ let base_config: ConfigMetadata = serde_json::from_reader(BufReader::new(&mut file))?;
+
+ Self::migrate_config(base_config.version, file_dir)?;
+
+ file.seek(SeekFrom::Start(0))?;
+ Ok(serde_json::from_reader(BufReader::new(&mut file))?)
+ }
+
+ /// save will write the configuration back to disk
+ pub(super) async fn save(
+ file_dir: PathBuf,
+ config: &LibraryConfig,
+ ) -> Result<(), LibraryManagerError> {
+ File::create(file_dir)?.write_all(serde_json::to_string(config)?.as_bytes())?;
+ Ok(())
+ }
+
+ /// migrate_config is a function used to apply breaking changes to the library config file.
+ fn migrate_config(
+ current_version: Option,
+ config_path: PathBuf,
+ ) -> Result<(), LibraryManagerError> {
+ match current_version {
+ None => Err(LibraryManagerError::Migration(format!(
+ "Your Spacedrive library at '{}' is missing the `version` field",
+ config_path.display()
+ ))),
+ _ => Ok(()),
+ }
+ }
+}
+
+// used to return to the frontend with uuid context
+#[derive(Serialize, Deserialize, Debug, TS)]
+#[ts(export)]
+pub struct LibraryConfigWrapped {
+ pub uuid: String,
+ pub config: LibraryConfig,
+}
diff --git a/core/src/library/library_ctx.rs b/core/src/library/library_ctx.rs
new file mode 100644
index 000000000..50bc5ea94
--- /dev/null
+++ b/core/src/library/library_ctx.rs
@@ -0,0 +1,46 @@
+use std::sync::Arc;
+
+use uuid::Uuid;
+
+use crate::{job::Job, node::NodeConfigManager, prisma::PrismaClient, CoreEvent, NodeContext};
+
+use super::LibraryConfig;
+
+/// LibraryContext holds context for a library which can be passed around the application.
+#[derive(Clone)]
+pub struct LibraryContext {
+ /// id holds the ID of the current library.
+ pub id: Uuid,
+ /// config holds the configuration of the current library.
+ pub config: LibraryConfig,
+ /// db holds the database client for the current library.
+ pub db: Arc,
+ /// node_local_id holds the local ID of the node which is running the library.
+ pub node_local_id: i32,
+ /// node_context holds the node context for the node which this library is running on.
+ pub(super) node_context: NodeContext,
+}
+
+impl LibraryContext {
+ pub(crate) async fn spawn_job(&self, job: Box) {
+ self.node_context.jobs.clone().ingest(self, job).await;
+ }
+
+ pub(crate) async fn queue_job(&self, job: Box) {
+ self.node_context.jobs.ingest_queue(self, job).await;
+ }
+
+ pub(crate) async fn emit(&self, event: CoreEvent) {
+ self.node_context
+ .event_sender
+ .send(event)
+ .await
+ .unwrap_or_else(|e| {
+ println!("Failed to emit event. {:?}", e);
+ });
+ }
+
+ pub(crate) fn config(&self) -> Arc {
+ self.node_context.config.clone()
+ }
+}
diff --git a/core/src/library/library_manager.rs b/core/src/library/library_manager.rs
new file mode 100644
index 000000000..ffabb814a
--- /dev/null
+++ b/core/src/library/library_manager.rs
@@ -0,0 +1,264 @@
+use std::{
+ env, fs, io,
+ path::{Path, PathBuf},
+ str::FromStr,
+ sync::Arc,
+};
+
+use thiserror::Error;
+use tokio::sync::RwLock;
+use uuid::Uuid;
+
+use crate::{
+ node::Platform,
+ prisma::{self, node},
+ util::db::load_and_migrate,
+ ClientQuery, CoreEvent, NodeContext,
+};
+
+use super::{LibraryConfig, LibraryConfigWrapped, LibraryContext};
+
+/// LibraryManager is a singleton that manages all libraries for a node.
+pub struct LibraryManager {
+ /// libraries_dir holds the path to the directory where libraries are stored.
+ libraries_dir: PathBuf,
+ /// libraries holds the list of libraries which are currently loaded into the node.
+ libraries: RwLock>,
+ /// node_context holds the context for the node which this library manager is running on.
+ node_context: NodeContext,
+}
+
+#[derive(Error, Debug)]
+pub enum LibraryManagerError {
+ #[error("error saving or loading the config from the filesystem")]
+ IO(#[from] io::Error),
+ #[error("error serializing or deserializing the JSON in the config file")]
+ Json(#[from] serde_json::Error),
+ #[error("Database error")]
+ Database(#[from] prisma::QueryError),
+ #[error("Library not found error")]
+ LibraryNotFound,
+ #[error("error migrating the config file")]
+ Migration(String),
+ #[error("failed to parse uuid")]
+ Uuid(#[from] uuid::Error),
+}
+
+impl LibraryManager {
+ pub(crate) async fn new(
+ libraries_dir: PathBuf,
+ node_context: NodeContext,
+ ) -> Result, LibraryManagerError> {
+ fs::create_dir_all(&libraries_dir)?;
+
+ let mut libraries = Vec::new();
+ for entry in fs::read_dir(&libraries_dir)?
+ .into_iter()
+ .filter_map(|entry| entry.ok())
+ .filter(|entry| {
+ entry.path().is_file()
+ && entry
+ .path()
+ .extension()
+ .map(|v| &*v == "sdlibrary")
+ .unwrap_or(false)
+ }) {
+ let config_path = entry.path();
+ let library_id = match Path::new(&config_path)
+ .file_stem()
+ .map(|v| v.to_str().map(Uuid::from_str))
+ {
+ Some(Some(Ok(id))) => id,
+ _ => {
+ println!("Attempted to load library from path '{}' but it has an invalid filename. Skipping...", config_path.display());
+ continue;
+ }
+ };
+
+ let db_path = config_path.clone().with_extension("db");
+ if !db_path.exists() {
+ println!(
+ "Found library '{}' but no matching database file was found. Skipping...",
+ config_path.display()
+ );
+ continue;
+ }
+
+ let config = LibraryConfig::read(config_path).await?;
+ libraries.push(
+ Self::load(
+ library_id,
+ db_path.to_str().unwrap(),
+ config,
+ node_context.clone(),
+ )
+ .await?,
+ );
+ }
+
+ let this = Arc::new(Self {
+ libraries: RwLock::new(libraries),
+ libraries_dir,
+ node_context,
+ });
+
+ // TODO: Remove this before merging PR -> Currently it exists to make the app usable
+ if this.libraries.read().await.len() == 0 {
+ this.create(LibraryConfig {
+ name: "My Default Library".into(),
+ ..Default::default()
+ })
+ .await
+ .unwrap();
+ }
+
+ Ok(this)
+ }
+
+ /// create creates a new library with the given config and mounts it into the running [LibraryManager].
+ pub(crate) async fn create(&self, config: LibraryConfig) -> Result<(), LibraryManagerError> {
+ let id = Uuid::new_v4();
+ LibraryConfig::save(
+ Path::new(&self.libraries_dir).join(format!("{id}.sdlibrary")),
+ &config,
+ )
+ .await?;
+
+ let library = Self::load(
+ id,
+ self.libraries_dir.join(format!("{id}.db")),
+ config,
+ self.node_context.clone(),
+ )
+ .await?;
+
+ self.libraries.write().await.push(library);
+
+ self.node_context
+ .emit(CoreEvent::InvalidateQuery(ClientQuery::NodeGetLibraries))
+ .await;
+
+ Ok(())
+ }
+
+ pub(crate) async fn get_all_libraries_config(&self) -> Vec {
+ self.libraries
+ .read()
+ .await
+ .iter()
+ .map(|lib| LibraryConfigWrapped {
+ config: lib.config.clone(),
+ uuid: lib.id.to_string(),
+ })
+ .collect()
+ }
+
+ pub(crate) async fn edit_library(
+ &self,
+ id: String,
+ name: Option,
+ description: Option,
+ ) -> Result<(), LibraryManagerError> {
+ // check library is valid
+ let mut libraries = self.libraries.write().await;
+ let library = libraries
+ .iter_mut()
+ .find(|lib| lib.id == Uuid::from_str(&id).unwrap())
+ .ok_or(LibraryManagerError::LibraryNotFound)?;
+
+ // update the library
+ if let Some(name) = name {
+ library.config.name = name;
+ }
+ if let Some(description) = description {
+ library.config.description = description;
+ }
+
+ LibraryConfig::save(
+ Path::new(&self.libraries_dir).join(format!("{id}.sdlibrary")),
+ &library.config,
+ )
+ .await?;
+
+ self.node_context
+ .emit(CoreEvent::InvalidateQuery(ClientQuery::NodeGetLibraries))
+ .await;
+ Ok(())
+ }
+
+ pub async fn delete_library(&self, id: String) -> Result<(), LibraryManagerError> {
+ let mut libraries = self.libraries.write().await;
+
+ let id = Uuid::parse_str(&id)?;
+
+ let library = libraries
+ .iter()
+ .find(|l| l.id == id)
+ .ok_or(LibraryManagerError::LibraryNotFound)?;
+
+ fs::remove_file(Path::new(&self.libraries_dir).join(format!("{}.db", library.id)))?;
+ fs::remove_file(Path::new(&self.libraries_dir).join(format!("{}.sdlibrary", library.id)))?;
+
+ libraries.retain(|l| l.id != id);
+
+ self.node_context
+ .emit(CoreEvent::InvalidateQuery(ClientQuery::NodeGetLibraries))
+ .await;
+ Ok(())
+ }
+
+ // get_ctx will return the library context for the given library id.
+ pub(crate) async fn get_ctx(&self, library_id: String) -> Option {
+ self.libraries
+ .read()
+ .await
+ .iter()
+ .find(|lib| lib.id.to_string() == library_id)
+ .map(Clone::clone)
+ }
+
+ /// load the library from a given path
+ pub(crate) async fn load(
+ id: Uuid,
+ db_path: impl AsRef,
+ config: LibraryConfig,
+ node_context: NodeContext,
+ ) -> Result {
+ let db = Arc::new(
+ load_and_migrate(&format!("file:{}", db_path.as_ref().to_string_lossy()))
+ .await
+ .unwrap(),
+ );
+
+ let node_config = node_context.config.get().await;
+
+ let platform = match env::consts::OS {
+ "windows" => Platform::Windows,
+ "macos" => Platform::MacOS,
+ "linux" => Platform::Linux,
+ _ => Platform::Unknown,
+ };
+
+ let node_data = db
+ .node()
+ .upsert(
+ node::pub_id::equals(id.to_string()),
+ (
+ node::pub_id::set(id.to_string()),
+ node::name::set(node_config.name.clone()),
+ vec![node::platform::set(platform as i32)],
+ ),
+ vec![node::name::set(node_config.name.clone())],
+ )
+ .exec()
+ .await?;
+
+ Ok(LibraryContext {
+ id,
+ config,
+ db,
+ node_local_id: node_data.id,
+ node_context,
+ })
+ }
+}
diff --git a/core/src/library/loader.rs b/core/src/library/loader.rs
index da4826f7b..8b1378917 100644
--- a/core/src/library/loader.rs
+++ b/core/src/library/loader.rs
@@ -1,102 +1 @@
-use log::info;
-use std::fmt::Debug;
-use std::path::{Path, PathBuf};
-use uuid::Uuid;
-use crate::{
- node::{get_nodestate, LibraryState},
- prisma::library,
- util::db::{run_migrations, DatabaseError},
- CoreContext,
-};
-
-pub static LIBRARY_DB_NAME: &str = "library.db";
-pub static DEFAULT_NAME: &str = "My Library";
-
-pub fn get_library_path(data_path: impl AsRef) -> PathBuf {
- data_path.as_ref().join(LIBRARY_DB_NAME)
-}
-
-// pub async fn get(core: &Node) -> Result {
-// let config = get_nodestate();
-// let db = &core.database;
-
-// let library_state = config.get_current_library();
-
-// info!("{:?}", library_state);
-
-// // get library from db
-// let library = match db
-// .library()
-// .find_unique(library::pub_id::equals(library_state.library_uuid.clone()))
-// .exec()
-// .await?
-// {
-// Some(library) => Ok(library),
-// None => {
-// // update config library state to offline
-// // config.libraries
-
-// Err(anyhow::anyhow!("library_not_found"))
-// }
-// };
-
-// Ok(library.unwrap())
-// }
-
-pub async fn load(
- ctx: &CoreContext,
- library_path: impl AsRef + Debug,
- library_id: &str,
-) -> Result<(), DatabaseError> {
- let mut config = get_nodestate();
-
- info!("Initializing library: {} {:#?}", &library_id, library_path);
-
- if config.current_library_uuid != library_id {
- config.current_library_uuid = library_id.to_string();
- config.save().await;
- }
- // create connection with library database & run migrations
- run_migrations(ctx).await?;
- // if doesn't exist, mark as offline
- Ok(())
-}
-
-pub async fn create(ctx: &CoreContext, name: Option) -> Result<(), ()> {
- let mut config = get_nodestate();
-
- let uuid = Uuid::new_v4().to_string();
-
- info!("Creating library {:?}, UUID: {:?}", name, uuid);
-
- let library_state = LibraryState {
- library_uuid: uuid.clone(),
- library_path: get_library_path(config.data_path.as_ref().unwrap()),
- ..LibraryState::default()
- };
-
- run_migrations(ctx).await.unwrap();
-
- config.libraries.push(library_state);
-
- config.current_library_uuid = uuid;
-
- config.save().await;
-
- let library = ctx
- .database
- .library()
- .create(
- library::pub_id::set(config.current_library_uuid),
- library::name::set(name.unwrap_or_else(|| DEFAULT_NAME.into())),
- vec![],
- )
- .exec()
- .await
- .unwrap();
-
- info!("library created in database: {:?}", library);
-
- Ok(())
-}
diff --git a/core/src/library/mod.rs b/core/src/library/mod.rs
index 17dc6db10..23aed8efa 100644
--- a/core/src/library/mod.rs
+++ b/core/src/library/mod.rs
@@ -1,7 +1,11 @@
-mod loader;
+mod library_config;
+mod library_ctx;
+mod library_manager;
mod statistics;
-pub use loader::*;
+pub use library_config::*;
+pub use library_ctx::*;
+pub use library_manager::*;
pub use statistics::*;
use thiserror::Error;
diff --git a/core/src/library/statistics.rs b/core/src/library/statistics.rs
index 91157286c..5c078e30b 100644
--- a/core/src/library/statistics.rs
+++ b/core/src/library/statistics.rs
@@ -1,16 +1,10 @@
-use crate::{
- node::get_nodestate,
- prisma::{library, library_statistics::*},
- sys::Volume,
- CoreContext,
-};
+use crate::{prisma::statistics::*, sys::Volume};
use fs_extra::dir::get_size;
-use log::info;
use serde::{Deserialize, Serialize};
use tokio::fs;
use ts_rs::TS;
-use super::LibraryError;
+use super::{LibraryContext, LibraryError};
#[derive(Debug, Serialize, Deserialize, TS, Clone, Default)]
#[ts(export)]
@@ -39,46 +33,22 @@ impl From for Statistics {
}
impl Statistics {
- pub async fn retrieve(ctx: &CoreContext) -> Result {
- let config = get_nodestate();
- let library_data = config.get_current_library();
-
+ pub async fn retrieve(ctx: &LibraryContext) -> Result {
let library_statistics_db = ctx
- .database
- .library_statistics()
- .find_unique(id::equals(library_data.library_id))
+ .db
+ .statistics()
+ .find_unique(id::equals(ctx.node_local_id))
.exec()
.await?
.map_or_else(Default::default, Into::into);
Ok(library_statistics_db)
}
- pub async fn calculate(ctx: &CoreContext) -> Result {
- let config = get_nodestate();
- // get library from client state
- let library_data = config.get_current_library();
- info!(
- "Calculating library statistics {:?}",
- library_data.library_uuid
- );
- // get library from db
- let library = ctx
- .database
- .library()
- .find_unique(library::pub_id::equals(
- library_data.library_uuid.to_string(),
- ))
- .exec()
- .await?;
-
- if library.is_none() {
- return Err(LibraryError::LibraryNotFound);
- }
-
- let library_statistics = ctx
- .database
- .library_statistics()
- .find_unique(id::equals(library_data.library_id))
+ pub async fn calculate(ctx: &LibraryContext) -> Result {
+ let _statistics = ctx
+ .db
+ .statistics()
+ .find_unique(id::equals(ctx.node_local_id))
.exec()
.await?;
@@ -97,14 +67,12 @@ impl Statistics {
}
}
- let library_db_size = match fs::metadata(library_data.library_path).await {
+ let library_db_size = match fs::metadata(ctx.config().data_directory()).await {
Ok(metadata) => metadata.len(),
Err(_) => 0,
};
- info!("{:?}", library_statistics);
-
- let thumbnail_folder_size = get_size(config.data_path.unwrap().join("thumbnails"));
+ let thumbnail_folder_size = get_size(ctx.config().data_directory().join("thumbnails"));
let statistics = Statistics {
library_db_size: library_db_size.to_string(),
@@ -114,19 +82,11 @@ impl Statistics {
..Statistics::default()
};
- let library_local_id = match library {
- Some(library) => library.id,
- None => library_data.library_id,
- };
-
- ctx.database
- .library_statistics()
+ ctx.db
+ .statistics()
.upsert(
- library_id::equals(library_local_id),
- (
- library_id::set(library_local_id),
- vec![library_db_size::set(statistics.library_db_size.clone())],
- ),
+ id::equals(1),
+ vec![library_db_size::set(statistics.library_db_size.clone())],
vec![
total_file_count::set(statistics.total_file_count),
total_bytes_used::set(statistics.total_bytes_used.clone()),
diff --git a/core/src/node/config.rs b/core/src/node/config.rs
new file mode 100644
index 000000000..ea1a09f1a
--- /dev/null
+++ b/core/src/node/config.rs
@@ -0,0 +1,149 @@
+use serde::{Deserialize, Serialize};
+use std::fs::File;
+use std::io::{self, BufReader, Seek, SeekFrom, Write};
+use std::path::{Path, PathBuf};
+use std::sync::Arc;
+use thiserror::Error;
+use tokio::sync::{RwLock, RwLockWriteGuard};
+use ts_rs::TS;
+use uuid::Uuid;
+
+/// NODE_STATE_CONFIG_NAME is the name of the file which stores the NodeState
+pub const NODE_STATE_CONFIG_NAME: &str = "node_state.sdconfig";
+
+/// ConfigMetadata is a part of node configuration that is loaded before the main configuration and contains information about the schema of the config.
+/// This allows us to migrate breaking changes to the config format between Spacedrive releases.
+#[derive(Debug, Serialize, Deserialize, Clone, TS)]
+#[ts(export)]
+pub struct ConfigMetadata {
+ /// version of Spacedrive. Determined from `CARGO_PKG_VERSION` environment variable.
+ pub version: Option,
+}
+
+impl Default for ConfigMetadata {
+ fn default() -> Self {
+ Self {
+ version: Some(env!("CARGO_PKG_VERSION").into()),
+ }
+ }
+}
+
+/// NodeConfig is the configuration for a node. This is shared between all libraries and is stored in a JSON file on disk.
+#[derive(Debug, Serialize, Deserialize, Clone, TS)]
+#[ts(export)]
+pub struct NodeConfig {
+ #[serde(flatten)]
+ pub metadata: ConfigMetadata,
+ /// id is a unique identifier for the current node. Each node has a public identifier (this one) and is given a local id for each library (done within the library code).
+ pub id: Uuid,
+ /// name is the display name of the current node. This is set by the user and is shown in the UI. // TODO: Length validation so it can fit in DNS record
+ pub name: String,
+ // the port this node uses for peer to peer communication. By default a random free port will be chosen each time the application is started.
+ pub p2p_port: Option,
+}
+
+#[derive(Error, Debug)]
+pub enum NodeConfigError {
+ #[error("error saving or loading the config from the filesystem")]
+ IO(#[from] io::Error),
+ #[error("error serializing or deserializing the JSON in the config file")]
+ Json(#[from] serde_json::Error),
+ #[error("error migrating the config file")]
+ Migration(String),
+}
+
+impl NodeConfig {
+ fn default() -> Self {
+ NodeConfig {
+ id: Uuid::new_v4(),
+ name: match hostname::get() {
+ Ok(hostname) => hostname.to_string_lossy().into_owned(),
+ Err(err) => {
+ eprintln!("Falling back to default node name as an error occurred getting your systems hostname: '{}'", err);
+ "my-spacedrive".into()
+ }
+ },
+ p2p_port: None,
+ metadata: ConfigMetadata {
+ version: Some(env!("CARGO_PKG_VERSION").into()),
+ },
+ }
+ }
+}
+
+pub struct NodeConfigManager(RwLock, PathBuf);
+
+impl NodeConfigManager {
+ /// new will create a new NodeConfigManager with the given path to the config file.
+ pub(crate) async fn new(data_path: PathBuf) -> Result, NodeConfigError> {
+ Ok(Arc::new(Self(
+ RwLock::new(Self::read(&data_path).await?),
+ data_path,
+ )))
+ }
+
+ /// get will return the current NodeConfig in a read only state.
+ pub(crate) async fn get(&self) -> NodeConfig {
+ self.0.read().await.clone()
+ }
+
+ /// data_directory returns the path to the directory storing the configuration data.
+ pub(crate) fn data_directory(&self) -> PathBuf {
+ self.1.clone()
+ }
+
+ /// write allows the user to update the configuration. This is done in a closure while a Mutex lock is held so that the user can't cause a race condition if the config were to be updated in multiple parts of the app at the same time.
+ #[allow(unused)]
+ pub(crate) async fn write)>(
+ &self,
+ mutation_fn: F,
+ ) -> Result {
+ mutation_fn(self.0.write().await);
+ let config = self.0.read().await;
+ Self::save(&self.1, &config).await?;
+ Ok(config.clone())
+ }
+
+ /// read will read the configuration from disk and return it.
+ async fn read(base_path: &PathBuf) -> Result {
+ let path = Path::new(base_path).join(NODE_STATE_CONFIG_NAME);
+
+ match path.exists() {
+ true => {
+ let mut file = File::open(&path)?;
+ let base_config: ConfigMetadata =
+ serde_json::from_reader(BufReader::new(&mut file))?;
+
+ Self::migrate_config(base_config.version, path)?;
+
+ file.seek(SeekFrom::Start(0))?;
+ Ok(serde_json::from_reader(BufReader::new(&mut file))?)
+ }
+ false => {
+ let config = NodeConfig::default();
+ Self::save(base_path, &config).await?;
+ Ok(config)
+ }
+ }
+ }
+
+ /// save will write the configuration back to disk
+ async fn save(base_path: &PathBuf, config: &NodeConfig) -> Result<(), NodeConfigError> {
+ let path = Path::new(base_path).join(NODE_STATE_CONFIG_NAME);
+ File::create(path)?.write_all(serde_json::to_string(config)?.as_bytes())?;
+ Ok(())
+ }
+
+ /// migrate_config is a function used to apply breaking changes to the config file.
+ fn migrate_config(
+ current_version: Option,
+ config_path: PathBuf,
+ ) -> Result<(), NodeConfigError> {
+ match current_version {
+ None => {
+ Err(NodeConfigError::Migration(format!("Your Spacedrive config file stored at '{}' is missing the `version` field. If you just upgraded please delete the file and restart Spacedrive! Please note this upgrade will stop using your old 'library.db' as the folder structure has changed.", config_path.display())))
+ }
+ _ => Ok(()),
+ }
+ }
+}
diff --git a/core/src/node/mod.rs b/core/src/node/mod.rs
index 309815c97..5276ab015 100644
--- a/core/src/node/mod.rs
+++ b/core/src/node/mod.rs
@@ -1,18 +1,10 @@
-use crate::{
- prisma::{self, node},
- Node,
-};
use chrono::{DateTime, Utc};
use int_enum::IntEnum;
-use log::info;
use serde::{Deserialize, Serialize};
-use std::env;
-use thiserror::Error;
use ts_rs::TS;
-
-mod state;
-
-pub use state::*;
+mod config;
+use crate::prisma::node;
+pub use config::*;
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
@@ -52,64 +44,3 @@ pub enum Platform {
IOS = 4,
Android = 5,
}
-
-impl LibraryNode {
- pub async fn create(node: &Node) -> Result<(), NodeError> {
- info!("Creating node...");
- let mut config = state::get_nodestate();
-
- let hostname = match hostname::get() {
- Ok(hostname) => hostname.to_str().unwrap_or_default().to_owned(),
- Err(_) => "unknown".to_owned(),
- };
-
- let platform = match env::consts::OS {
- "windows" => Platform::Windows,
- "macos" => Platform::MacOS,
- "linux" => Platform::Linux,
- _ => Platform::Unknown,
- };
-
- let node = if let Some(node) = node
- .database
- .node()
- .find_unique(node::pub_id::equals(config.node_pub_id.clone()))
- .exec()
- .await?
- {
- node
- } else {
- node.database
- .node()
- .create(
- node::pub_id::set(config.node_pub_id.clone()),
- node::name::set(hostname.clone()),
- vec![node::platform::set(platform as i32)],
- )
- .exec()
- .await?
- };
-
- config.node_name = hostname;
- config.node_id = node.id;
- config.save().await;
-
- info!("node: {:?}", node);
-
- Ok(())
- }
-
- // pub async fn get_nodes(ctx: &CoreContext) -> Result, NodeError> {
- // let db = &ctx.database;
-
- // let _node = db.node().find_many(vec![]).exec().await?;
-
- // Ok(_node)
- // }
-}
-
-#[derive(Error, Debug)]
-pub enum NodeError {
- #[error("Database error")]
- DatabaseError(#[from] prisma::QueryError),
-}
diff --git a/core/src/node/state.rs b/core/src/node/state.rs
index 6892b0d37..8b1378917 100644
--- a/core/src/node/state.rs
+++ b/core/src/node/state.rs
@@ -1,109 +1 @@
-use lazy_static::lazy_static;
-use serde::{Deserialize, Serialize};
-use std::path::PathBuf;
-use std::sync::RwLock;
-use tokio::io::AsyncReadExt;
-use tokio::{
- fs,
- io::{AsyncWriteExt, BufReader},
-};
-use ts_rs::TS;
-use uuid::Uuid;
-#[derive(Debug, Serialize, Deserialize, Clone, Default, TS)]
-#[ts(export)]
-pub struct NodeState {
- pub node_pub_id: String,
- pub node_id: i32,
- pub node_name: String,
- // config path is stored as struct can exist only in memory during startup and be written to disk later without supplying path
- pub data_path: Option,
- // the port this node uses to listen for incoming connections
- pub tcp_port: u32,
- // all the libraries loaded by this node
- pub libraries: Vec,
- // used to quickly find the default library
- pub current_library_uuid: String,
-}
-
-pub static NODE_STATE_CONFIG_NAME: &str = "node_state.json";
-
-#[derive(Debug, Serialize, Deserialize, Clone, Default, TS)]
-#[ts(export)]
-pub struct LibraryState {
- pub library_uuid: String,
- pub library_id: i32,
- pub library_path: PathBuf,
- pub offline: bool,
-}
-
-// global, thread-safe storage for node state
-lazy_static! {
- static ref CONFIG: RwLock