Merging with main

This commit is contained in:
Ericson Fogo Soares 2022-07-11 18:21:50 -03:00
commit 75a8b49661
106 changed files with 2856 additions and 1865 deletions

View file

@ -1,10 +1,181 @@
Write-Host "This script is currently being used by CI and will need some more work before anyone can use it like the 'setup-system.sh' script for macOS and Linux!"
# Get ci parameter to check if running with ci
param(
[Parameter()]
[Switch]$ci
)
$VCINSTALLDIR = $(& "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" -latest -property installationPath)
Add-Content $env:GITHUB_ENV "LIBCLANG_PATH=${VCINSTALLDIR}\VC\Tools\LLVM\x64\bin`n"
Invoke-WebRequest "https://www.gyan.dev/ffmpeg/builds/ffmpeg-release-full-shared.7z" -OutFile ffmpeg-release-full-shared.7z
7z x ffmpeg-release-full-shared.7z
mkdir ffmpeg
mv ffmpeg-*/* ffmpeg/
Add-Content $env:GITHUB_ENV "FFMPEG_DIR=${pwd}\ffmpeg`n"
Add-Content $env:GITHUB_PATH "${pwd}\ffmpeg\bin`n"
# Get temp folder
$temp = [System.IO.Path]::GetTempPath()
# Get current running dir
$currentLocation = $((Get-Location).path)
# Check to see if a command exists (eg if an app is installed)
Function CheckCommand {
Param ($command)
$oldPreference = $ErrorActionPreference
$ErrorActionPreference = 'stop'
try { if (Get-Command $command) { RETURN $true } }
Catch { RETURN $false }
Finally { $ErrorActionPreference = $oldPreference }
}
Write-Host "Spacedrive Development Environment Setup" -ForegroundColor Magenta
Write-Host @"
To set up your machine for Spacedrive development, this script will do the following:
1) Check for Rust and Cargo
2) Install pnpm (if not installed)
3) Install the latest version of Node.js using pnpm
4) Install LLVM (compiler for ffmpeg-rust)
4) Download ffmpeg and set as an environment variable
"@
Write-Host "Checking for Rust and Cargo..." -ForegroundColor Yellow
Start-Sleep -Milliseconds 150
$cargoCheck = CheckCommand cargo
if ($cargoCheck -eq $false) {
Write-Host @"
Cargo is not installed.
To use Spacedrive on Windows, Cargo needs to be installed.
The Visual Studio C++ Build tools are also required.
Instructions can be found here:
https://tauri.app/v1/guides/getting-started/prerequisites/#setting-up-windows
Once you have installed Cargo, re-run this script.
"@
Exit
}
else {
Write-Host "Cargo is installed."
}
Write-Host
Write-Host "Checking for pnpm..." -ForegroundColor Yellow
Start-Sleep -Milliseconds 150
$pnpmCheck = CheckCommand pnpm
if ($pnpmCheck -eq $false) {
Write-Host "pnpm is not installed. Installing now."
Write-Host "Running the pnpm installer..."
#pnpm installer taken from https://pnpm.io
Invoke-WebRequest https://get.pnpm.io/install.ps1 -useb | Invoke-Expression
# Reset the PATH env variables to make sure pnpm is accessible
$env:PNPM_HOME = [System.Environment]::GetEnvironmentVariable("PNPM_HOME", "User")
$env:Path = [System.Environment]::ExpandEnvironmentVariables([System.Environment]::GetEnvironmentVariable("Path", "User"))
}
else {
Write-Host "pnpm is installed."
}
# A GitHub Action takes care of installing node, so this isn't necessary if running in the ci.
if ($ci -eq $True) {
Write-Host
Write-Host "Running with Ci, skipping Node install." -ForegroundColor Yellow
}
else {
Write-Host
Write-Host "Using pnpm to install the latest version of Node..." -ForegroundColor Yellow
Write-Host "This will set your global Node version to the latest!"
Start-Sleep -Milliseconds 150
# Runs the pnpm command to use the latest version of node, which also installs it
Start-Process -Wait -FilePath "pnpm" -ArgumentList "env use --global latest" -PassThru -Verb runAs
}
# The ci has LLVM installed already, so we instead just set the env variables.
if ($ci -eq $True) {
Write-Host
Write-Host "Running with Ci, skipping LLVM install." -ForegroundColor Yellow
$VCINSTALLDIR = $(& "${env:ProgramFiles(x86)}\Microsoft Visual Studio\Installer\vswhere.exe" -latest -property installationPath)
Add-Content $env:GITHUB_ENV "LIBCLANG_PATH=${VCINSTALLDIR}\VC\Tools\LLVM\x64\bin`n"
} else {
Write-Host
Write-Host "Downloading the LLVM installer..." -ForegroundColor Yellow
# Downloads latest installer for LLVM
$filenamePattern = "*-win64.exe"
$releasesUri = "https://api.github.com/repos/llvm/llvm-project/releases/latest"
$downloadUri = ((Invoke-RestMethod -Method GET -Uri $releasesUri).assets | Where-Object name -like $filenamePattern ).browser_download_url
Start-BitsTransfer -Source $downloadUri -Destination "$temp\llvm.exe"
Write-Host
Write-Host "Running the LLVM installer..." -ForegroundColor Yellow
Write-Host "Please follow the instructions to install LLVM."
Write-Host "Ensure you add LLVM to your PATH."
Start-Process "$temp\llvm.exe" -Wait
}
Write-Host
Write-Host "Downloading the latest ffmpeg build..." -ForegroundColor Yellow
# Downloads the latest shared build of ffmpeg from GitHub
$filenamePattern = "*-full_build-shared.zip"
$releasesUri = "https://api.github.com/repos/GyanD/codexffmpeg/releases/latest"
$downloadUri = ((Invoke-RestMethod -Method GET -Uri $releasesUri).assets | Where-Object name -like $filenamePattern ).browser_download_url
$filename = ((Invoke-RestMethod -Method GET -Uri $releasesUri).assets | Where-Object name -like $filenamePattern ).name
$remove = ".zip"
$foldername = $filename.Substring(0, ($filename.Length - $remove.Length))
Start-BitsTransfer -Source $downloadUri -Destination "$temp\ffmpeg.zip"
Write-Host
Write-Host "Expanding ffmpeg zip..." -ForegroundColor Yellow
Expand-Archive "$temp\ffmpeg.zip" $HOME -ErrorAction SilentlyContinue
Remove-Item "$temp\ffmpeg.zip"
Write-Host
Write-Host "Setting environment variables..." -ForegroundColor Yellow
if ($ci -eq $True) {
# If running in ci, we need to use GITHUB_ENV and GITHUB_PATH instead of the normal PATH env variables, so we set them here
Add-Content $env:GITHUB_ENV "FFMPEG_DIR=$HOME\$foldername`n"
Add-Content $env:GITHUB_PATH "$HOME\$foldername\bin`n"
}
else {
# Sets environment variable for ffmpeg
[System.Environment]::SetEnvironmentVariable('FFMPEG_DIR', "$HOME\$foldername", [System.EnvironmentVariableTarget]::User)
}
Write-Host
Write-Host "Copying Required .dll files..." -ForegroundColor Yellow
# Create target\debug folder, continue if already exists
New-Item -Path $currentLocation\target\debug -ItemType Directory -ErrorAction SilentlyContinue
# Copies all .dll required for rust-ffmpeg to target\debug folder
Get-ChildItem "$HOME\$foldername\bin" -recurse -filter *.dll | Copy-Item -Destination "$currentLocation\target\debug"
Write-Host
Write-Host "Your machine has been setup for Spacedrive development!"

View file

@ -23,8 +23,13 @@ fi
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
if which apt-get &> /dev/null; then
echo "Detected 'apt' based distro!"
if [[ "$(lsb_release -si)" == "Pop" ]]; then
DEBIAN_FFMPEG_DEPS="libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavutil-dev libswscale-dev libswresample-dev ffmpeg" # FFMPEG dependencies
else
DEBIAN_FFMPEG_DEPS="libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavresample-dev libavutil-dev libswscale-dev libswresample-dev ffmpeg" # FFMPEG dependencies
fi
DEBIAN_TAURI_DEPS="libwebkit2gtk-4.0-dev build-essential curl wget libssl-dev libgtk-3-dev libappindicator3-dev librsvg2-dev" # Tauri dependencies
DEBIAN_FFMPEG_DEPS="libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavresample-dev libavutil-dev libswscale-dev libswresample-dev ffmpeg" # FFMPEG dependencies
DEBIAN_BINDGEN_DEPS="pkg-config clang" # Bindgen dependencies - it's used by a dependency of Spacedrive
sudo apt-get -y update

View file

@ -36,7 +36,7 @@ jobs:
id: pnpm-cache
run: |
echo "::set-output name=pnpm_cache_dir::$(pnpm store path)"
- uses: actions/cache@v3
name: Setup pnpm cache
with:
@ -44,7 +44,7 @@ jobs:
key: ${{ runner.os }}-pnpm-store-${{ hashFiles('**/pnpm-lock.yaml') }}
restore-keys: |
${{ runner.os }}-pnpm-store-
- name: Install pnpm dependencies
run: pnpm --frozen-lockfile i
@ -81,7 +81,7 @@ jobs:
with:
version: 7
run_install: false
- name: Install Rust stable
uses: actions-rs/toolchain@v1
with:
@ -89,7 +89,7 @@ jobs:
profile: minimal
override: true
components: rustfmt, rust-src
- name: Cache Rust Dependencies
uses: Swatinem/rust-cache@v1
with:
@ -98,10 +98,10 @@ jobs:
- name: Run 'setup-system.sh' script
if: matrix.platform == 'ubuntu-latest' || matrix.platform == 'macos-latest'
run: ./.github/scripts/setup-system.sh
- name: Run 'setup-system.ps1' script
if: matrix.platform == 'windows-latest'
run: ./.github/scripts/setup-system.ps1
run: ./.github/scripts/setup-system.ps1 -ci
- name: Get pnpm store directory
id: pnpm-cache
@ -116,7 +116,7 @@ jobs:
${{ runner.os }}-pnpm-store-
- name: Install pnpm dependencies
run: pnpm --frozen-lockfile i
- name: Cache Prisma codegen
id: cache-prisma
uses: actions/cache@v3
@ -127,13 +127,13 @@ jobs:
- name: Generate Prisma client
working-directory: core
if: steps.cache-prisma.outputs.cache-hit != 'true'
run: cargo run --frozen -p prisma-cli --release -- generate
run: cargo run -p prisma-cli --release -- generate
- name: Cargo fetch
run: cargo fetch
- name: Check Core
run: cargo check --frozen -p sdcore --release
run: cargo check -p sdcore --release
- name: Bundle Desktop
run: pnpm desktop tauri build
@ -141,7 +141,7 @@ jobs:
- name: Build Server
if: matrix.platform == 'ubuntu-latest'
run: |
cargo build --frozen -p server --release
cargo build -p server --release
cp ./target/release/server ./apps/server/server
- name: Determine image name & tag

View file

@ -41,6 +41,8 @@ This project uses [Cargo](https://doc.rust-lang.org/cargo/getting-started/instal
- `$ cd spacedrive`
- For Linux or MacOS users run: `./.github/scripts/setup-system.sh`
- This will install FFMPEG and any other required dependencies for Spacedrive to build.
- For Windows users run using PowerShell: `.\.github\scripts\setup-system.ps1`
- This will install pnpm, LLVM, FFMPEG and any other required dependencies for Spacedrive to build.
- `$ pnpm i`
- `$ pnpm prep` - Runs all necessary codegen & builds required dependencies.

354
Cargo.lock generated
View file

@ -51,9 +51,9 @@ dependencies = [
[[package]]
name = "actix-http"
version = "3.1.0"
version = "3.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd2e9f6794b5826aff6df65e3a0d0127b271d1c03629c774238f3582e903d4e4"
checksum = "6f9ffb6db08c1c3a1f4aef540f1a63193adc73c4fbd40b75a95fc8c5258f6e51"
dependencies = [
"actix-codec",
"actix-rt",
@ -195,7 +195,7 @@ dependencies = [
"serde_urlencoded",
"smallvec",
"socket2",
"time 0.3.9",
"time 0.3.11",
"url",
]
@ -306,9 +306,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.57"
version = "1.0.58"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "08f9b8508dccb7687a1d6c4ce66b2b0ecef467c94667de27d8d7fe1f8d2a9cdc"
checksum = "bb07d2053ccdbe10e2af2995a2f116c1330396493dc1269f6a91d0ae82e19704"
[[package]]
name = "arrayvec"
@ -419,9 +419,9 @@ checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa"
[[package]]
name = "backtrace"
version = "0.3.65"
version = "0.3.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61"
checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7"
dependencies = [
"addr2line",
"cc",
@ -588,7 +588,7 @@ dependencies = [
"serde",
"serde_bytes",
"serde_json",
"time 0.3.9",
"time 0.3.11",
"uuid 0.8.2",
]
@ -615,9 +615,9 @@ checksum = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7"
[[package]]
name = "bytemuck"
version = "1.9.1"
version = "1.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cdead85bdec19c194affaeeb670c0e41fe23de31459efd1c174d049269cf02cc"
checksum = "c53dfa917ec274df8ed3c572698f381a24eef2efba9492d797301b72b6db408a"
[[package]]
name = "byteorder"
@ -642,9 +642,9 @@ dependencies = [
[[package]]
name = "cairo-rs"
version = "0.15.11"
version = "0.15.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "62be3562254e90c1c6050a72aa638f6315593e98c5cdaba9017cedbabf0a5dee"
checksum = "c76ee391b03d35510d9fa917357c7f1855bd9a6659c95a1b392e33f49b3369bc"
dependencies = [
"bitflags",
"cairo-sys-rs",
@ -882,7 +882,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "94d4706de1b0fa5b132270cddffa8585166037822e260a944fe161acd137ca05"
dependencies = [
"percent-encoding",
"time 0.3.9",
"time 0.3.11",
"version_check",
]
@ -964,17 +964,17 @@ dependencies = [
"crossbeam-deque",
"crossbeam-epoch",
"crossbeam-queue 0.3.5",
"crossbeam-utils 0.8.8",
"crossbeam-utils 0.8.10",
]
[[package]]
name = "crossbeam-channel"
version = "0.5.4"
version = "0.5.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53"
checksum = "4c02a4d71819009c192cf4872265391563fd6a84c81ff2c0f2a7026ca4c1d85c"
dependencies = [
"cfg-if 1.0.0",
"crossbeam-utils 0.8.8",
"crossbeam-utils 0.8.10",
]
[[package]]
@ -985,20 +985,20 @@ checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e"
dependencies = [
"cfg-if 1.0.0",
"crossbeam-epoch",
"crossbeam-utils 0.8.8",
"crossbeam-utils 0.8.10",
]
[[package]]
name = "crossbeam-epoch"
version = "0.9.8"
version = "0.9.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1145cf131a2c6ba0615079ab6a638f7e1973ac9c2634fcbeaaad6114246efe8c"
checksum = "07db9d94cbd326813772c968ccd25999e5f8ae22f4f8d1b11effa37ef6ce281d"
dependencies = [
"autocfg",
"cfg-if 1.0.0",
"crossbeam-utils 0.8.8",
"lazy_static",
"crossbeam-utils 0.8.10",
"memoffset",
"once_cell",
"scopeguard",
]
@ -1020,7 +1020,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f25d8400f4a7a5778f0e4e52384a48cbd9b5c495d110786187fc750075277a2"
dependencies = [
"cfg-if 1.0.0",
"crossbeam-utils 0.8.8",
"crossbeam-utils 0.8.10",
]
[[package]]
@ -1036,19 +1036,19 @@ dependencies = [
[[package]]
name = "crossbeam-utils"
version = "0.8.8"
version = "0.8.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38"
checksum = "7d82ee10ce34d7bc12c2122495e7593a9c41347ecdd64185af4ecf72cb1a7f83"
dependencies = [
"cfg-if 1.0.0",
"lazy_static",
"once_cell",
]
[[package]]
name = "crypto-common"
version = "0.1.3"
version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8"
checksum = "2ccfd8c0ee4cce11e45b3fd6f9d5e69e0cc62912aa6a0cb1bf4617b0eba5a12f"
dependencies = [
"generic-array 0.14.5",
"typenum",
@ -1186,9 +1186,9 @@ dependencies = [
[[package]]
name = "dbus"
version = "0.9.5"
version = "0.9.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "de0a745c25b32caa56b82a3950f5fec7893a960f4c10ca3b02060b0c38d8c2ce"
checksum = "6f8bcdd56d2e5c4ed26a529c5a9029f5db8290d433497506f958eae3be148eb6"
dependencies = [
"libc",
"libdbus-sys",
@ -1377,9 +1377,9 @@ dependencies = [
[[package]]
name = "either"
version = "1.6.1"
version = "1.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e78d4f1cc4ae33bbfc157ed5d5a5ef3bc29227303d595861deb238fcec4e9457"
checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be"
[[package]]
name = "embed-resource"
@ -1598,14 +1598,14 @@ dependencies = [
[[package]]
name = "filetime"
version = "0.2.16"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0408e2626025178a6a7f7ffc05a25bc47103229f19c113755de7bf63816290c"
checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c"
dependencies = [
"cfg-if 1.0.0",
"libc",
"redox_syscall 0.2.13",
"winapi",
"windows-sys",
]
[[package]]
@ -2010,9 +2010,9 @@ dependencies = [
[[package]]
name = "gif"
version = "0.11.3"
version = "0.11.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c3a7187e78088aead22ceedeee99779455b23fc231fe13ec443f99bb71694e5b"
checksum = "3edd93c6756b4dfaf2709eafcc345ba2636565295c198a9cfbf75fa5e3e00b06"
dependencies = [
"color_quant",
"weezl",
@ -2026,9 +2026,9 @@ checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4"
[[package]]
name = "gio"
version = "0.15.11"
version = "0.15.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0f132be35e05d9662b9fa0fee3f349c6621f7782e0105917f4cc73c1bf47eceb"
checksum = "68fdbc90312d462781a395f7a16d96a2b379bb6ef8cd6310a2df272771c4283b"
dependencies = [
"bitflags",
"futures-channel",
@ -2056,9 +2056,9 @@ dependencies = [
[[package]]
name = "glib"
version = "0.15.11"
version = "0.15.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd124026a2fa8c33a3d17a3fe59c103f2d9fa5bd92c19e029e037736729abeab"
checksum = "edb0306fbad0ab5428b0ca674a23893db909a98582969c9b537be4ced78c505d"
dependencies = [
"bitflags",
"futures-channel",
@ -2228,13 +2228,19 @@ dependencies = [
"ahash",
]
[[package]]
name = "hashbrown"
version = "0.12.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "607c8a29735385251a339424dd462993c0fed8fa09d378f259377df08c126022"
[[package]]
name = "hashlink"
version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7249a3129cbc1ffccd74857f81464a323a152173cdb134e0fd81bc803b29facf"
dependencies = [
"hashbrown",
"hashbrown 0.11.2",
]
[[package]]
@ -2359,9 +2365,9 @@ checksum = "9a3a5bfb195931eeb336b2a7b4d761daec841b97f947d34394601737a7bba5e4"
[[package]]
name = "hyper"
version = "0.14.19"
version = "0.14.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42dc3c131584288d375f2d07f822b0cb012d8c6fb899a5b9fdb3cb7eb9b6004f"
checksum = "02c929dc5c39e335a03c405292728118860721b10190d98c2a0f0efd5baafbac"
dependencies = [
"bytes",
"futures-channel",
@ -2427,7 +2433,7 @@ version = "0.4.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "713f1b139373f96a2e0ce3ac931cd01ee973c3c5dd7c40c0c2efe96ad2b6751d"
dependencies = [
"crossbeam-utils 0.8.8",
"crossbeam-utils 0.8.10",
"globset",
"lazy_static",
"log",
@ -2492,12 +2498,12 @@ dependencies = [
[[package]]
name = "indexmap"
version = "1.8.2"
version = "1.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e6012d540c5baa3589337a98ce73408de9b5a25ec9fc2c6fd6be8f0d39e0ca5a"
checksum = "10a35a97730320ffe8e2d410b5d3b69279b98d2c14bdb8b70ea89ecf7888d41e"
dependencies = [
"autocfg",
"hashbrown",
"hashbrown 0.12.2",
"serde",
]
@ -2866,10 +2872,19 @@ dependencies = [
]
[[package]]
name = "linked-hash-map"
version = "0.5.4"
name = "line-wrap"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3"
checksum = "f30344350a2a51da54c1d53be93fade8a237e545dbcc4bdbe635413f2117cab9"
dependencies = [
"safemem",
]
[[package]]
name = "linked-hash-map"
version = "0.5.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f"
[[package]]
name = "local-channel"
@ -2934,11 +2949,11 @@ dependencies = [
[[package]]
name = "lru"
version = "0.7.6"
version = "0.7.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8015d95cb7b2ddd3c0d32ca38283ceb1eea09b4713ee380bceb942d85a244228"
checksum = "c84e6fe5655adc6ce00787cf7dcaf8dc4f998a0565d23eafc207a8b08ca3349a"
dependencies = [
"hashbrown",
"hashbrown 0.11.2",
]
[[package]]
@ -2958,15 +2973,15 @@ checksum = "c41e0c4fef86961ac6d6f8a82609f55f31b05e4fce149ac5710e439df7619ba4"
[[package]]
name = "mac-notification-sys"
version = "0.5.2"
version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "042f74a606175d72ca483e14e0873fe0f6c003f7af45865b17b16fdaface7203"
checksum = "47a4acb83c904844ca12aafeac6fff6f781cf9e220a985c1db94fd94123993aa"
dependencies = [
"cc",
"dirs-next",
"objc-foundation",
"objc_id",
"time 0.3.9",
"time 0.3.11",
]
[[package]]
@ -3093,9 +3108,9 @@ dependencies = [
[[package]]
name = "mio"
version = "0.8.3"
version = "0.8.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "713d550d9b44d89174e066b7a6217ae06234c10cb47819a88290d2b353c31799"
checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf"
dependencies = [
"libc",
"log",
@ -3284,7 +3299,7 @@ dependencies = [
"smallvec",
"subprocess",
"thiserror",
"time 0.3.9",
"time 0.3.11",
"uuid 0.8.2",
]
@ -3439,9 +3454,9 @@ dependencies = [
[[package]]
name = "num-rational"
version = "0.4.0"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d41702bd167c2df5520b384281bc111a4b5efcf7fbc4c9c222c815b07e0a6a6a"
checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0"
dependencies = [
"autocfg",
"num-integer",
@ -3538,18 +3553,18 @@ dependencies = [
[[package]]
name = "object"
version = "0.28.4"
version = "0.29.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424"
checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53"
dependencies = [
"memchr",
]
[[package]]
name = "once_cell"
version = "1.12.0"
version = "1.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7709cef83f0c1f58f666e746a08b21e0085f7440fa6a29cc194d68aac97a4225"
checksum = "18a6dbe30758c9f83eb00cbea4ac95966305f5a7772f3f42ebfc7fc7eddbd8e1"
[[package]]
name = "opaque-debug"
@ -3569,9 +3584,9 @@ dependencies = [
[[package]]
name = "openssl"
version = "0.10.40"
version = "0.10.41"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb81a6430ac911acb25fe5ac8f1d2af1b4ea8a4fdfda0f1ee4292af2e2d8eb0e"
checksum = "618febf65336490dfcf20b73f885f5651a0c89c64c2d4a8c3662585a70bf5bd0"
dependencies = [
"bitflags",
"cfg-if 1.0.0",
@ -3601,9 +3616,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf"
[[package]]
name = "openssl-sys"
version = "0.9.74"
version = "0.9.75"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "835363342df5fba8354c5b453325b110ffd54044e588c539cf2f20a8014e4cb1"
checksum = "e5f9bd0c2710541a3cda73d6f9ac4f1b240de4ae261065d309dbe73d9dceb42f"
dependencies = [
"autocfg",
"cc",
@ -3980,18 +3995,18 @@ dependencies = [
[[package]]
name = "pin-project"
version = "1.0.10"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e"
checksum = "78203e83c48cffbe01e4a2d35d566ca4de445d79a85372fc64e378bfc812a260"
dependencies = [
"pin-project-internal",
]
[[package]]
name = "pin-project-internal"
version = "1.0.10"
version = "1.0.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb"
checksum = "710faf75e1b33345361201d36d04e98ac1ed8909151a017ed384700836104c74"
dependencies = [
"proc-macro2",
"quote",
@ -4016,6 +4031,20 @@ version = "0.3.25"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae"
[[package]]
name = "plist"
version = "1.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd39bc6cdc9355ad1dc5eeedefee696bb35c34caf21768741e81826c0bbd7225"
dependencies = [
"base64 0.13.0",
"indexmap",
"line-wrap",
"serde",
"time 0.3.11",
"xml-rs",
]
[[package]]
name = "png"
version = "0.11.0"
@ -4247,9 +4276,9 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5"
[[package]]
name = "proc-macro2"
version = "1.0.39"
version = "1.0.40"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c54b25569025b7fc9651de43004ae593a75ad88543b17178aa5e1b9c4f15f56f"
checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7"
dependencies = [
"unicode-ident",
]
@ -4361,9 +4390,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quote"
version = "1.0.18"
version = "1.0.20"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1feb54ed693b93a84e14094943b84b7c4eae204c512b7ccb95ab0c66d278ad1"
checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804"
dependencies = [
"proc-macro2",
]
@ -4484,7 +4513,7 @@ checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f"
dependencies = [
"crossbeam-channel",
"crossbeam-deque",
"crossbeam-utils 0.8.8",
"crossbeam-utils 0.8.10",
"num_cpus",
]
@ -4516,9 +4545,9 @@ dependencies = [
[[package]]
name = "regex"
version = "1.5.6"
version = "1.6.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d83f127d94bdbcda4c8cc2e50f6f84f4b611f69c902699ca385a39c3a75f9ff1"
checksum = "4c4eb3267174b8c6c2f654116623910a0fef09c4753f8dd83db29c48a0df988b"
dependencies = [
"aho-corasick",
"memchr",
@ -4536,9 +4565,9 @@ dependencies = [
[[package]]
name = "regex-syntax"
version = "0.6.26"
version = "0.6.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "49b3de9ec5dc0a3417da371aab17d729997c15010e7fd24ff707773a33bddb64"
checksum = "a3f87b73ce11b1619a3c6332f45341e0047173771e8b8b73f87bfeefb7b56244"
[[package]]
name = "remove_dir_all"
@ -4736,7 +4765,7 @@ version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366"
dependencies = [
"semver 1.0.10",
"semver 1.0.12",
]
[[package]]
@ -4772,9 +4801,9 @@ dependencies = [
[[package]]
name = "rustversion"
version = "1.0.6"
version = "1.0.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f"
checksum = "a0a5f7c728f5d284929a1cccb5bc19884422bfe6ef4d6c409da2c41838983fcf"
[[package]]
name = "ryu"
@ -4782,6 +4811,12 @@ version = "1.0.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695"
[[package]]
name = "safemem"
version = "0.3.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef703b7cb59335eae2eb93ceb664c0eb7ea6bf567079d843e09420219668e072"
[[package]]
name = "same-file"
version = "1.0.6"
@ -4862,7 +4897,6 @@ dependencies = [
"image",
"include_dir",
"int-enum",
"lazy_static",
"log",
"prisma-client-rust",
"ring 0.17.0-alpha.11",
@ -4872,7 +4906,7 @@ dependencies = [
"thiserror",
"tokio",
"ts-rs",
"uuid 1.1.2",
"uuid 0.8.2",
"walkdir",
"webp",
]
@ -4940,9 +4974,9 @@ dependencies = [
[[package]]
name = "semver"
version = "1.0.10"
version = "1.0.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a41d061efea015927ac527063765e73601444cdc344ba855bc7bd44578b25e1c"
checksum = "a2333e6df6d6598f2b1974829f853c2b4c5f4a6e503c10af918081aa6f8564e1"
dependencies = [
"serde",
]
@ -4964,9 +4998,9 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.137"
version = "1.0.139"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "61ea8d54c77f8315140a05f4c7237403bf38b72704d031543aa1d16abbf517d1"
checksum = "0171ebb889e45aa68b44aee0859b3eede84c6f5f5c228e6f140c0b2a0a46cad6"
dependencies = [
"serde_derive",
]
@ -4982,9 +5016,9 @@ dependencies = [
[[package]]
name = "serde_derive"
version = "1.0.137"
version = "1.0.139"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1f26faba0c3959972377d3b2d306ee9f71faee9714294e41bb777f83f88578be"
checksum = "dc1d3230c1de7932af58ad8ffbe1d784bd55efd5a9d84ac24f69c72d83543dfb"
dependencies = [
"proc-macro2",
"quote",
@ -4993,9 +5027,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.81"
version = "1.0.82"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c"
checksum = "82c2c1fdcd807d1098552c5b9a36e425e42e9fbd7c6a37a8425f390f781f7fa7"
dependencies = [
"indexmap",
"itoa 1.0.2",
@ -5204,9 +5238,9 @@ checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32"
[[package]]
name = "smallvec"
version = "1.8.0"
version = "1.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83"
checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1"
[[package]]
name = "socket2"
@ -5442,9 +5476,9 @@ dependencies = [
[[package]]
name = "syn"
version = "1.0.96"
version = "1.0.98"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0748dd251e24453cb8717f0354206b91557e4ec8703673a4b30208f2abaf1ebf"
checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd"
dependencies = [
"proc-macro2",
"quote",
@ -5500,9 +5534,9 @@ checksum = "f764005d11ee5f36500a149ace24e00e3da98b0158b3e2d53a7495660d3f4d60"
[[package]]
name = "tao"
version = "0.11.2"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3bfe4c782f0543f667ee3b732d026b2f1c64af39cd52e726dec1ea1f2d8f6b80"
checksum = "a71c32c2fa7bba46b01becf9cf470f6a781573af7e376c5e317a313ecce27545"
dependencies = [
"bitflags",
"cairo-rs",
@ -5537,7 +5571,6 @@ dependencies = [
"raw-window-handle",
"scopeguard",
"serde",
"tao-core-video-sys",
"unicode-segmentation",
"uuid 0.8.2",
"windows 0.37.0",
@ -5545,18 +5578,6 @@ dependencies = [
"x11-dl",
]
[[package]]
name = "tao-core-video-sys"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "271450eb289cb4d8d0720c6ce70c72c8c858c93dd61fc625881616752e6b98f6"
dependencies = [
"cfg-if 1.0.0",
"core-foundation-sys",
"libc",
"objc",
]
[[package]]
name = "tap"
version = "1.0.1"
@ -5576,9 +5597,9 @@ dependencies = [
[[package]]
name = "tauri"
version = "1.0.0"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e1ebb60bb8f246d5351ff9b7728fdfa7a6eba72baa722ab6021d553981caba1"
checksum = "d61fc211e0bd2c04c0aecd202d2cd72dd797a89da02989a39e1b9691462386d6"
dependencies = [
"anyhow",
"attohttpc",
@ -5605,7 +5626,7 @@ dependencies = [
"raw-window-handle",
"regex",
"rfd",
"semver 1.0.10",
"semver 1.0.12",
"serde",
"serde_json",
"serde_repr",
@ -5629,14 +5650,15 @@ dependencies = [
[[package]]
name = "tauri-build"
version = "1.0.0"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e7b26eb3523e962b90012fedbfb744ca153d9be85e7981e00737e106d5323941"
checksum = "2f2b32e551ec810ba4ab2ad735de5e3576e54bf0322ab0f4b7ce41244bc65ecf"
dependencies = [
"anyhow",
"cargo_toml",
"heck 0.4.0",
"semver 1.0.10",
"json-patch",
"semver 1.0.12",
"serde_json",
"tauri-utils",
"winres",
@ -5644,32 +5666,35 @@ dependencies = [
[[package]]
name = "tauri-codegen"
version = "1.0.0"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9468c5189188c820ef605dfe4937c768cb2918e9460c8093dc4ee2cbd717b262"
checksum = "f6f1f7928dd040fc03c94207adfad506c0cf5b152982fd1dc0a621f7fd777e22"
dependencies = [
"base64 0.13.0",
"brotli",
"ico",
"json-patch",
"plist",
"png 0.17.5",
"proc-macro2",
"quote",
"regex",
"semver 1.0.10",
"semver 1.0.12",
"serde",
"serde_json",
"sha2",
"tauri-utils",
"thiserror",
"time 0.3.11",
"uuid 1.1.2",
"walkdir",
]
[[package]]
name = "tauri-macros"
version = "1.0.0"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "40e3ffddd7a274fc7baaa260888c971a0d95d2ef403aa16600c878b8b1c00ffe"
checksum = "e50b9f52871c088857360319a37472d59f4644f1ed004489599d62831a1b6996"
dependencies = [
"heck 0.4.0",
"proc-macro2",
@ -5681,14 +5706,15 @@ dependencies = [
[[package]]
name = "tauri-runtime"
version = "0.9.0"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fb7dc4db360bb40584187b6cb7834da736ce4ef2ab0914e2be98014444fa9920"
checksum = "4e4cff3b4d9469727fa2107c4b3d2eda110df1ba45103fb420178e536362fae4"
dependencies = [
"gtk",
"http",
"http-range",
"infer",
"raw-window-handle",
"serde",
"serde_json",
"tauri-utils",
@ -5700,14 +5726,15 @@ dependencies = [
[[package]]
name = "tauri-runtime-wry"
version = "0.9.0"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c876fb3a6e7c6fe2ac466b2a6ecd83658528844b4df0914558a9bc1501b31cf3"
checksum = "3fa8c4edaf01d8b556e7172c844b1b4dd3399adcd1a606bd520fc3e65f698546"
dependencies = [
"cocoa",
"gtk",
"percent-encoding",
"rand 0.8.5",
"raw-window-handle",
"tauri-runtime",
"tauri-utils",
"uuid 1.1.2",
@ -5719,9 +5746,9 @@ dependencies = [
[[package]]
name = "tauri-utils"
version = "1.0.0"
version = "1.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "727145cb55b8897fa9f2bcea4fad31dc39394703d037c9669b40f2d1c0c2d7f3"
checksum = "12ff4b68d9faeb57c9c727bf58c9c9768d2b67d8e84e62ce6146e7859a2e9c6b"
dependencies = [
"brotli",
"ctor",
@ -5734,13 +5761,14 @@ dependencies = [
"phf 0.10.1",
"proc-macro2",
"quote",
"semver 1.0.10",
"semver 1.0.12",
"serde",
"serde_json",
"serde_with",
"thiserror",
"url",
"walkdir",
"windows 0.37.0",
]
[[package]]
@ -5877,9 +5905,9 @@ dependencies = [
[[package]]
name = "time"
version = "0.3.9"
version = "0.3.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd"
checksum = "72c91f41dcb2f096c05f0873d667dceec1087ce5bcf984ec8ffb19acddbb3217"
dependencies = [
"itoa 1.0.2",
"libc",
@ -6031,9 +6059,9 @@ dependencies = [
[[package]]
name = "tower-service"
version = "0.3.1"
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6"
checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52"
[[package]]
name = "tracing"
@ -6050,9 +6078,9 @@ dependencies = [
[[package]]
name = "tracing-attributes"
version = "0.1.21"
version = "0.1.22"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cc6b8ad3567499f98a1db7a752b07a7c8c7c7c34c332ec00effb2b0027974b7c"
checksum = "11c75893af559bc8e10716548bdef5cb2b983f8e637db9d0e15126b61b484ee2"
dependencies = [
"proc-macro2",
"quote",
@ -6061,9 +6089,9 @@ dependencies = [
[[package]]
name = "tracing-core"
version = "0.1.27"
version = "0.1.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7709595b8878a4965ce5e87ebf880a7d39c9afc6837721b21a5a816a8117d921"
checksum = "7b7358be39f2f274f322d2aaed611acc57f382e8eb1e5b48cb9ae30933495ce7"
dependencies = [
"once_cell",
"valuable",
@ -6105,13 +6133,13 @@ dependencies = [
[[package]]
name = "tracing-subscriber"
version = "0.3.11"
version = "0.3.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4bc28f93baff38037f64e6f43d34cfa1605f27a49c34e8a04c5e78b0babf2596"
checksum = "3a713421342a5a666b7577783721d3117f1b69a393df803ee17bb73b1e122a59"
dependencies = [
"ansi_term",
"lazy_static",
"matchers",
"once_cell",
"regex",
"sharded-slab",
"smallvec",
@ -6190,6 +6218,7 @@ dependencies = [
"chrono",
"thiserror",
"ts-rs-macros",
"uuid 0.8.2",
]
[[package]]
@ -6235,9 +6264,9 @@ checksum = "dcf81ac59edc17cc8697ff311e8f5ef2d99fcbd9817b34cec66f90b6c3dfd987"
[[package]]
name = "ucd-trie"
version = "0.1.3"
version = "0.1.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "56dee185309b50d1f11bfedef0fe6d036842e3fb77413abef29f8f8d1c5d4c1c"
checksum = "89570599c4fe5585de2b388aab47e99f7fa4e9238a1399f707a02e356058141c"
[[package]]
name = "unicode-bidi"
@ -6253,9 +6282,9 @@ checksum = "5bd2fe26506023ed7b5e1e315add59d6f584c621d037f9368fea9cfb988f368c"
[[package]]
name = "unicode-normalization"
version = "0.1.19"
version = "0.1.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9"
checksum = "854cbdc4f7bc6ae19c820d44abdc3277ac3e1b2b93db20a636825d9322fb60e6"
dependencies = [
"tinyvec",
]
@ -6341,19 +6370,6 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dd6469f4314d5f1ffec476e05f17cc9a78bc7a27a6a857842170bdf8d6f98d2f"
dependencies = [
"getrandom 0.2.7",
"serde",
"uuid-macro-internal",
]
[[package]]
name = "uuid-macro-internal"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "548f7181a5990efa50237abb7ebca410828b57a8955993334679f8b50b35c97d"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
@ -6602,9 +6618,9 @@ dependencies = [
[[package]]
name = "webpki-roots"
version = "0.22.3"
version = "0.22.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44d8de8415c823c8abd270ad483c6feeac771fad964890779f9a8cb24fbbc1bf"
checksum = "f1c760f0d366a6c24a02ed7816e23e691f5d92291f94d15e836006fd11b04daf"
dependencies = [
"webpki",
]
@ -6648,9 +6664,9 @@ dependencies = [
[[package]]
name = "weezl"
version = "0.1.6"
version = "0.1.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c97e489d8f836838d497091de568cf16b117486d529ec5579233521065bd5e4"
checksum = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb"
[[package]]
name = "widestring"
@ -6660,9 +6676,9 @@ checksum = "17882f045410753661207383517a6f62ec3dbeb6a4ed2acce01f0728238d1983"
[[package]]
name = "wildmatch"
version = "2.1.0"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d6c48bd20df7e4ced539c12f570f937c6b4884928a87fee70a479d72f031d4e0"
checksum = "ee583bdc5ff1cf9db20e9db5bb3ff4c3089a8f6b8b31aff265c9aba85812db86"
[[package]]
name = "winapi"
@ -6924,9 +6940,9 @@ dependencies = [
[[package]]
name = "wry"
version = "0.18.3"
version = "0.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26b1ba327c7dd4292f46bf8e6ba8e6ec2db4443b2973c9d304a359d95e0aa856"
checksum = "ce19dddbd3ce01dc8f14eb6d4c8f914123bf8379aaa838f6da4f981ff7104a3f"
dependencies = [
"block",
"cocoa",

151
LICENSE
View file

@ -1,25 +1,23 @@
Copyright (c) 2021-present Spacedrive Technology Inc.
GNU AFFERO GENERAL PUBLIC LICENSE
Version 3, 19 November 2007
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <http://fsf.org/>
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The GNU Affero General Public License is a free, copyleft license for
software and other kinds of works, specifically designed to ensure
cooperation with the community in the case of network server software.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
our General Public Licenses are intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
software for all its users.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
@ -28,44 +26,34 @@ them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
Developers that use our General Public Licenses protect your rights
with two steps: (1) assert copyright on the software, and (2) offer
you this License which gives you legal permission to copy, distribute
and/or modify the software.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
A secondary benefit of defending all users' freedom is that
improvements made in alternate versions of the program, if they
receive widespread use, become available for other developers to
incorporate. Many developers of free software are heartened and
encouraged by the resulting cooperation. However, in the case of
software used on network servers, this result may fail to come about.
The GNU General Public License permits making a modified version and
letting the public access it on a server without ever releasing its
source code to the public.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
The GNU Affero General Public License is designed specifically to
ensure that, in such cases, the modified source code becomes available
to the community. It requires the operator of a network server to
provide the source code of the modified version running there to the
users of that server. Therefore, public use of a modified version, on
a publicly accessible server, gives the public access to the source
code of the modified version.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
An older license, called the Affero General Public License and
published by Affero, was designed to accomplish similar goals. This is
a different license, not a version of the Affero GPL, but Affero has
released a new version of the Affero GPL which permits relicensing under
this license.
The precise terms and conditions for copying, distribution and
modification follow.
@ -74,7 +62,7 @@ modification follow.
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"This License" refers to version 3 of the GNU Affero General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
@ -551,35 +539,45 @@ to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
13. Remote Network Interaction; Use with the GNU General Public License.
Notwithstanding any other provision of this License, if you modify the
Program, your modified version must prominently offer all users
interacting with it remotely through a computer network (if your version
supports such interaction) an opportunity to receive the Corresponding
Source of your version by providing access to the Corresponding Source
from a network server at no charge, through some standard or customary
means of facilitating copying of software. This Corresponding Source
shall include the Corresponding Source for any work covered by version 3
of the GNU General Public License that is incorporated pursuant to the
following paragraph.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
under version 3 of the GNU General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
but the work with which it is combined will remain governed by version
3 of the GNU General Public License.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
the GNU Affero General Public License from time to time. Such new versions
will be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Program specifies that a certain numbered version of the GNU Affero General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
GNU Affero General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
versions of the GNU Affero General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
@ -637,40 +635,29 @@ the "copyright" line and a pointer to where the full notice is found.
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
it under the terms of the GNU Affero General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
GNU Affero General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
If your software can interact with users remotely through a computer
network, you should also make sure that it provides a way for users to
get its source. For example, if your program is a web application, its
interface could display a "Source" link that leads users to an archive
of the code. There are many ways you could offer source, and different
solutions will be better for different programs; see section 13 for the
specific requirements.
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<http://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<http://www.gnu.org/philosophy/why-not-lgpl.html>.
For more information on this, and how to apply and follow the GNU AGPL, see
<https://www.gnu.org/licenses/>.

View file

@ -1,16 +1,15 @@
use std::time::{Duration, Instant};
use dotenvy::dotenv;
use sdcore::{ClientCommand, ClientQuery, CoreController, CoreEvent, CoreResponse, Node};
use tauri::api::path;
use tauri::Manager;
use sdcore::{ClientCommand, ClientQuery, CoreEvent, CoreResponse, Node, NodeController};
use tauri::{api::path, Manager};
#[cfg(target_os = "macos")]
mod macos;
mod menu;
#[tauri::command(async)]
async fn client_query_transport(
core: tauri::State<'_, CoreController>,
core: tauri::State<'_, NodeController>,
data: ClientQuery,
) -> Result<CoreResponse, String> {
match core.query(data).await {
@ -24,7 +23,7 @@ async fn client_query_transport(
#[tauri::command(async)]
async fn client_command_transport(
core: tauri::State<'_, CoreController>,
core: tauri::State<'_, NodeController>,
data: ClientCommand,
) -> Result<CoreResponse, String> {
match core.command(data).await {
@ -48,17 +47,11 @@ async fn main() {
dotenv().ok();
env_logger::init();
let data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./"));
let mut data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./"));
data_dir = data_dir.join("spacedrive");
// create an instance of the core
let (mut node, mut event_receiver) = Node::new(data_dir).await;
// run startup tasks
node.initializer().await;
// extract the node controller
let controller = node.get_controller();
// throw the node into a dedicated thread
tokio::spawn(async move {
node.start().await;
});
let (controller, mut event_receiver, node) = Node::new(data_dir).await;
tokio::spawn(node.start());
// create tauri app
tauri::Builder::default()
// pass controller to the tauri state manager

View file

@ -15,7 +15,13 @@
"active": true,
"targets": "all",
"identifier": "com.spacedrive.desktop",
"icon": ["icons/icon.icns"],
"icon": [
"icons/32x32.png",
"icons/128x128.png",
"icons/128x128@2x.png",
"icons/icon.icns",
"icons/icon.ico"
],
"resources": [],
"externalBin": [],
"copyright": "Spacedrive Technology Inc.",

View file

@ -197,7 +197,7 @@ function Page() {
style={{ transform: 'scale(2)' }}
/>
<div className="relative z-10">
<h1 className="text-5xl leading-snug fade-in-heading ">
<h1 className="text-5xl leading-tight sm:leading-snug fade-in-heading ">
We believe file management should be <span className="title-gradient">universal</span>.
</h1>
<p className="text-gray-400 animation-delay-2 fade-in-heading ">

View file

@ -1,42 +0,0 @@
# Infrastructure setups up the Kubernetes cluster for Spacedrive!
#
# To get the service account token use the following:
# ```bash
# TOKENNAME=`kubectl -n spacedrive get sa/spacedrive-ci -o jsonpath='{.secrets[0].name}'`
# kubectl -n spacedrive get secret $TOKENNAME -o jsonpath='{.data.token}' | base64 -d
# ```
apiVersion: v1
kind: Namespace
metadata:
name: spacedrive
---
apiVersion: v1
kind: ServiceAccount
metadata:
name: spacedrive-ci
namespace: spacedrive
---
apiVersion: rbac.authorization.k8s.io/v1
kind: Role
metadata:
name: spacedrive-ns-full
namespace: spacedrive
rules:
- apiGroups: ['apps']
resources: ['deployments']
verbs: ['get', 'patch']
---
apiVersion: rbac.authorization.k8s.io/v1
kind: RoleBinding
metadata:
name: spacedrive-ci-rb
namespace: spacedrive
subjects:
- kind: ServiceAccount
name: spacedrive-ci
namespace: spacedrive
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: Role
name: spacedrive-ns-full

View file

@ -1,118 +0,0 @@
# This will deploy the Spacedrive Server container to the `spacedrive`` namespace on Kubernetes.
apiVersion: networking.k8s.io/v1
kind: Ingress
metadata:
name: sdserver-ingress
namespace: spacedrive
labels:
app.kubernetes.io/name: sdserver
app.kubernetes.io/component: webserver
annotations:
traefik.ingress.kubernetes.io/router.tls.certresolver: le
traefik.ingress.kubernetes.io/router.middlewares: kube-system-antiseo@kubernetescrd
spec:
rules:
- host: spacedrive.otbeaumont.me
http:
paths:
- path: /
pathType: Prefix
backend:
service:
name: sdserver-service
port:
number: 8080
---
apiVersion: v1
kind: Service
metadata:
name: sdserver-service
namespace: spacedrive
labels:
app.kubernetes.io/name: sdserver
app.kubernetes.io/component: webserver
spec:
ports:
- port: 8080
targetPort: 8080
protocol: TCP
selector:
app.kubernetes.io/name: sdserver
app.kubernetes.io/component: webserver
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: sdserver-pvc
namespace: spacedrive
spec:
accessModes:
- ReadWriteOnce
storageClassName: local-path
resources:
requests:
storage: 512M
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: sdserver-deployment
namespace: spacedrive
labels:
app.kubernetes.io/name: sdserver
app.kubernetes.io/component: webserver
spec:
replicas: 1
selector:
matchLabels:
app.kubernetes.io/name: sdserver
app.kubernetes.io/component: webserver
template:
metadata:
labels:
app.kubernetes.io/name: sdserver
app.kubernetes.io/component: webserver
spec:
restartPolicy: Always
# refer to Dockerfile to find securityContext values
securityContext:
runAsUser: 101
runAsGroup: 101
fsGroup: 101
containers:
- name: sdserver
image: ghcr.io/oscartbeaumont/spacedrive/server:staging
imagePullPolicy: Always
ports:
- containerPort: 8080
volumeMounts:
- name: data-volume
mountPath: /data
securityContext:
allowPrivilegeEscalation: false
resources:
limits:
memory: 100Mi
cpu: 100m
requests:
memory: 5Mi
cpu: 10m
readinessProbe:
httpGet:
path: /health
port: 8080
initialDelaySeconds: 10
failureThreshold: 4
periodSeconds: 5
livenessProbe:
httpGet:
path: /health
port: 8080
initialDelaySeconds: 20
failureThreshold: 3
periodSeconds: 10
volumes:
- name: data-volume
persistentVolumeClaim:
claimName: sdserver-pvc

View file

@ -1,4 +1,4 @@
use sdcore::{ClientCommand, ClientQuery, CoreController, CoreEvent, CoreResponse, Node};
use sdcore::{ClientCommand, ClientQuery, CoreEvent, CoreResponse, Node, NodeController};
use std::{env, path::Path};
use actix::{
@ -19,7 +19,7 @@ const DATA_DIR_ENV_VAR: &'static str = "DATA_DIR";
/// Define HTTP actor
struct Socket {
_event_receiver: web::Data<mpsc::Receiver<CoreEvent>>,
core: web::Data<CoreController>,
core: web::Data<NodeController>,
}
impl Actor for Socket {
@ -52,7 +52,15 @@ impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for Socket {
match msg {
Ok(ws::Message::Ping(msg)) => ctx.pong(&msg),
Ok(ws::Message::Text(text)) => {
let msg: SocketMessage = serde_json::from_str(&text).unwrap();
let msg = serde_json::from_str::<SocketMessage>(&text);
let msg = match msg {
Ok(msg) => msg,
Err(err) => {
println!("Error parsing message: {}", err);
return;
},
};
let core = self.core.clone();
@ -133,7 +141,7 @@ async fn ws_handler(
req: HttpRequest,
stream: web::Payload,
event_receiver: web::Data<mpsc::Receiver<CoreEvent>>,
controller: web::Data<CoreController>,
controller: web::Data<NodeController>,
) -> Result<HttpResponse, Error> {
let resp = ws::start(
Socket {
@ -178,7 +186,7 @@ async fn main() -> std::io::Result<()> {
async fn setup() -> (
web::Data<mpsc::Receiver<CoreEvent>>,
web::Data<CoreController>,
web::Data<NodeController>,
) {
let data_dir_path = match env::var(DATA_DIR_ENV_VAR) {
Ok(path) => Path::new(&path).to_path_buf(),
@ -196,15 +204,8 @@ async fn setup() -> (
},
};
let (mut node, event_receiver) = Node::new(data_dir_path).await;
node.initializer().await;
let controller = node.get_controller();
tokio::spawn(async move {
node.start().await;
});
let (controller, event_receiver, node) = Node::new(data_dir_path).await;
tokio::spawn(node.start());
(web::Data::new(event_receiver), web::Data::new(controller))
}

View file

@ -1,20 +1,47 @@
import { BaseTransport } from '@sd/client';
import { ClientCommand, ClientQuery, CoreEvent } from '@sd/core';
import { ClientCommand, ClientQuery } from '@sd/core';
import SpacedriveInterface from '@sd/interface';
import React, { useEffect } from 'react';
const websocket = new WebSocket(import.meta.env.VITE_SDSERVER_BASE_URL || 'ws://localhost:8080/ws');
const timeouts = [1000, 2000, 5000, 10000]; // In milliseconds
const randomId = () => Math.random().toString(36).slice(2);
// bind state to core via Tauri
class Transport extends BaseTransport {
websocket: WebSocket;
requestMap = new Map<string, (data: any) => void>();
constructor() {
super();
this.websocket = new WebSocket(
import.meta.env.VITE_SDSERVER_BASE_URL || 'ws://localhost:8080/ws'
);
this.attachEventListeners();
}
websocket.addEventListener('message', (event) => {
async reconnect(timeoutIndex = 0) {
let timeout =
(timeouts[timeoutIndex] ?? timeouts[timeouts.length - 1]) +
(Math.floor(Math.random() * 5000 /* 5 Seconds */) + 1);
setTimeout(() => {
let ws = new WebSocket(import.meta.env.VITE_SDSERVER_BASE_URL || 'ws://localhost:8080/ws');
new Promise(function (resolve, reject) {
ws.addEventListener('open', () => resolve(null));
ws.addEventListener('close', reject);
})
.then(() => {
this.websocket = ws;
this.attachEventListeners();
console.log('Reconnected!');
})
.catch((err) => this.reconnect(timeoutIndex++));
}, timeout);
}
attachEventListeners() {
this.websocket.addEventListener('message', (event) => {
if (!event.data) return;
const { id, payload } = JSON.parse(event.data);
@ -29,8 +56,24 @@ class Transport extends BaseTransport {
}
}
});
this.websocket.addEventListener('close', () => {
console.log('GONE');
this.reconnect();
});
}
async query(query: ClientQuery) {
if (websocket.readyState == 0) {
let resolve: () => void;
const promise = new Promise((res) => {
resolve = () => res(undefined);
});
// @ts-ignore
websocket.addEventListener('open', resolve);
await promise;
}
const id = randomId();
let resolve: (data: any) => void;
@ -41,7 +84,7 @@ class Transport extends BaseTransport {
// @ts-ignore
this.requestMap.set(id, resolve);
websocket.send(JSON.stringify({ id, payload: { type: 'query', data: query } }));
this.websocket.send(JSON.stringify({ id, payload: { type: 'query', data: query } }));
return await promise;
}
@ -56,12 +99,14 @@ class Transport extends BaseTransport {
// @ts-ignore
this.requestMap.set(id, resolve);
websocket.send(JSON.stringify({ id, payload: { type: 'command', data: command } }));
this.websocket.send(JSON.stringify({ id, payload: { type: 'command', data: command } }));
return await promise;
}
}
const transport = new Transport();
function App() {
useEffect(() => {
window.parent.postMessage('spacedrive-hello', '*');
@ -72,7 +117,7 @@ function App() {
{/* <header className="App-header"></header> */}
<SpacedriveInterface
demoMode
transport={new Transport()}
transport={transport}
platform={'browser'}
convertFileSrc={function (url: string): string {
return url;

View file

@ -24,11 +24,10 @@ ring = "0.17.0-alpha.10"
int-enum = "0.4.0"
# Project dependencies
ts-rs = { version = "6.1", features = ["chrono-impl"] }
ts-rs = { version = "6.2", features = ["chrono-impl", "uuid-impl", "serde-compat"] }
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust.git", tag = "0.5.0" }
walkdir = "^2.3.2"
lazy_static = "1.4.0"
uuid = { version = "1.1.2", features = ["v4", "macro-diagnostics", "serde"]}
uuid = { version = "^0.8.2", features = ["v4", "serde"]}
sysinfo = "0.23.9"
thiserror = "1.0.30"
core-derive = { path = "./derive" }

View file

@ -1,3 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { LibraryCommand } from "./LibraryCommand";
export type ClientCommand = { key: "FileReadMetaData", params: { id: number, } } | { key: "FileSetNote", params: { id: number, note: string | null, } } | { key: "FileDelete", params: { id: number, } } | { key: "LibDelete", params: { id: number, } } | { key: "TagCreate", params: { name: string, color: string, } } | { key: "TagUpdate", params: { name: string, color: string, } } | { key: "TagAssign", params: { file_id: number, tag_id: number, } } | { key: "TagDelete", params: { id: number, } } | { key: "LocCreate", params: { path: string, } } | { key: "LocUpdate", params: { id: number, name: string | null, } } | { key: "LocDelete", params: { id: number, } } | { key: "LocRescan", params: { id: number, } } | { key: "SysVolumeUnmount", params: { id: number, } } | { key: "GenerateThumbsForLocation", params: { id: number, path: string, } } | { key: "IdentifyUniqueFiles", params: { id: number, path: string, } };
export type ClientCommand = { key: "CreateLibrary", params: { name: string, } } | { key: "EditLibrary", params: { id: string, name: string | null, description: string | null, } } | { key: "DeleteLibrary", params: { id: string, } } | { key: "LibraryCommand", params: { library_id: string, command: LibraryCommand, } };

View file

@ -1,3 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { LibraryQuery } from "./LibraryQuery";
export type ClientQuery = { key: "NodeGetState" } | { key: "SysGetVolumes" } | { key: "LibGetTags" } | { key: "JobGetRunning" } | { key: "JobGetHistory" } | { key: "SysGetLocations" } | { key: "SysGetLocation", params: { id: number, } } | { key: "LibGetExplorerDir", params: { location_id: number, path: string, limit: number, } } | { key: "GetLibraryStatistics" } | { key: "GetNodes" };
export type ClientQuery = { key: "NodeGetLibraries" } | { key: "NodeGetState" } | { key: "SysGetVolumes" } | { key: "JobGetRunning" } | { key: "GetNodes" } | { key: "LibraryQuery", params: { library_id: string, query: LibraryQuery, } };

View file

@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export interface ConfigMetadata { version: string | null, }

View file

@ -1,9 +1,10 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { DirectoryWithContents } from "./DirectoryWithContents";
import type { JobReport } from "./JobReport";
import type { LibraryConfigWrapped } from "./LibraryConfigWrapped";
import type { LocationResource } from "./LocationResource";
import type { NodeState } from "./NodeState";
import type { Statistics } from "./Statistics";
import type { Volume } from "./Volume";
export type CoreResponse = { key: "Success", data: null } | { key: "SysGetVolumes", data: Array<Volume> } | { key: "SysGetLocation", data: LocationResource } | { key: "SysGetLocations", data: Array<LocationResource> } | { key: "LibGetExplorerDir", data: DirectoryWithContents } | { key: "NodeGetState", data: NodeState } | { key: "LocCreate", data: LocationResource } | { key: "JobGetRunning", data: Array<JobReport> } | { key: "JobGetHistory", data: Array<JobReport> } | { key: "GetLibraryStatistics", data: Statistics };
export type CoreResponse = { key: "Success", data: null } | { key: "Error", data: string } | { key: "NodeGetLibraries", data: Array<LibraryConfigWrapped> } | { key: "SysGetVolumes", data: Array<Volume> } | { key: "SysGetLocation", data: LocationResource } | { key: "SysGetLocations", data: Array<LocationResource> } | { key: "LibGetExplorerDir", data: DirectoryWithContents } | { key: "NodeGetState", data: NodeState } | { key: "LocCreate", data: LocationResource } | { key: "JobGetRunning", data: Array<JobReport> } | { key: "JobGetHistory", data: Array<JobReport> } | { key: "GetLibraryStatistics", data: Statistics };

View file

@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type LibraryCommand = { key: "FileReadMetaData", params: { id: number, } } | { key: "FileSetNote", params: { id: number, note: string | null, } } | { key: "FileDelete", params: { id: number, } } | { key: "TagCreate", params: { name: string, color: string, } } | { key: "TagUpdate", params: { name: string, color: string, } } | { key: "TagAssign", params: { file_id: number, tag_id: number, } } | { key: "TagDelete", params: { id: number, } } | { key: "LocCreate", params: { path: string, } } | { key: "LocUpdate", params: { id: number, name: string | null, } } | { key: "LocDelete", params: { id: number, } } | { key: "LocRescan", params: { id: number, } } | { key: "SysVolumeUnmount", params: { id: number, } } | { key: "GenerateThumbsForLocation", params: { id: number, path: string, } } | { key: "IdentifyUniqueFiles", params: { id: number, path: string, } };

View file

@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export interface LibraryConfig { version: string | null, name: string, description: string, }

View file

@ -0,0 +1,4 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { LibraryConfig } from "./LibraryConfig";
export interface LibraryConfigWrapped { uuid: string, config: LibraryConfig, }

View file

@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export type LibraryQuery = { key: "LibGetTags" } | { key: "JobGetHistory" } | { key: "SysGetLocations" } | { key: "SysGetLocation", params: { id: number, } } | { key: "LibGetExplorerDir", params: { location_id: number, path: string, limit: number, } } | { key: "GetLibraryStatistics" };

View file

@ -0,0 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
export interface NodeConfig { version: string | null, id: string, name: string, p2p_port: number | null, }

View file

@ -1,4 +1,3 @@
// This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually.
import type { LibraryState } from "./LibraryState";
export interface NodeState { node_pub_id: string, node_id: number, node_name: string, data_path: string, tcp_port: number, libraries: Array<LibraryState>, current_library_uuid: string, }
export interface NodeState { version: string | null, id: string, name: string, p2p_port: number | null, data_path: string, }

View file

@ -2,6 +2,7 @@ export * from './bindings/Client';
export * from './bindings/ClientCommand';
export * from './bindings/ClientQuery';
export * from './bindings/ClientState';
export * from './bindings/ConfigMetadata';
export * from './bindings/CoreEvent';
export * from './bindings/CoreResource';
export * from './bindings/CoreResponse';
@ -12,9 +13,14 @@ export * from './bindings/FileKind';
export * from './bindings/FilePath';
export * from './bindings/JobReport';
export * from './bindings/JobStatus';
export * from './bindings/LibraryCommand';
export * from './bindings/LibraryConfig';
export * from './bindings/LibraryConfigWrapped';
export * from './bindings/LibraryNode';
export * from './bindings/LibraryQuery';
export * from './bindings/LibraryState';
export * from './bindings/LocationResource';
export * from './bindings/NodeConfig';
export * from './bindings/NodeState';
export * from './bindings/Platform';
export * from './bindings/Statistics';

View file

@ -0,0 +1,29 @@
/*
Warnings:
- You are about to drop the `libraries` table. If the table is not empty, all the data it contains will be lost.
- You are about to drop the `library_statistics` table. If the table is not empty, all the data it contains will be lost.
*/
-- DropTable
PRAGMA foreign_keys=off;
DROP TABLE "libraries";
PRAGMA foreign_keys=on;
-- DropTable
PRAGMA foreign_keys=off;
DROP TABLE "library_statistics";
PRAGMA foreign_keys=on;
-- CreateTable
CREATE TABLE "statistics" (
"id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT,
"date_captured" DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
"total_file_count" INTEGER NOT NULL DEFAULT 0,
"library_db_size" TEXT NOT NULL DEFAULT '0',
"total_bytes_used" TEXT NOT NULL DEFAULT '0',
"total_bytes_capacity" TEXT NOT NULL DEFAULT '0',
"total_unique_bytes" TEXT NOT NULL DEFAULT '0',
"total_bytes_free" TEXT NOT NULL DEFAULT '0',
"preview_media_bytes" TEXT NOT NULL DEFAULT '0'
);

View file

@ -35,21 +35,9 @@ model SyncEvent {
@@map("sync_events")
}
model Library {
id Int @id @default(autoincrement())
pub_id String @unique
name String
is_primary Boolean @default(true)
date_created DateTime @default(now())
timezone String?
@@map("libraries")
}
model LibraryStatistics {
model Statistics {
id Int @id @default(autoincrement())
date_captured DateTime @default(now())
library_id Int @unique
total_file_count Int @default(0)
library_db_size String @default("0")
total_bytes_used String @default("0")
@ -58,7 +46,7 @@ model LibraryStatistics {
total_bytes_free String @default("0")
preview_media_bytes String @default("0")
@@map("library_statistics")
@@map("statistics")
}
model Node {

View file

@ -1,8 +1,8 @@
use crate::library::LibraryContext;
use crate::{
job::{Job, JobReportUpdate, WorkerContext},
node::get_nodestate,
job::{Job, JobReportUpdate, JobResult, WorkerContext},
prisma::file_path,
sys, CoreContext, CoreEvent,
sys, CoreEvent,
};
use image::{self, imageops, DynamicImage, GenericImageView};
use log::{error, info};
@ -28,11 +28,15 @@ impl Job for ThumbnailJob {
fn name(&self) -> &'static str {
"thumbnailer"
}
async fn run(&self, ctx: WorkerContext) -> JobResult {
let library_ctx = ctx.library_ctx();
let thumbnail_dir = library_ctx
.config()
.data_directory()
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(self.location_id.to_string());
async fn run(&self, ctx: WorkerContext) -> Result<(), Box<dyn Error>> {
let config = get_nodestate();
let location = sys::get_location(&ctx.core_ctx, self.location_id).await?;
let location = sys::get_location(&library_ctx, self.location_id).await?;
info!(
"Searching for images in location {} at path {:#?}",
@ -40,19 +44,11 @@ impl Job for ThumbnailJob {
);
// create all necessary directories if they don't exist
fs::create_dir_all(
config
.data_path
.as_ref()
.unwrap()
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", self.location_id)),
)
.await?;
fs::create_dir_all(&thumbnail_dir).await?;
let root_path = location.path.unwrap();
// query database for all files in this location that need thumbnails
let image_files = get_images(&ctx.core_ctx, self.location_id, &self.path).await?;
let image_files = get_images(&library_ctx, self.location_id, &self.path).await?;
info!("Found {:?} files", image_files.len());
ctx.progress(vec![
@ -86,14 +82,7 @@ impl Job for ThumbnailJob {
};
// Define and write the WebP-encoded file to a given path
let output_path = config
.data_path
.as_ref()
.unwrap()
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", location.id))
.join(&cas_id)
.with_extension("webp");
let output_path = thumbnail_dir.join(&cas_id).with_extension("webp");
// check if file exists at output path
if !output_path.exists() {
@ -105,7 +94,9 @@ impl Job for ThumbnailJob {
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(i + 1)]);
if !self.background {
ctx.core_ctx.emit(CoreEvent::NewThumbnail { cas_id }).await;
ctx.library_ctx()
.emit(CoreEvent::NewThumbnail { cas_id })
.await;
};
} else {
info!("Thumb exists, skipping... {}", output_path.display());
@ -145,7 +136,7 @@ pub async fn generate_thumbnail<P: AsRef<Path>>(
}
pub async fn get_images(
ctx: &CoreContext,
ctx: &LibraryContext,
location_id: i32,
path: impl AsRef<Path>,
) -> Result<Vec<file_path::Data>, std::io::Error> {
@ -167,7 +158,7 @@ pub async fn get_images(
}
let image_files = ctx
.database
.db
.file_path()
.find_many(params)
.with(file_path::file::fetch())

View file

@ -2,10 +2,10 @@ use super::checksum::generate_cas_id;
use crate::{
file::FileError,
job::JobReportUpdate,
job::{Job, WorkerContext},
job::{Job, JobResult, WorkerContext},
library::LibraryContext,
prisma::{file, file_path},
sys::get_location,
CoreContext,
};
use chrono::{DateTime, FixedOffset};
use futures::future::join_all;
@ -13,7 +13,6 @@ use log::{error, info};
use prisma_client_rust::{prisma_models::PrismaValue, raw, raw::Raw, Direction};
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, HashSet};
use std::error::Error;
use std::path::{Path, PathBuf};
use tokio::{fs, io};
@ -35,13 +34,13 @@ impl Job for FileIdentifierJob {
"file_identifier"
}
async fn run(&self, ctx: WorkerContext) -> Result<(), Box<dyn Error>> {
async fn run(&self, ctx: WorkerContext) -> JobResult {
info!("Identifying orphan file paths...");
let location = get_location(&ctx.core_ctx, self.location_id).await?;
let location = get_location(&ctx.library_ctx(), self.location_id).await?;
let location_path = location.path.unwrap_or_else(|| "".to_string());
let total_count = count_orphan_file_paths(&ctx.core_ctx, location.id.into()).await?;
let total_count = count_orphan_file_paths(&ctx.library_ctx(), location.id.into()).await?;
info!("Found {} orphan file paths", total_count);
let task_count = (total_count as f64 / CHUNK_SIZE as f64).ceil() as usize;
@ -59,7 +58,7 @@ impl Job for FileIdentifierJob {
let mut cas_lookup: HashMap<String, i32> = HashMap::new();
// get chunk of orphans to process
let file_paths = match get_orphan_file_paths(&ctx.core_ctx, cursor).await {
let file_paths = match get_orphan_file_paths(&ctx.library_ctx(), cursor).await {
Ok(file_paths) => file_paths,
Err(e) => {
info!("Error getting orphan file paths: {:#?}", e);
@ -93,8 +92,8 @@ impl Job for FileIdentifierJob {
// find all existing files by cas id
let generated_cas_ids = chunk.values().map(|c| c.cas_id.clone()).collect();
let existing_files = ctx
.core_ctx
.database
.library_ctx()
.db
.file()
.find_many(vec![file::cas_id::in_vec(generated_cas_ids)])
.exec()
@ -104,7 +103,8 @@ impl Job for FileIdentifierJob {
// link those existing files to their file paths
// Had to put the file_path in a variable outside of the closure, to satisfy the borrow checker
let prisma_file_path = ctx.core_ctx.database.file_path();
let library_ctx = ctx.library_ctx();
let prisma_file_path = library_ctx.db.file_path();
for result in join_all(existing_files.iter().map(|file| {
prisma_file_path
.find_unique(file_path::id::equals(
@ -133,7 +133,7 @@ impl Job for FileIdentifierJob {
.collect::<Vec<_>>();
// assemble prisma values for new unique files
let mut values: Vec<PrismaValue> = Vec::new();
let mut values = Vec::with_capacity(new_files.len() * 3);
for file in &new_files {
values.extend([
PrismaValue::String(file.cas_id.clone()),
@ -144,8 +144,8 @@ impl Job for FileIdentifierJob {
// create new file records with assembled values
let created_files: Vec<FileCreated> = ctx
.core_ctx
.database
.library_ctx()
.db
._query_raw(Raw::new(
&format!(
"INSERT INTO files (cas_id, size_in_bytes, date_created) VALUES {}
@ -210,10 +210,10 @@ struct CountRes {
}
pub async fn count_orphan_file_paths(
ctx: &CoreContext,
ctx: &LibraryContext,
location_id: i64,
) -> Result<usize, FileError> {
let files_count = ctx.database
let files_count = ctx.db
._query_raw::<CountRes>(raw!(
"SELECT COUNT(*) AS count FROM file_paths WHERE file_id IS NULL AND is_dir IS FALSE AND location_id = {}",
PrismaValue::Int(location_id)
@ -223,14 +223,14 @@ pub async fn count_orphan_file_paths(
}
pub async fn get_orphan_file_paths(
ctx: &CoreContext,
ctx: &LibraryContext,
cursor: i32,
) -> Result<Vec<file_path::Data>, FileError> {
info!(
"discovering {} orphan file paths at cursor: {:?}",
CHUNK_SIZE, cursor
);
ctx.database
ctx.db
.file_path()
.find_many(vec![
file_path::file_id::equals(None),

View file

@ -1,28 +1,25 @@
use crate::{
encode::THUMBNAIL_CACHE_DIR_NAME,
file::{DirectoryWithContents, FileError, FilePath},
node::get_nodestate,
library::LibraryContext,
prisma::file_path,
sys::get_location,
CoreContext,
};
use log::info;
use std::path::Path;
pub async fn open_dir(
ctx: &CoreContext,
ctx: &LibraryContext,
location_id: i32,
path: impl AsRef<Path>,
) -> Result<DirectoryWithContents, FileError> {
let config = get_nodestate();
// get location
let location = get_location(ctx, location_id).await?;
let path_str = path.as_ref().to_string_lossy().to_string();
let directory = ctx
.database
.db
.file_path()
.find_first(vec![
file_path::location_id::equals(Some(location.id)),
@ -36,7 +33,7 @@ pub async fn open_dir(
info!("DIRECTORY: {:?}", directory);
let mut file_paths: Vec<FilePath> = ctx
.database
.db
.file_path()
.find_many(vec![
file_path::location_id::equals(Some(location.id)),
@ -49,17 +46,17 @@ pub async fn open_dir(
.map(Into::into)
.collect();
if let Some(ref data_path) = config.data_path {
for file_path in &mut file_paths {
if let Some(file) = &mut file_path.file {
let thumb_path = data_path
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(location.id.to_string())
.join(file.cas_id.clone())
.with_extension("webp");
for file_path in &mut file_paths {
if let Some(file) = &mut file_path.file {
let thumb_path = ctx
.config()
.data_directory()
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(location.id.to_string())
.join(&file.cas_id)
.with_extension("webp");
file.has_thumbnail = thumb_path.exists();
}
file.has_thumbnail = thumb_path.exists();
}
}

View file

@ -1,5 +1,4 @@
use crate::job::{Job, JobReportUpdate, WorkerContext};
use std::error::Error;
use crate::job::{Job, JobReportUpdate, JobResult, WorkerContext};
use std::path::PathBuf;
use self::scan::ScanProgress;
@ -20,8 +19,8 @@ impl Job for IndexerJob {
fn name(&self) -> &'static str {
"indexer"
}
async fn run(&self, ctx: WorkerContext) -> Result<(), Box<dyn Error>> {
scan_path(&ctx.core_ctx.clone(), &self.path, move |p| {
async fn run(&self, ctx: WorkerContext) -> JobResult {
scan_path(&ctx.library_ctx(), &self.path, move |p| {
ctx.progress(
p.iter()
.map(|p| match p.clone() {

View file

@ -1,8 +1,6 @@
use crate::{
sys::{create_location, LocationResource},
CoreContext,
};
use crate::job::JobResult;
use crate::library::LibraryContext;
use crate::sys::{create_location, LocationResource};
use chrono::{DateTime, Utc};
use log::{error, info};
use prisma_client_rust::prisma_models::PrismaValue;
@ -30,10 +28,10 @@ static BATCH_SIZE: usize = 100;
// creates a vector of valid path buffers from a directory
pub async fn scan_path(
ctx: &CoreContext,
ctx: &LibraryContext,
path: impl AsRef<Path> + Debug,
on_progress: impl Fn(Vec<ScanProgress>) + Send + Sync + 'static,
) -> Result<(), Box<dyn std::error::Error>> {
) -> JobResult {
let location = create_location(ctx, &path).await?;
// query db to highers id, so we can increment it for the new files indexed
@ -43,7 +41,7 @@ pub async fn scan_path(
}
// grab the next id so we can increment in memory for batch inserting
let first_file_id = match ctx
.database
.db
._query_raw::<QueryRes>(raw!("SELECT MAX(id) id FROM file_paths"))
.await
{
@ -168,7 +166,7 @@ pub async fn scan_path(
files
);
let count = ctx.database._execute_raw(raw).await;
let count = ctx.db._execute_raw(raw).await;
info!("Inserted {:?} records", count);
}

View file

@ -1,13 +1,15 @@
use std::path::PathBuf;
use chrono::{DateTime, Utc};
use int_enum::IntEnum;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use thiserror::Error;
use ts_rs::TS;
use crate::{
library::LibraryContext,
prisma::{self, file, file_path},
sys::SysError,
ClientQuery, CoreContext, CoreError, CoreEvent, CoreResponse,
ClientQuery, CoreError, CoreEvent, CoreResponse, LibraryQuery,
};
pub mod cas;
pub mod explorer;
@ -33,9 +35,9 @@ pub struct File {
pub ipfs_id: Option<String>,
pub note: Option<String>,
pub date_created: chrono::DateTime<chrono::Utc>,
pub date_modified: chrono::DateTime<chrono::Utc>,
pub date_indexed: chrono::DateTime<chrono::Utc>,
pub date_created: DateTime<Utc>,
pub date_modified: DateTime<Utc>,
pub date_indexed: DateTime<Utc>,
pub paths: Vec<FilePath>,
// pub media_data: Option<MediaData>,
@ -56,9 +58,9 @@ pub struct FilePath {
pub file_id: Option<i32>,
pub parent_id: Option<i32>,
pub date_created: chrono::DateTime<chrono::Utc>,
pub date_modified: chrono::DateTime<chrono::Utc>,
pub date_indexed: chrono::DateTime<chrono::Utc>,
pub date_created: DateTime<chrono::Utc>,
pub date_modified: DateTime<chrono::Utc>,
pub date_indexed: DateTime<chrono::Utc>,
pub file: Option<File>,
}
@ -148,12 +150,12 @@ pub enum FileError {
}
pub async fn set_note(
ctx: CoreContext,
ctx: LibraryContext,
id: i32,
note: Option<String>,
) -> Result<CoreResponse, CoreError> {
let _response = ctx
.database
.db
.file()
.find_unique(file::id::equals(id))
.update(vec![file::note::set(note.clone())])
@ -161,10 +163,13 @@ pub async fn set_note(
.await
.unwrap();
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibGetExplorerDir {
limit: 0,
path: "".to_string(),
location_id: 0,
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibraryQuery {
library_id: ctx.id.to_string(),
query: LibraryQuery::LibGetExplorerDir {
limit: 0,
path: PathBuf::new(),
location_id: 0,
},
}))
.await;

View file

@ -3,48 +3,69 @@ use super::{
JobError,
};
use crate::{
node::get_nodestate,
library::LibraryContext,
prisma::{job, node},
CoreContext,
};
use int_enum::IntEnum;
use log::info;
use log::{error, info};
use serde::{Deserialize, Serialize};
use std::{
collections::{HashMap, VecDeque},
error::Error,
fmt::Debug,
sync::Arc,
};
use tokio::sync::Mutex;
use tokio::sync::{mpsc, Mutex, RwLock};
use ts_rs::TS;
// db is single threaded, nerd
const MAX_WORKERS: usize = 1;
pub type JobResult = Result<(), Box<dyn Error + Send + Sync>>;
#[async_trait::async_trait]
pub trait Job: Send + Sync + Debug {
fn name(&self) -> &'static str;
async fn run(&self, ctx: WorkerContext) -> Result<(), Box<dyn std::error::Error>>;
async fn run(&self, ctx: WorkerContext) -> JobResult;
}
pub enum JobManagerEvent {
IngestJob(LibraryContext, Box<dyn Job>),
}
// jobs struct is maintained by the core
pub struct Jobs {
job_queue: VecDeque<Box<dyn Job>>,
pub struct JobManager {
job_queue: RwLock<VecDeque<Box<dyn Job>>>,
// workers are spawned when jobs are picked off the queue
running_workers: HashMap<String, Arc<Mutex<Worker>>>,
running_workers: RwLock<HashMap<String, Arc<Mutex<Worker>>>>,
internal_sender: mpsc::UnboundedSender<JobManagerEvent>,
}
impl Jobs {
pub fn new() -> Self {
Self {
job_queue: VecDeque::new(),
running_workers: HashMap::new(),
}
impl JobManager {
pub fn new() -> Arc<Self> {
let (internal_sender, mut internal_receiver) = mpsc::unbounded_channel();
let this = Arc::new(Self {
job_queue: RwLock::new(VecDeque::new()),
running_workers: RwLock::new(HashMap::new()),
internal_sender,
});
let this2 = this.clone();
tokio::spawn(async move {
while let Some(event) = internal_receiver.recv().await {
match event {
JobManagerEvent::IngestJob(ctx, job) => this2.clone().ingest(&ctx, job).await,
}
}
});
this
}
pub async fn ingest(&mut self, ctx: &CoreContext, job: Box<dyn Job>) {
pub async fn ingest(self: Arc<Self>, ctx: &LibraryContext, job: Box<dyn Job>) {
// create worker to process job
if self.running_workers.len() < MAX_WORKERS {
let mut running_workers = self.running_workers.write().await;
if running_workers.len() < MAX_WORKERS {
info!("Running job: {:?}", job.name());
let worker = Worker::new(job);
@ -52,51 +73,57 @@ impl Jobs {
let wrapped_worker = Arc::new(Mutex::new(worker));
Worker::spawn(Arc::clone(&wrapped_worker), ctx).await;
Worker::spawn(Arc::clone(&self), Arc::clone(&wrapped_worker), ctx.clone()).await;
self.running_workers.insert(id, wrapped_worker);
running_workers.insert(id, wrapped_worker);
} else {
self.job_queue.push_back(job);
self.job_queue.write().await.push_back(job);
}
}
pub fn ingest_queue(&mut self, _ctx: &CoreContext, job: Box<dyn Job>) {
self.job_queue.push_back(job);
pub async fn ingest_queue(&self, _ctx: &LibraryContext, job: Box<dyn Job>) {
self.job_queue.write().await.push_back(job);
}
pub async fn complete(&mut self, ctx: &CoreContext, job_id: String) {
pub async fn complete(self: Arc<Self>, ctx: &LibraryContext, job_id: String) {
// remove worker from running workers
self.running_workers.remove(&job_id);
self.running_workers.write().await.remove(&job_id);
// continue queue
let job = self.job_queue.pop_front();
let job = self.job_queue.write().await.pop_front();
if let Some(job) = job {
self.ingest(ctx, job).await;
// We can't directly execute `self.ingest` here because it would cause an async cycle.
self.internal_sender
.send(JobManagerEvent::IngestJob(ctx.clone(), job))
.unwrap_or_else(|_| {
error!("Failed to ingest job!");
});
}
}
pub async fn get_running(&self) -> Vec<JobReport> {
let mut ret = vec![];
for worker in self.running_workers.values() {
for worker in self.running_workers.read().await.values() {
let worker = worker.lock().await;
ret.push(worker.job_report.clone());
}
ret
}
pub async fn queue_pending_job(ctx: &CoreContext) -> Result<(), JobError> {
let _next_job = ctx
.database
.job()
.find_first(vec![job::status::equals(JobStatus::Queued.int_value())])
.exec()
.await?;
// pub async fn queue_pending_job(ctx: &LibraryContext) -> Result<(), JobError> {
// let _next_job = ctx
// .db
// .job()
// .find_first(vec![job::status::equals(JobStatus::Queued.int_value())])
// .exec()
// .await?;
Ok(())
}
// Ok(())
// }
pub async fn get_history(ctx: &CoreContext) -> Result<Vec<JobReport>, JobError> {
pub async fn get_history(ctx: &LibraryContext) -> Result<Vec<JobReport>, JobError> {
let jobs = ctx
.database
.db
.job()
.find_many(vec![job::status::not(JobStatus::Running.int_value())])
.exec()
@ -171,30 +198,29 @@ impl JobReport {
seconds_elapsed: 0,
}
}
pub async fn create(&self, ctx: &CoreContext) -> Result<(), JobError> {
let config = get_nodestate();
pub async fn create(&self, ctx: &LibraryContext) -> Result<(), JobError> {
let mut params = Vec::new();
if self.data.is_some() {
params.push(job::data::set(self.data.clone()))
}
ctx.database
ctx.db
.job()
.create(
job::id::set(self.id.clone()),
job::name::set(self.name.clone()),
job::action::set(1),
job::nodes::link(node::id::equals(config.node_id)),
job::nodes::link(node::id::equals(ctx.node_local_id)),
params,
)
.exec()
.await?;
Ok(())
}
pub async fn update(&self, ctx: &CoreContext) -> Result<(), JobError> {
ctx.database
pub async fn update(&self, ctx: &LibraryContext) -> Result<(), JobError> {
ctx.db
.job()
.find_unique(job::id::equals(self.id.clone()))
.update(vec![

View file

@ -1,8 +1,8 @@
use super::{
jobs::{JobReport, JobReportUpdate, JobStatus},
Job,
Job, JobManager,
};
use crate::{ClientQuery, CoreContext, CoreEvent, InternalEvent};
use crate::{library::LibraryContext, ClientQuery, CoreEvent, LibraryQuery};
use log::error;
use std::{sync::Arc, time::Duration};
use tokio::{
@ -29,8 +29,8 @@ enum WorkerState {
#[derive(Clone)]
pub struct WorkerContext {
pub uuid: String,
pub core_ctx: CoreContext,
pub sender: UnboundedSender<WorkerEvent>,
library_ctx: LibraryContext,
sender: UnboundedSender<WorkerEvent>,
}
impl WorkerContext {
@ -39,9 +39,13 @@ impl WorkerContext {
.send(WorkerEvent::Progressed(updates))
.unwrap_or(());
}
pub fn library_ctx(&self) -> LibraryContext {
self.library_ctx.clone()
}
// save the job data to
// pub fn save_data () {
// }
}
@ -66,7 +70,11 @@ impl Worker {
}
}
// spawns a thread and extracts channel sender to communicate with it
pub async fn spawn(worker: Arc<Mutex<Self>>, ctx: &CoreContext) {
pub async fn spawn(
job_manager: Arc<JobManager>,
worker: Arc<Mutex<Self>>,
ctx: LibraryContext,
) {
// we capture the worker receiver channel so state can be updated from inside the worker
let mut worker_mut = worker.lock().await;
// extract owned job and receiver from Self
@ -79,25 +87,26 @@ impl Worker {
WorkerState::Running => unreachable!(),
};
let worker_sender = worker_mut.worker_sender.clone();
let core_ctx = ctx.clone();
worker_mut.job_report.status = JobStatus::Running;
worker_mut.job_report.create(ctx).await.unwrap_or(());
worker_mut.job_report.create(&ctx).await.unwrap_or(());
// spawn task to handle receiving events from the worker
let library_ctx = ctx.clone();
tokio::spawn(Worker::track_progress(
worker.clone(),
worker_receiver,
ctx.clone(),
library_ctx.clone(),
));
let uuid = worker_mut.job_report.id.clone();
// spawn task to handle running the job
tokio::spawn(async move {
let worker_ctx = WorkerContext {
uuid,
core_ctx,
library_ctx,
sender: worker_sender,
};
let job_start = Instant::now();
@ -116,20 +125,15 @@ impl Worker {
}
});
let result = job.run(worker_ctx.clone()).await;
if let Err(e) = result {
error!("job failed {:?}", e);
if let Err(e) = job.run(worker_ctx.clone()).await {
error!("job '{}' failed with error: {}", worker_ctx.uuid, e);
worker_ctx.sender.send(WorkerEvent::Failed).unwrap_or(());
} else {
// handle completion
worker_ctx.sender.send(WorkerEvent::Completed).unwrap_or(());
}
worker_ctx
.core_ctx
.internal_sender
.send(InternalEvent::JobComplete(worker_ctx.uuid.clone()))
.unwrap_or(());
job_manager.complete(&ctx, worker_ctx.uuid).await;
});
}
@ -140,7 +144,7 @@ impl Worker {
async fn track_progress(
worker: Arc<Mutex<Self>>,
mut channel: UnboundedReceiver<WorkerEvent>,
ctx: CoreContext,
ctx: LibraryContext,
) {
while let Some(command) = channel.recv().await {
let mut worker = worker.lock().await;
@ -179,16 +183,23 @@ impl Worker {
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetRunning))
.await;
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
.await;
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibraryQuery {
library_id: ctx.id.to_string(),
query: LibraryQuery::JobGetHistory,
}))
.await;
break;
}
WorkerEvent::Failed => {
worker.job_report.status = JobStatus::Failed;
worker.job_report.update(&ctx).await.unwrap_or(());
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
.await;
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibraryQuery {
library_id: ctx.id.to_string(),
query: LibraryQuery::JobGetHistory,
}))
.await;
break;
}
}

View file

@ -1,13 +1,13 @@
use crate::{
file::cas::FileIdentifierJob, library::get_library_path, node::NodeState,
prisma::file as prisma_file, prisma::location, util::db::create_connection,
};
use job::{Job, JobReport, Jobs};
use log::{error, info};
use prisma::PrismaClient;
use crate::{file::cas::FileIdentifierJob, prisma::file as prisma_file, prisma::location};
use job::{JobManager, JobReport};
use library::{LibraryConfig, LibraryConfigWrapped, LibraryManager};
use log::error;
use node::{NodeConfig, NodeConfigManager};
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use std::sync::Arc;
use std::{
path::{Path, PathBuf},
sync::Arc,
};
use thiserror::Error;
use tokio::{
fs,
@ -37,12 +37,12 @@ pub struct ReturnableMessage<D, R = Result<CoreResponse, CoreError>> {
}
// core controller is passed to the client to communicate with the core which runs in a dedicated thread
pub struct CoreController {
pub struct NodeController {
query_sender: UnboundedSender<ReturnableMessage<ClientQuery>>,
command_sender: UnboundedSender<ReturnableMessage<ClientCommand>>,
}
impl CoreController {
impl NodeController {
pub async fn query(&self, query: ClientQuery) -> Result<CoreResponse, CoreError> {
// a one time use channel to send and await a response
let (sender, recv) = oneshot::channel();
@ -69,48 +69,25 @@ impl CoreController {
}
}
#[derive(Debug)]
pub enum InternalEvent {
JobIngest(Box<dyn Job>),
JobQueue(Box<dyn Job>),
JobComplete(String),
}
#[derive(Clone)]
pub struct CoreContext {
pub database: Arc<PrismaClient>,
pub struct NodeContext {
pub event_sender: mpsc::Sender<CoreEvent>,
pub internal_sender: UnboundedSender<InternalEvent>,
pub config: Arc<NodeConfigManager>,
pub jobs: Arc<JobManager>,
}
impl CoreContext {
pub fn spawn_job(&self, job: Box<dyn Job>) {
self.internal_sender
.send(InternalEvent::JobIngest(job))
.unwrap_or_else(|e| {
error!("Failed to spawn job. {:?}", e);
});
}
pub fn queue_job(&self, job: Box<dyn Job>) {
self.internal_sender
.send(InternalEvent::JobQueue(job))
.unwrap_or_else(|e| {
error!("Failed to queue job. {:?}", e);
});
}
impl NodeContext {
pub async fn emit(&self, event: CoreEvent) {
self.event_sender.send(event).await.unwrap_or_else(|e| {
error!("Failed to emit event. {:?}", e);
error!("Failed to emit event. {:#?}", e);
});
}
}
pub struct Node {
state: NodeState,
jobs: job::Jobs,
database: Arc<PrismaClient>,
// filetype_registry: library::TypeRegistry,
// extension_registry: library::ExtensionRegistry,
config: Arc<NodeConfigManager>,
library_manager: Arc<LibraryManager>,
jobs: Arc<JobManager>,
// global messaging channels
query_channel: (
@ -122,73 +99,56 @@ pub struct Node {
UnboundedReceiver<ReturnableMessage<ClientCommand>>,
),
event_sender: mpsc::Sender<CoreEvent>,
// a channel for child threads to send events back to the core
internal_channel: (
UnboundedSender<InternalEvent>,
UnboundedReceiver<InternalEvent>,
),
}
impl Node {
// create new instance of node, run startup tasks
pub async fn new(mut data_dir: PathBuf) -> (Node, mpsc::Receiver<CoreEvent>) {
let (event_sender, event_recv) = mpsc::channel(100);
data_dir.push("spacedrive");
// create data directory if it doesn't exist
pub async fn new(
data_dir: impl AsRef<Path>,
) -> (NodeController, mpsc::Receiver<CoreEvent>, Node) {
fs::create_dir_all(&data_dir).await.unwrap();
// prepare basic client state
let mut state = NodeState::new(data_dir.clone(), "diamond-mastering-space-dragon").unwrap();
// load from disk
state
.read_disk()
let (event_sender, event_recv) = mpsc::channel(100);
let config = NodeConfigManager::new(data_dir.as_ref().to_owned())
.await
.unwrap_or_else(|_| error!("Error: No node state found, creating new one..."));
state.save().await;
info!("Node State: {:?}", state);
// connect to default library
let database = Arc::new(
create_connection(&get_library_path(&data_dir))
.await
.unwrap(),
);
let internal_channel = unbounded_channel::<InternalEvent>();
let node = Node {
state,
query_channel: unbounded_channel(),
command_channel: unbounded_channel(),
jobs: Jobs::new(),
event_sender,
database,
internal_channel,
.unwrap();
let jobs = JobManager::new();
let node_ctx = NodeContext {
event_sender: event_sender.clone(),
config: config.clone(),
jobs: jobs.clone(),
};
(node, event_recv)
let node = Node {
config,
library_manager: LibraryManager::new(data_dir.as_ref().join("libraries"), node_ctx)
.await
.unwrap(),
query_channel: unbounded_channel(),
command_channel: unbounded_channel(),
jobs,
event_sender,
};
(
NodeController {
query_sender: node.query_channel.0.clone(),
command_sender: node.command_channel.0.clone(),
},
event_recv,
node,
)
}
pub fn get_context(&self) -> CoreContext {
CoreContext {
database: self.database.clone(),
pub fn get_context(&self) -> NodeContext {
NodeContext {
event_sender: self.event_sender.clone(),
internal_sender: self.internal_channel.0.clone(),
config: Arc::clone(&self.config),
jobs: Arc::clone(&self.jobs),
}
}
pub fn get_controller(&self) -> CoreController {
CoreController {
query_sender: self.query_channel.0.clone(),
command_sender: self.command_channel.0.clone(),
}
}
pub async fn start(&mut self) {
let ctx = self.get_context();
pub async fn start(mut self) {
loop {
// listen on global messaging channels for incoming messages
tokio::select! {
@ -200,174 +160,200 @@ impl Node {
let res = self.exec_command(msg.data).await;
msg.return_sender.send(res).unwrap_or(());
}
Some(event) = self.internal_channel.1.recv() => {
match event {
InternalEvent::JobIngest(job) => {
self.jobs.ingest(&ctx, job).await;
},
InternalEvent::JobQueue(job) => {
self.jobs.ingest_queue(&ctx, job);
},
InternalEvent::JobComplete(id) => {
self.jobs.complete(&ctx, id).await;
},
}
}
}
}
}
// load library database + initialize client with db
pub async fn initializer(&self) {
info!("Initializing...");
let ctx = self.get_context();
if self.state.libraries.is_empty() {
match library::create(&ctx, None).await {
Ok(library) => info!("Created new library: {:?}", library),
Err(e) => error!("Error creating library: {:?}", e),
}
} else {
for library in self.state.libraries.iter() {
// init database for library
match library::load(&ctx, &library.library_path, &library.library_uuid).await {
Ok(library) => info!("Loaded library: {:?}", library),
Err(e) => error!("Error loading library: {:?}", e),
}
}
}
// init node data within library
match node::LibraryNode::create(self).await {
Ok(_) => info!("Spacedrive online"),
Err(e) => error!("Error initializing node: {:?}", e),
};
}
async fn exec_command(&mut self, cmd: ClientCommand) -> Result<CoreResponse, CoreError> {
info!("Core command: {:?}", cmd);
let ctx = self.get_context();
Ok(match cmd {
// CRUD for locations
ClientCommand::LocCreate { path } => {
let loc = sys::new_location_and_scan(&ctx, &path).await?;
// ctx.queue_job(Box::new(FileIdentifierJob));
CoreResponse::LocCreate(loc)
ClientCommand::CreateLibrary { name } => {
self.library_manager
.create(LibraryConfig {
name: name.to_string(),
..Default::default()
})
.await
.unwrap();
CoreResponse::Success(())
}
ClientCommand::LocUpdate { id, name } => {
ctx.database
.location()
.find_unique(location::id::equals(id))
.update(vec![location::name::set(name)])
.exec()
.await?;
ClientCommand::EditLibrary {
id,
name,
description,
} => {
self.library_manager
.edit_library(id, name, description)
.await
.unwrap();
CoreResponse::Success(())
}
ClientCommand::DeleteLibrary { id } => {
self.library_manager.delete_library(id).await.unwrap();
CoreResponse::Success(())
}
ClientCommand::LibraryCommand {
library_id,
command,
} => {
let ctx = self.library_manager.get_ctx(library_id).await.unwrap();
match command {
// CRUD for locations
LibraryCommand::LocCreate { path } => {
let loc = sys::new_location_and_scan(&ctx, &path).await?;
// ctx.queue_job(Box::new(FileIdentifierJob));
CoreResponse::LocCreate(loc)
}
LibraryCommand::LocUpdate { id, name } => {
ctx.db
.location()
.find_unique(location::id::equals(id))
.update(vec![location::name::set(name)])
.exec()
.await?;
CoreResponse::Success(())
}
ClientCommand::LocDelete { id } => {
sys::delete_location(&ctx, id).await?;
CoreResponse::Success(())
}
ClientCommand::LocRescan { id } => {
sys::scan_location(&ctx, id, String::new());
CoreResponse::Success(())
}
// CRUD for files
ClientCommand::FileReadMetaData { id: _ } => todo!(),
ClientCommand::FileSetNote { id, note } => file::set_note(ctx, id, note).await?,
// ClientCommand::FileEncrypt { id: _, algorithm: _ } => todo!(),
ClientCommand::FileDelete { id } => {
ctx.database
.file()
.find_unique(prisma_file::id::equals(id))
.delete()
.exec()
.await?;
CoreResponse::Success(())
}
LibraryCommand::LocDelete { id } => {
sys::delete_location(&ctx, id).await?;
CoreResponse::Success(())
}
LibraryCommand::LocRescan { id } => {
sys::scan_location(&ctx, id, String::new()).await;
CoreResponse::Success(())
}
// CRUD for files
LibraryCommand::FileReadMetaData { id: _ } => todo!(),
LibraryCommand::FileSetNote { id, note } => {
file::set_note(ctx, id, note).await?
}
// ClientCommand::FileEncrypt { id: _, algorithm: _ } => todo!(),
LibraryCommand::FileDelete { id } => {
ctx.db
.file()
.find_unique(prisma_file::id::equals(id))
.delete()
.exec()
.await?;
CoreResponse::Success(())
}
// CRUD for tags
ClientCommand::TagCreate { name: _, color: _ } => todo!(),
ClientCommand::TagAssign {
file_id: _,
tag_id: _,
} => todo!(),
ClientCommand::TagDelete { id: _ } => todo!(),
// CRUD for libraries
ClientCommand::SysVolumeUnmount { id: _ } => todo!(),
ClientCommand::LibDelete { id: _ } => todo!(),
ClientCommand::TagUpdate { name: _, color: _ } => todo!(),
ClientCommand::GenerateThumbsForLocation { id, path } => {
ctx.spawn_job(Box::new(ThumbnailJob {
location_id: id,
path,
background: false, // fix
}));
CoreResponse::Success(())
}
// ClientCommand::PurgeDatabase => {
// info!("Purging database...");
// fs::remove_file(Path::new(&self.state.data_path).join("library.db")).unwrap();
// CoreResponse::Success(())
// }
ClientCommand::IdentifyUniqueFiles { id, path } => {
ctx.spawn_job(Box::new(FileIdentifierJob {
location_id: id,
path,
}));
CoreResponse::Success(())
CoreResponse::Success(())
}
// CRUD for tags
LibraryCommand::TagCreate { name: _, color: _ } => todo!(),
LibraryCommand::TagAssign {
file_id: _,
tag_id: _,
} => todo!(),
LibraryCommand::TagUpdate { name: _, color: _ } => todo!(),
LibraryCommand::TagDelete { id: _ } => todo!(),
// CRUD for libraries
LibraryCommand::SysVolumeUnmount { id: _ } => todo!(),
LibraryCommand::GenerateThumbsForLocation { id, path } => {
ctx.spawn_job(Box::new(ThumbnailJob {
location_id: id,
path,
background: false, // fix
}))
.await;
CoreResponse::Success(())
}
LibraryCommand::IdentifyUniqueFiles { id, path } => {
ctx.spawn_job(Box::new(FileIdentifierJob {
location_id: id,
path,
}))
.await;
CoreResponse::Success(())
}
}
}
})
}
// query sources of data
async fn exec_query(&self, query: ClientQuery) -> Result<CoreResponse, CoreError> {
let ctx = self.get_context();
Ok(match query {
// return the client state from memory
ClientQuery::NodeGetState => CoreResponse::NodeGetState(self.state.clone()),
// get system volumes without saving to library
ClientQuery::SysGetVolumes => CoreResponse::SysGetVolumes(sys::Volume::get_volumes()?),
ClientQuery::SysGetLocations => {
CoreResponse::SysGetLocations(sys::get_locations(&ctx).await?)
}
// get location from library
ClientQuery::SysGetLocation { id } => {
CoreResponse::SysGetLocation(sys::get_location(&ctx, id).await?)
}
// return contents of a directory for the explorer
ClientQuery::LibGetExplorerDir {
path,
location_id,
limit: _,
} => CoreResponse::LibGetExplorerDir(
file::explorer::open_dir(&ctx, location_id, &path).await?,
ClientQuery::NodeGetLibraries => CoreResponse::NodeGetLibraries(
self.library_manager.get_all_libraries_config().await,
),
ClientQuery::LibGetTags => todo!(),
ClientQuery::NodeGetState => CoreResponse::NodeGetState(NodeState {
config: self.config.get().await,
data_path: self.config.data_directory().to_str().unwrap().to_string(),
}),
ClientQuery::SysGetVolumes => CoreResponse::SysGetVolumes(sys::Volume::get_volumes()?),
ClientQuery::JobGetRunning => {
CoreResponse::JobGetRunning(self.jobs.get_running().await)
}
ClientQuery::JobGetHistory => {
CoreResponse::JobGetHistory(Jobs::get_history(&ctx).await?)
}
ClientQuery::GetLibraryStatistics => {
CoreResponse::GetLibraryStatistics(library::Statistics::calculate(&ctx).await?)
}
ClientQuery::GetNodes => todo!(),
ClientQuery::LibraryQuery { library_id, query } => {
let ctx = match self.library_manager.get_ctx(library_id.clone()).await {
Some(ctx) => ctx,
None => {
println!("Library '{}' not found!", library_id);
return Ok(CoreResponse::Error("Library not found".into()));
}
};
match query {
LibraryQuery::SysGetLocations => {
CoreResponse::SysGetLocations(sys::get_locations(&ctx).await?)
}
// get location from library
LibraryQuery::SysGetLocation { id } => {
CoreResponse::SysGetLocation(sys::get_location(&ctx, id).await?)
}
// return contents of a directory for the explorer
LibraryQuery::LibGetExplorerDir {
location_id,
path,
limit: _,
} => CoreResponse::LibGetExplorerDir(Box::new(
file::explorer::open_dir(&ctx, location_id, path).await?,
)),
LibraryQuery::LibGetTags => todo!(),
LibraryQuery::JobGetHistory => {
CoreResponse::JobGetHistory(JobManager::get_history(&ctx).await?)
}
LibraryQuery::GetLibraryStatistics => CoreResponse::GetLibraryStatistics(
library::Statistics::calculate(&ctx).await?,
),
}
}
})
}
}
// represents an event this library can emit
/// is a command destined for the core
#[derive(Serialize, Deserialize, Debug, TS)]
#[serde(tag = "key", content = "params")]
#[ts(export)]
pub enum ClientCommand {
// Libraries
CreateLibrary {
name: String,
},
EditLibrary {
id: String,
name: Option<String>,
description: Option<String>,
},
DeleteLibrary {
id: String,
},
LibraryCommand {
library_id: String,
command: LibraryCommand,
},
}
/// is a command destined for a specific library which is loaded into the core.
#[derive(Serialize, Deserialize, Debug, TS)]
#[serde(tag = "key", content = "params")]
#[ts(export)]
pub enum LibraryCommand {
// Files
FileReadMetaData { id: i32 },
FileSetNote { id: i32, note: Option<String> },
// FileEncrypt { id: i32, algorithm: EncryptionAlgorithm },
FileDelete { id: i32 },
// Library
LibDelete { id: i32 },
// Tags
TagCreate { name: String, color: String },
TagUpdate { name: String, color: String },
@ -385,15 +371,28 @@ pub enum ClientCommand {
IdentifyUniqueFiles { id: i32, path: PathBuf },
}
// represents an event this library can emit
/// is a query destined for the core
#[derive(Serialize, Deserialize, Debug, TS)]
#[serde(tag = "key", content = "params")]
#[ts(export)]
pub enum ClientQuery {
NodeGetLibraries,
NodeGetState,
SysGetVolumes,
LibGetTags,
JobGetRunning,
GetNodes,
LibraryQuery {
library_id: String,
query: LibraryQuery,
},
}
/// is a query destined for a specific library which is loaded into the core.
#[derive(Serialize, Deserialize, Debug, TS)]
#[serde(tag = "key", content = "params")]
#[ts(export)]
pub enum LibraryQuery {
LibGetTags,
JobGetHistory,
SysGetLocations,
SysGetLocation {
@ -401,11 +400,10 @@ pub enum ClientQuery {
},
LibGetExplorerDir {
location_id: i32,
path: String,
path: PathBuf,
limit: i32,
},
GetLibraryStatistics,
GetNodes,
}
// represents an event this library can emit
@ -422,15 +420,25 @@ pub enum CoreEvent {
DatabaseDisconnected { reason: Option<String> },
}
#[derive(Serialize, Deserialize, Debug, TS)]
#[ts(export)]
pub struct NodeState {
#[serde(flatten)]
pub config: NodeConfig,
pub data_path: String,
}
#[derive(Serialize, Deserialize, Debug, TS)]
#[serde(tag = "key", content = "data")]
#[ts(export)]
pub enum CoreResponse {
Success(()),
Error(String),
NodeGetLibraries(Vec<LibraryConfigWrapped>),
SysGetVolumes(Vec<sys::Volume>),
SysGetLocation(sys::LocationResource),
SysGetLocations(Vec<sys::LocationResource>),
LibGetExplorerDir(file::DirectoryWithContents),
LibGetExplorerDir(Box<file::DirectoryWithContents>),
NodeGetState(NodeState),
LocCreate(sys::LocationResource),
JobGetRunning(Vec<JobReport>),

View file

@ -0,0 +1,69 @@
use std::{
fs::File,
io::{BufReader, Seek, SeekFrom},
path::PathBuf,
};
use serde::{Deserialize, Serialize};
use std::io::Write;
use ts_rs::TS;
use crate::node::ConfigMetadata;
use super::LibraryManagerError;
/// LibraryConfig holds the configuration for a specific library. This is stored as a '{uuid}.sdlibrary' file.
#[derive(Debug, Serialize, Deserialize, Clone, TS, Default)]
#[ts(export)]
pub struct LibraryConfig {
#[serde(flatten)]
pub metadata: ConfigMetadata,
/// name is the display name of the library. This is used in the UI and is set by the user.
pub name: String,
/// description is a user set description of the library. This is used in the UI and is set by the user.
pub description: String,
}
impl LibraryConfig {
/// read will read the configuration from disk and return it.
pub(super) async fn read(file_dir: PathBuf) -> Result<LibraryConfig, LibraryManagerError> {
let mut file = File::open(&file_dir)?;
let base_config: ConfigMetadata = serde_json::from_reader(BufReader::new(&mut file))?;
Self::migrate_config(base_config.version, file_dir)?;
file.seek(SeekFrom::Start(0))?;
Ok(serde_json::from_reader(BufReader::new(&mut file))?)
}
/// save will write the configuration back to disk
pub(super) async fn save(
file_dir: PathBuf,
config: &LibraryConfig,
) -> Result<(), LibraryManagerError> {
File::create(file_dir)?.write_all(serde_json::to_string(config)?.as_bytes())?;
Ok(())
}
/// migrate_config is a function used to apply breaking changes to the library config file.
fn migrate_config(
current_version: Option<String>,
config_path: PathBuf,
) -> Result<(), LibraryManagerError> {
match current_version {
None => Err(LibraryManagerError::Migration(format!(
"Your Spacedrive library at '{}' is missing the `version` field",
config_path.display()
))),
_ => Ok(()),
}
}
}
// used to return to the frontend with uuid context
#[derive(Serialize, Deserialize, Debug, TS)]
#[ts(export)]
pub struct LibraryConfigWrapped {
pub uuid: String,
pub config: LibraryConfig,
}

View file

@ -0,0 +1,46 @@
use std::sync::Arc;
use uuid::Uuid;
use crate::{job::Job, node::NodeConfigManager, prisma::PrismaClient, CoreEvent, NodeContext};
use super::LibraryConfig;
/// LibraryContext holds context for a library which can be passed around the application.
#[derive(Clone)]
pub struct LibraryContext {
/// id holds the ID of the current library.
pub id: Uuid,
/// config holds the configuration of the current library.
pub config: LibraryConfig,
/// db holds the database client for the current library.
pub db: Arc<PrismaClient>,
/// node_local_id holds the local ID of the node which is running the library.
pub node_local_id: i32,
/// node_context holds the node context for the node which this library is running on.
pub(super) node_context: NodeContext,
}
impl LibraryContext {
pub(crate) async fn spawn_job(&self, job: Box<dyn Job>) {
self.node_context.jobs.clone().ingest(self, job).await;
}
pub(crate) async fn queue_job(&self, job: Box<dyn Job>) {
self.node_context.jobs.ingest_queue(self, job).await;
}
pub(crate) async fn emit(&self, event: CoreEvent) {
self.node_context
.event_sender
.send(event)
.await
.unwrap_or_else(|e| {
println!("Failed to emit event. {:?}", e);
});
}
pub(crate) fn config(&self) -> Arc<NodeConfigManager> {
self.node_context.config.clone()
}
}

View file

@ -0,0 +1,264 @@
use std::{
env, fs, io,
path::{Path, PathBuf},
str::FromStr,
sync::Arc,
};
use thiserror::Error;
use tokio::sync::RwLock;
use uuid::Uuid;
use crate::{
node::Platform,
prisma::{self, node},
util::db::load_and_migrate,
ClientQuery, CoreEvent, NodeContext,
};
use super::{LibraryConfig, LibraryConfigWrapped, LibraryContext};
/// LibraryManager is a singleton that manages all libraries for a node.
pub struct LibraryManager {
/// libraries_dir holds the path to the directory where libraries are stored.
libraries_dir: PathBuf,
/// libraries holds the list of libraries which are currently loaded into the node.
libraries: RwLock<Vec<LibraryContext>>,
/// node_context holds the context for the node which this library manager is running on.
node_context: NodeContext,
}
#[derive(Error, Debug)]
pub enum LibraryManagerError {
#[error("error saving or loading the config from the filesystem")]
IO(#[from] io::Error),
#[error("error serializing or deserializing the JSON in the config file")]
Json(#[from] serde_json::Error),
#[error("Database error")]
Database(#[from] prisma::QueryError),
#[error("Library not found error")]
LibraryNotFound,
#[error("error migrating the config file")]
Migration(String),
#[error("failed to parse uuid")]
Uuid(#[from] uuid::Error),
}
impl LibraryManager {
pub(crate) async fn new(
libraries_dir: PathBuf,
node_context: NodeContext,
) -> Result<Arc<Self>, LibraryManagerError> {
fs::create_dir_all(&libraries_dir)?;
let mut libraries = Vec::new();
for entry in fs::read_dir(&libraries_dir)?
.into_iter()
.filter_map(|entry| entry.ok())
.filter(|entry| {
entry.path().is_file()
&& entry
.path()
.extension()
.map(|v| &*v == "sdlibrary")
.unwrap_or(false)
}) {
let config_path = entry.path();
let library_id = match Path::new(&config_path)
.file_stem()
.map(|v| v.to_str().map(Uuid::from_str))
{
Some(Some(Ok(id))) => id,
_ => {
println!("Attempted to load library from path '{}' but it has an invalid filename. Skipping...", config_path.display());
continue;
}
};
let db_path = config_path.clone().with_extension("db");
if !db_path.exists() {
println!(
"Found library '{}' but no matching database file was found. Skipping...",
config_path.display()
);
continue;
}
let config = LibraryConfig::read(config_path).await?;
libraries.push(
Self::load(
library_id,
db_path.to_str().unwrap(),
config,
node_context.clone(),
)
.await?,
);
}
let this = Arc::new(Self {
libraries: RwLock::new(libraries),
libraries_dir,
node_context,
});
// TODO: Remove this before merging PR -> Currently it exists to make the app usable
if this.libraries.read().await.len() == 0 {
this.create(LibraryConfig {
name: "My Default Library".into(),
..Default::default()
})
.await
.unwrap();
}
Ok(this)
}
/// create creates a new library with the given config and mounts it into the running [LibraryManager].
pub(crate) async fn create(&self, config: LibraryConfig) -> Result<(), LibraryManagerError> {
let id = Uuid::new_v4();
LibraryConfig::save(
Path::new(&self.libraries_dir).join(format!("{id}.sdlibrary")),
&config,
)
.await?;
let library = Self::load(
id,
self.libraries_dir.join(format!("{id}.db")),
config,
self.node_context.clone(),
)
.await?;
self.libraries.write().await.push(library);
self.node_context
.emit(CoreEvent::InvalidateQuery(ClientQuery::NodeGetLibraries))
.await;
Ok(())
}
pub(crate) async fn get_all_libraries_config(&self) -> Vec<LibraryConfigWrapped> {
self.libraries
.read()
.await
.iter()
.map(|lib| LibraryConfigWrapped {
config: lib.config.clone(),
uuid: lib.id.to_string(),
})
.collect()
}
pub(crate) async fn edit_library(
&self,
id: String,
name: Option<String>,
description: Option<String>,
) -> Result<(), LibraryManagerError> {
// check library is valid
let mut libraries = self.libraries.write().await;
let library = libraries
.iter_mut()
.find(|lib| lib.id == Uuid::from_str(&id).unwrap())
.ok_or(LibraryManagerError::LibraryNotFound)?;
// update the library
if let Some(name) = name {
library.config.name = name;
}
if let Some(description) = description {
library.config.description = description;
}
LibraryConfig::save(
Path::new(&self.libraries_dir).join(format!("{id}.sdlibrary")),
&library.config,
)
.await?;
self.node_context
.emit(CoreEvent::InvalidateQuery(ClientQuery::NodeGetLibraries))
.await;
Ok(())
}
pub async fn delete_library(&self, id: String) -> Result<(), LibraryManagerError> {
let mut libraries = self.libraries.write().await;
let id = Uuid::parse_str(&id)?;
let library = libraries
.iter()
.find(|l| l.id == id)
.ok_or(LibraryManagerError::LibraryNotFound)?;
fs::remove_file(Path::new(&self.libraries_dir).join(format!("{}.db", library.id)))?;
fs::remove_file(Path::new(&self.libraries_dir).join(format!("{}.sdlibrary", library.id)))?;
libraries.retain(|l| l.id != id);
self.node_context
.emit(CoreEvent::InvalidateQuery(ClientQuery::NodeGetLibraries))
.await;
Ok(())
}
// get_ctx will return the library context for the given library id.
pub(crate) async fn get_ctx(&self, library_id: String) -> Option<LibraryContext> {
self.libraries
.read()
.await
.iter()
.find(|lib| lib.id.to_string() == library_id)
.map(Clone::clone)
}
/// load the library from a given path
pub(crate) async fn load(
id: Uuid,
db_path: impl AsRef<Path>,
config: LibraryConfig,
node_context: NodeContext,
) -> Result<LibraryContext, LibraryManagerError> {
let db = Arc::new(
load_and_migrate(&format!("file:{}", db_path.as_ref().to_string_lossy()))
.await
.unwrap(),
);
let node_config = node_context.config.get().await;
let platform = match env::consts::OS {
"windows" => Platform::Windows,
"macos" => Platform::MacOS,
"linux" => Platform::Linux,
_ => Platform::Unknown,
};
let node_data = db
.node()
.upsert(
node::pub_id::equals(id.to_string()),
(
node::pub_id::set(id.to_string()),
node::name::set(node_config.name.clone()),
vec![node::platform::set(platform as i32)],
),
vec![node::name::set(node_config.name.clone())],
)
.exec()
.await?;
Ok(LibraryContext {
id,
config,
db,
node_local_id: node_data.id,
node_context,
})
}
}

View file

@ -1,102 +1 @@
use log::info;
use std::fmt::Debug;
use std::path::{Path, PathBuf};
use uuid::Uuid;
use crate::{
node::{get_nodestate, LibraryState},
prisma::library,
util::db::{run_migrations, DatabaseError},
CoreContext,
};
pub static LIBRARY_DB_NAME: &str = "library.db";
pub static DEFAULT_NAME: &str = "My Library";
pub fn get_library_path(data_path: impl AsRef<Path>) -> PathBuf {
data_path.as_ref().join(LIBRARY_DB_NAME)
}
// pub async fn get(core: &Node) -> Result<library::Data, LibraryError> {
// let config = get_nodestate();
// let db = &core.database;
// let library_state = config.get_current_library();
// info!("{:?}", library_state);
// // get library from db
// let library = match db
// .library()
// .find_unique(library::pub_id::equals(library_state.library_uuid.clone()))
// .exec()
// .await?
// {
// Some(library) => Ok(library),
// None => {
// // update config library state to offline
// // config.libraries
// Err(anyhow::anyhow!("library_not_found"))
// }
// };
// Ok(library.unwrap())
// }
pub async fn load(
ctx: &CoreContext,
library_path: impl AsRef<Path> + Debug,
library_id: &str,
) -> Result<(), DatabaseError> {
let mut config = get_nodestate();
info!("Initializing library: {} {:#?}", &library_id, library_path);
if config.current_library_uuid != library_id {
config.current_library_uuid = library_id.to_string();
config.save().await;
}
// create connection with library database & run migrations
run_migrations(ctx).await?;
// if doesn't exist, mark as offline
Ok(())
}
pub async fn create(ctx: &CoreContext, name: Option<String>) -> Result<(), ()> {
let mut config = get_nodestate();
let uuid = Uuid::new_v4().to_string();
info!("Creating library {:?}, UUID: {:?}", name, uuid);
let library_state = LibraryState {
library_uuid: uuid.clone(),
library_path: get_library_path(config.data_path.as_ref().unwrap()),
..LibraryState::default()
};
run_migrations(ctx).await.unwrap();
config.libraries.push(library_state);
config.current_library_uuid = uuid;
config.save().await;
let library = ctx
.database
.library()
.create(
library::pub_id::set(config.current_library_uuid),
library::name::set(name.unwrap_or_else(|| DEFAULT_NAME.into())),
vec![],
)
.exec()
.await
.unwrap();
info!("library created in database: {:?}", library);
Ok(())
}

View file

@ -1,7 +1,11 @@
mod loader;
mod library_config;
mod library_ctx;
mod library_manager;
mod statistics;
pub use loader::*;
pub use library_config::*;
pub use library_ctx::*;
pub use library_manager::*;
pub use statistics::*;
use thiserror::Error;

View file

@ -1,16 +1,10 @@
use crate::{
node::get_nodestate,
prisma::{library, library_statistics::*},
sys::Volume,
CoreContext,
};
use crate::{prisma::statistics::*, sys::Volume};
use fs_extra::dir::get_size;
use log::info;
use serde::{Deserialize, Serialize};
use tokio::fs;
use ts_rs::TS;
use super::LibraryError;
use super::{LibraryContext, LibraryError};
#[derive(Debug, Serialize, Deserialize, TS, Clone, Default)]
#[ts(export)]
@ -39,46 +33,22 @@ impl From<Data> for Statistics {
}
impl Statistics {
pub async fn retrieve(ctx: &CoreContext) -> Result<Statistics, LibraryError> {
let config = get_nodestate();
let library_data = config.get_current_library();
pub async fn retrieve(ctx: &LibraryContext) -> Result<Statistics, LibraryError> {
let library_statistics_db = ctx
.database
.library_statistics()
.find_unique(id::equals(library_data.library_id))
.db
.statistics()
.find_unique(id::equals(ctx.node_local_id))
.exec()
.await?
.map_or_else(Default::default, Into::into);
Ok(library_statistics_db)
}
pub async fn calculate(ctx: &CoreContext) -> Result<Statistics, LibraryError> {
let config = get_nodestate();
// get library from client state
let library_data = config.get_current_library();
info!(
"Calculating library statistics {:?}",
library_data.library_uuid
);
// get library from db
let library = ctx
.database
.library()
.find_unique(library::pub_id::equals(
library_data.library_uuid.to_string(),
))
.exec()
.await?;
if library.is_none() {
return Err(LibraryError::LibraryNotFound);
}
let library_statistics = ctx
.database
.library_statistics()
.find_unique(id::equals(library_data.library_id))
pub async fn calculate(ctx: &LibraryContext) -> Result<Statistics, LibraryError> {
let _statistics = ctx
.db
.statistics()
.find_unique(id::equals(ctx.node_local_id))
.exec()
.await?;
@ -97,14 +67,12 @@ impl Statistics {
}
}
let library_db_size = match fs::metadata(library_data.library_path).await {
let library_db_size = match fs::metadata(ctx.config().data_directory()).await {
Ok(metadata) => metadata.len(),
Err(_) => 0,
};
info!("{:?}", library_statistics);
let thumbnail_folder_size = get_size(config.data_path.unwrap().join("thumbnails"));
let thumbnail_folder_size = get_size(ctx.config().data_directory().join("thumbnails"));
let statistics = Statistics {
library_db_size: library_db_size.to_string(),
@ -114,19 +82,11 @@ impl Statistics {
..Statistics::default()
};
let library_local_id = match library {
Some(library) => library.id,
None => library_data.library_id,
};
ctx.database
.library_statistics()
ctx.db
.statistics()
.upsert(
library_id::equals(library_local_id),
(
library_id::set(library_local_id),
vec![library_db_size::set(statistics.library_db_size.clone())],
),
id::equals(1),
vec![library_db_size::set(statistics.library_db_size.clone())],
vec![
total_file_count::set(statistics.total_file_count),
total_bytes_used::set(statistics.total_bytes_used.clone()),

149
core/src/node/config.rs Normal file
View file

@ -0,0 +1,149 @@
use serde::{Deserialize, Serialize};
use std::fs::File;
use std::io::{self, BufReader, Seek, SeekFrom, Write};
use std::path::{Path, PathBuf};
use std::sync::Arc;
use thiserror::Error;
use tokio::sync::{RwLock, RwLockWriteGuard};
use ts_rs::TS;
use uuid::Uuid;
/// NODE_STATE_CONFIG_NAME is the name of the file which stores the NodeState
pub const NODE_STATE_CONFIG_NAME: &str = "node_state.sdconfig";
/// ConfigMetadata is a part of node configuration that is loaded before the main configuration and contains information about the schema of the config.
/// This allows us to migrate breaking changes to the config format between Spacedrive releases.
#[derive(Debug, Serialize, Deserialize, Clone, TS)]
#[ts(export)]
pub struct ConfigMetadata {
/// version of Spacedrive. Determined from `CARGO_PKG_VERSION` environment variable.
pub version: Option<String>,
}
impl Default for ConfigMetadata {
fn default() -> Self {
Self {
version: Some(env!("CARGO_PKG_VERSION").into()),
}
}
}
/// NodeConfig is the configuration for a node. This is shared between all libraries and is stored in a JSON file on disk.
#[derive(Debug, Serialize, Deserialize, Clone, TS)]
#[ts(export)]
pub struct NodeConfig {
#[serde(flatten)]
pub metadata: ConfigMetadata,
/// id is a unique identifier for the current node. Each node has a public identifier (this one) and is given a local id for each library (done within the library code).
pub id: Uuid,
/// name is the display name of the current node. This is set by the user and is shown in the UI. // TODO: Length validation so it can fit in DNS record
pub name: String,
// the port this node uses for peer to peer communication. By default a random free port will be chosen each time the application is started.
pub p2p_port: Option<u32>,
}
#[derive(Error, Debug)]
pub enum NodeConfigError {
#[error("error saving or loading the config from the filesystem")]
IO(#[from] io::Error),
#[error("error serializing or deserializing the JSON in the config file")]
Json(#[from] serde_json::Error),
#[error("error migrating the config file")]
Migration(String),
}
impl NodeConfig {
fn default() -> Self {
NodeConfig {
id: Uuid::new_v4(),
name: match hostname::get() {
Ok(hostname) => hostname.to_string_lossy().into_owned(),
Err(err) => {
eprintln!("Falling back to default node name as an error occurred getting your systems hostname: '{}'", err);
"my-spacedrive".into()
}
},
p2p_port: None,
metadata: ConfigMetadata {
version: Some(env!("CARGO_PKG_VERSION").into()),
},
}
}
}
pub struct NodeConfigManager(RwLock<NodeConfig>, PathBuf);
impl NodeConfigManager {
/// new will create a new NodeConfigManager with the given path to the config file.
pub(crate) async fn new(data_path: PathBuf) -> Result<Arc<Self>, NodeConfigError> {
Ok(Arc::new(Self(
RwLock::new(Self::read(&data_path).await?),
data_path,
)))
}
/// get will return the current NodeConfig in a read only state.
pub(crate) async fn get(&self) -> NodeConfig {
self.0.read().await.clone()
}
/// data_directory returns the path to the directory storing the configuration data.
pub(crate) fn data_directory(&self) -> PathBuf {
self.1.clone()
}
/// write allows the user to update the configuration. This is done in a closure while a Mutex lock is held so that the user can't cause a race condition if the config were to be updated in multiple parts of the app at the same time.
#[allow(unused)]
pub(crate) async fn write<F: FnOnce(RwLockWriteGuard<NodeConfig>)>(
&self,
mutation_fn: F,
) -> Result<NodeConfig, NodeConfigError> {
mutation_fn(self.0.write().await);
let config = self.0.read().await;
Self::save(&self.1, &config).await?;
Ok(config.clone())
}
/// read will read the configuration from disk and return it.
async fn read(base_path: &PathBuf) -> Result<NodeConfig, NodeConfigError> {
let path = Path::new(base_path).join(NODE_STATE_CONFIG_NAME);
match path.exists() {
true => {
let mut file = File::open(&path)?;
let base_config: ConfigMetadata =
serde_json::from_reader(BufReader::new(&mut file))?;
Self::migrate_config(base_config.version, path)?;
file.seek(SeekFrom::Start(0))?;
Ok(serde_json::from_reader(BufReader::new(&mut file))?)
}
false => {
let config = NodeConfig::default();
Self::save(base_path, &config).await?;
Ok(config)
}
}
}
/// save will write the configuration back to disk
async fn save(base_path: &PathBuf, config: &NodeConfig) -> Result<(), NodeConfigError> {
let path = Path::new(base_path).join(NODE_STATE_CONFIG_NAME);
File::create(path)?.write_all(serde_json::to_string(config)?.as_bytes())?;
Ok(())
}
/// migrate_config is a function used to apply breaking changes to the config file.
fn migrate_config(
current_version: Option<String>,
config_path: PathBuf,
) -> Result<(), NodeConfigError> {
match current_version {
None => {
Err(NodeConfigError::Migration(format!("Your Spacedrive config file stored at '{}' is missing the `version` field. If you just upgraded please delete the file and restart Spacedrive! Please note this upgrade will stop using your old 'library.db' as the folder structure has changed.", config_path.display())))
}
_ => Ok(()),
}
}
}

View file

@ -1,18 +1,10 @@
use crate::{
prisma::{self, node},
Node,
};
use chrono::{DateTime, Utc};
use int_enum::IntEnum;
use log::info;
use serde::{Deserialize, Serialize};
use std::env;
use thiserror::Error;
use ts_rs::TS;
mod state;
pub use state::*;
mod config;
use crate::prisma::node;
pub use config::*;
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
@ -52,64 +44,3 @@ pub enum Platform {
IOS = 4,
Android = 5,
}
impl LibraryNode {
pub async fn create(node: &Node) -> Result<(), NodeError> {
info!("Creating node...");
let mut config = state::get_nodestate();
let hostname = match hostname::get() {
Ok(hostname) => hostname.to_str().unwrap_or_default().to_owned(),
Err(_) => "unknown".to_owned(),
};
let platform = match env::consts::OS {
"windows" => Platform::Windows,
"macos" => Platform::MacOS,
"linux" => Platform::Linux,
_ => Platform::Unknown,
};
let node = if let Some(node) = node
.database
.node()
.find_unique(node::pub_id::equals(config.node_pub_id.clone()))
.exec()
.await?
{
node
} else {
node.database
.node()
.create(
node::pub_id::set(config.node_pub_id.clone()),
node::name::set(hostname.clone()),
vec![node::platform::set(platform as i32)],
)
.exec()
.await?
};
config.node_name = hostname;
config.node_id = node.id;
config.save().await;
info!("node: {:?}", node);
Ok(())
}
// pub async fn get_nodes(ctx: &CoreContext) -> Result<Vec<node::Data>, NodeError> {
// let db = &ctx.database;
// let _node = db.node().find_many(vec![]).exec().await?;
// Ok(_node)
// }
}
#[derive(Error, Debug)]
pub enum NodeError {
#[error("Database error")]
DatabaseError(#[from] prisma::QueryError),
}

View file

@ -1,109 +1 @@
use lazy_static::lazy_static;
use serde::{Deserialize, Serialize};
use std::path::PathBuf;
use std::sync::RwLock;
use tokio::io::AsyncReadExt;
use tokio::{
fs,
io::{AsyncWriteExt, BufReader},
};
use ts_rs::TS;
use uuid::Uuid;
#[derive(Debug, Serialize, Deserialize, Clone, Default, TS)]
#[ts(export)]
pub struct NodeState {
pub node_pub_id: String,
pub node_id: i32,
pub node_name: String,
// config path is stored as struct can exist only in memory during startup and be written to disk later without supplying path
pub data_path: Option<PathBuf>,
// the port this node uses to listen for incoming connections
pub tcp_port: u32,
// all the libraries loaded by this node
pub libraries: Vec<LibraryState>,
// used to quickly find the default library
pub current_library_uuid: String,
}
pub static NODE_STATE_CONFIG_NAME: &str = "node_state.json";
#[derive(Debug, Serialize, Deserialize, Clone, Default, TS)]
#[ts(export)]
pub struct LibraryState {
pub library_uuid: String,
pub library_id: i32,
pub library_path: PathBuf,
pub offline: bool,
}
// global, thread-safe storage for node state
lazy_static! {
static ref CONFIG: RwLock<Option<NodeState>> = RwLock::new(None);
}
pub fn get_nodestate() -> NodeState {
if let Ok(guard) = CONFIG.read() {
guard.clone().unwrap_or_default()
} else {
NodeState::default()
}
}
impl NodeState {
pub fn new(data_path: PathBuf, node_name: &str) -> Result<Self, ()> {
let uuid = Uuid::new_v4().to_string();
// create struct and assign defaults
let config = Self {
node_pub_id: uuid,
data_path: Some(data_path),
node_name: node_name.to_string(),
..Default::default()
};
Ok(config)
}
pub async fn save(&self) {
self.write_memory();
// only write to disk if config path is set
if let Some(ref data_path) = self.data_path {
let config_path = data_path.join(NODE_STATE_CONFIG_NAME);
let mut file = fs::File::create(config_path).await.unwrap();
let json = serde_json::to_string(&self).unwrap();
file.write_all(json.as_bytes()).await.unwrap();
}
}
pub async fn read_disk(&mut self) -> Result<(), ()> {
if let Some(ref data_path) = self.data_path {
let config_path = data_path.join(NODE_STATE_CONFIG_NAME);
// open the file and parse json
if let Ok(file) = fs::File::open(config_path).await {
let mut buf = vec![];
let bytes = BufReader::new(file).read_to_end(&mut buf).await.unwrap();
let data = serde_json::from_slice(&buf[..bytes]).unwrap();
// assign to self
*self = data;
}
}
Ok(())
}
fn write_memory(&self) {
let mut writeable = CONFIG.write().unwrap();
*writeable = Some(self.clone());
}
pub fn get_current_library(&self) -> LibraryState {
self.libraries
.iter()
.find(|lib| lib.library_uuid == self.current_library_uuid)
.cloned()
.unwrap_or_default()
}
pub fn get_current_library_db_path(&self) -> PathBuf {
self.get_current_library().library_path.join("library.db")
}
}

View file

@ -1,8 +1,9 @@
use crate::{
file::{cas::FileIdentifierJob, indexer::IndexerJob},
node::{get_nodestate, LibraryNode},
library::LibraryContext,
node::LibraryNode,
prisma::{file_path, location},
ClientQuery, CoreContext, CoreEvent,
ClientQuery, CoreEvent, LibraryQuery,
};
use log::info;
@ -10,10 +11,9 @@ use serde::{Deserialize, Serialize};
use std::fmt::Debug;
use std::path::{Path, PathBuf};
use thiserror::Error;
use tokio::io::AsyncWriteExt;
use tokio::{
fs::{metadata, File},
io,
io::{self, AsyncWriteExt},
};
use ts_rs::TS;
use uuid::Uuid;
@ -73,11 +73,11 @@ static DOTFILE_NAME: &str = ".spacedrive";
// }
pub async fn get_location(
ctx: &CoreContext,
ctx: &LibraryContext,
location_id: i32,
) -> Result<LocationResource, SysError> {
// get location by location_id from db and include location_paths
ctx.database
ctx.db
.location()
.find_unique(location::id::equals(location_id))
.exec()
@ -86,15 +86,17 @@ pub async fn get_location(
.ok_or_else(|| LocationError::IdNotFound(location_id).into())
}
pub fn scan_location(ctx: &CoreContext, location_id: i32, path: impl AsRef<Path>) {
pub async fn scan_location(ctx: &LibraryContext, location_id: i32, path: impl AsRef<Path>) {
let path_buf = path.as_ref().to_path_buf();
ctx.spawn_job(Box::new(IndexerJob {
path: path_buf.clone(),
}));
}))
.await;
ctx.queue_job(Box::new(FileIdentifierJob {
location_id,
path: path_buf,
}));
}))
.await;
// TODO: make a way to stop jobs so this can be canceled without rebooting app
// ctx.queue_job(Box::new(ThumbnailJob {
// location_id,
@ -104,19 +106,19 @@ pub fn scan_location(ctx: &CoreContext, location_id: i32, path: impl AsRef<Path>
}
pub async fn new_location_and_scan(
ctx: &CoreContext,
ctx: &LibraryContext,
path: impl AsRef<Path> + Debug,
) -> Result<LocationResource, SysError> {
let location = create_location(ctx, &path).await?;
scan_location(ctx, location.id, path);
scan_location(ctx, location.id, path).await;
Ok(location)
}
pub async fn get_locations(ctx: &CoreContext) -> Result<Vec<LocationResource>, SysError> {
pub async fn get_locations(ctx: &LibraryContext) -> Result<Vec<LocationResource>, SysError> {
let locations = ctx
.database
.db
.location()
.find_many(vec![])
.with(location::node::fetch())
@ -128,7 +130,7 @@ pub async fn get_locations(ctx: &CoreContext) -> Result<Vec<LocationResource>, S
}
pub async fn create_location(
ctx: &CoreContext,
ctx: &LibraryContext,
path: impl AsRef<Path> + Debug,
) -> Result<LocationResource, SysError> {
let path = path.as_ref();
@ -151,7 +153,7 @@ pub async fn create_location(
// check if location already exists
let location_resource = if let Some(location) = ctx
.database
.db
.location()
.find_first(vec![location::local_path::equals(Some(
path_string.clone(),
@ -162,15 +164,13 @@ pub async fn create_location(
location.into()
} else {
info!(
"Location does not exist, creating new location for '{:#?}'",
path
"Location does not exist, creating new location for '{}'",
path_string
);
let uuid = Uuid::new_v4();
let config = get_nodestate();
let location = ctx
.database
.db
.location()
.create(
location::pub_id::set(uuid.to_string()),
@ -180,7 +180,7 @@ pub async fn create_location(
)),
location::is_online::set(true),
location::local_path::set(Some(path_string)),
location::node_id::set(Some(config.node_id)),
location::node_id::set(Some(ctx.node_local_id)),
],
)
.exec()
@ -195,7 +195,7 @@ pub async fn create_location(
let data = DotSpacedrive {
location_uuid: uuid,
library_uuid: config.current_library_uuid,
library_uuid: ctx.id.to_string(),
};
let json_bytes = serde_json::to_vec(&data)
@ -206,8 +206,8 @@ pub async fn create_location(
.await
.map_err(|e| LocationError::DotfileWriteFailure(e, path.to_owned()))?;
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::SysGetLocations))
.await;
// ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::SysGetLocations))
// .await;
location.into()
};
@ -215,23 +215,26 @@ pub async fn create_location(
Ok(location_resource)
}
pub async fn delete_location(ctx: &CoreContext, location_id: i32) -> Result<(), SysError> {
ctx.database
pub async fn delete_location(ctx: &LibraryContext, location_id: i32) -> Result<(), SysError> {
ctx.db
.file_path()
.find_many(vec![file_path::location_id::equals(Some(location_id))])
.delete()
.exec()
.await?;
ctx.database
ctx.db
.location()
.find_unique(location::id::equals(location_id))
.delete()
.exec()
.await?;
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::SysGetLocations))
.await;
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::LibraryQuery {
library_id: ctx.id.to_string(),
query: LibraryQuery::SysGetLocations,
}))
.await;
info!("Location {} deleted", location_id);

View file

@ -1,5 +1,5 @@
// use crate::native;
use crate::{node::get_nodestate, prisma::volume::*, CoreContext};
use crate::{library::LibraryContext, prisma::volume::*};
use serde::{Deserialize, Serialize};
use ts_rs::TS;
// #[cfg(not(target_os = "macos"))]
@ -24,23 +24,21 @@ pub struct Volume {
}
impl Volume {
pub async fn save(ctx: &CoreContext) -> Result<(), SysError> {
let config = get_nodestate();
pub async fn save(ctx: &LibraryContext) -> Result<(), SysError> {
let volumes = Self::get_volumes()?;
// enter all volumes associate with this client add to db
for volume in volumes {
ctx.database
ctx.db
.volume()
.upsert(
node_id_mount_point_name(
config.node_id,
ctx.node_local_id,
volume.mount_point.to_string(),
volume.name.to_string(),
),
(
node_id::set(config.node_id),
node_id::set(ctx.node_local_id),
name::set(volume.name),
mount_point::set(volume.mount_point),
vec![

View file

@ -1,165 +1,121 @@
use crate::prisma::{self, migration, PrismaClient};
use crate::CoreContext;
use data_encoding::HEXLOWER;
use include_dir::{include_dir, Dir};
use log::{error, info};
use prisma_client_rust::raw;
use ring::digest::{Context, Digest, SHA256};
use std::ffi::OsStr;
use std::fmt::Debug;
use std::io::{self, BufReader, Read};
use std::path::Path;
use prisma_client_rust::{raw, NewClientError};
use ring::digest::{Context, SHA256};
use thiserror::Error;
const INIT_MIGRATION: &str = include_str!("../../prisma/migrations/migration_table/migration.sql");
static MIGRATIONS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/prisma/migrations");
/// MigrationError represents an error that occurring while opening a initialising and running migrations on the database.
#[derive(Error, Debug)]
pub enum DatabaseError {
#[error("Unable to initialize the Prisma client")]
ClientError(#[from] prisma::NewClientError),
pub enum MigrationError {
#[error("An error occurred while initialising a new database connection")]
DatabaseInitialization(#[from] NewClientError),
#[error("An error occurred with the database while applying migrations")]
DatabaseError(#[from] prisma_client_rust::queries::Error),
#[error("An error occured reading the embedded migration files. {0}. Please report to Spacedrive developers!")]
InvalidEmbeddedMigration(&'static str),
}
pub async fn create_connection(
path: impl AsRef<Path> + Debug,
) -> Result<PrismaClient, DatabaseError> {
info!("Creating database connection: {:?}", path);
let client =
prisma::new_client_with_url(&format!("file:{}", path.as_ref().to_string_lossy())).await?;
/// load_and_migrate will load the database from the given path and migrate it to the latest version of the schema.
pub async fn load_and_migrate(db_url: &str) -> Result<PrismaClient, MigrationError> {
let client = prisma::new_client_with_url(db_url).await?;
Ok(client)
}
pub fn sha256_digest<R: Read>(mut reader: R) -> Result<Digest, io::Error> {
let mut context = Context::new(&SHA256);
let mut buffer = [0; 1024];
loop {
let count = reader.read(&mut buffer)?;
if count == 0 {
break;
}
context.update(&buffer[..count]);
}
Ok(context.finish())
}
pub async fn run_migrations(ctx: &CoreContext) -> Result<(), DatabaseError> {
let client = &ctx.database;
match client
let migrations_table_missing = client
._query_raw::<serde_json::Value>(raw!(
"SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'"
))
.await
{
Ok(data) => {
if data.is_empty() {
// execute migration
match client._execute_raw(raw!(INIT_MIGRATION)).await {
Ok(_) => {}
Err(e) => {
info!("Failed to create migration table: {}", e);
}
};
.await?
.is_empty();
let value: Vec<serde_json::Value> = client
._query_raw(raw!(
"SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'"
))
.await
.unwrap();
if migrations_table_missing {
client._execute_raw(raw!(INIT_MIGRATION)).await?;
}
#[cfg(debug_assertions)]
info!("Migration table created: {:?}", value);
}
let mut migration_subdirs = MIGRATIONS_DIR
.dirs()
.filter(|subdir| {
subdir
.path()
.file_name()
.map(|name| name != OsStr::new("migration_table"))
.unwrap_or(false)
let mut migration_directories = MIGRATIONS_DIR
.dirs()
.map(|dir| {
dir.path()
.file_name()
.ok_or(MigrationError::InvalidEmbeddedMigration(
"File has malformed name",
))
.and_then(|name| {
name.to_str()
.ok_or(MigrationError::InvalidEmbeddedMigration(
"File name contains malformed characters",
))
.map(|name| (name, dir))
})
.collect::<Vec<_>>();
})
.filter_map(|v| match v {
Ok((name, _)) if name == "migration_table" => None,
Ok((name, dir)) => match name[..14].parse::<i64>() {
Ok(timestamp) => Some(Ok((name, timestamp, dir))),
Err(_) => Some(Err(MigrationError::InvalidEmbeddedMigration(
"File name is incorrectly formatted",
))),
},
Err(v) => Some(Err(v)),
})
.collect::<Result<Vec<_>, _>>()?;
migration_subdirs.sort_by(|a, b| {
let a_name = a.path().file_name().unwrap().to_str().unwrap();
let b_name = b.path().file_name().unwrap().to_str().unwrap();
// We sort the migrations so they are always applied in the correct order
migration_directories.sort_by(|(_, a_time, _), (_, b_time, _)| a_time.cmp(b_time));
let a_time = a_name[..14].parse::<i64>().unwrap();
let b_time = b_name[..14].parse::<i64>().unwrap();
for (name, _, dir) in migration_directories {
let migration_file_raw = dir
.get_file(dir.path().join("./migration.sql"))
.ok_or(MigrationError::InvalidEmbeddedMigration(
"Failed to find 'migration.sql' file in '{}' migration subdirectory",
))?
.contents_utf8()
.ok_or(
MigrationError::InvalidEmbeddedMigration(
"Failed to open the contents of 'migration.sql' file in '{}' migration subdirectory",
)
)?;
a_time.cmp(&b_time)
});
// Generate SHA256 checksum of migration
let mut checksum = Context::new(&SHA256);
checksum.update(migration_file_raw.as_bytes());
let checksum = HEXLOWER.encode(checksum.finish().as_ref());
for subdir in migration_subdirs {
info!("{:?}", subdir.path());
let migration_file = subdir
.get_file(subdir.path().join("./migration.sql"))
.unwrap();
let migration_sql = migration_file.contents_utf8().unwrap();
// get existing migration by checksum, if it doesn't exist run the migration
if client
.migration()
.find_unique(migration::checksum::equals(checksum.clone()))
.exec()
.await?
.is_none()
{
// Create migration record
client
.migration()
.create(
migration::name::set(name.to_string()),
migration::checksum::set(checksum.clone()),
vec![],
)
.exec()
.await?;
let digest = sha256_digest(BufReader::new(migration_file.contents())).unwrap();
// create a lowercase hash from
let checksum = HEXLOWER.encode(digest.as_ref());
let name = subdir.path().file_name().unwrap().to_str().unwrap();
// get existing migration by checksum, if it doesn't exist run the migration
let existing_migration = client
// Split the migrations file up into each individual step and apply them all
let steps = migration_file_raw.split(';').collect::<Vec<&str>>();
let steps = &steps[0..steps.len() - 1];
for (i, step) in steps.iter().enumerate() {
client._execute_raw(raw!(*step)).await?;
client
.migration()
.find_unique(migration::checksum::equals(checksum.clone()))
.update(vec![migration::steps_applied::set(i as i32 + 1)])
.exec()
.await
.unwrap();
if existing_migration.is_none() {
#[cfg(debug_assertions)]
info!("Running migration: {}", name);
let steps = migration_sql.split(';').collect::<Vec<&str>>();
let steps = &steps[0..steps.len() - 1];
client
.migration()
.create(
migration::name::set(name.to_string()),
migration::checksum::set(checksum.clone()),
vec![],
)
.exec()
.await
.unwrap();
for (i, step) in steps.iter().enumerate() {
match client._execute_raw(raw!(*step)).await {
Ok(_) => {
client
.migration()
.find_unique(migration::checksum::equals(checksum.clone()))
.update(vec![migration::steps_applied::set(i as i32 + 1)])
.exec()
.await
.unwrap();
}
Err(e) => {
error!("Error running migration: {}", name);
error!("{:?}", e);
break;
}
}
}
#[cfg(debug_assertions)]
info!("Migration {} recorded successfully", name);
}
.await?;
}
}
Err(err) => {
panic!("Failed to check migration table existence: {:?}", err);
}
}
Ok(())
Ok(client)
}

View file

@ -13,25 +13,27 @@
"lint": "TIMING=1 eslint src --fix",
"clean": "rm -rf .turbo && rm -rf node_modules && rm -rf dist"
},
"devDependencies": {
"@types/react": "^18.0.9",
"scripts": "*",
"tsconfig": "*",
"typescript": "^4.7.2"
},
"jest": {
"preset": "scripts/jest/node"
},
"dependencies": {
"@sd/config": "workspace:*",
"@sd/core": "workspace:*",
"@sd/interface": "workspace:*",
"eventemitter3": "^4.0.7",
"immer": "^9.0.14",
"react-query": "^3.39.1",
"lodash": "^4.17.21",
"react-query": "^3.34.19",
"zustand": "4.0.0-rc.1"
},
"devDependencies": {
"@types/react": "^18.0.9",
"scripts": "*",
"tsconfig": "*",
"typescript": "^4.7.2",
"@types/lodash": "^4.14.182"
},
"peerDependencies": {
"react": "^18.0.0",
"react-query": "^3.34.19"
"react": "^18.0.0"
}
}

View file

@ -1,12 +1,8 @@
import { ClientCommand, ClientQuery, CoreResponse } from '@sd/core';
import { ClientCommand, ClientQuery, CoreResponse, LibraryCommand, LibraryQuery } from '@sd/core';
import { EventEmitter } from 'eventemitter3';
import {
UseMutationOptions,
UseQueryOptions,
UseQueryResult,
useMutation,
useQuery
} from 'react-query';
import { UseMutationOptions, UseQueryOptions, useMutation, useQuery } from 'react-query';
import { useLibraryStore } from './stores';
// global var to store the transport TODO: not global :D
export let transport: BaseTransport | null = null;
@ -23,11 +19,15 @@ export function setTransport(_transport: BaseTransport) {
// extract keys from generated Rust query/command types
type QueryKeyType = ClientQuery['key'];
type LibraryQueryKeyType = LibraryQuery['key'];
type CommandKeyType = ClientCommand['key'];
type LibraryCommandKeyType = LibraryCommand['key'];
// extract the type from the union
type CQType<K> = Extract<ClientQuery, { key: K }>;
type LQType<K> = Extract<LibraryQuery, { key: K }>;
type CCType<K> = Extract<ClientCommand, { key: K }>;
type LCType<K> = Extract<LibraryCommand, { key: K }>;
type CRType<K> = Extract<CoreResponse, { key: K }>;
// extract payload type
@ -35,20 +35,18 @@ type ExtractParams<P> = P extends { params: any } ? P['params'] : never;
type ExtractData<D> = D extends { data: any } ? D['data'] : never;
// vanilla method to call the transport
export async function queryBridge<
K extends QueryKeyType,
CQ extends CQType<K>,
CR extends CRType<K>
>(key: K, params?: ExtractParams<CQ>): Promise<ExtractData<CR>> {
async function queryBridge<K extends QueryKeyType, CQ extends CQType<K>, CR extends CRType<K>>(
key: K,
params?: ExtractParams<CQ>
): Promise<ExtractData<CR>> {
const result = (await transport?.query({ key, params } as any)) as any;
return result?.data;
}
export async function commandBridge<
K extends CommandKeyType,
CC extends CCType<K>,
CR extends CRType<K>
>(key: K, params?: ExtractParams<CC>): Promise<ExtractData<CR>> {
async function commandBridge<K extends CommandKeyType, CC extends CCType<K>, CR extends CRType<K>>(
key: K,
params?: ExtractParams<CC>
): Promise<ExtractData<CR>> {
const result = (await transport?.command({ key, params } as any)) as any;
return result?.data;
}
@ -66,6 +64,21 @@ export function useBridgeQuery<K extends QueryKeyType, CQ extends CQType<K>, CR
);
}
export function useLibraryQuery<
K extends LibraryQueryKeyType,
CQ extends LQType<K>,
CR extends CRType<K>
>(key: K, params?: ExtractParams<CQ>, options: UseQueryOptions<ExtractData<CR>> = {}) {
const library_id = useLibraryStore((state) => state.currentLibraryUuid);
if (!library_id) throw new Error(`Attempted to do library query '${key}' with no library set!`);
return useQuery<ExtractData<CR>>(
[library_id, key, params],
async () => await queryBridge('LibraryQuery', { library_id, query: { key, params } as any }),
options
);
}
export function useBridgeCommand<
K extends CommandKeyType,
CC extends CCType<K>,
@ -78,9 +91,35 @@ export function useBridgeCommand<
);
}
export function useLibraryCommand<
K extends LibraryCommandKeyType,
LC extends LCType<K>,
CR extends CRType<K>
>(key: K, options: UseMutationOptions<ExtractData<LC>> = {}) {
const library_id = useLibraryStore((state) => state.currentLibraryUuid);
if (!library_id) throw new Error(`Attempted to do library command '${key}' with no library set!`);
return useMutation<ExtractData<CR>, unknown, ExtractParams<LC>>(
[library_id, key],
async (vars?: ExtractParams<LC>) =>
await commandBridge('LibraryCommand', { library_id, command: { key, params: vars } as any }),
options
);
}
export function command<K extends CommandKeyType, CC extends CCType<K>, CR extends CRType<K>>(
key: K,
vars: ExtractParams<CC>
): Promise<ExtractData<CR>> {
return commandBridge(key, vars);
}
export function libraryCommand<
K extends LibraryCommandKeyType,
LC extends LCType<K>,
CR extends CRType<K>
>(key: K, vars: ExtractParams<LC>): Promise<ExtractData<CR>> {
const library_id = useLibraryStore((state) => state.currentLibraryUuid);
if (!library_id) throw new Error(`Attempted to do library command '${key}' with no library set!`);
return commandBridge('LibraryCommand', { library_id, command: { key, params: vars } as any });
}

View file

@ -0,0 +1 @@
export * from './AppPropsContext';

View file

@ -1,2 +0,0 @@
export * from './query';
export * from './state';

View file

@ -1,21 +0,0 @@
import { useState } from 'react';
import { useQuery } from 'react-query';
import { useBridgeCommand, useBridgeQuery } from '../bridge';
import { useFileExplorerState } from './state';
// this hook initializes the explorer state and queries the core
export function useFileExplorer(initialPath = '/', initialLocation: number | null = null) {
const fileState = useFileExplorerState();
// file explorer hooks maintain their own local state relative to exploration
const [path, setPath] = useState(initialPath);
const [locationId, setLocationId] = useState(initialPath);
// const { data: volumes } = useQuery(['sys_get_volumes'], () => bridge('sys_get_volumes'));
return { setPath, setLocationId };
}
// export function useVolumes() {
// return useQuery(['SysGetVolumes'], () => bridge('SysGetVolumes'));
// }

View file

@ -1,23 +0,0 @@
import produce from 'immer';
import create from 'zustand';
export interface FileExplorerState {
current_location_id: number | null;
row_limit: number;
}
interface FileExplorerStore extends FileExplorerState {
update_row_limit: (new_limit: number) => void;
}
export const useFileExplorerState = create<FileExplorerStore>((set, get) => ({
current_location_id: null,
row_limit: 10,
update_row_limit: (new_limit: number) => {
set((store) =>
produce(store, (draft) => {
draft.row_limit = new_limit;
})
);
}
}));

View file

@ -0,0 +1 @@
export * from './useCoreEvents';

View file

@ -0,0 +1,59 @@
import { CoreEvent } from '@sd/core';
import { useContext, useEffect } from 'react';
import { useQueryClient } from 'react-query';
import { transport, useExplorerStore } from '..';
export function useCoreEvents() {
const client = useQueryClient();
const { addNewThumbnail } = useExplorerStore();
useEffect(() => {
function handleCoreEvent(e: CoreEvent) {
switch (e?.key) {
case 'NewThumbnail':
addNewThumbnail(e.data.cas_id);
break;
case 'InvalidateQuery':
case 'InvalidateQueryDebounced':
let query = [];
if (e.data.key === 'LibraryQuery') {
query = [e.data.params.library_id, e.data.params.query.key];
// TODO: find a way to make params accessible in TS
// also this method will only work for queries that use the whole params obj as the second key
// @ts-expect-error
if (e.data.params.query.params) {
// @ts-expect-error
query.push(e.data.params.query.params);
}
} else {
query = [e.data.key];
// TODO: find a way to make params accessible in TS
// also this method will only work for queries that use the whole params obj as the second key
// @ts-expect-error
if (e.data.params) {
// @ts-expect-error
query.push(e.data.params);
}
}
client.invalidateQueries(query);
break;
default:
break;
}
}
// check Tauri Event type
transport?.on('core_event', handleCoreEvent);
return () => {
transport?.off('core_event', handleCoreEvent);
};
// listen('core_event', (e: { payload: CoreEvent }) => {
// });
}, [transport]);
}

View file

@ -1,3 +1,5 @@
export * from './bridge';
export * from './files';
export * from './ClientProvider';
export * from './stores';
export * from './hooks';
export * from './context';

View file

@ -0,0 +1,4 @@
export * from './useLibraryStore';
export * from './useExplorerStore';
export * from './useInspectorStore';
export * from './useInspectorStore';

View file

@ -1,15 +1,16 @@
import create from 'zustand';
type ExplorerState = {
type ExplorerStore = {
selectedRowIndex: number;
setSelectedRowIndex: (index: number) => void;
locationId: number;
setLocationId: (index: number) => void;
newThumbnails: Record<string, boolean>;
addNewThumbnail: (cas_id: string) => void;
reset: () => void;
};
export const useExplorerState = create<ExplorerState>((set) => ({
export const useExplorerStore = create<ExplorerStore>((set) => ({
selectedRowIndex: 1,
setSelectedRowIndex: (index) => set((state) => ({ ...state, selectedRowIndex: index })),
locationId: -1,
@ -19,5 +20,6 @@ export const useExplorerState = create<ExplorerState>((set) => ({
set((state) => ({
...state,
newThumbnails: { ...state.newThumbnails, [cas_id]: true }
}))
})),
reset: () => set(() => ({}))
}));

View file

@ -1,17 +1,18 @@
import { command } from '@sd/client';
import produce from 'immer';
import { debounce } from 'lodash';
import create from 'zustand';
import { libraryCommand } from '../bridge';
export type UpdateNoteFN = (vars: { id: number; note: string }) => void;
interface UseInspectorState {
interface InspectorStore {
notes: Record<number, string>;
setNote: (file_id: number, note: string) => void;
unCacheNote: (file_id: number) => void;
}
export const useInspectorState = create<UseInspectorState>((set) => ({
export const useInspectorStore = create<InspectorStore>((set) => ({
notes: {},
// set the note locally
setNote: (file_id, note) => {
@ -35,7 +36,7 @@ export const useInspectorState = create<UseInspectorState>((set) => ({
// direct command call to update note
export const updateNote = debounce(async (file_id: number, note: string) => {
return await command('FileSetNote', {
return await libraryCommand('FileSetNote', {
id: file_id,
note
});

View file

@ -0,0 +1,67 @@
import { LibraryConfigWrapped } from '@sd/core';
import produce from 'immer';
import { useMemo } from 'react';
import { useQueryClient } from 'react-query';
import create from 'zustand';
import { devtools, persist } from 'zustand/middleware';
import { useBridgeQuery } from '../bridge';
import { useExplorerStore } from './useExplorerStore';
type LibraryStore = {
// the uuid of the currently active library
currentLibraryUuid: string | null;
// for full functionality this should be triggered along-side query invalidation
switchLibrary: (uuid: string) => void;
// a function
init: (libraries: LibraryConfigWrapped[]) => Promise<void>;
};
export const useLibraryStore = create<LibraryStore>()(
devtools(
persist(
(set) => ({
currentLibraryUuid: null,
switchLibrary: (uuid) => {
set((state) =>
produce(state, (draft) => {
draft.currentLibraryUuid = uuid;
})
);
// reset other stores
useExplorerStore().reset();
},
init: async (libraries) => {
set((state) =>
produce(state, (draft) => {
// use first library default if none set
if (!state.currentLibraryUuid) {
draft.currentLibraryUuid = libraries[0].uuid;
}
})
);
}
}),
{ name: 'sd-library-store' }
)
)
);
// this must be used at least once in the app to correct the initial state
// is memorized and can be used safely in any component
export const useCurrentLibrary = () => {
const { currentLibraryUuid, switchLibrary } = useLibraryStore();
const { data: libraries } = useBridgeQuery('NodeGetLibraries', undefined, {});
// memorize library to avoid re-running find function
const currentLibrary = useMemo(() => {
const current = libraries?.find((l) => l.uuid === currentLibraryUuid);
// switch to first library if none set
if (Array.isArray(libraries) && !current && libraries[0]?.uuid) {
switchLibrary(libraries[0]?.uuid);
}
return current;
}, [libraries, currentLibraryUuid]);
return { currentLibrary, libraries, currentLibraryUuid };
};

View file

@ -46,7 +46,7 @@
"react-loading-icons": "^1.1.0",
"react-loading-skeleton": "^3.1.0",
"react-portal": "^4.2.2",
"react-query": "^3.39.1",
"react-query": "^3.34.19",
"react-router": "6.3.0",
"react-router-dom": "6.3.0",
"react-scrollbars-custom": "^4.0.27",
@ -55,6 +55,7 @@
"react-virtuoso": "^2.12.1",
"rooks": "^5.11.2",
"tailwindcss": "^3.0.24",
"use-debounce": "^8.0.1",
"zustand": "4.0.0-rc.1"
},
"devDependencies": {

View file

@ -1,14 +1,14 @@
import '@fontsource/inter/variable.css';
import { BaseTransport, ClientProvider, setTransport } from '@sd/client';
import { useCoreEvents } from '@sd/client';
import { AppProps, AppPropsContext } from '@sd/client';
import React from 'react';
import { ErrorBoundary } from 'react-error-boundary';
import { QueryClient, QueryClientProvider } from 'react-query';
import { MemoryRouter } from 'react-router-dom';
import { AppProps, AppPropsContext } from './AppPropsContext';
import { AppRouter } from './AppRouter';
import { ErrorFallback } from './ErrorFallback';
import { useCoreEvents } from './hooks/useCoreEvents';
import './style.scss';
const queryClient = new QueryClient();

View file

@ -1,8 +1,8 @@
import { AppPropsContext } from '@sd/client';
import clsx from 'clsx';
import React, { useContext } from 'react';
import { Outlet } from 'react-router-dom';
import { AppPropsContext } from './AppPropsContext';
import { Sidebar } from './components/file/Sidebar';
export function AppLayout() {

View file

@ -1,3 +1,5 @@
import { useBridgeQuery } from '@sd/client';
import { useLibraryStore } from '@sd/client';
import React, { useEffect } from 'react';
import { Route, Routes, useLocation } from 'react-router-dom';
@ -9,56 +11,81 @@ import { ExplorerScreen } from './screens/Explorer';
import { OverviewScreen } from './screens/Overview';
import { PhotosScreen } from './screens/Photos';
import { RedirectPage } from './screens/Redirect';
import { SettingsScreen } from './screens/Settings';
import { TagScreen } from './screens/Tag';
import AppearanceSettings from './screens/settings/AppearanceSettings';
import ContactsSettings from './screens/settings/ContactsSettings';
import ExperimentalSettings from './screens/settings/ExperimentalSettings';
import GeneralSettings from './screens/settings/GeneralSettings';
import KeysSettings from './screens/settings/KeysSetting';
import LibrarySettings from './screens/settings/LibrarySettings';
import LocationSettings from './screens/settings/LocationSettings';
import SecuritySettings from './screens/settings/SecuritySettings';
import SharingSettings from './screens/settings/SharingSettings';
import SyncSettings from './screens/settings/SyncSettings';
import TagsSettings from './screens/settings/TagsSettings';
import { CurrentLibrarySettings } from './screens/settings/CurrentLibrarySettings';
import { SettingsScreen } from './screens/settings/Settings';
import AppearanceSettings from './screens/settings/client/AppearanceSettings';
import GeneralSettings from './screens/settings/client/GeneralSettings';
import ContactsSettings from './screens/settings/library/ContactsSettings';
import KeysSettings from './screens/settings/library/KeysSetting';
import LibraryGeneralSettings from './screens/settings/library/LibraryGeneralSettings';
import LocationSettings from './screens/settings/library/LocationSettings';
import SecuritySettings from './screens/settings/library/SecuritySettings';
import SharingSettings from './screens/settings/library/SharingSettings';
import SyncSettings from './screens/settings/library/SyncSettings';
import TagsSettings from './screens/settings/library/TagsSettings';
import ExperimentalSettings from './screens/settings/node/ExperimentalSettings';
import LibrarySettings from './screens/settings/node/LibrariesSettings';
import NodesSettings from './screens/settings/node/NodesSettings';
import P2PSettings from './screens/settings/node/P2PSettings';
export function AppRouter() {
let location = useLocation();
let state = location.state as { backgroundLocation?: Location };
const libraryState = useLibraryStore();
const { data: libraries } = useBridgeQuery('NodeGetLibraries');
// TODO: This can be removed once we add a setup flow to the app
useEffect(() => {
console.log({ url: location.pathname });
}, [state]);
if (libraryState.currentLibraryUuid === null && libraries && libraries.length > 0) {
libraryState.switchLibrary(libraries[0].uuid);
}
}, [libraryState.currentLibraryUuid, libraries]);
return (
<>
<Routes location={state?.backgroundLocation || location}>
<Route path="/" element={<AppLayout />}>
<Route index element={<RedirectPage to="/overview" />} />
<Route path="overview" element={<OverviewScreen />} />
<Route path="content" element={<ContentScreen />} />
<Route path="photos" element={<PhotosScreen />} />
<Route path="debug" element={<DebugScreen />} />
<Route path={'settings'} element={<SettingsScreen />}>
<Route index element={<GeneralSettings />} />
<Route path="appearance" element={<AppearanceSettings />} />
<Route path="contacts" element={<ContactsSettings />} />
<Route path="experimental" element={<ExperimentalSettings />} />
<Route path="general" element={<GeneralSettings />} />
<Route path="keys" element={<KeysSettings />} />
<Route path="library" element={<LibrarySettings />} />
<Route path="security" element={<SecuritySettings />} />
<Route path="locations" element={<LocationSettings />} />
<Route path="sharing" element={<SharingSettings />} />
<Route path="sync" element={<SyncSettings />} />
<Route path="tags" element={<TagsSettings />} />
{libraryState.currentLibraryUuid === null ? (
<>
{/* TODO: Remove this when adding app setup flow */}
<h1>No Library Loaded...</h1>
</>
) : (
<Routes location={state?.backgroundLocation || location}>
<Route path="/" element={<AppLayout />}>
<Route index element={<RedirectPage to="/overview" />} />
<Route path="overview" element={<OverviewScreen />} />
<Route path="content" element={<ContentScreen />} />
<Route path="photos" element={<PhotosScreen />} />
<Route path="debug" element={<DebugScreen />} />
<Route path={'library-settings'} element={<CurrentLibrarySettings />}>
<Route index element={<LocationSettings />} />
<Route path="general" element={<LibraryGeneralSettings />} />
<Route path="locations" element={<LocationSettings />} />
<Route path="tags" element={<TagsSettings />} />
<Route path="keys" element={<KeysSettings />} />
</Route>
<Route path={'settings'} element={<SettingsScreen />}>
<Route index element={<GeneralSettings />} />
<Route path="general" element={<GeneralSettings />} />
<Route path="appearance" element={<AppearanceSettings />} />
<Route path="nodes" element={<NodesSettings />} />
<Route path="p2p" element={<P2PSettings />} />
<Route path="contacts" element={<ContactsSettings />} />
<Route path="experimental" element={<ExperimentalSettings />} />
<Route path="keys" element={<KeysSettings />} />
<Route path="library" element={<LibrarySettings />} />
<Route path="security" element={<SecuritySettings />} />
<Route path="locations" element={<LocationSettings />} />
<Route path="sharing" element={<SharingSettings />} />
<Route path="sync" element={<SyncSettings />} />
<Route path="tags" element={<TagsSettings />} />
</Route>
<Route path="explorer/:id" element={<ExplorerScreen />} />
<Route path="tag/:id" element={<TagScreen />} />
<Route path="*" element={<NotFound />} />
</Route>
<Route path="explorer/:id" element={<ExplorerScreen />} />
<Route path="tag/:id" element={<TagScreen />} />
<Route path="*" element={<NotFound />} />
</Route>
</Routes>
</Routes>
)}
</>
);
}

View file

@ -10,7 +10,7 @@ export function NotFound() {
role="alert"
className="flex flex-col items-center justify-center w-full h-full p-4 rounded-lg dark:text-white"
>
<p className="m-3 mt-20 text-sm font-semibold text-gray-500 uppercase">Error: 404</p>
<p className="m-3 text-sm font-semibold text-gray-500 uppercase">Error: 404</p>
<h1 className="text-4xl font-bold">You chose nothingness.</h1>
<div className="flex flex-row space-x-2">
<Button variant="primary" className="mt-4" onClick={() => navigate(-1)}>

View file

@ -1,5 +1,7 @@
import { DotsVerticalIcon } from '@heroicons/react/solid';
import { useBridgeQuery } from '@sd/client';
import { useBridgeQuery, useLibraryQuery } from '@sd/client';
import { useExplorerStore } from '@sd/client';
import { AppPropsContext } from '@sd/client';
import { FilePath } from '@sd/core';
import clsx from 'clsx';
import React, { useContext, useEffect, useMemo, useRef, useState } from 'react';
@ -7,8 +9,6 @@ import { useSearchParams } from 'react-router-dom';
import { Virtuoso, VirtuosoHandle } from 'react-virtuoso';
import { useKey, useWindowSize } from 'rooks';
import { AppPropsContext } from '../../AppPropsContext';
import { useExplorerState } from '../../hooks/useExplorerState';
import FileThumb from './FileThumb';
interface IColumn {
@ -51,10 +51,10 @@ export const FileList: React.FC<{ location_id: number; path: string; limit: numb
const path = props.path;
const { selectedRowIndex, setSelectedRowIndex, setLocationId } = useExplorerState();
const { selectedRowIndex, setSelectedRowIndex, setLocationId } = useExplorerStore();
const [goingUp, setGoingUp] = useState(false);
const { data: currentDir } = useBridgeQuery('LibGetExplorerDir', {
const { data: currentDir } = useLibraryQuery('LibGetExplorerDir', {
location_id: props.location_id,
path,
limit: props.limit
@ -148,7 +148,7 @@ const RenderRow: React.FC<{
rowIndex: number;
dirId: number;
}> = ({ row, rowIndex, dirId }) => {
const { selectedRowIndex, setSelectedRowIndex } = useExplorerState();
const { selectedRowIndex, setSelectedRowIndex } = useExplorerStore();
const isActive = selectedRowIndex === rowIndex;
let [_, setSearchParams] = useSearchParams();
@ -202,7 +202,7 @@ const RenderCell: React.FC<{
if (!value) return <></>;
const location = useContext(LocationContext);
const { newThumbnails } = useExplorerState();
const { newThumbnails } = useExplorerStore();
const hasNewThumbnail = !!newThumbnails[row?.file?.cas_id ?? ''];

View file

@ -1,9 +1,9 @@
import { useBridgeQuery } from '@sd/client';
import { AppPropsContext } from '@sd/client';
import { FilePath } from '@sd/core';
import clsx from 'clsx';
import React, { useContext } from 'react';
import { AppPropsContext } from '../../AppPropsContext';
import icons from '../../assets/icons';
import { Folder } from '../icons/Folder';

View file

@ -1,5 +1,6 @@
import { Transition } from '@headlessui/react';
import { ShareIcon } from '@heroicons/react/solid';
import { useInspectorStore } from '@sd/client';
import { FilePath, LocationResource } from '@sd/core';
import { Button, TextArea } from '@sd/ui';
import moment from 'moment';
@ -7,7 +8,6 @@ import { Heart, Link } from 'phosphor-react';
import React, { useEffect } from 'react';
import { default as types } from '../../constants/file-types.json';
import { useInspectorState } from '../../hooks/useInspectorState';
import FileThumb from './FileThumb';
interface MetaItemProps {
@ -42,7 +42,7 @@ export const Inspector = (props: {
// notes are cached in a store by their file id
// this is so we can ensure every note has been sent to Rust even
// when quickly navigating files, which cancels update function
const { notes, setNote, unCacheNote } = useInspectorState();
const { notes, setNote, unCacheNote } = useInspectorStore();
// show cached note over server note, important to check for undefined not falsey
const note =

View file

@ -1,13 +1,14 @@
import { LockClosedIcon, PhotographIcon } from '@heroicons/react/outline';
import { CogIcon, EyeOffIcon, PlusIcon } from '@heroicons/react/solid';
import { useBridgeCommand, useBridgeQuery } from '@sd/client';
import { useLibraryCommand, useLibraryQuery } from '@sd/client';
import { useCurrentLibrary, useLibraryStore } from '@sd/client';
import { AppPropsContext } from '@sd/client';
import { Button, Dropdown } from '@sd/ui';
import clsx from 'clsx';
import { CirclesFour, Code, Planet } from 'phosphor-react';
import React, { useContext } from 'react';
import { NavLink, NavLinkProps } from 'react-router-dom';
import React, { useContext, useEffect, useMemo } from 'react';
import { NavLink, NavLinkProps, useNavigate } from 'react-router-dom';
import { AppPropsContext } from '../../AppPropsContext';
import { useNodeStore } from '../device/Stores';
import { Folder } from '../icons/Folder';
import RunningJobsWidget from '../jobs/RunningJobsWidget';
@ -76,11 +77,30 @@ const macOnly = (platform: string | undefined, classnames: string) =>
export const Sidebar: React.FC<SidebarProps> = (props) => {
const { isExperimental } = useNodeStore();
const appProps = useContext(AppPropsContext);
const { data: locations } = useBridgeQuery('SysGetLocations');
const { data: clientState } = useBridgeQuery('NodeGetState');
const navigate = useNavigate();
const { mutate: createLocation } = useBridgeCommand('LocCreate');
const appProps = useContext(AppPropsContext);
const { data: locationsResponse, isError: isLocationsError } = useLibraryQuery('SysGetLocations');
let locations = Array.isArray(locationsResponse) ? locationsResponse : [];
// initialize libraries
const { init: initLibraries, switchLibrary: _switchLibrary } = useLibraryStore();
const switchLibrary = (uuid: string) => {
navigate('overview');
_switchLibrary(uuid);
};
const { currentLibrary, libraries, currentLibraryUuid } = useCurrentLibrary();
useEffect(() => {
if (libraries && !currentLibraryUuid) initLibraries(libraries);
}, [libraries, currentLibraryUuid]);
const { mutate: createLocation } = useLibraryCommand('LocCreate');
const tags = [
{ id: 1, name: 'Keepsafe', color: '#FF6788' },
@ -122,7 +142,6 @@ export const Sidebar: React.FC<SidebarProps> = (props) => {
appProps?.platform === 'macOS' &&
'dark:!bg-opacity-40 dark:hover:!bg-opacity-70 dark:!border-[#333949] dark:hover:!border-[#394052]'
),
variant: 'gray'
}}
// to support the transparent sidebar on macOS we use slightly adjusted styles
@ -133,17 +152,22 @@ export const Sidebar: React.FC<SidebarProps> = (props) => {
)}
// this shouldn't default to "My Library", it is only this way for landing demo
// TODO: implement demo mode for the sidebar and show loading indicator instead of "My Library"
buttonText={clientState?.node_name || 'My Library'}
buttonText={currentLibrary?.config.name || ' '}
items={[
libraries?.map((library) => ({
name: library.config.name,
selected: library.uuid === currentLibraryUuid,
onPress: () => switchLibrary(library.uuid)
})) || [],
[
{ name: clientState?.node_name || 'My Library', selected: true },
{ name: 'Private Library' }
],
[
{ name: 'Library Settings', icon: CogIcon },
{
name: 'Library Settings',
icon: CogIcon,
onPress: () => navigate('library-settings/general')
},
{ name: 'Add Library', icon: PlusIcon },
{ name: 'Lock', icon: LockClosedIcon },
{ name: 'Hide', icon: EyeOffIcon }
{ name: 'Lock', icon: LockClosedIcon }
// { name: 'Hide', icon: EyeOffIcon }
]
]}
/>
@ -204,21 +228,23 @@ export const Sidebar: React.FC<SidebarProps> = (props) => {
);
})}
<button
onClick={() => {
appProps?.openDialog({ directory: true }).then((result) => {
if (result) createLocation({ path: result as string });
});
}}
className={clsx(
'w-full px-2 py-1.5 mt-1 text-xs font-bold text-center text-gray-400 border border-dashed rounded border-transparent cursor-normal border-gray-350 transition',
appProps?.platform === 'macOS'
? 'dark:text-gray-450 dark:border-gray-450 hover:dark:border-gray-400 dark:border-opacity-60'
: 'dark:text-gray-450 dark:border-gray-550 hover:dark:border-gray-500'
)}
>
Add Location
</button>
{(locations?.length || 0) < 1 && (
<button
onClick={() => {
appProps?.openDialog({ directory: true }).then((result) => {
if (result) createLocation({ path: result as string });
});
}}
className={clsx(
'w-full px-2 py-1.5 mt-1 text-xs font-bold text-center text-gray-400 border border-dashed rounded border-transparent cursor-normal border-gray-350 transition',
appProps?.platform === 'macOS'
? 'dark:text-gray-450 dark:border-gray-450 hover:dark:border-gray-400 dark:border-opacity-60'
: 'dark:text-gray-450 dark:border-gray-550 hover:dark:border-gray-500'
)}
>
Add Location
</button>
)}
</div>
<div>
<Heading>Tags</Heading>

View file

@ -0,0 +1,15 @@
import clsx from 'clsx';
import React, { ReactNode } from 'react';
export default function Card(props: { children: ReactNode; className?: string }) {
return (
<div
className={clsx(
'flex w-full px-4 py-2 border border-gray-500 rounded-lg bg-gray-550',
props.className
)}
>
{props.children}
</div>
);
}

View file

@ -5,7 +5,7 @@ import React, { ReactNode } from 'react';
import Loader from '../primitive/Loader';
export interface DialogProps {
export interface DialogProps extends DialogPrimitive.DialogProps {
trigger: ReactNode;
ctaLabel?: string;
ctaDanger?: boolean;
@ -18,13 +18,15 @@ export interface DialogProps {
export default function Dialog(props: DialogProps) {
return (
<DialogPrimitive.Root>
<DialogPrimitive.Root open={props.open} onOpenChange={props.onOpenChange}>
<DialogPrimitive.Trigger asChild>{props.trigger}</DialogPrimitive.Trigger>
<DialogPrimitive.Portal>
<DialogPrimitive.Overlay className="fixed top-0 dialog-overlay bottom-0 left-0 right-0 z-50 grid overflow-y-auto bg-black bg-opacity-50 rounded-xl place-items-center m-[1px]">
<DialogPrimitive.Content className="min-w-[300px] max-w-[400px] dialog-content rounded-md bg-gray-650 text-white border border-gray-550 shadow-deep">
<div className="p-5">
<DialogPrimitive.Title className="font-bold ">{props.title}</DialogPrimitive.Title>
<DialogPrimitive.Title className="mb-2 font-bold">
{props.title}
</DialogPrimitive.Title>
<DialogPrimitive.Description className="text-sm text-gray-300">
{props.description}
</DialogPrimitive.Description>

View file

@ -1,5 +1,6 @@
import { ChevronLeftIcon, ChevronRightIcon } from '@heroicons/react/outline';
import { useBridgeCommand } from '@sd/client';
import { useLibraryCommand } from '@sd/client';
import { useExplorerStore } from '@sd/client';
import { Dropdown } from '@sd/ui';
import clsx from 'clsx';
import {
@ -15,7 +16,6 @@ import {
import React, { DetailedHTMLProps, HTMLAttributes } from 'react';
import { useNavigate } from 'react-router-dom';
import { useExplorerState } from '../../hooks/useExplorerState';
import { Shortcut } from '../primitive/Shortcut';
import { DefaultProps } from '../primitive/types';
@ -50,14 +50,14 @@ const TopBarButton: React.FC<TopBarButtonProps> = ({ icon: Icon, ...props }) =>
};
export const TopBar: React.FC<TopBarProps> = (props) => {
const { locationId } = useExplorerState();
const { mutate: generateThumbsForLocation } = useBridgeCommand('GenerateThumbsForLocation', {
const { locationId } = useExplorerStore();
const { mutate: generateThumbsForLocation } = useLibraryCommand('GenerateThumbsForLocation', {
onMutate: (data) => {
console.log('GenerateThumbsForLocation', data);
}
});
const { mutate: identifyUniqueFiles } = useBridgeCommand('IdentifyUniqueFiles', {
const { mutate: identifyUniqueFiles } = useLibraryCommand('IdentifyUniqueFiles', {
onMutate: (data) => {
console.log('IdentifyUniqueFiles', data);
},

View file

@ -1,6 +1,6 @@
import { DotsVerticalIcon, RefreshIcon } from '@heroicons/react/outline';
import { CogIcon, TrashIcon } from '@heroicons/react/solid';
import { command, useBridgeCommand } from '@sd/client';
import { TrashIcon } from '@heroicons/react/solid';
import { useLibraryCommand } from '@sd/client';
import { LocationResource } from '@sd/core';
import { Button } from '@sd/ui';
import clsx from 'clsx';
@ -16,9 +16,9 @@ interface LocationListItemProps {
export default function LocationListItem({ location }: LocationListItemProps) {
const [hide, setHide] = useState(false);
const { mutate: locRescan } = useBridgeCommand('LocRescan');
const { mutate: locRescan } = useLibraryCommand('LocRescan');
const { mutate: deleteLoc, isLoading: locDeletePending } = useBridgeCommand('LocDelete', {
const { mutate: deleteLoc, isLoading: locDeletePending } = useLibraryCommand('LocDelete', {
onSuccess: () => {
setHide(true);
}

View file

@ -5,5 +5,5 @@ interface SettingsContainerProps {
}
export const SettingsContainer: React.FC<SettingsContainerProps> = (props) => {
return <div className="flex flex-col flex-grow max-w-4xl space-y-4 w-ful">{props.children}</div>;
return <div className="flex flex-col flex-grow max-w-4xl space-y-6 w-ful">{props.children}</div>;
};

View file

@ -1,15 +1,19 @@
import React from 'react';
import React, { ReactNode } from 'react';
interface SettingsHeaderProps {
title: string;
description: string;
rightArea?: ReactNode;
}
export const SettingsHeader: React.FC<SettingsHeaderProps> = (props) => {
return (
<div className="mt-3 mb-3">
<h1 className="text-2xl font-bold">{props.title}</h1>
<p className="mt-1 text-sm text-gray-400">{props.description}</p>
<div className="flex mt-3 mb-3">
<div className="flex-grow">
<h1 className="text-2xl font-bold">{props.title}</h1>
<p className="mt-1 text-sm text-gray-400">{props.description}</p>
</div>
{props.rightArea}
<hr className="mt-4 border-gray-550" />
</div>
);

View file

@ -0,0 +1,40 @@
import clsx from 'clsx';
import React from 'react';
import { Outlet } from 'react-router';
interface SettingsScreenContainerProps {
children: React.ReactNode;
}
export const SettingsIcon = ({ component: Icon, ...props }: any) => (
<Icon weight="bold" {...props} className={clsx('w-4 h-4 mr-2', props.className)} />
);
export const SettingsHeading: React.FC<{ className?: string; children: string }> = ({
children,
className
}) => (
<div className={clsx('mt-5 mb-1 ml-1 text-xs font-semibold text-gray-300', className)}>
{children}
</div>
);
export const SettingsScreenContainer: React.FC<SettingsScreenContainerProps> = (props) => {
return (
<div className="flex flex-row w-full">
<div className="h-full border-r max-w-[200px] flex-shrink-0 border-gray-100 w-60 dark:border-gray-550">
<div data-tauri-drag-region className="w-full h-7" />
<div className="p-5 pt-0">{props.children}</div>
</div>
<div className="w-full">
<div data-tauri-drag-region className="w-full h-7" />
<div className="flex flex-grow-0 w-full h-full max-h-screen custom-scroll page-scroll">
<div className="flex flex-grow px-12 pb-5">
<Outlet />
<div className="block h-20" />
</div>
</div>
</div>
</div>
);
};

View file

@ -1,46 +0,0 @@
import { transport } from '@sd/client';
import { CoreEvent } from '@sd/core';
import { useContext, useEffect } from 'react';
import { useQueryClient } from 'react-query';
import { AppPropsContext } from '../AppPropsContext';
import { useExplorerState } from './useExplorerState';
export function useCoreEvents() {
const client = useQueryClient();
const { addNewThumbnail } = useExplorerState();
useEffect(() => {
function handleCoreEvent(e: CoreEvent) {
switch (e?.key) {
case 'NewThumbnail':
addNewThumbnail(e.data.cas_id);
break;
case 'InvalidateQuery':
case 'InvalidateQueryDebounced':
let query = [e.data.key];
// TODO: find a way to make params accessible in TS
// also this method will only work for queries that use the whole params obj as the second key
// @ts-expect-error
if (e.data.params) {
// @ts-expect-error
query.push(e.data.params);
}
client.invalidateQueries(e.data.key);
break;
default:
break;
}
}
// check Tauri Event type
transport?.on('core_event', handleCoreEvent);
return () => {
transport?.off('core_event', handleCoreEvent);
};
// listen('core_event', (e: { payload: CoreEvent }) => {
// });
}, [transport]);
}

View file

@ -1,5 +1,6 @@
import { AppProps, Platform } from '@sd/client';
import App from './App';
import { AppProps, Platform } from './AppPropsContext';
export type { AppProps, Platform };

View file

@ -1,21 +1,22 @@
import { useBridgeCommand, useBridgeQuery } from '@sd/client';
import { useBridgeQuery, useLibraryCommand, useLibraryQuery } from '@sd/client';
import { AppPropsContext } from '@sd/client';
import { Button } from '@sd/ui';
import React, { useContext } from 'react';
import { AppPropsContext } from '../AppPropsContext';
import CodeBlock from '../components/primitive/Codeblock';
export const DebugScreen: React.FC<{}> = (props) => {
const appPropsContext = useContext(AppPropsContext);
const { data: client } = useBridgeQuery('NodeGetState');
const { data: nodeState } = useBridgeQuery('NodeGetState');
const { data: libraryState } = useBridgeQuery('NodeGetLibraries');
const { data: jobs } = useBridgeQuery('JobGetRunning');
const { data: jobHistory } = useBridgeQuery('JobGetHistory');
const { data: jobHistory } = useLibraryQuery('JobGetHistory');
// const { mutate: purgeDB } = useBridgeCommand('PurgeDatabase', {
// onMutate: () => {
// alert('Database purged');
// }
// });
const { mutate: identifyFiles } = useBridgeCommand('IdentifyUniqueFiles');
const { mutate: identifyFiles } = useLibraryCommand('IdentifyUniqueFiles');
return (
<div className="flex flex-col w-full h-screen custom-scroll page-scroll">
<div data-tauri-drag-region className="flex flex-shrink-0 w-full h-5" />
@ -27,8 +28,8 @@ export const DebugScreen: React.FC<{}> = (props) => {
variant="gray"
size="sm"
onClick={() => {
if (client && appPropsContext?.onOpen) {
appPropsContext.onOpen(client.data_path);
if (nodeState && appPropsContext?.onOpen) {
appPropsContext.onOpen(nodeState.data_path);
}
}}
>
@ -39,8 +40,10 @@ export const DebugScreen: React.FC<{}> = (props) => {
<CodeBlock src={{ ...jobs }} />
<h1 className="text-sm font-bold ">Job History</h1>
<CodeBlock src={{ ...jobHistory }} />
<h1 className="text-sm font-bold ">Client State</h1>
<CodeBlock src={{ ...client }} />
<h1 className="text-sm font-bold ">Node State</h1>
<CodeBlock src={{ ...nodeState }} />
<h1 className="text-sm font-bold ">Libraries</h1>
<CodeBlock src={{ ...libraryState }} />
</div>
</div>
);

View file

@ -1,11 +1,11 @@
import { useBridgeQuery } from '@sd/client';
import { useLibraryQuery } from '@sd/client';
import { useExplorerStore } from '@sd/client';
import React from 'react';
import { useParams, useSearchParams } from 'react-router-dom';
import { FileList } from '../components/file/FileList';
import { Inspector } from '../components/file/Inspector';
import { TopBar } from '../components/layout/TopBar';
import { useExplorerState } from '../hooks/useExplorerState';
export const ExplorerScreen: React.FC<{}> = () => {
let [searchParams] = useSearchParams();
@ -16,13 +16,13 @@ export const ExplorerScreen: React.FC<{}> = () => {
const [limit, setLimit] = React.useState(100);
const { selectedRowIndex } = useExplorerState();
const { selectedRowIndex } = useExplorerStore();
// Current Location
const { data: currentLocation } = useBridgeQuery('SysGetLocation', { id: location_id });
const { data: currentLocation } = useLibraryQuery('SysGetLocation', { id: location_id });
// Current Directory
const { data: currentDir } = useBridgeQuery(
const { data: currentDir } = useLibraryQuery(
'LibGetExplorerDir',
{ location_id: location_id!, path, limit },
{ enabled: !!location_id }

View file

@ -1,5 +1,6 @@
import { PlusIcon } from '@heroicons/react/solid';
import { useBridgeQuery } from '@sd/client';
import { DatabaseIcon, ExclamationCircleIcon, PlusIcon } from '@heroicons/react/solid';
import { useBridgeQuery, useLibraryQuery } from '@sd/client';
import { AppPropsContext } from '@sd/client';
import { Statistics } from '@sd/core';
import { Button, Input } from '@sd/ui';
import byteSize from 'byte-size';
@ -10,7 +11,6 @@ import Skeleton from 'react-loading-skeleton';
import 'react-loading-skeleton/dist/skeleton.css';
import create from 'zustand';
import { AppPropsContext } from '../AppPropsContext';
import { Device } from '../components/device/Device';
import Dialog from '../components/layout/Dialog';
@ -102,7 +102,7 @@ const StatItem: React.FC<StatItemProps> = (props) => {
export const OverviewScreen = () => {
const { data: libraryStatistics, isLoading: isStatisticsLoading } =
useBridgeQuery('GetLibraryStatistics');
useLibraryQuery('GetLibraryStatistics');
const { data: nodeState } = useBridgeQuery('NodeGetState');
const { overviewStats, setOverviewStats } = useOverviewState();
@ -157,7 +157,17 @@ export const OverviewScreen = () => {
{/* STAT HEADER */}
<div className="flex w-full">
{/* STAT CONTAINER */}
<div className="flex pb-4 overflow-hidden">
<div className="flex -mb-1 overflow-hidden">
{!libraryStatistics && (
<div className="mb-2 ml-2">
<div className="font-semibold text-gray-200">
<ExclamationCircleIcon className="inline w-4 h-4 mr-1 -mt-1 " /> Missing library
</div>
<span className="text-xs text-gray-400 ">
Ensure the library you have loaded still exists on disk
</span>
</div>
)}
{Object.entries(overviewStats).map(([key, value]) => {
if (!displayableStatItems.includes(key)) return null;
@ -171,8 +181,9 @@ export const OverviewScreen = () => {
);
})}
</div>
<div className="flex-grow" />
<div className="space-x-2">
<div className="space-x-2 ">
<Dialog
title="Add Device"
description="Connect a new device to your library. Either enter another device's code or copy this one."
@ -205,7 +216,7 @@ export const OverviewScreen = () => {
</Dialog>
</div>
</div>
<div className="flex flex-col pb-4 space-y-4">
<div className="flex flex-col pb-4 mt-4 space-y-4">
<Device
name={`James' MacBook Pro`}
size="1TB"

View file

@ -1,92 +0,0 @@
import {
CloudIcon,
CogIcon,
KeyIcon,
LockClosedIcon,
TagIcon,
TerminalIcon,
UsersIcon
} from '@heroicons/react/outline';
import clsx from 'clsx';
import { Database, HardDrive, PaintBrush } from 'phosphor-react';
import React from 'react';
import { Outlet } from 'react-router-dom';
import { SidebarLink } from '../components/file/Sidebar';
const Icon = ({ component: Icon, ...props }: any) => (
<Icon weight="bold" {...props} className={clsx('w-4 h-4 mr-2', props.className)} />
);
const Heading: React.FC<{ className?: string; children: string }> = ({ children, className }) => (
<div className={clsx('mt-5 mb-1 ml-1 text-xs font-semibold text-gray-300', className)}>
{children}
</div>
);
export const SettingsScreen: React.FC<{}> = () => {
return (
<div className="flex flex-row w-full">
<div className="h-full border-r max-w-[200px] flex-shrink-0 border-gray-100 w-60 dark:border-gray-550">
<div data-tauri-drag-region className="w-full h-7" />
<div className="p-5 pt-0">
<Heading className="mt-0">Client</Heading>
<SidebarLink to="/settings/general">
<Icon component={CogIcon} />
General
</SidebarLink>
<SidebarLink to="/settings/security">
<Icon component={LockClosedIcon} />
Security
</SidebarLink>
<SidebarLink to="/settings/appearance">
<Icon component={PaintBrush} />
Appearance
</SidebarLink>
<SidebarLink to="/settings/experimental">
<Icon component={TerminalIcon} />
Experimental
</SidebarLink>
<Heading>Library</Heading>
<SidebarLink to="/settings/library">
<Icon component={Database} />
Database
</SidebarLink>
<SidebarLink to="/settings/locations">
<Icon component={HardDrive} />
Locations
</SidebarLink>
<SidebarLink to="/settings/keys">
<Icon component={KeyIcon} />
Keys
</SidebarLink>
<SidebarLink to="/settings/tags">
<Icon component={TagIcon} />
Tags
</SidebarLink>
<Heading>Cloud</Heading>
<SidebarLink to="/settings/sync">
<Icon component={CloudIcon} />
Sync
</SidebarLink>
<SidebarLink to="/settings/contacts">
<Icon component={UsersIcon} />
Contacts
</SidebarLink>
</div>
</div>
<div className="w-full">
<div data-tauri-drag-region className="w-full h-7" />
<div className="flex flex-grow-0 w-full h-full max-h-screen custom-scroll page-scroll">
<div className="flex flex-grow px-12 pb-5">
<Outlet />
<div className="block h-20" />
</div>
</div>
</div>
</div>
);
};

View file

@ -0,0 +1,42 @@
import { CogIcon, DatabaseIcon, KeyIcon, TagIcon } from '@heroicons/react/outline';
import { HardDrive, ShareNetwork } from 'phosphor-react';
import React from 'react';
import { SidebarLink } from '../../components/file/Sidebar';
import {
SettingsHeading,
SettingsIcon,
SettingsScreenContainer
} from '../../components/settings/SettingsScreenContainer';
export const CurrentLibrarySettings: React.FC = () => {
return (
<SettingsScreenContainer>
<SettingsHeading className="!mt-0">Library Settings</SettingsHeading>
<SidebarLink to="/library-settings/general">
<SettingsIcon component={CogIcon} />
General
</SidebarLink>
<SidebarLink to="/library-settings/locations">
<SettingsIcon component={HardDrive} />
Locations
</SidebarLink>
<SidebarLink to="/library-settings/tags">
<SettingsIcon component={TagIcon} />
Tags
</SidebarLink>
<SidebarLink to="/library-settings/keys">
<SettingsIcon component={KeyIcon} />
Keys
</SidebarLink>
<SidebarLink to="/library-settings/backups">
<SettingsIcon component={DatabaseIcon} />
Backups
</SidebarLink>
<SidebarLink to="/library-settings/backups">
<SettingsIcon component={ShareNetwork} />
Sync
</SidebarLink>
</SettingsScreenContainer>
);
};

View file

@ -1,40 +0,0 @@
import { useBridgeQuery } from '@sd/client';
import React from 'react';
import { InputContainer } from '../../components/primitive/InputContainer';
import Listbox from '../../components/primitive/Listbox';
import { SettingsContainer } from '../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../components/settings/SettingsHeader';
export default function GeneralSettings() {
const { data: volumes } = useBridgeQuery('SysGetVolumes');
return (
<SettingsContainer>
<SettingsHeader
title="General Settings"
description="Basic settings related to this client."
/>
<InputContainer title="Volumes" description="A list of volumes running on this device.">
<div className="flex flex-row space-x-2">
<div className="flex flex-grow">
<Listbox
options={
volumes?.map((volume) => {
const name = volume.name && volume.name.length ? volume.name : volume.mount_point;
return {
key: name,
option: name,
description: volume.mount_point
};
}) ?? []
}
/>
</div>
</div>
</InputContainer>
{/* <div className="">{JSON.stringify({ config })}</div> */}
</SettingsContainer>
);
}

View file

@ -1,32 +0,0 @@
import React from 'react';
import { Toggle } from '../../components/primitive';
import { InputContainer } from '../../components/primitive/InputContainer';
import { SettingsContainer } from '../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../components/settings/SettingsHeader';
// type LibrarySecurity = 'public' | 'password' | 'vault';
export default function LibrarySettings() {
// const locations = useBridgeQuery("SysGetLocation")
const [encryptOnCloud, setEncryptOnCloud] = React.useState<boolean>(false);
return (
<SettingsContainer>
{/* <Button size="sm">Add Location</Button> */}
<SettingsHeader
title="Library database"
description="The database contains all library data and file metadata."
/>
<InputContainer
mini
title="Encrypt on cloud"
description="Enable if library contains sensitive data and should not be synced to the cloud without full encryption."
>
<div className="flex items-center h-full pl-10">
<Toggle value={encryptOnCloud} onChange={setEncryptOnCloud} size={'sm'} />
</div>
</InputContainer>
</SettingsContainer>
);
}

View file

@ -1,23 +0,0 @@
import { Button } from '@sd/ui';
import React from 'react';
import { InputContainer } from '../../components/primitive/InputContainer';
import { SettingsContainer } from '../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../components/settings/SettingsHeader';
export default function SecuritySettings() {
return (
<SettingsContainer>
<SettingsHeader title="Security" description="Keep your client safe." />
<InputContainer
title="Vault"
description="You'll need to set a passphrase to enable the vault."
>
<div className="flex flex-row">
<Button variant="primary">Enable Vault</Button>
{/*<Input className="flex-grow" value="jeff" placeholder="/users/jamie/Desktop" />*/}
</div>
</InputContainer>
</SettingsContainer>
);
}

View file

@ -0,0 +1,83 @@
import {
CogIcon,
CollectionIcon,
GlobeAltIcon,
KeyIcon,
TerminalIcon
} from '@heroicons/react/outline';
import { HardDrive, PaintBrush, ShareNetwork } from 'phosphor-react';
import React from 'react';
import { SidebarLink } from '../../components/file/Sidebar';
import {
SettingsHeading,
SettingsIcon,
SettingsScreenContainer
} from '../../components/settings/SettingsScreenContainer';
export const SettingsScreen: React.FC = () => {
return (
<SettingsScreenContainer>
<SettingsHeading className="!mt-0">Client</SettingsHeading>
<SidebarLink to="/settings/general">
<SettingsIcon component={CogIcon} />
General
</SidebarLink>
<SidebarLink to="/settings/appearance">
<SettingsIcon component={PaintBrush} />
Appearance
</SidebarLink>
<SettingsHeading>Node</SettingsHeading>
<SidebarLink to="/settings/nodes">
<SettingsIcon component={GlobeAltIcon} />
Nodes
</SidebarLink>
<SidebarLink to="/settings/p2p">
<SettingsIcon component={ShareNetwork} />
P2P
</SidebarLink>
<SidebarLink to="/settings/library">
<SettingsIcon component={CollectionIcon} />
Libraries
</SidebarLink>
<SidebarLink to="/settings/security">
<SettingsIcon component={KeyIcon} />
Security
</SidebarLink>
<SettingsHeading>Developer</SettingsHeading>
<SidebarLink to="/settings/experimental">
<SettingsIcon component={TerminalIcon} />
Experimental
</SidebarLink>
{/* <SettingsHeading>Library</SettingsHeading>
<SidebarLink to="/settings/library">
<SettingsIcon component={CollectionIcon} />
My Libraries
</SidebarLink>
<SidebarLink to="/settings/locations">
<SettingsIcon component={HardDrive} />
Locations
</SidebarLink>
<SidebarLink to="/settings/keys">
<SettingsIcon component={KeyIcon} />
Keys
</SidebarLink>
<SidebarLink to="/settings/tags">
<SettingsIcon component={TagIcon} />
Tags
</SidebarLink> */}
{/* <SettingsHeading>Cloud</SettingsHeading>
<SidebarLink to="/settings/sync">
<SettingsIcon component={CloudIcon} />
Sync
</SidebarLink>
<SidebarLink to="/settings/contacts">
<SettingsIcon component={UsersIcon} />
Contacts
</SidebarLink> */}
</SettingsScreenContainer>
);
};

View file

@ -1,7 +1,7 @@
import React from 'react';
import { SettingsContainer } from '../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../components/settings/SettingsHeader';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
export default function AppearanceSettings() {
return (

View file

@ -0,0 +1,35 @@
import React from 'react';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
export default function GeneralSettings() {
// const { data: volumes } = useBridgeQuery('SysGetVolumes');
return (
<SettingsContainer>
<SettingsHeader
title="General Settings"
description="General settings related to this client."
/>
{/* <InputContainer title="Volumes" description="A list of volumes running on this device.">
<div className="flex flex-row space-x-2">
<div className="flex flex-grow">
<Listbox
options={
volumes?.map((volume) => {
const name = volume.name && volume.name.length ? volume.name : volume.mount_point;
return {
key: name,
option: name,
description: volume.mount_point
};
}) ?? []
}
/>
</div>
</div>
</InputContainer> */}
</SettingsContainer>
);
}

View file

@ -1,7 +1,7 @@
import React from 'react';
import { SettingsContainer } from '../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../components/settings/SettingsHeader';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
export default function ContactsSettings() {
return (

View file

@ -1,7 +1,7 @@
import React from 'react';
import { SettingsContainer } from '../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../components/settings/SettingsHeader';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
export default function KeysSettings() {
return (

View file

@ -0,0 +1,91 @@
import { useBridgeCommand, useBridgeQuery } from '@sd/client';
import { useCurrentLibrary } from '@sd/client';
import { Button, Input } from '@sd/ui';
import React, { useCallback, useEffect, useState } from 'react';
import { useDebounce } from 'use-debounce';
import { Toggle } from '../../../components/primitive';
import { InputContainer } from '../../../components/primitive/InputContainer';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
export default function LibraryGeneralSettings() {
const { currentLibrary, libraries, currentLibraryUuid } = useCurrentLibrary();
const { mutate: editLibrary } = useBridgeCommand('EditLibrary');
const [name, setName] = useState('');
const [description, setDescription] = useState('');
const [encryptLibrary, setEncryptLibrary] = useState(false);
const [nameDebounced] = useDebounce(name, 500);
const [descriptionDebounced] = useDebounce(description, 500);
useEffect(() => {
if (currentLibrary) {
const { name, description } = currentLibrary.config;
// currentLibrary must be loaded, name must not be empty, and must be different from the current
if (nameDebounced && (nameDebounced !== name || descriptionDebounced !== description)) {
editLibrary({
id: currentLibraryUuid!,
name: nameDebounced,
description: descriptionDebounced
});
}
}
}, [nameDebounced, descriptionDebounced]);
useEffect(() => {
if (currentLibrary) {
setName(currentLibrary.config.name);
setDescription(currentLibrary.config.description);
}
}, [libraries]);
return (
<SettingsContainer>
<SettingsHeader
title="Library Settings"
description="General settings related to the currently active library."
/>
<div className="flex flex-row pb-3 space-x-5">
<div className="flex flex-col flex-grow ">
<span className="mt-2 mb-1 text-xs font-semibold text-gray-300">Name</span>
<Input
value={name}
onChange={(e) => setName(e.target.value)}
defaultValue="My Default Library"
/>
</div>
<div className="flex flex-col flex-grow">
<span className="mt-2 mb-1 text-xs font-semibold text-gray-300">Description</span>
<Input
value={description}
onChange={(e) => setDescription(e.target.value)}
placeholder=""
/>
</div>
</div>
<InputContainer
mini
title="Encrypt Library"
description="Enable encryption for this library, this will only encrypt the Spacedrive database, not the files themselves."
>
<div className="flex items-center ml-3">
<Toggle value={encryptLibrary} onChange={setEncryptLibrary} />
</div>
</InputContainer>
<InputContainer
title="Delete Library"
description="This is permanent, your files will not be deleted, only the Spacedrive library."
>
<div className="mt-2">
<Button size="sm" variant="colored" className="bg-red-500 border-red-500">
Delete Library
</Button>
</div>
</InputContainer>
</SettingsContainer>
);
}

View file

@ -0,0 +1,55 @@
import { PlusIcon } from '@heroicons/react/solid';
import { useBridgeQuery, useLibraryCommand, useLibraryQuery } from '@sd/client';
import { AppPropsContext } from '@sd/client';
import { Button } from '@sd/ui';
import React, { useContext } from 'react';
import LocationListItem from '../../../components/location/LocationListItem';
import { InputContainer } from '../../../components/primitive/InputContainer';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
// const exampleLocations = [
// { option: 'Macintosh HD', key: 'macintosh_hd' },
// { option: 'LaCie External', key: 'lacie_external' },
// { option: 'Seagate 8TB', key: 'seagate_8tb' }
// ];
export default function LocationSettings() {
const { data: locations } = useLibraryQuery('SysGetLocations');
const appProps = useContext(AppPropsContext);
const { mutate: createLocation } = useLibraryCommand('LocCreate');
return (
<SettingsContainer>
{/*<Button size="sm">Add Location</Button>*/}
<SettingsHeader
title="Locations"
description="Manage your storage locations."
rightArea={
<div className="flex-row space-x-2">
<Button
variant="primary"
size="sm"
onClick={() => {
appProps?.openDialog({ directory: true }).then((result) => {
if (result) createLocation({ path: result as string });
});
}}
>
Add Location
</Button>
</div>
}
/>
<div className="grid space-y-2">
{locations?.map((location) => (
<LocationListItem key={location.id} location={location} />
))}
</div>
</SettingsContainer>
);
}

View file

@ -0,0 +1,14 @@
import { Button } from '@sd/ui';
import React from 'react';
import { InputContainer } from '../../../components/primitive/InputContainer';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
export default function SecuritySettings() {
return (
<SettingsContainer>
<SettingsHeader title="Security" description="Keep your client safe." />
</SettingsContainer>
);
}

View file

@ -1,7 +1,7 @@
import React from 'react';
import { SettingsContainer } from '../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../components/settings/SettingsHeader';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
export default function SharingSettings() {
return (

View file

@ -1,7 +1,7 @@
import React from 'react';
import { SettingsContainer } from '../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../components/settings/SettingsHeader';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
export default function SyncSettings() {
return (

View file

@ -1,7 +1,7 @@
import React from 'react';
import { SettingsContainer } from '../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../components/settings/SettingsHeader';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
export default function TagsSettings() {
return (

View file

@ -1,14 +1,12 @@
import React from 'react';
import { useNodeStore } from '../../components/device/Stores';
import { Toggle } from '../../components/primitive';
import { InputContainer } from '../../components/primitive/InputContainer';
import { SettingsContainer } from '../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../components/settings/SettingsHeader';
import { useNodeStore } from '../../../components/device/Stores';
import { Toggle } from '../../../components/primitive';
import { InputContainer } from '../../../components/primitive/InputContainer';
import { SettingsContainer } from '../../../components/settings/SettingsContainer';
import { SettingsHeader } from '../../../components/settings/SettingsHeader';
export default function ExperimentalSettings() {
// const locations = useBridgeQuery("SysGetLocation")
const { isExperimental, setIsExperimental } = useNodeStore();
return (

Some files were not shown because too many files have changed in this diff Show more