improvements to spacedrive:// custom URI protocol (#550)

* fix `spacedrive://` custom protocol on Windows
(hopefully)

* custom protocol using `http::Response` + fix broken web

* import patches before App on web

* use `http::Request` for input to `handle_custom_uri`

* break  into dedicated file + error handling

* serving files via custom protocol

* cargo fmt because vscode did cringe

* lru cache to reduce video chunk request time

* add  helper to JS

* clippy be like

* remove duplicate Open buttons in context menu

* fix Linux 🙏

* no shot

* fix Windows custom URI passing (hopefully)

* better fix for custom uri on Linux

* upgrade Tauri for  feature

* switch url replacement order

* prevent React dev tools script being added in prod to desktop

* remove React devtools from html

* upgrade Tauri; required upgrading rspc, Axum, PCR

* pass typecheck + less cringe bigint

* clippy is love, clippy is life

* Typecheck plz

* fix bigint to number conversion

* use httpz + localhost server for Linux

* clippy be right

* Remove console.log

* [wip] proper auth

* fix Linux sidebar padding

* Secure Axum server with random

* Extracting app setup specific to linux to a different file

* remove outdated comment

* Some tweaks on cursom_uri.rs

* file_path_with_location doesn't need to be a named include

* fix typo

* factually wrong comment

* Change `unwrap` to `expect`

* bruh

---------

Co-authored-by: Ericson Soares <ericson.ds999@gmail.com>
This commit is contained in:
Oscar Beaumont 2023-02-14 13:27:11 +08:00 committed by GitHub
parent f47a2d58e5
commit a9fceae819
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
57 changed files with 1150 additions and 645 deletions

545
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -12,25 +12,25 @@ members = [
]
[workspace.dependencies]
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust.git", tag = "0.6.4", features = [
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "c965b89f1a07a6931d90f4b5556421f7ffcda03b", features = [
"rspc",
"sqlite-create-many",
"migrations",
"sqlite",
], default-features = false }
prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust.git", tag = "0.6.4", features = [
prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "c965b89f1a07a6931d90f4b5556421f7ffcda03b", features = [
"rspc",
"sqlite-create-many",
"migrations",
"sqlite",
], default-features = false }
prisma-client-rust-sdk = { git = "https://github.com/Brendonovich/prisma-client-rust.git", tag = "0.6.4", features = [
prisma-client-rust-sdk = { git = "https://github.com/Brendonovich/prisma-client-rust", rev = "c965b89f1a07a6931d90f4b5556421f7ffcda03b", features = [
"sqlite",
], default-features = false }
rspc = { version = "0.1.2" }
normi = { version = "0.0.1" }
specta = { version = "0.0.4" }
specta = { version = "0.0.6" }
httpz = { version = "0.0.3" }
swift-rs = { git = "https://github.com/Brendonovich/swift-rs.git", rev = "833e29ba333f1dfe303eaa21de78c4f8c5a3f2ff" }
@ -40,6 +40,6 @@ tokio = { version = "1.25.0" }
# We use this patch so we can compile for the IOS simulator on M1
openssl-sys = { git = "https://github.com/spacedriveapp/rust-openssl", rev = "92c3dec225a9e984884d5b30a517e5d44a24d03b" }
rspc = { git = "https://github.com/oscartbeaumont/rspc", rev = "6243b5b6a1376940a40318340e5eaef22e4a2c22" } # TODO: Move back to crates.io when new jsonrpc executor + `tokio::spawn` in the Tauri IPC plugin is released
normi = { git = "https://github.com/oscartbeaumont/rspc", rev = "6243b5b6a1376940a40318340e5eaef22e4a2c22" } # TODO: When normi is released on crates.io
specta = { git = "https://github.com/oscartbeaumont/rspc", rev = "6243b5b6a1376940a40318340e5eaef22e4a2c22" } # TODO: When normi is released on crates.io
rspc = { git = "https://github.com/oscartbeaumont/rspc", rev = "c03872c0ba29d2429e9c059dfb235cdd03e15e8c" } # TODO: Move back to crates.io when new jsonrpc executor + `tokio::spawn` in the Tauri IPC plugin + upgraded Tauri version is released
specta = { git = "https://github.com/oscartbeaumont/rspc", rev = "c03872c0ba29d2429e9c059dfb235cdd03e15e8c" }
httpz = { git = "https://github.com/oscartbeaumont/httpz", rev = "a5185f2ed2fdefeb2f582dce38a692a1bf76d1d6" }

View file

@ -18,13 +18,13 @@
"@sd/client": "workspace:*",
"@sd/interface": "workspace:*",
"@sd/ui": "workspace:*",
"@tauri-apps/api": "1.1.0",
"@tauri-apps/api": "1.2.0",
"react": "^18.2.0",
"react-dom": "^18.2.0"
},
"devDependencies": {
"@sd/config": "workspace:*",
"@tauri-apps/cli": "1.1.1",
"@tauri-apps/cli": "1.2.3",
"@types/babel-core": "^6.25.7",
"@types/react": "^18.0.21",
"@types/react-dom": "^18.0.6",

View file

@ -10,13 +10,22 @@ edition = "2021"
build = "build.rs"
[dependencies]
tauri = { version = "1.1.1", features = ["api-all", "macos-private-api"] }
tauri = { version = "1.2.4", features = ["api-all", "linux-protocol-headers", "macos-private-api"] }
rspc = { workspace = true, features = ["tauri"] }
httpz = { workspace = true, features = ["axum", "tauri"] } # TODO: The `axum` feature should be only enabled on Linux but this currently can't be done: https://github.com/rust-lang/cargo/issues/1197
sd-core = { path = "../../../core", features = ["ffmpeg", "location-watcher"] }
tokio = { workspace = true, features = ["sync"] }
window-shadows = "0.2.0"
tracing = "0.1.36"
serde = "1.0.145"
percent-encoding = "2.2.0"
http = "0.2.8"
[target.'cfg(target_os = "linux")'.dependencies]
server = { path = "../../server" }
axum = "0.6.4"
rand = "0.8.5"
url = "2.1.1"
[target.'cfg(target_os = "macos")'.dependencies]
swift-rs.workspace = true

View file

@ -0,0 +1,95 @@
use std::{
net::{SocketAddr, TcpListener},
sync::Arc,
};
use sd_core::Node;
use axum::{
extract::State,
http::{Request, StatusCode},
middleware::{self, Next},
response::{IntoResponse, Response},
routing::get,
};
use httpz::{Endpoint, HttpEndpoint};
use rand::{distributions::Alphanumeric, Rng};
use tauri::{plugin::TauriPlugin, Builder, Runtime};
use tracing::debug;
use url::Url;
pub(super) async fn setup<R: Runtime>(
app: Builder<R>,
node: Arc<Node>,
endpoint: Endpoint<impl HttpEndpoint>,
) -> Builder<R> {
let signal = server::utils::axum_shutdown_signal(node);
let auth_token: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(10)
.map(char::from)
.collect();
let axum_app = axum::Router::new()
.route("/", get(|| async { "Spacedrive Server!" }))
.nest("/spacedrive", endpoint.axum())
.route_layer(middleware::from_fn_with_state(
auth_token.clone(),
auth_middleware,
))
.fallback(|| async { "404 Not Found: We're past the event horizon..." });
// Only allow current device to access it and randomise port
let listener = TcpListener::bind("127.0.0.1:0").expect("Error creating localhost server!");
let listen_addr = listener
.local_addr()
.expect("Error getting localhost server listen addr!");
debug!("Localhost server listening on: http://{:?}", listen_addr);
tokio::spawn(async move {
axum::Server::from_tcp(listener)
.expect("error creating HTTP server!")
.serve(axum_app.into_make_service())
.with_graceful_shutdown(signal)
.await
.expect("Error with HTTP server!");
});
app.plugin(spacedrive_plugin_init(&auth_token, listen_addr))
}
async fn auth_middleware<B>(
State(auth_token): State<String>,
request: Request<B>,
next: Next<B>,
) -> Response {
let url = Url::parse(&request.uri().to_string()).unwrap();
if let Some((_, v)) = url.query_pairs().find(|(k, _)| k == "token") {
if v == auth_token {
return next.run(request).await;
}
} else if let Some(v) = request
.headers()
.get("Authorization")
.and_then(|v| v.to_str().ok())
{
if v == auth_token {
return next.run(request).await;
}
}
(StatusCode::UNAUTHORIZED, "Unauthorized!").into_response()
}
pub fn spacedrive_plugin_init<R: Runtime>(
auth_token: &str,
listen_addr: SocketAddr,
) -> TauriPlugin<R> {
tauri::plugin::Builder::new("spacedrive")
.js_init_script(format!(
r#"window.__SD_CUSTOM_SERVER_AUTH_TOKEN__ = "{auth_token}"; window.__SD_CUSTOM_URI_SERVER__ = "http://{listen_addr}";"#
))
.build()
}

View file

@ -3,24 +3,20 @@
windows_subsystem = "windows"
)]
use std::error::Error;
use std::path::PathBuf;
use std::time::Duration;
use std::{error::Error, path::PathBuf, sync::Arc, time::Duration};
use sd_core::Node;
use tauri::async_runtime::block_on;
use tauri::{
api::path,
http::{ResponseBuilder, Uri},
Manager, RunEvent,
};
use tokio::task::block_in_place;
use tokio::time::sleep;
use sd_core::{custom_uri::create_custom_uri_endpoint, Node};
use tauri::{api::path, async_runtime::block_on, Manager, RunEvent};
use tokio::{task::block_in_place, time::sleep};
use tracing::{debug, error};
#[cfg(target_os = "macos")]
mod macos;
#[cfg(target_os = "linux")]
mod app_linux;
mod menu;
#[tauri::command(async)]
@ -41,26 +37,21 @@ async fn main() -> Result<(), Box<dyn Error>> {
let (node, router) = Node::new(data_dir).await?;
let app = tauri::Builder::default()
.plugin(rspc::integrations::tauri::plugin(router, {
let node = node.clone();
move || node.get_request_context()
}))
.register_uri_scheme_protocol("spacedrive", {
let node = node.clone();
move |_, req| {
let url = req.uri().parse::<Uri>().unwrap();
let mut path = url.path().split('/').collect::<Vec<_>>();
path[0] = url.host().unwrap(); // The first forward slash causes an empty item and we replace it with the URL's host which you expect to be at the start
let app = tauri::Builder::default().plugin(rspc::integrations::tauri::plugin(router, {
let node = Arc::clone(&node);
move || node.get_request_context()
}));
let (status_code, content_type, body) =
block_in_place(|| block_on(node.handle_custom_uri(path)));
ResponseBuilder::new()
.status(status_code)
.mimetype(content_type)
.body(body)
}
})
// This is a super cringe workaround for: https://github.com/tauri-apps/tauri/issues/3725 & https://bugs.webkit.org/show_bug.cgi?id=146351#c5
let endpoint = create_custom_uri_endpoint(Arc::clone(&node));
#[cfg(target_os = "linux")]
let app = app_linux::setup(app, Arc::clone(&node), endpoint).await;
#[cfg(not(target_os = "linux"))]
let app = app.register_uri_scheme_protocol("spacedrive", endpoint.tauri_uri_scheme("spacedrive"));
let app = app
.setup(|app| {
let app = app.handle();
app.windows().iter().for_each(|(_, window)| {
@ -71,7 +62,9 @@ async fn main() -> Result<(), Box<dyn Error>> {
async move {
sleep(Duration::from_secs(3)).await;
if !window.is_visible().unwrap_or(true) {
println!("Window did not emit `app_ready` event fast enough. Showing window...");
println!(
"Window did not emit `app_ready` event fast enough. Showing window..."
);
let _ = window.show();
}
}

View file

@ -2,6 +2,7 @@ import { loggerLink } from '@rspc/client';
import { tauriLink } from '@rspc/tauri';
import { dialog, invoke, os, shell } from '@tauri-apps/api';
import { listen } from '@tauri-apps/api/event';
import { convertFileSrc } from '@tauri-apps/api/tauri';
import { useEffect } from 'react';
import { getDebugState, hooks, queryClient } from '@sd/client';
import SpacedriveInterface, { OperatingSystem, Platform, PlatformProvider } from '@sd/interface';
@ -30,9 +31,29 @@ async function getOs(): Promise<OperatingSystem> {
}
}
let customUriServerUrl = (window as any).__SD_CUSTOM_URI_SERVER__ as string | undefined;
const customUriAuthToken = (window as any).__SD_CUSTOM_URI_TOKEN__ as string | undefined;
if (customUriServerUrl && !customUriServerUrl?.endsWith('/')) {
customUriServerUrl += '/';
}
function getCustomUriURL(path: string): string {
if (customUriServerUrl) {
const queryParams = customUriAuthToken
? `?token=${encodeURIComponent(customUriAuthToken)}`
: '';
return `${customUriServerUrl}spacedrive/${path}${queryParams}`;
} else {
return convertFileSrc(path, 'spacedrive');
}
}
const platform: Platform = {
platform: 'tauri',
getThumbnailUrlById: (casId) => `spacedrive://thumbnail/${encodeURIComponent(casId)}`,
getThumbnailUrlById: (casId) => getCustomUriURL(`thumbnail/${casId}`),
getFileUrl: (libraryId, locationLocalId, filePathId) =>
getCustomUriURL(`file/${libraryId}/${locationLocalId}/${filePathId}`),
openLink: shell.open,
getOs,
openDirectoryPickerDialog: () => dialog.open({ directory: true }),

View file

@ -8,8 +8,6 @@
</head>
<body style="overflow: hidden">
<div id="root"></div>
<!-- Script for React devtools. TODO: Make sure this isn't included in production builds. -->
<script src="http://localhost:8097"></script>
<script type="module" src="./index.tsx"></script>
</body>
</html>

View file

@ -6,6 +6,13 @@ import '@sd/ui/style';
import '~/patches';
import App from './App';
// React dev tools extension
if (import.meta.env.DEV) {
var script = document.createElement('script');
script.src = 'http://localhost:8097';
document.head.appendChild(script);
}
const root = ReactDOM.createRoot(document.getElementById('root') as HTMLElement);
root.render(
<React.StrictMode>

View file

@ -36,10 +36,7 @@ pub extern "system" fn Java_com_spacedrive_app_SDCore_registerCoreEventListener(
if let Err(err) = result {
// TODO: Send rspc error or something here so we can show this in the UI.
// TODO: Maybe reinitialise the core cause it could be in an invalid state?
println!(
"Error in Java_com_spacedrive_app_SDCore_registerCoreEventListener: {:?}",
err
);
println!("Error in Java_com_spacedrive_app_SDCore_registerCoreEventListener: {err:?}");
}
}
@ -86,9 +83,7 @@ pub extern "system" fn Java_com_spacedrive_app_SDCore_handleCoreMsg(
)
.unwrap();
}
Err(_) => {
// TODO: handle error
}
Err(err) => error!(err),
});
});

View file

@ -95,7 +95,7 @@ pub fn spawn_core_event_listener(callback: impl Fn(String) + Send + 'static) {
let data = match to_string(&event) {
Ok(json) => json,
Err(err) => {
println!("Failed to serialize event: {}", err);
println!("Failed to serialize event: {err}");
continue;
}
};

View file

@ -13,8 +13,8 @@ const StatItemNames: Partial<Record<keyof Statistics, string>> = {
total_bytes_free: 'Free space'
};
const StatItem: FC<{ title: string; bytes: number }> = ({ title, bytes }) => {
const { value, unit } = byteSize(+bytes);
const StatItem: FC<{ title: string; bytes: bigint }> = ({ title, bytes }) => {
const { value, unit } = byteSize(Number(bytes)); // TODO: This BigInt to Number conversion will truncate the number if the number is too large. `byteSize` doesn't support BigInt so we are gonna need to come up with a longer term solution at some point.
const count = useCounter({ name: title, end: Number(value) });
@ -51,12 +51,13 @@ const OverviewStats = () => {
return libraryStatistics ? (
<ScrollView horizontal showsHorizontalScrollIndicator={false}>
{Object.entries(libraryStatistics).map(([key, bytes]) => {
{Object.entries(libraryStatistics).map(([key, bytesRaw]) => {
if (!displayableStatItems.includes(key)) return null;
let bytes = BigInt(bytesRaw);
if (key === 'total_bytes_free') {
bytes = sizeInfo.freeSpace;
bytes = BigInt(sizeInfo.freeSpace);
} else if (key === 'total_bytes_capacity') {
bytes = sizeInfo.totalSpace;
bytes = BigInt(sizeInfo.totalSpace);
}
return <StatItem key={key} title={StatItemNames[key as keyof Statistics]!} bytes={bytes} />;
})}

View file

@ -4,11 +4,7 @@ import { resetStore } from '@sd/client';
// TODO: Add "media"
export type ExplorerLayoutMode = 'list' | 'grid';
export enum ExplorerKind {
Location,
Tag,
Space
}
export type ExplorerKind = 'Location' | 'Tag' | 'Space';
const state = {
locationId: null as number | null,

View file

@ -6,7 +6,10 @@ edition = "2021"
[dependencies]
sd-core = { path = "../../core", features = ["ffmpeg"] }
rspc = { workspace = true, features = ["axum"] }
axum = "0.5.16"
httpz = { workspace = true, features = ["axum"] }
axum = "0.6.4"
tokio = { workspace = true, features = ["sync", "rt-multi-thread", "signal"] }
tracing = "0.1.36"
ctrlc = "3.2.3"
http = "0.2.8"
hyper = "0.14.23"

1
apps/server/src/lib.rs Normal file
View file

@ -0,0 +1 @@
pub mod utils;

View file

@ -1,12 +1,7 @@
use std::{env, net::SocketAddr, path::Path};
use axum::{
extract,
handler::Handler,
http::{header::CONTENT_TYPE, HeaderMap, StatusCode},
routing::get,
};
use sd_core::Node;
use axum::routing::get;
use sd_core::{custom_uri::create_custom_uri_endpoint, Node};
use tracing::info;
mod utils;
@ -39,29 +34,15 @@ async fn main() {
let app = axum::Router::new()
.route("/", get(|| async { "Spacedrive Server!" }))
.route("/health", get(|| async { "OK" }))
.route("/spacedrive/*id", {
let node = node.clone();
get(|extract::Path(path): extract::Path<String>| async move {
let (status_code, content_type, body) = node
.handle_custom_uri(path.split('/').skip(1).collect())
.await;
(
StatusCode::from_u16(status_code).unwrap(),
{
let mut headers = HeaderMap::new();
headers.insert(CONTENT_TYPE, content_type.parse().unwrap());
headers
},
body,
)
})
})
.route(
"/rspc/:id",
.nest(
"/spacedrive",
create_custom_uri_endpoint(node.clone()).axum(),
)
.nest(
"/rspc",
router.endpoint(move || node.get_request_context()).axum(),
)
.fallback((|| async { "404 Not Found: We're past the event horizon..." }).into_service());
.fallback(|| async { "404 Not Found: We're past the event horizon..." });
let mut addr = "[::]:8080".parse::<SocketAddr>().unwrap(); // This listens on IPv6 and IPv4
addr.set_port(port);

View file

@ -23,11 +23,16 @@ const client = hooks.createClient({
});
const http = isDev ? 'http' : 'https';
const spacedriveProtocol = `${http}://${serverOrigin}/spacedrive`;
const platform: Platform = {
platform: 'web',
getThumbnailUrlById: (casId) =>
`${http}://${serverOrigin}/spacedrive/thumbnail/${encodeURIComponent(casId)}.webp`,
`${spacedriveProtocol}/thumbnail/${encodeURIComponent(casId)}.webp`,
getFileUrl: (libraryId, locationLocalId, filePathId) =>
`${spacedriveProtocol}/file/${encodeURIComponent(libraryId)}/${encodeURIComponent(
locationLocalId
)}/${encodeURIComponent(filePathId)}`,
openLink: (url) => window.open(url, '_blank')?.focus(),
demoMode: true
};

View file

@ -3,8 +3,8 @@ import React, { Suspense } from 'react';
import ReactDOM from 'react-dom/client';
import '@sd/ui/style';
// THIS MUST GO BEFORE importing the App
import '~/patches';
import App from './App';
import './patches';
const root = ReactDOM.createRoot(document.getElementById('root') as HTMLElement);
root.render(

View file

@ -10,15 +10,10 @@ rust-version = "1.67.0"
[features]
default = ["p2p"]
p2p = [
] # This feature controls whether the Spacedrive Core contains the Peer to Peer syncing engine (It isn't required for the hosted core so we can disable it).
mobile = [
] # This feature allows features to be disabled when the Core is running on mobile.
p2p = [] # This feature controls whether the Spacedrive Core contains the Peer to Peer syncing engine (It isn't required for the hosted core so we can disable it).
mobile = [] # This feature allows features to be disabled when the Core is running on mobile.
android = ["dep:tracing-android"]
ffmpeg = [
"dep:ffmpeg-next",
"dep:sd-ffmpeg",
] # This feature controls whether the Spacedrive Core contains functionality which requires FFmpeg.
ffmpeg = ["dep:ffmpeg-next", "dep:sd-ffmpeg"] # This feature controls whether the Spacedrive Core contains functionality which requires FFmpeg.
location-watcher = ["dep:notify"]
[dependencies]
@ -37,8 +32,8 @@ blake3 = "1.3.1"
# Project dependencies
rspc = { workspace = true, features = ["uuid", "chrono", "tracing"] }
httpz = { workspace = true }
prisma-client-rust = { workspace = true }
normi = { workspace = true }
specta = { workspace = true }
uuid = { version = "1.1.2", features = ["v4", "serde"] }
sysinfo = "0.26.4"
@ -73,6 +68,9 @@ notify = { version = "5.0.0", default-features = false, features = [
"macos_fsevent",
], optional = true }
uhlc = "0.5.1"
http-range = "0.1.5"
mini-moka = "0.10.0"
serde_with = "2.2.0"
dashmap = { version = "5.4.0", features = ["serde"] }
[dev-dependencies]

View file

@ -7,5 +7,5 @@ fn main() {
.expect("error getting git hash. Does `git rev-parse --short HEAD` work for you?");
let git_hash = String::from_utf8(output.stdout)
.expect("Error passing output of `git rev-parse --short HEAD`");
println!("cargo:rustc-env=GIT_HASH={}", git_hash);
println!("cargo:rustc-env=GIT_HASH={git_hash}");
}

View file

@ -342,7 +342,7 @@ pub(crate) fn mount() -> RouterBuilder {
invalidate_query!(library, "keys.list");
invalidate_query!(library, "keys.listMounted");
Ok(updated_keys.len())
Ok(TryInto::<u32>::try_into(updated_keys.len()).unwrap()) // We convert from `usize` (bigint type) to `u32` (number type) because rspc doesn't support bigints.
})
})
.library_mutation("changeMasterPassword", |t| {

View file

@ -9,7 +9,7 @@ use crate::{
use std::path::PathBuf;
use rspc::{self, internal::MiddlewareBuilderLike, ErrorCode, Type};
use rspc::{self, ErrorCode, RouterBuilderLike, Type};
use serde::{Deserialize, Serialize};
use super::{utils::LibraryRequest, Ctx, RouterBuilder};
@ -27,11 +27,11 @@ pub enum ExplorerContext {
pub enum ExplorerItem {
Path {
has_thumbnail: bool,
item: Box<file_path_with_object::Data>,
item: file_path_with_object::Data,
},
Object {
has_thumbnail: bool,
item: Box<object_with_file_paths::Data>,
item: object_with_file_paths::Data,
},
}
@ -45,12 +45,7 @@ file_path::include!(file_path_with_object { object });
object::include!(object_with_file_paths { file_paths });
indexer_rules_in_location::include!(indexer_rules_in_location_with_rules { indexer_rule });
// TODO(@Oscar): This return type sucks. Add an upstream rspc solution.
pub(crate) fn mount() -> rspc::RouterBuilder<
Ctx,
(),
impl MiddlewareBuilderLike<Ctx, LayerContext = Ctx> + Send + 'static,
> {
pub(crate) fn mount() -> impl RouterBuilderLike<Ctx> {
<RouterBuilder>::new()
.library_query("list", |t| {
t(|_, _: (), library| async move {
@ -149,7 +144,7 @@ pub(crate) fn mount() -> rspc::RouterBuilder<
items.push(ExplorerItem::Path {
has_thumbnail,
item: Box::new(file_path),
item: file_path,
});
}

View file

@ -42,7 +42,6 @@ mod keys;
mod libraries;
mod locations;
mod nodes;
mod normi;
mod tags;
pub mod utils;
pub mod volumes;
@ -90,15 +89,14 @@ pub(crate) fn mount() -> Arc<Router> {
})
})
})
.merge("normi.", normi::mount())
.merge("library.", libraries::mount())
.merge("volumes.", volumes::mount())
.merge("tags.", tags::mount())
.merge("nodes.", nodes::mount())
.merge("keys.", keys::mount())
.merge("locations.", locations::mount())
.merge("files.", files::mount())
.merge("jobs.", jobs::mount())
.yolo_merge("library.", libraries::mount())
.yolo_merge("volumes.", volumes::mount())
.yolo_merge("tags.", tags::mount())
.yolo_merge("nodes.", nodes::mount())
.yolo_merge("keys.", keys::mount())
.yolo_merge("locations.", locations::mount())
.yolo_merge("files.", files::mount())
.yolo_merge("jobs.", jobs::mount())
// TODO: Scope the invalidate queries to a specific library (filtered server side)
.subscription("invalidateQuery", |t| {
t(|ctx, _: ()| {

View file

@ -1,91 +0,0 @@
use normi::{typed, Object};
use rspc::Type;
use serde::Serialize;
use super::RouterBuilder;
#[derive(Serialize, Type, Object)]
#[normi(rename = "org")]
pub struct Organisation {
#[normi(id)]
pub id: String,
pub name: String,
#[normi(refr)]
pub users: Vec<User>,
#[normi(refr)]
pub owner: User,
pub non_normalised_data: Vec<()>,
}
#[derive(Serialize, Type, Object)]
pub struct User {
#[normi(id)]
pub id: String,
pub name: String,
}
#[derive(Serialize, Type, Object)]
pub struct CompositeId {
#[normi(id)]
pub org_id: String,
#[normi(id)]
pub user_id: String,
}
pub fn mount() -> RouterBuilder {
RouterBuilder::new()
.query("version", |t| t(|_, _: ()| "0.1.0"))
.query("userSync", |t| {
t.resolver(|_, _: ()| User {
id: "1".to_string(),
name: "Monty Beaumont".to_string(),
})
.map(typed)
})
.query("user", |t| {
t.resolver(|_, _: ()| async move {
Ok(User {
id: "1".to_string(),
name: "Monty Beaumont".to_string(),
})
})
.map(typed)
})
.query("org", |t| {
t.resolver(|_, _: ()| async move {
Ok(Organisation {
id: "org-1".into(),
name: "Org 1".into(),
users: vec![
User {
id: "user-1".into(),
name: "Monty Beaumont".into(),
},
User {
id: "user-2".into(),
name: "Millie Beaumont".into(),
},
User {
id: "user-3".into(),
name: "Oscar Beaumont".into(),
},
],
owner: User {
id: "user-1".into(),
name: "Monty Beaumont".into(),
},
non_normalised_data: vec![(), ()],
})
})
.map(typed)
})
.query("composite", |t| {
t.resolver(|_, _: ()| async move {
Ok(CompositeId {
org_id: "org-1".into(),
user_id: "user-1".into(),
})
})
.map(typed)
})
}

View file

@ -79,7 +79,7 @@ pub(crate) fn mount() -> RouterBuilder {
items.push(ExplorerItem::Object {
has_thumbnail,
item: Box::new(object),
item: object,
});
}

View file

@ -32,7 +32,7 @@ impl InvalidateOperationEvent {
#[allow(dead_code)]
pub(crate) struct InvalidationRequest {
pub key: &'static str,
pub arg_ty: Option<DataType>,
pub input_ty: Option<DataType>,
pub macro_src: &'static str,
}
@ -60,8 +60,8 @@ impl InvalidRequests {
let queries = r.queries();
for req in &invalidate_requests.queries {
if let Some(query_ty) = queries.get(req.key) {
if let Some(arg) = &req.arg_ty {
if &query_ty.ty.arg_ty != arg {
if let Some(input) = &req.input_ty {
if &query_ty.ty.input != input {
panic!(
"Error at '{}': Attempted to invalid query '{}' but the argument type does not match the type defined on the router.",
req.macro_src, req.key
@ -104,8 +104,8 @@ macro_rules! invalidate_query {
.queries
.push(crate::api::utils::InvalidationRequest {
key: $key,
arg_ty: None,
macro_src: concat!(file!(), ":", line!()),
input_ty: None,
macro_src: concat!(file!(), ":", line!()),
})
}
}
@ -115,8 +115,8 @@ macro_rules! invalidate_query {
crate::api::utils::InvalidateOperationEvent::dangerously_create($key, serde_json::Value::Null)
))
}};
($ctx:expr, $key:literal: $arg_ty:ty, $arg:expr $(,)?) => {{
let _: $arg_ty = $arg; // Assert the type the user provided is correct
($ctx:expr, $key:literal: $input_ty:ty, $input:expr $(,)?) => {{
let _: $input_ty = $input; // Assert the type the user provided is correct
let ctx: &crate::library::LibraryContext = &$ctx; // Assert the context is the correct type
#[cfg(debug_assertions)]
@ -129,7 +129,7 @@ macro_rules! invalidate_query {
.queries
.push(crate::api::utils::InvalidationRequest {
key: $key,
arg_ty: Some(<$arg_ty as rspc::internal::specta::Type>::reference(rspc::internal::specta::DefOpts {
input_ty: Some(<$input_ty as rspc::internal::specta::Type>::reference(rspc::internal::specta::DefOpts {
parent_inline: false,
type_map: &mut rspc::internal::specta::TypeDefs::new(),
}, &[])),
@ -139,7 +139,7 @@ macro_rules! invalidate_query {
}
// The error are ignored here because they aren't mission critical. If they fail the UI might be outdated for a bit.
let _ = serde_json::to_value($arg)
let _ = serde_json::to_value($input)
.map(|v|
ctx.emit(crate::api::CoreEvent::InvalidateOperation(
crate::api::utils::InvalidateOperationEvent::dangerously_create($key, v),

View file

@ -67,7 +67,7 @@ pub trait LibraryRequest {
}
// Note: This will break with middleware context switching but that's fine for now
impl<TMiddleware> LibraryRequest for rspc::RouterBuilder<Ctx, (), TMiddleware>
impl<TMiddleware> LibraryRequest for rspc::RouterBuilder<Ctx, TMiddleware>
where
TMiddleware: MiddlewareBuilderLike<Ctx, LayerContext = Ctx> + Send + 'static,
{

300
core/src/custom_uri.rs Normal file
View file

@ -0,0 +1,300 @@
use crate::{prisma::file_path, Node};
use std::{cmp::min, io, path::PathBuf, str::FromStr, sync::Arc};
use http_range::HttpRange;
use httpz::{
http::{Method, Response, StatusCode},
Endpoint, GenericEndpoint, HttpEndpoint, Request,
};
use mini_moka::sync::Cache;
use once_cell::sync::Lazy;
use prisma_client_rust::QueryError;
use thiserror::Error;
use tokio::{
fs::{self, File},
io::{AsyncReadExt, AsyncSeekExt, SeekFrom},
};
use tracing::{error, warn};
use uuid::Uuid;
// This LRU cache allows us to avoid doing a DB lookup on every request.
// The main advantage of this LRU Cache is for video files. Video files are fetch in multiple chunks and the cache prevents a DB lookup on every chunk reducing the request time from 15-25ms to 1-10ms.
type MetadataCacheKey = (Uuid, i32, i32);
static FILE_METADATA_CACHE: Lazy<Cache<MetadataCacheKey, (PathBuf, Option<String>)>> =
Lazy::new(|| Cache::new(100));
// TODO: We should listen to events when deleting or moving a location and evict the cache accordingly.
// TODO: Probs use this cache in rspc queries too!
async fn handler(node: Arc<Node>, req: Request) -> Result<Response<Vec<u8>>, HandleCustomUriError> {
let path = req
.uri()
.path()
.strip_prefix('/')
.unwrap_or_else(|| req.uri().path())
.split('/')
.collect::<Vec<_>>();
match path.first() {
Some(&"thumbnail") => handle_thumbnail(&node, &path).await,
Some(&"file") => handle_file(&node, &path, &req).await,
_ => Err(HandleCustomUriError::BadRequest("Invalid operation!")),
}
}
async fn handle_thumbnail(
node: &Node,
path: &[&str],
) -> Result<Response<Vec<u8>>, HandleCustomUriError> {
let file_cas_id = path
.get(1)
.ok_or_else(|| HandleCustomUriError::BadRequest("Invalid number of parameters!"))?;
let filename = node
.config
.data_directory()
.join("thumbnails")
.join(file_cas_id)
.with_extension("webp");
let buf = fs::read(&filename).await.map_err(|err| {
if err.kind() == io::ErrorKind::NotFound {
HandleCustomUriError::NotFound("file")
} else {
err.into()
}
})?;
Ok(Response::builder()
.header("Content-Type", "image/webp")
.status(StatusCode::OK)
.body(buf)?)
}
async fn handle_file(
node: &Node,
path: &[&str],
req: &Request,
) -> Result<Response<Vec<u8>>, HandleCustomUriError> {
let library_id = path
.get(1)
.and_then(|id| Uuid::from_str(id).ok())
.ok_or_else(|| {
HandleCustomUriError::BadRequest("Invalid number of parameters. Missing library_id!")
})?;
let location_id = path
.get(2)
.and_then(|id| id.parse::<i32>().ok())
.ok_or_else(|| {
HandleCustomUriError::BadRequest("Invalid number of parameters. Missing location_id!")
})?;
let file_path_id = path
.get(3)
.and_then(|id| id.parse::<i32>().ok())
.ok_or_else(|| {
HandleCustomUriError::BadRequest("Invalid number of parameters. Missing file_path_id!")
})?;
let lru_cache_key = (library_id, location_id, file_path_id);
let (file_path_materialized_path, extension) =
if let Some(entry) = FILE_METADATA_CACHE.get(&lru_cache_key) {
entry
} else {
let library = node
.library_manager
.get_ctx(library_id)
.await
.ok_or_else(|| HandleCustomUriError::NotFound("library"))?;
let file_path = library
.db
.file_path()
.find_unique(file_path::location_id_id(location_id, file_path_id))
.include(file_path::include!({ location }))
.exec()
.await?
.ok_or_else(|| HandleCustomUriError::NotFound("object"))?;
let lru_entry = (
PathBuf::from(file_path.location.local_path.ok_or_else(|| {
warn!(
"Location '{}' doesn't have local path set",
file_path.location_id
);
HandleCustomUriError::BadRequest("Location doesn't have `local_path` set!")
})?)
.join(&file_path.materialized_path),
file_path.extension,
);
FILE_METADATA_CACHE.insert(lru_cache_key, lru_entry.clone());
lru_entry
};
let mut file = File::open(file_path_materialized_path)
.await
.map_err(|err| {
if err.kind() == io::ErrorKind::NotFound {
HandleCustomUriError::NotFound("file")
} else {
err.into()
}
})?;
let metadata = file.metadata().await?;
// TODO: This should be determined from magic bytes when the file is indexed and stored it in the DB on the file path
let (mime_type, is_video) = match extension.as_deref() {
Some("mp4") => ("video/mp4", true),
Some("webm") => ("video/webm", true),
Some("mkv") => ("video/x-matroska", true),
Some("avi") => ("video/x-msvideo", true),
Some("mov") => ("video/quicktime", true),
Some("png") => ("image/png", false),
Some("jpg") => ("image/jpeg", false),
Some("jpeg") => ("image/jpeg", false),
Some("gif") => ("image/gif", false),
Some("webp") => ("image/webp", false),
Some("svg") => ("image/svg+xml", false),
_ => {
return Err(HandleCustomUriError::BadRequest(
"TODO: This filetype is not supported because of the missing mime type!",
));
}
};
if is_video {
let mut response = Response::builder();
let mut status_code = 200;
// if the webview sent a range header, we need to send a 206 in return
let buf = if let Some(range) = req.headers().get("range") {
let mut buf = Vec::new();
let file_size = metadata.len();
let range = HttpRange::parse(
range
.to_str()
.map_err(|_| HandleCustomUriError::BadRequest("Error passing range header!"))?,
file_size,
)
.map_err(|_| HandleCustomUriError::BadRequest("Error passing range!"))?;
// let support only 1 range for now
let first_range = range.first();
if let Some(range) = first_range {
let mut real_length = range.length;
// prevent max_length;
// specially on webview2
if range.length > file_size / 3 {
// max size sent (400kb / request)
// as it's local file system we can afford to read more often
real_length = min(file_size - range.start, 1024 * 400);
}
// last byte we are reading, the length of the range include the last byte
// who should be skipped on the header
let last_byte = range.start + real_length - 1;
status_code = 206;
// Only macOS and Windows are supported, if you set headers in linux they are ignored
response = response
.header("Connection", "Keep-Alive")
.header("Accept-Ranges", "bytes")
.header("Content-Length", real_length)
.header(
"Content-Range",
format!("bytes {}-{}/{}", range.start, last_byte, file_size),
);
// FIXME: Add ETag support (caching on the webview)
file.seek(SeekFrom::Start(range.start)).await?;
file.take(real_length).read_to_end(&mut buf).await?;
} else {
file.read_to_end(&mut buf).await?;
}
buf
} else {
// Linux is mega cringe and doesn't support streaming so we just load the whole file into memory and return it
let mut buf = Vec::with_capacity(metadata.len() as usize);
file.read_to_end(&mut buf).await?;
buf
};
Ok(response
.header("Content-type", mime_type)
.status(status_code)
.body(buf)?)
} else {
let mut buf = Vec::with_capacity(metadata.len() as usize);
file.read_to_end(&mut buf).await?;
Ok(Response::builder()
.header("Content-Type", mime_type)
.status(StatusCode::OK)
.body(buf)?)
}
}
pub fn create_custom_uri_endpoint(node: Arc<Node>) -> Endpoint<impl HttpEndpoint> {
GenericEndpoint::new("/*any", [Method::GET, Method::POST], move |req: Request| {
let node = node.clone();
async move { handler(node, req).await.unwrap_or_else(Into::into) }
})
}
#[derive(Error, Debug)]
pub enum HandleCustomUriError {
#[error("error creating http request/response: {0}")]
Http(#[from] httpz::http::Error),
#[error("io error: {0}")]
Io(#[from] io::Error),
#[error("query error: {0}")]
QueryError(#[from] QueryError),
#[error("{0}")]
BadRequest(&'static str),
#[error("resource '{0}' not found")]
NotFound(&'static str),
}
impl From<HandleCustomUriError> for Response<Vec<u8>> {
fn from(value: HandleCustomUriError) -> Self {
let builder = Response::builder().header("Content-Type", "text/plain");
(match value {
HandleCustomUriError::Http(err) => {
error!("Error creating http request/response: {}", err);
builder
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(b"Internal Server Error".to_vec())
}
HandleCustomUriError::Io(err) => {
error!("IO error: {}", err);
builder
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(b"Internal Server Error".to_vec())
}
HandleCustomUriError::QueryError(err) => {
error!("Query error: {}", err);
builder
.status(StatusCode::INTERNAL_SERVER_ERROR)
.body(b"Internal Server Error".to_vec())
}
HandleCustomUriError::BadRequest(msg) => {
error!("Bad request: {}", msg);
builder
.status(StatusCode::BAD_REQUEST)
.body(msg.as_bytes().to_vec())
}
HandleCustomUriError::NotFound(resource) => builder.status(StatusCode::NOT_FOUND).body(
format!("Resource '{resource}' not found")
.as_bytes()
.to_vec(),
),
})
// SAFETY: This unwrap is ok as we have an hardcoded the response builders.
.expect("internal error building hardcoded HTTP error response")
}
}

View file

@ -7,15 +7,12 @@ use util::secure_temp_keystore::SecureTempKeystore;
use std::{path::Path, sync::Arc};
use thiserror::Error;
use tokio::{
fs::{self, File},
io::AsyncReadExt,
sync::broadcast,
};
use tokio::{fs, sync::broadcast};
use tracing::{error, info};
use tracing_subscriber::{prelude::*, EnvFilter};
pub mod api;
pub mod custom_uri;
pub(crate) mod job;
pub(crate) mod library;
pub(crate) mod location;
@ -187,50 +184,6 @@ impl Node {
}
}
// Note: this system doesn't use chunked encoding which could prove a problem with large files but I can't see an easy way to do chunked encoding with Tauri custom URIs.
pub async fn handle_custom_uri(
&self,
path: Vec<&str>,
) -> (
u16, /* Status Code */
&str, /* Content-Type */
Vec<u8>, /* Body */
) {
match path.first().copied() {
Some("thumbnail") => {
if path.len() != 2 {
return (
400,
"text/html",
b"Bad Request: Invalid number of parameters".to_vec(),
);
}
let filename = Path::new(&self.config.data_directory())
.join("thumbnails")
.join(path[1] /* file_cas_id */)
.with_extension("webp");
match File::open(&filename).await {
Ok(mut file) => {
let mut buf = match fs::metadata(&filename).await {
Ok(metadata) => Vec::with_capacity(metadata.len() as usize),
Err(_) => Vec::new(),
};
file.read_to_end(&mut buf).await.unwrap();
(200, "image/webp", buf)
}
Err(_) => (404, "text/html", b"File Not Found".to_vec()),
}
}
_ => (
400,
"text/html",
b"Bad Request: Invalid operation!".to_vec(),
),
}
}
pub async fn shutdown(&self) {
info!("Spacedrive shutting down...");
self.jobs.pause().await;

View file

@ -1,6 +1,6 @@
use std::{
fs::File,
io::{BufReader, Seek, SeekFrom},
io::{BufReader, Seek},
path::PathBuf,
};
@ -35,7 +35,7 @@ impl LibraryConfig {
Self::migrate_config(base_config.version, file_dir)?;
file.seek(SeekFrom::Start(0))?;
file.rewind()?;
Ok(serde_json::from_reader(BufReader::new(&mut file))?)
}

View file

@ -2,7 +2,7 @@ use rspc::Type;
use serde::{Deserialize, Serialize};
use std::{
fs::File,
io::{self, BufReader, Seek, SeekFrom, Write},
io::{self, BufReader, Seek, Write},
path::{Path, PathBuf},
sync::Arc,
};
@ -66,7 +66,7 @@ impl NodeConfig {
// SAFETY: This is just for display purposes so it doesn't matter if it's lossy
Ok(hostname) => hostname.to_string_lossy().into_owned(),
Err(err) => {
eprintln!("Falling back to default node name as an error occurred getting your systems hostname: '{}'", err);
eprintln!("Falling back to default node name as an error occurred getting your systems hostname: '{err}'");
"my-spacedrive".into()
}
},
@ -123,7 +123,7 @@ impl NodeConfigManager {
Self::migrate_config(base_config.version, path)?;
file.seek(SeekFrom::Start(0))?;
file.rewind()?;
Ok(serde_json::from_reader(BufReader::new(&mut file))?)
}
false => {

View file

@ -3,6 +3,7 @@ use crate::job::{JobError, JobReportUpdate, JobResult, JobState, StatefulJob, Wo
use std::{hash::Hash, path::PathBuf};
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, DisplayFromStr};
use specta::Type;
use tokio::{fs::OpenOptions, io::AsyncWriteExt};
use tracing::{trace, warn};
@ -11,10 +12,13 @@ use super::{context_menu_fs_info, FsInfo};
pub struct FileEraserJob {}
#[serde_as]
#[derive(Serialize, Deserialize, Hash, Type)]
pub struct FileEraserJobInit {
pub location_id: i32,
pub path_id: i32,
#[specta(type = String)]
#[serde_as(as = "DisplayFromStr")]
pub passes: usize,
}

View file

@ -2,15 +2,21 @@ use crate::{library::LibraryContext, prisma::volume::*};
use rspc::Type;
use serde::{Deserialize, Serialize};
use serde_with::{serde_as, DisplayFromStr};
use std::process::Command;
use sysinfo::{DiskExt, System, SystemExt};
use thiserror::Error;
#[serde_as]
#[derive(Serialize, Deserialize, Debug, Default, Clone, Type)]
pub struct Volume {
pub name: String,
pub mount_point: String,
#[specta(type = String)]
#[serde_as(as = "DisplayFromStr")]
pub total_capacity: u64,
#[specta(type = String)]
#[serde_as(as = "DisplayFromStr")]
pub available_capacity: u64,
pub is_removable: bool,
pub disk_type: Option<String>,

View file

@ -7,6 +7,10 @@ description = "A library to handle cryptographic functions within Spacedrive"
edition = "2021"
rust-version = "1.67.0"
[features]
rspc = ["dep:rspc"]
serde = ["dep:serde", "dep:serde_json", "dep:serde-big-array", "uuid/serde"]
[dependencies]
# rng
rand = "0.8.5"
@ -41,7 +45,6 @@ dashmap = "5.4.0"
# optional, for support with rspc
rspc = { workspace = true, features = ["uuid"], optional = true }
specta = { workspace = true, optional = true }
# for asynchronous crypto
tokio = { workspace = true, features = ["io-util", "rt-multi-thread", "sync"] }
@ -62,14 +65,10 @@ tokio = { workspace = true, features = [
"macros",
] } # features needed for examples
[features]
rspc = ["dep:rspc", "dep:specta"]
serde = ["dep:serde", "dep:serde_json", "dep:serde-big-array", "uuid/serde"]
# [[bench]]
# name = "aes-256-gcm"
# path = "benches/aes-256-gcm.rs"
# harness = false
[[bench]]
name = "aes-256-gcm"
path = "benches/aes-256-gcm.rs"
harness = false
# [[bench]]
# name = "xchacha20-poly1305"

View file

@ -25,7 +25,7 @@ use tokio::io::{AsyncReadExt, AsyncWriteExt};
derive(serde::Serialize),
derive(serde::Deserialize)
)]
#[cfg_attr(feature = "rspc", derive(specta::Type))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
pub enum Algorithm {
XChaCha20Poly1305,
Aes256Gcm,

View file

@ -30,7 +30,7 @@ use balloon_hash::Balloon;
derive(serde::Serialize),
derive(serde::Deserialize)
)]
#[cfg_attr(feature = "rspc", derive(specta::Type))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
pub enum Params {
Standard,
Hardened,
@ -45,7 +45,7 @@ pub enum Params {
derive(serde::Deserialize),
serde(tag = "name", content = "params")
)]
#[cfg_attr(feature = "rspc", derive(specta::Type))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
pub enum HashingAlgorithm {
Argon2id(Params),
BalloonBlake3(Params),

View file

@ -62,7 +62,7 @@ use super::{
/// This is a stored key, and can be freely written to Prisma/another database.
#[derive(Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(specta::Type))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
pub struct StoredKey {
pub uuid: Uuid, // uuid for identification. shared with mounted keys
pub version: StoredKeyVersion,
@ -81,7 +81,7 @@ pub struct StoredKey {
#[derive(Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(specta::Type))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
pub enum StoredKeyType {
User,
Root,
@ -89,7 +89,7 @@ pub enum StoredKeyType {
#[derive(Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(specta::Type))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
pub enum StoredKeyVersion {
V1,
}

View file

@ -1,6 +1,6 @@
//! This is Spacedrive's Apple OS keyring integration. It has no strict dependencies.
//!
//! This has been tested on MacOS, but should work just the same for iOS (according to the `security_framework` documentation)
//! This has been tested on macOS, but should work just the same for iOS (according to the `security_framework` documentation)
use super::{Identifier, Keyring};
use crate::{primitives::types::SecretKeyString, Error, Protected, Result};
@ -13,19 +13,19 @@ pub struct AppleKeyring;
impl Keyring for AppleKeyring {
fn insert(&self, identifier: Identifier, value: SecretKeyString) -> Result<()> {
set_generic_password(
&identifier.application,
identifier.application,
&identifier.to_apple_account(),
value.expose().as_bytes(),
)
.map_err(Error::AppleKeyringError)
}
fn retrieve(&self, identifier: Identifier) -> Result<Protected<Vec<u8>>> {
get_generic_password(&identifier.application, &identifier.to_apple_account())
get_generic_password(identifier.application, &identifier.to_apple_account())
.map(Protected::new)
.map_err(Error::AppleKeyringError)
}
fn delete(&self, identifier: Identifier) -> Result<()> {
delete_generic_password(&identifier.application, &identifier.to_apple_account())
delete_generic_password(identifier.application, &identifier.to_apple_account())
.map_err(Error::AppleKeyringError)
}
}

View file

@ -6,7 +6,7 @@ use crate::{crypto::stream::Algorithm, keys::hashing::HashingAlgorithm, Error, P
#[derive(Clone, Copy, Eq, PartialEq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(specta::Type))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
pub enum Nonce {
XChaCha20Poly1305([u8; 20]),
Aes256Gcm([u8; 8]),
@ -220,7 +220,7 @@ use serde_big_array::BigArray;
use super::{to_array, ENCRYPTED_KEY_LEN, KEY_LEN, SALT_LEN, SECRET_KEY_LEN};
#[derive(Clone, PartialEq, Eq)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(specta::Type))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
pub struct EncryptedKey(
#[cfg_attr(feature = "serde", serde(with = "BigArray"))] // salt used for file data
pub [u8; ENCRYPTED_KEY_LEN],
@ -244,7 +244,7 @@ impl TryFrom<Vec<u8>> for EncryptedKey {
#[derive(Clone, PartialEq, Eq, Copy)]
#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(specta::Type))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
pub struct Salt(pub [u8; SALT_LEN]);
impl Salt {
@ -274,7 +274,7 @@ impl TryFrom<Vec<u8>> for Salt {
#[derive(Clone)]
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
#[cfg_attr(feature = "rspc", derive(specta::Type))]
#[cfg_attr(feature = "rspc", derive(rspc::Type))]
pub struct OnboardingConfig {
pub password: Protected<String>,
pub algorithm: Algorithm,

View file

@ -97,12 +97,16 @@ where
}
}
#[cfg(feature = "rspc")]
use rspc::internal::specta;
#[cfg(feature = "rspc")]
impl<T> specta::Type for Protected<T>
where
T: specta::Type + Zeroize,
{
const NAME: &'static str = T::NAME;
const SID: specta::TypeSid = specta::sid!();
const IMPL_LOCATION: specta::ImplLocation = specta::impl_location!();
fn inline(opts: specta::DefOpts, generics: &[specta::DataType]) -> specta::DataType {
T::inline(opts, generics)
@ -112,7 +116,7 @@ where
T::reference(opts, generics)
}
fn definition(opts: specta::DefOpts) -> specta::DataType {
fn definition(opts: specta::DefOpts) -> specta::DataTypeExt {
T::definition(opts)
}
}

View file

@ -4,7 +4,7 @@ use crate::extensions::{CodeExtension, Extension, VideoExtension};
use std::{ffi::OsStr, io::SeekFrom, path::Path};
use tokio::{
fs::{self, File},
fs::File,
io::{AsyncReadExt, AsyncSeekExt},
};

View file

@ -104,7 +104,7 @@ impl<TP2PManager: P2PManager> Mdns<TP2PManager> {
let service_info = ServiceInfo::new(
&self.service_type,
peer_id_str,
&format!("{}.", peer_id_str),
&format!("{peer_id_str}."),
&(self
.nm
.lan_addrs

View file

@ -27,7 +27,7 @@ impl PeerId {
let peer_id = digest(&ring::digest::SHA1_FOR_LEGACY_USE_ONLY, &cert.0)
.as_ref()
.iter()
.map(|b| format!("{:02x}", b))
.map(|b| format!("{b:02x}"))
.collect();
Self(peer_id)

View file

@ -8,6 +8,7 @@ pub enum AttributeFieldValue<'a> {
List(Vec<&'a str>),
}
#[allow(unused)]
impl AttributeFieldValue<'_> {
pub fn as_single(&self) -> Option<&str> {
match self {
@ -44,10 +45,6 @@ pub fn model_attributes(model: &dml::Model) -> Vec<Attribute> {
model
.documentation
.as_ref()
.map(|docs| {
docs.lines()
.flat_map(|line| Attribute::parse(line))
.collect()
})
.map(|docs| docs.lines().flat_map(Attribute::parse).collect())
.unwrap_or_default()
}

View file

@ -13,6 +13,7 @@ struct SDSyncGenerator {}
type FieldVec<'a> = Vec<&'a dml::Field>;
#[derive(Debug)]
#[allow(unused)]
enum ModelSyncType<'a> {
Local {
id: FieldVec<'a>,
@ -79,7 +80,7 @@ impl PrismaGenerator for SDSyncGenerator {
let model_modules = args.dml.models().map(|model| {
let model_name_snake = snake_ident(&model.name);
let attributes = model_attributes(&model);
let attributes = model_attributes(model);
let sync_id = attributes
.iter()

View file

@ -8,7 +8,7 @@ publish = false
[dependencies]
serde_json = "1.0.85"
serde = { version = "1.0.145", features = ["derive"] }
axum = "0.5.16"
axum = "0.6.4"
rspc = { workspace = true, features = ["axum"] }
tokio = { workspace = true, features = ["full"] }
prisma-client-rust = { workspace = true }

View file

@ -75,7 +75,7 @@ pub enum CRDTOperationType {
Owned(OwnedOperation),
}
#[derive(Serialize, Deserialize, Clone, Type)]
#[derive(Serialize, Deserialize, Clone)]
pub struct CRDTOperation {
pub node: Uuid,
pub timestamp: NTP64,

View file

@ -4,36 +4,31 @@
export type Procedures = {
queries:
{ key: "buildInfo", input: never, result: BuildInfo } |
{ key: "files.get", input: LibraryArgs<GetArgs>, result: { id: number, pub_id: Array<number>, name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string, file_paths: Array<FilePath>, media_data: MediaData | null } | null } |
{ key: "jobs.getHistory", input: LibraryArgs<null>, result: Array<JobReport> } |
{ key: "jobs.getRunning", input: LibraryArgs<null>, result: Array<JobReport> } |
{ key: "files.get", input: LibraryArgs<GetArgs>, result: { id: number, pub_id: number[], name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string, file_paths: FilePath[], media_data: MediaData | null } | null } |
{ key: "jobs.getHistory", input: LibraryArgs<null>, result: JobReport[] } |
{ key: "jobs.getRunning", input: LibraryArgs<null>, result: JobReport[] } |
{ key: "jobs.isRunning", input: LibraryArgs<null>, result: boolean } |
{ key: "keys.getDefault", input: LibraryArgs<null>, result: string | null } |
{ key: "keys.getKey", input: LibraryArgs<string>, result: string } |
{ key: "keys.getSecretKey", input: LibraryArgs<null>, result: string | null } |
{ key: "keys.isKeyManagerUnlocking", input: LibraryArgs<null>, result: boolean | null } |
{ key: "keys.isUnlocked", input: LibraryArgs<null>, result: boolean } |
{ key: "keys.list", input: LibraryArgs<null>, result: Array<StoredKey> } |
{ key: "keys.listMounted", input: LibraryArgs<null>, result: Array<string> } |
{ key: "keys.list", input: LibraryArgs<null>, result: StoredKey[] } |
{ key: "keys.listMounted", input: LibraryArgs<null>, result: string[] } |
{ key: "library.getStatistics", input: LibraryArgs<null>, result: Statistics } |
{ key: "library.list", input: never, result: Array<LibraryConfigWrapped> } |
{ key: "locations.getById", input: LibraryArgs<number>, result: { id: number, pub_id: Array<number>, node_id: number, name: string | null, local_path: string | null, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string, indexer_rules: Array<IndexerRulesInLocation> } | null } |
{ key: "library.list", input: never, result: LibraryConfigWrapped[] } |
{ key: "locations.getById", input: LibraryArgs<number>, result: { id: number, pub_id: number[], node_id: number, name: string | null, local_path: string | null, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string, indexer_rules: IndexerRulesInLocation[] } | null } |
{ key: "locations.getExplorerData", input: LibraryArgs<LocationExplorerArgs>, result: ExplorerData } |
{ key: "locations.indexer_rules.get", input: LibraryArgs<number>, result: IndexerRule } |
{ key: "locations.indexer_rules.list", input: LibraryArgs<null>, result: Array<IndexerRule> } |
{ key: "locations.indexer_rules.listForLocation", input: LibraryArgs<number>, result: Array<IndexerRule> } |
{ key: "locations.list", input: LibraryArgs<null>, result: Array<{ id: number, pub_id: Array<number>, node_id: number, name: string | null, local_path: string | null, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string, node: Node }> } |
{ key: "locations.indexer_rules.list", input: LibraryArgs<null>, result: IndexerRule[] } |
{ key: "locations.indexer_rules.listForLocation", input: LibraryArgs<number>, result: IndexerRule[] } |
{ key: "locations.list", input: LibraryArgs<null>, result: { id: number, pub_id: number[], node_id: number, name: string | null, local_path: string | null, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string, node: Node }[] } |
{ key: "nodeState", input: never, result: NodeState } |
{ key: "normi.composite", input: never, result: NormalisedCompositeId } |
{ key: "normi.org", input: never, result: NormalisedOrganisation } |
{ key: "normi.user", input: never, result: NormalisedUser } |
{ key: "normi.userSync", input: never, result: NormalisedUser } |
{ key: "normi.version", input: never, result: string } |
{ key: "tags.get", input: LibraryArgs<number>, result: Tag | null } |
{ key: "tags.getExplorerData", input: LibraryArgs<number>, result: ExplorerData } |
{ key: "tags.getForObject", input: LibraryArgs<number>, result: Array<Tag> } |
{ key: "tags.list", input: LibraryArgs<null>, result: Array<Tag> } |
{ key: "volumes.list", input: never, result: Array<Volume> },
{ key: "tags.getForObject", input: LibraryArgs<number>, result: Tag[] } |
{ key: "tags.list", input: LibraryArgs<null>, result: Tag[] } |
{ key: "volumes.list", input: never, result: Volume[] },
mutations:
{ key: "files.copyFiles", input: LibraryArgs<FileCopierJobInit>, result: null } |
{ key: "files.cutFiles", input: LibraryArgs<FileCutterJobInit>, result: null } |
@ -82,141 +77,183 @@ export type Procedures = {
subscriptions:
{ key: "invalidateQuery", input: never, result: InvalidateOperationEvent } |
{ key: "jobs.newThumbnail", input: LibraryArgs<null>, result: string } |
{ key: "locations.online", input: never, result: Array<Array<number>> }
{ key: "locations.online", input: never, result: number[][] }
};
/**
* These are all possible algorithms that can be used for encryption and decryption
*/
export type Algorithm = "XChaCha20Poly1305" | "Aes256Gcm"
export type AuthOption = { type: "Password", value: string } | { type: "TokenizedPassword", value: string }
export interface AutomountUpdateArgs { uuid: string, status: boolean }
export type AutomountUpdateArgs = { uuid: string, status: boolean }
export interface BuildInfo { version: string, commit: string }
export type BuildInfo = { version: string, commit: string }
export interface ConfigMetadata { version: string | null }
/**
* ConfigMetadata is a part of node configuration that is loaded before the main configuration and contains information about the schema of the config.
* This allows us to migrate breaking changes to the config format between Spacedrive releases.
*/
export type ConfigMetadata = { version: string | null }
export interface CreateLibraryArgs { name: string, auth: AuthOption, algorithm: Algorithm, hashing_algorithm: HashingAlgorithm }
export type CreateLibraryArgs = { name: string, auth: AuthOption, algorithm: Algorithm, hashing_algorithm: HashingAlgorithm }
export interface EditLibraryArgs { id: string, name: string | null, description: string | null }
export type EditLibraryArgs = { id: string, name: string | null, description: string | null }
export type EncryptedKey = Array<number>
export type EncryptedKey = number[]
export type ExplorerContext = { type: "Location" } & Location | { type: "Tag" } & Tag
export type ExplorerContext = ({ type: "Location" } & Location) | ({ type: "Tag" } & Tag)
export interface ExplorerData { context: ExplorerContext, items: Array<ExplorerItem> }
export type ExplorerData = { context: ExplorerContext, items: ExplorerItem[] }
export type ExplorerItem = { type: "Path", has_thumbnail: boolean, item: FilePathWithObject } | { type: "Object", has_thumbnail: boolean, item: ObjectWithFilePaths }
export type ExplorerItem = { type: "Path", has_thumbnail: boolean, item: file_path_with_object } | { type: "Object", has_thumbnail: boolean, item: object_with_file_paths }
export interface FileCopierJobInit { source_location_id: number, source_path_id: number, target_location_id: number, target_path: string, target_file_name_suffix: string | null }
export type FileCopierJobInit = { source_location_id: number, source_path_id: number, target_location_id: number, target_path: string, target_file_name_suffix: string | null }
export interface FileCutterJobInit { source_location_id: number, source_path_id: number, target_location_id: number, target_path: string }
export type FileCutterJobInit = { source_location_id: number, source_path_id: number, target_location_id: number, target_path: string }
export interface FileDecryptorJobInit { location_id: number, path_id: number, mount_associated_key: boolean, output_path: string | null, password: string | null, save_to_library: boolean | null }
export type FileDecryptorJobInit = { location_id: number, path_id: number, mount_associated_key: boolean, output_path: string | null, password: string | null, save_to_library: boolean | null }
export interface FileDeleterJobInit { location_id: number, path_id: number }
export type FileDeleterJobInit = { location_id: number, path_id: number }
export interface FileEncryptorJobInit { location_id: number, path_id: number, key_uuid: string, algorithm: Algorithm, metadata: boolean, preview_media: boolean, output_path: string | null }
export type FileEncryptorJobInit = { location_id: number, path_id: number, key_uuid: string, algorithm: Algorithm, metadata: boolean, preview_media: boolean, output_path: string | null }
export interface FileEraserJobInit { location_id: number, path_id: number, passes: number }
export type FileEraserJobInit = { location_id: number, path_id: number, passes: string }
export interface FilePath { id: number, is_dir: boolean, cas_id: string | null, integrity_checksum: string | null, location_id: number, materialized_path: string, name: string, extension: string | null, object_id: number | null, parent_id: number | null, key_id: number | null, date_created: string, date_modified: string, date_indexed: string }
export type FilePath = { id: number, is_dir: boolean, cas_id: string | null, integrity_checksum: string | null, location_id: number, materialized_path: string, name: string, extension: string | null, object_id: number | null, parent_id: number | null, key_id: number | null, date_created: string, date_modified: string, date_indexed: string }
export interface GenerateThumbsForLocationArgs { id: number, path: string }
export type GenerateThumbsForLocationArgs = { id: number, path: string }
export interface GetArgs { id: number }
export type GetArgs = { id: number }
/**
* This defines all available password hashing algorithms.
*/
export type HashingAlgorithm = { name: "Argon2id", params: Params } | { name: "BalloonBlake3", params: Params }
export interface IdentifyUniqueFilesArgs { id: number, path: string }
export type IdentifyUniqueFilesArgs = { id: number, path: string }
export interface IndexerRule { id: number, kind: number, name: string, parameters: Array<number>, date_created: string, date_modified: string }
export type IndexerRule = { id: number, kind: number, name: string, parameters: number[], date_created: string, date_modified: string }
export interface IndexerRuleCreateArgs { kind: RuleKind, name: string, parameters: Array<number> }
/**
* `IndexerRuleCreateArgs` is the argument received from the client using rspc to create a new indexer rule.
* Note that `parameters` field **MUST** be a JSON object serialized to bytes.
*
* In case of `RuleKind::AcceptFilesByGlob` or `RuleKind::RejectFilesByGlob`, it will be a
* single string containing a glob pattern.
*
* In case of `RuleKind::AcceptIfChildrenDirectoriesArePresent` or `RuleKind::RejectIfChildrenDirectoriesArePresent` the
* `parameters` field must be a vector of strings containing the names of the directories.
*/
export type IndexerRuleCreateArgs = { kind: RuleKind, name: string, parameters: number[] }
export interface IndexerRulesInLocation { date_created: string, location_id: number, indexer_rule_id: number }
export type IndexerRulesInLocation = { date_created: string, location_id: number, indexer_rule_id: number }
export interface InvalidateOperationEvent { key: string, arg: any }
export type InvalidateOperationEvent = { key: string, arg: any }
export interface JobReport { id: string, name: string, data: Array<number> | null, metadata: any | null, date_created: string, date_modified: string, status: JobStatus, task_count: number, completed_task_count: number, message: string, seconds_elapsed: number }
export type JobReport = { id: string, name: string, data: number[] | null, metadata: any | null, date_created: string, date_modified: string, status: JobStatus, task_count: number, completed_task_count: number, message: string, seconds_elapsed: number }
export type JobStatus = "Queued" | "Running" | "Completed" | "Canceled" | "Failed" | "Paused"
export interface KeyAddArgs { algorithm: Algorithm, hashing_algorithm: HashingAlgorithm, key: string, library_sync: boolean, automount: boolean }
export type KeyAddArgs = { algorithm: Algorithm, hashing_algorithm: HashingAlgorithm, key: string, library_sync: boolean, automount: boolean }
export interface LibraryArgs<T> { library_id: string, arg: T }
/**
* Can wrap a query argument to require it to contain a `library_id` and provide helpers for working with libraries.
*/
export type LibraryArgs<T> = { library_id: string, arg: T }
export interface LibraryConfig { version: string | null, name: string, description: string }
/**
* LibraryConfig holds the configuration for a specific library. This is stored as a '{uuid}.sdlibrary' file.
*/
export type LibraryConfig = ({ version: string | null }) & { name: string, description: string }
export interface LibraryConfigWrapped { uuid: string, config: LibraryConfig }
export type LibraryConfigWrapped = { uuid: string, config: LibraryConfig }
export interface Location { id: number, pub_id: Array<number>, node_id: number, name: string | null, local_path: string | null, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string }
export type Location = { id: number, pub_id: number[], node_id: number, name: string | null, local_path: string | null, total_capacity: number | null, available_capacity: number | null, is_archived: boolean, generate_preview_media: boolean, sync_preview_media: boolean, hidden: boolean, date_created: string }
export interface LocationCreateArgs { path: string, indexer_rules_ids: Array<number> }
/**
* `LocationCreateArgs` is the argument received from the client using `rspc` to create a new location.
* It has the actual path and a vector of indexer rules ids, to create many-to-many relationships
* between the location and indexer rules.
*/
export type LocationCreateArgs = { path: string, indexer_rules_ids: number[] }
export interface LocationExplorerArgs { location_id: number, path: string, limit: number, cursor: string | null }
export type LocationExplorerArgs = { location_id: number, path: string, limit: number, cursor: string | null }
export interface LocationUpdateArgs { id: number, name: string | null, generate_preview_media: boolean | null, sync_preview_media: boolean | null, hidden: boolean | null, indexer_rules_ids: Array<number> }
/**
* `LocationUpdateArgs` is the argument received from the client using `rspc` to update a location.
* It contains the id of the location to be updated, possible a name to change the current location's name
* and a vector of indexer rules ids to add or remove from the location.
*
* It is important to note that only the indexer rule ids in this vector will be used from now on.
* Old rules that aren't in this vector will be purged.
*/
export type LocationUpdateArgs = { id: number, name: string | null, generate_preview_media: boolean | null, sync_preview_media: boolean | null, hidden: boolean | null, indexer_rules_ids: number[] }
export interface MasterPasswordChangeArgs { password: string, algorithm: Algorithm, hashing_algorithm: HashingAlgorithm }
export type MasterPasswordChangeArgs = { password: string, algorithm: Algorithm, hashing_algorithm: HashingAlgorithm }
export interface MediaData { id: number, pixel_width: number | null, pixel_height: number | null, longitude: number | null, latitude: number | null, fps: number | null, capture_device_make: string | null, capture_device_model: string | null, capture_device_software: string | null, duration_seconds: number | null, codecs: string | null, streams: number | null }
export type MediaData = { id: number, pixel_width: number | null, pixel_height: number | null, longitude: number | null, latitude: number | null, fps: number | null, capture_device_make: string | null, capture_device_model: string | null, capture_device_software: string | null, duration_seconds: number | null, codecs: string | null, streams: number | null }
export interface Node { id: number, pub_id: Array<number>, name: string, platform: number, version: string | null, last_seen: string, timezone: string | null, date_created: string }
export type Node = { id: number, pub_id: number[], name: string, platform: number, version: string | null, last_seen: string, timezone: string | null, date_created: string }
export interface NodeConfig { version: string | null, id: string, name: string, p2p_port: number | null }
/**
* NodeConfig is the configuration for a node. This is shared between all libraries and is stored in a JSON file on disk.
*/
export type NodeConfig = ({ version: string | null }) & { id: string, name: string, p2p_port: number | null }
export interface NodeState { version: string | null, id: string, name: string, p2p_port: number | null, data_path: string }
export type NodeState = (({ version: string | null }) & { id: string, name: string, p2p_port: number | null }) & { data_path: string }
export type Nonce = { XChaCha20Poly1305: Array<number> } | { Aes256Gcm: Array<number> }
export type Nonce = { XChaCha20Poly1305: number[] } | { Aes256Gcm: number[] }
export interface NormalisedCompositeId { $type: string, $id: any, org_id: string, user_id: string }
export type Object = { id: number, pub_id: number[], name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string }
export interface NormalisedOrganisation { $type: string, $id: any, id: string, name: string, users: NormalizedVec<NormalisedUser>, owner: NormalisedUser, non_normalised_data: Array<null> }
export interface NormalisedUser { $type: string, $id: any, id: string, name: string }
export interface NormalizedVec<T> { $type: string, edges: Array<T> }
export interface Object { id: number, pub_id: Array<number>, name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string }
export interface ObjectValidatorArgs { id: number, path: string }
export type ObjectValidatorArgs = { id: number, path: string }
/**
* These parameters define the password-hashing level.
*
* The harder the parameter, the longer the password will take to hash.
*/
export type Params = "Standard" | "Hardened" | "Paranoid"
export interface RestoreBackupArgs { password: string, secret_key: string, path: string }
export type RestoreBackupArgs = { password: string, secret_key: string, path: string }
export type RuleKind = "AcceptFilesByGlob" | "RejectFilesByGlob" | "AcceptIfChildrenDirectoriesArePresent" | "RejectIfChildrenDirectoriesArePresent"
export type Salt = Array<number>
export type Salt = number[]
export interface SetFavoriteArgs { id: number, favorite: boolean }
export type SetFavoriteArgs = { id: number, favorite: boolean }
export interface SetNoteArgs { id: number, note: string | null }
export type SetNoteArgs = { id: number, note: string | null }
export interface Statistics { id: number, date_captured: string, total_object_count: number, library_db_size: string, total_bytes_used: string, total_bytes_capacity: string, total_unique_bytes: string, total_bytes_free: string, preview_media_bytes: string }
export type Statistics = { id: number, date_captured: string, total_object_count: number, library_db_size: string, total_bytes_used: string, total_bytes_capacity: string, total_unique_bytes: string, total_bytes_free: string, preview_media_bytes: string }
export interface StoredKey { uuid: string, version: StoredKeyVersion, key_type: StoredKeyType, algorithm: Algorithm, hashing_algorithm: HashingAlgorithm, content_salt: Salt, master_key: EncryptedKey, master_key_nonce: Nonce, key_nonce: Nonce, key: Array<number>, salt: Salt, memory_only: boolean, automount: boolean }
/**
* This is a stored key, and can be freely written to Prisma/another database.
*/
export type StoredKey = { uuid: string, version: StoredKeyVersion, key_type: StoredKeyType, algorithm: Algorithm, hashing_algorithm: HashingAlgorithm, content_salt: Salt, master_key: EncryptedKey, master_key_nonce: Nonce, key_nonce: Nonce, key: number[], salt: Salt, memory_only: boolean, automount: boolean }
export type StoredKeyType = "User" | "Root"
export type StoredKeyVersion = "V1"
export interface Tag { id: number, pub_id: Array<number>, name: string | null, color: string | null, total_objects: number | null, redundancy_goal: number | null, date_created: string, date_modified: string }
export type Tag = { id: number, pub_id: number[], name: string | null, color: string | null, total_objects: number | null, redundancy_goal: number | null, date_created: string, date_modified: string }
export interface TagAssignArgs { object_id: number, tag_id: number, unassign: boolean }
export type TagAssignArgs = { object_id: number, tag_id: number, unassign: boolean }
export interface TagCreateArgs { name: string, color: string }
export type TagCreateArgs = { name: string, color: string }
export interface TagUpdateArgs { id: number, name: string | null, color: string | null }
export type TagUpdateArgs = { id: number, name: string | null, color: string | null }
export interface TokenizeKeyArgs { secret_key: string }
export type TokenizeKeyArgs = { secret_key: string }
export interface TokenizeResponse { token: string }
export type TokenizeResponse = { token: string }
export interface UnlockKeyManagerArgs { password: string, secret_key: string }
export type UnlockKeyManagerArgs = { password: string, secret_key: string }
export interface Volume { name: string, mount_point: string, total_capacity: bigint, available_capacity: bigint, is_removable: boolean, disk_type: string | null, file_system: string | null, is_root_filesystem: boolean }
export type Volume = { name: string, mount_point: string, total_capacity: string, available_capacity: string, is_removable: boolean, disk_type: string | null, file_system: string | null, is_root_filesystem: boolean }
export interface FilePathWithObject { id: number, is_dir: boolean, cas_id: string | null, integrity_checksum: string | null, location_id: number, materialized_path: string, name: string, extension: string | null, object_id: number | null, parent_id: number | null, key_id: number | null, date_created: string, date_modified: string, date_indexed: string, object: Object | null }
export type file_path_with_object = { id: number, is_dir: boolean, cas_id: string | null, integrity_checksum: string | null, location_id: number, materialized_path: string, name: string, extension: string | null, object_id: number | null, parent_id: number | null, key_id: number | null, date_created: string, date_modified: string, date_indexed: string, object: Object | null }
export interface ObjectWithFilePaths { id: number, pub_id: Array<number>, name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string, file_paths: Array<FilePath> }
export type object_with_file_paths = { id: number, pub_id: number[], name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string, file_paths: FilePath[] }

View file

@ -29,7 +29,7 @@ export const EraseFileDialog = (props: EraseDialogProps) => {
eraseFile.mutateAsync({
location_id: props.location_id,
path_id: props.path_id,
passes: data.passes
passes: data.passes.toString()
})
);

View file

@ -17,10 +17,16 @@ import {
TrashSimple
} from 'phosphor-react';
import { PropsWithChildren, useMemo } from 'react';
import { ExplorerItem, useLibraryMutation, useLibraryQuery } from '@sd/client';
import {
ExplorerItem,
getLibraryIdRaw,
useDebugState,
useLibraryMutation,
useLibraryQuery
} from '@sd/client';
import { ContextMenu as CM } from '@sd/ui';
import { dialogManager } from '@sd/ui';
import { CutCopyType, getExplorerStore, useExplorerStore } from '~/hooks/useExplorerStore';
import { getExplorerStore, useExplorerStore } from '~/hooks/useExplorerStore';
import { useOperatingSystem } from '~/hooks/useOperatingSystem';
import { useExplorerParams } from '~/screens/LocationExplorer';
import { usePlatform } from '~/util/Platform';
@ -144,7 +150,7 @@ export function ExplorerContextMenu(props: PropsWithChildren) {
keybind="⌘V"
hidden={!store.cutCopyState.active}
onClick={(e) => {
if (store.cutCopyState.actionType == CutCopyType.Copy) {
if (store.cutCopyState.actionType == 'Copy') {
store.locationId &&
copyFiles.mutate({
source_location_id: store.cutCopyState.sourceLocationId,
@ -209,6 +215,7 @@ export interface FileItemContextMenuProps extends PropsWithChildren {
export function FileItemContextMenu({ data, ...props }: FileItemContextMenuProps) {
const store = useExplorerStore();
const params = useExplorerParams();
const platform = usePlatform();
const objectData = data ? (isObject(data) ? data.item : data.item.object) : null;
const isUnlockedQuery = useLibraryQuery(['keys.isUnlocked']);
@ -224,7 +231,19 @@ export function FileItemContextMenu({ data, ...props }: FileItemContextMenuProps
return (
<div className="relative">
<CM.ContextMenu trigger={props.children}>
<CM.Item label="Open" keybind="⌘O" />
<CM.Item
label="Open"
keybind="⌘O"
onClick={(e) => {
// TODO: Replace this with a proper UI
window.location.href = platform.getFileUrl(
getLibraryIdRaw()!,
store.locationId!,
data.item.id
);
}}
icon={Copy}
/>
<CM.Item label="Open with..." />
<CM.Separator />
@ -256,7 +275,7 @@ export function FileItemContextMenu({ data, ...props }: FileItemContextMenuProps
getExplorerStore().cutCopyState = {
sourceLocationId: store.locationId!,
sourcePathId: data.item.id,
actionType: CutCopyType.Cut,
actionType: 'Cut',
active: true
};
}}
@ -270,7 +289,7 @@ export function FileItemContextMenu({ data, ...props }: FileItemContextMenuProps
getExplorerStore().cutCopyState = {
sourceLocationId: store.locationId!,
sourcePathId: data.item.id,
actionType: CutCopyType.Copy,
actionType: 'Copy',
active: true
};
}}

View file

@ -39,7 +39,7 @@ import {
tw
} from '@sd/ui';
import { useOperatingSystem } from '~/hooks/useOperatingSystem';
import { usePlatform } from '~/util/Platform';
import { OperatingSystem, usePlatform } from '~/util/Platform';
import AddLocationDialog from '../dialog/AddLocationDialog';
import CreateLibraryDialog from '../dialog/CreateLibraryDialog';
import { Folder } from '../icons/Folder';
@ -449,16 +449,17 @@ const Icon = ({ component: Icon, ...props }: any) => (
);
// cute little helper to decrease code clutter
const macOnly = (platform: string | undefined, classnames: string) =>
const macOnly = (platform: OperatingSystem | undefined, classnames: string) =>
platform === 'macOS' ? classnames : '';
function WindowControls() {
const { platform } = usePlatform();
const os = useOperatingSystem();
const showControls = window.location.search.includes('showControls');
if (platform === 'tauri' || showControls) {
return (
<div data-tauri-drag-region className="flex-shrink-0 h-7">
<div data-tauri-drag-region className={clsx('flex-shrink-0', macOnly(os, 'h-7'))}>
{/* We do not provide the onClick handlers for 'MacTrafficLights' because this is only used in demo mode */}
{showControls && <MacTrafficLights className="z-50 absolute top-[13px] left-[13px]" />}
</div>

View file

@ -1,8 +1,7 @@
import { getOnboardingStore, unlockOnboardingScreen, useOnboardingStore } from '@sd/client';
import clsx from 'clsx';
import { useEffect } from 'react';
import { useNavigate } from 'react-router';
import { getOnboardingStore, unlockOnboardingScreen, useOnboardingStore } from '@sd/client';
import { ONBOARDING_SCREENS } from './OnboardingRoot';
import { useCurrentOnboardingScreenKey } from './helpers/screens';

View file

@ -1,10 +1,9 @@
import BloomOne from '@sd/assets/images/bloom-one.png';
import { getOnboardingStore } from '@sd/client';
import { tw } from '@sd/ui';
import clsx from 'clsx';
import { ComponentType, useEffect } from 'react';
import { Outlet, useNavigate } from 'react-router';
import { getOnboardingStore } from '@sd/client';
import { tw } from '@sd/ui';
import { useOperatingSystem } from '../../hooks/useOperatingSystem';
import OnboardingCreatingLibrary from './OnboardingCreatingLibrary';
import OnboardingMasterPassword from './OnboardingMasterPassword';

View file

@ -10,10 +10,7 @@ export enum ExplorerKind {
Space
}
export enum CutCopyType {
Cut,
Copy
}
export type CutCopyType = 'Cut' | 'Copy';
const state = {
locationId: null as number | null,
@ -30,7 +27,7 @@ const state = {
cutCopyState: {
sourceLocationId: 0,
sourcePathId: 0,
actionType: CutCopyType.Cut,
actionType: 'Cut',
active: false
}
};

View file

@ -28,7 +28,7 @@ import { usePlatform } from '~/util/Platform';
interface StatItemProps {
title: string;
bytes: string;
bytes: bigint;
isLoading: boolean;
}
@ -76,9 +76,9 @@ onLibraryChange((newLibraryId) => {
const StatItem: React.FC<StatItemProps> = (props) => {
const { library } = useCurrentLibrary();
const { title, bytes = '0', isLoading } = props;
const { title, bytes = BigInt('0'), isLoading } = props;
const size = byteSize(+bytes);
const size = byteSize(Number(bytes)); // TODO: This BigInt to Number conversion will truncate the number if the number is too large. `byteSize` doesn't support BigInt so we are gonna need to come up with a longer term solution at some point.
const count = useCounter({
name: title,
end: +size.value,
@ -101,7 +101,7 @@ const StatItem: React.FC<StatItemProps> = (props) => {
<div
className={clsx(
'flex flex-col flex-shrink-0 w-32 px-4 py-3 duration-75 transform rounded-md cursor-default ',
!+bytes && 'hidden'
!bytes && 'hidden'
)}
>
<span className="text-sm text-gray-400">{title}</span>
@ -160,7 +160,7 @@ export default function OverviewScreen() {
<StatItem
key={library?.uuid + ' ' + key}
title={StatItemNames[key as keyof Statistics]!}
bytes={value}
bytes={BigInt(value)}
isLoading={platform.demoMode === true ? false : isStatisticsLoading}
/>
);

View file

@ -7,6 +7,7 @@ export type OperatingSystem = 'browser' | 'linux' | 'macOS' | 'windows' | 'unkno
export type Platform = {
platform: 'web' | 'tauri'; // This represents the specific platform implementation
getThumbnailUrlById: (casId: string) => string;
getFileUrl: (libraryId: string, locationLocalId: number, filePathId: number) => string;
openLink: (url: string) => void;
demoMode?: boolean; // TODO: Remove this in favour of demo mode being handled at the React Query level
getOs?(): Promise<OperatingSystem>;

View file

@ -43,8 +43,8 @@ importers:
'@sd/config': workspace:*
'@sd/interface': workspace:*
'@sd/ui': workspace:*
'@tauri-apps/api': 1.1.0
'@tauri-apps/cli': 1.1.1
'@tauri-apps/api': 1.2.0
'@tauri-apps/cli': 1.2.3
'@types/babel-core': ^6.25.7
'@types/react': ^18.0.21
'@types/react-dom': ^18.0.6
@ -59,16 +59,16 @@ importers:
vite-tsconfig-paths: ^4.0.3
dependencies:
'@rspc/client': 0.0.0-main-7c0a67c1
'@rspc/tauri': 0.0.0-main-7c0a67c1_@tauri-apps+api@1.1.0
'@rspc/tauri': 0.0.0-main-7c0a67c1_@tauri-apps+api@1.2.0
'@sd/client': link:../../packages/client
'@sd/interface': link:../../packages/interface
'@sd/ui': link:../../packages/ui
'@tauri-apps/api': 1.1.0
'@tauri-apps/api': 1.2.0
react: 18.2.0
react-dom: 18.2.0_react@18.2.0
devDependencies:
'@sd/config': link:../../packages/config
'@tauri-apps/cli': 1.1.1
'@tauri-apps/cli': 1.2.3
'@types/babel-core': 6.25.7
'@types/react': 18.0.27
'@types/react-dom': 18.0.10
@ -5707,13 +5707,13 @@ packages:
'@tanstack/react-query': 4.22.0
dev: false
/@rspc/tauri/0.0.0-main-7c0a67c1_@tauri-apps+api@1.1.0:
/@rspc/tauri/0.0.0-main-7c0a67c1_@tauri-apps+api@1.2.0:
resolution: {integrity: sha512-GnTAGcVV1FWp4Cs5n3wK0x/etrOTGbUHHq1M2sqLiG2Nfq2ej8bI5e5HTVhDgXD+PCGN38zYV3u8rEYlxNAMpA==}
peerDependencies:
'@tauri-apps/api': ^1.0.2
dependencies:
'@rspc/client': 0.0.0-main-7c0a67c1
'@tauri-apps/api': 1.1.0
'@tauri-apps/api': 1.2.0
dev: false
/@segment/loosely-validate-event/2.0.0:
@ -7616,13 +7616,13 @@ packages:
engines: {node: '>=12'}
dev: false
/@tauri-apps/api/1.1.0:
resolution: {integrity: sha512-n13pIqdPd3KtaMmmAcrU7BTfdMtIlGNnfZD0dNX8L4p8dgmuNyikm6JAA+yCpl9gqq6I8x5cV2Y0muqdgD0cWw==}
engines: {node: '>= 12.22.0', npm: '>= 6.6.0', yarn: '>= 1.19.1'}
/@tauri-apps/api/1.2.0:
resolution: {integrity: sha512-lsI54KI6HGf7VImuf/T9pnoejfgkNoXveP14pVV7XarrQ46rOejIVJLFqHI9sRReJMGdh2YuCoI3cc/yCWCsrw==}
engines: {node: '>= 14.6.0', npm: '>= 6.6.0', yarn: '>= 1.19.1'}
dev: false
/@tauri-apps/cli-darwin-arm64/1.1.1:
resolution: {integrity: sha512-qBG11ig525/qf0f5OQxn0ON3hT8YdpTfpa4Y4kVqBJhdW50R5fadPv6tv5Dpl2TS2X7nWh/zg5mEXYoCK3HZ9w==}
/@tauri-apps/cli-darwin-arm64/1.2.3:
resolution: {integrity: sha512-phJN3fN8FtZZwqXg08bcxfq1+X1JSDglLvRxOxB7VWPq+O5SuB8uLyssjJsu+PIhyZZnIhTGdjhzLSFhSXfLsw==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [darwin]
@ -7630,8 +7630,8 @@ packages:
dev: true
optional: true
/@tauri-apps/cli-darwin-x64/1.1.1:
resolution: {integrity: sha512-M3dMsp78OdxisbTwAWGvy3jIb3uqThtQcUYVvqOu9LeEOHyldOBFDSht+6PTBpaJLAHFMQK2rmNxiWgigklJaA==}
/@tauri-apps/cli-darwin-x64/1.2.3:
resolution: {integrity: sha512-jFZ/y6z8z6v4yliIbXKBXA7BJgtZVMsITmEXSuD6s5+eCOpDhQxbRkr6CA+FFfr+/r96rWSDSgDenDQuSvPAKw==}
engines: {node: '>= 10'}
cpu: [x64]
os: [darwin]
@ -7639,8 +7639,8 @@ packages:
dev: true
optional: true
/@tauri-apps/cli-linux-arm-gnueabihf/1.1.1:
resolution: {integrity: sha512-LYlvdAd73cq+yTi6rw7j/DWIvDpeApwgQkIn+HYsNNeFhyFmABU7tmw+pekK3W3nHAkYAJ69Rl4ZdoxdNGKmHg==}
/@tauri-apps/cli-linux-arm-gnueabihf/1.2.3:
resolution: {integrity: sha512-C7h5vqAwXzY0kRGSU00Fj8PudiDWFCiQqqUNI1N+fhCILrzWZB9TPBwdx33ZfXKt/U4+emdIoo/N34v3TiAOmQ==}
engines: {node: '>= 10'}
cpu: [arm]
os: [linux]
@ -7648,8 +7648,8 @@ packages:
dev: true
optional: true
/@tauri-apps/cli-linux-arm64-gnu/1.1.1:
resolution: {integrity: sha512-o/hbMQIKuFI7cTNpeQBHD/OCNJOBIci78faKms/t6AstLXx0QJuRHDk477Rg6VVy/I3BBKbyATALbmcTq+ti0A==}
/@tauri-apps/cli-linux-arm64-gnu/1.2.3:
resolution: {integrity: sha512-buf1c8sdkuUzVDkGPQpyUdAIIdn5r0UgXU6+H5fGPq/Xzt5K69JzXaeo6fHsZEZghbV0hOK+taKV4J0m30UUMQ==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
@ -7657,8 +7657,8 @@ packages:
dev: true
optional: true
/@tauri-apps/cli-linux-arm64-musl/1.1.1:
resolution: {integrity: sha512-8Ci4qlDnXIp93XqUrtzFCBDatUzPHpZq7L3bociUbWpvy/bnlzxp1C/C+vwdc4uS1MiAp9v3BFgrU4i0f0Z3QQ==}
/@tauri-apps/cli-linux-arm64-musl/1.2.3:
resolution: {integrity: sha512-x88wPS9W5xAyk392vc4uNHcKBBvCp0wf4H9JFMF9OBwB7vfd59LbQCFcPSu8f0BI7bPrOsyHqspWHuFL8ojQEA==}
engines: {node: '>= 10'}
cpu: [arm64]
os: [linux]
@ -7666,8 +7666,8 @@ packages:
dev: true
optional: true
/@tauri-apps/cli-linux-x64-gnu/1.1.1:
resolution: {integrity: sha512-ES4Bkx2JAI8+dDNDJswhLS3yqt+yT/4C6UfGOPIHFxcXUh6fe36eUllrTt+HLRS9xTZbYnteJy7ebq2TqMkaxw==}
/@tauri-apps/cli-linux-x64-gnu/1.2.3:
resolution: {integrity: sha512-ZMz1jxEVe0B4/7NJnlPHmwmSIuwiD6ViXKs8F+OWWz2Y4jn5TGxWKFg7DLx5OwQTRvEIZxxT7lXHi5CuTNAxKg==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
@ -7675,8 +7675,8 @@ packages:
dev: true
optional: true
/@tauri-apps/cli-linux-x64-musl/1.1.1:
resolution: {integrity: sha512-qrN1WOMAaDl+LE8P8iO0+DYlrWNTc9jIu/CsnVY/LImTn79ZPxEkcVBo0UGeKRI7f10TfvkVmLCBLxTz8QhEyA==}
/@tauri-apps/cli-linux-x64-musl/1.2.3:
resolution: {integrity: sha512-B/az59EjJhdbZDzawEVox0LQu2ZHCZlk8rJf85AMIktIUoAZPFbwyiUv7/zjzA/sY6Nb58OSJgaPL2/IBy7E0A==}
engines: {node: '>= 10'}
cpu: [x64]
os: [linux]
@ -7684,8 +7684,8 @@ packages:
dev: true
optional: true
/@tauri-apps/cli-win32-ia32-msvc/1.1.1:
resolution: {integrity: sha512-vw7VOmrQlywHhFV3pf54udf2FRNj9dg9WP1gL0My55FnB+w+PWS9Ipm871kX5qepmChdnZHKq9fsqE2uTjX//Q==}
/@tauri-apps/cli-win32-ia32-msvc/1.2.3:
resolution: {integrity: sha512-ypdO1OdC5ugNJAKO2m3sb1nsd+0TSvMS9Tr5qN/ZSMvtSduaNwrcZ3D7G/iOIanrqu/Nl8t3LYlgPZGBKlw7Ng==}
engines: {node: '>= 10'}
cpu: [ia32]
os: [win32]
@ -7693,8 +7693,8 @@ packages:
dev: true
optional: true
/@tauri-apps/cli-win32-x64-msvc/1.1.1:
resolution: {integrity: sha512-OukxlLLi3AoCN4ABnqCDTiiC7xJGWukAjrKCIx7wFISrLjNfsrnH7/UOzuopfGpZChSe2c+AamVmcpBfVsEmJA==}
/@tauri-apps/cli-win32-x64-msvc/1.2.3:
resolution: {integrity: sha512-CsbHQ+XhnV/2csOBBDVfH16cdK00gNyNYUW68isedmqcn8j+s0e9cQ1xXIqi+Hue3awp8g3ImYN5KPepf3UExw==}
engines: {node: '>= 10'}
cpu: [x64]
os: [win32]
@ -7702,20 +7702,20 @@ packages:
dev: true
optional: true
/@tauri-apps/cli/1.1.1:
resolution: {integrity: sha512-80kjMEMPBwLYCp0tTKSquy90PHHGGBvZsneNr3B/mWxNsvjzA1C0vOyGJGFrJuT2OmkvrdvuJZ5mch5hL8O1Xg==}
/@tauri-apps/cli/1.2.3:
resolution: {integrity: sha512-erxtXuPhMEGJPBtnhPILD4AjuT81GZsraqpFvXAmEJZ2p8P6t7MVBifCL8LznRknznM3jn90D3M8RNBP3wcXTw==}
engines: {node: '>= 10'}
hasBin: true
optionalDependencies:
'@tauri-apps/cli-darwin-arm64': 1.1.1
'@tauri-apps/cli-darwin-x64': 1.1.1
'@tauri-apps/cli-linux-arm-gnueabihf': 1.1.1
'@tauri-apps/cli-linux-arm64-gnu': 1.1.1
'@tauri-apps/cli-linux-arm64-musl': 1.1.1
'@tauri-apps/cli-linux-x64-gnu': 1.1.1
'@tauri-apps/cli-linux-x64-musl': 1.1.1
'@tauri-apps/cli-win32-ia32-msvc': 1.1.1
'@tauri-apps/cli-win32-x64-msvc': 1.1.1
'@tauri-apps/cli-darwin-arm64': 1.2.3
'@tauri-apps/cli-darwin-x64': 1.2.3
'@tauri-apps/cli-linux-arm-gnueabihf': 1.2.3
'@tauri-apps/cli-linux-arm64-gnu': 1.2.3
'@tauri-apps/cli-linux-arm64-musl': 1.2.3
'@tauri-apps/cli-linux-x64-gnu': 1.2.3
'@tauri-apps/cli-linux-x64-musl': 1.2.3
'@tauri-apps/cli-win32-ia32-msvc': 1.2.3
'@tauri-apps/cli-win32-x64-msvc': 1.2.3
dev: true
/@testing-library/dom/8.20.0: