diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml
index 1c8c5ffe7..158f2d152 100644
--- a/.github/ISSUE_TEMPLATE/bug_report.yml
+++ b/.github/ISSUE_TEMPLATE/bug_report.yml
@@ -1,6 +1,6 @@
name: 🐞 Bug Report
description: Report a bug
-labels:
+labels:
- kind/bug
- status/needs-triage
@@ -43,8 +43,8 @@ body:
id: info
attributes:
label: Platform and versions
- description: "Please include the output of `pnpm --version && cargo --version && rustc --version` along with information about your Operating System such as version and/or specific distribution if revelant."
- render: shell
+ description: 'Please include the output of `pnpm --version && cargo --version && rustc --version` along with information about your Operating System such as version and/or specific distribution if revelant.'
+ render: Shell
validations:
required: true
@@ -52,8 +52,8 @@ body:
id: logs
attributes:
label: Stack trace
- render: shell
-
+ render: Shell
+
- type: textarea
id: context
attributes:
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 5ec4697aa..911f5b2bc 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -1,3 +1,5 @@
+# tell yaml plugin that this is the config file and not a template of its own:
+# yaml-language-server: $schema=https://json.schemastore.org/github-issue-config.json
blank_issues_enabled: false
contact_links:
- name: 📝 Report Typo
@@ -11,4 +13,4 @@ contact_links:
about: Suggest any ideas you have using our discussion forums.
- name: 💬 Discord Chat
url: https://discord.gg/gTaF2Z44f5
- about: Ask questions and talk to other Spacedrive users and the maintainers
\ No newline at end of file
+ about: Ask questions and talk to other Spacedrive users and the maintainers
diff --git a/.github/actions/build-server-image/action.yml b/.github/actions/build-server-image/action.yml
index 6b6f86e38..e4ffc3dce 100644
--- a/.github/actions/build-server-image/action.yml
+++ b/.github/actions/build-server-image/action.yml
@@ -1,4 +1,4 @@
-name: Build Server Image
+name: Build Server Image
description: Builds and publishes the docker image for the Spacedrive server
inputs:
gh_token:
diff --git a/.github/actions/install-ffmpeg-macos/index.js b/.github/actions/install-ffmpeg-macos/index.js
index 700837043..38812de3c 100644
--- a/.github/actions/install-ffmpeg-macos/index.js
+++ b/.github/actions/install-ffmpeg-macos/index.js
@@ -3,6 +3,6 @@ const core = require('@actions/core');
const exec = require('@actions/exec');
const github = require('@actions/github');
-// const folders =
+// const folders =
exec.exec('brew', ['install', 'ffmpeg']);
diff --git a/.github/actions/install-ffmpeg-macos/package.json b/.github/actions/install-ffmpeg-macos/package.json
index 6d0070abe..a752be00f 100644
--- a/.github/actions/install-ffmpeg-macos/package.json
+++ b/.github/actions/install-ffmpeg-macos/package.json
@@ -1,17 +1,17 @@
{
- "name": "install-ffmpeg-macos",
- "version": "0.0.0",
- "description": "",
- "main": "index.js",
- "scripts": {
- "test": "echo \"Error: no test specified\" && exit 1"
- },
- "keywords": [],
- "author": "Brendan Allan",
- "license": "ISC",
- "dependencies": {
- "@actions/core": "^1.6.0",
- "@actions/exec": "^1.1.1",
- "@actions/github": "^5.0.1"
- }
+ "name": "install-ffmpeg-macos",
+ "version": "0.0.0",
+ "description": "",
+ "main": "index.js",
+ "scripts": {
+ "test": "echo \"Error: no test specified\" && exit 1"
+ },
+ "keywords": [],
+ "author": "Brendan Allan",
+ "license": "ISC",
+ "dependencies": {
+ "@actions/core": "^1.6.0",
+ "@actions/exec": "^1.1.1",
+ "@actions/github": "^5.0.1"
+ }
}
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index eb3361eda..b87b20af0 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -1,10 +1,9 @@
-
-
+
Closes #(issue)
diff --git a/.rustfmt.toml b/.rustfmt.toml
index 7f591ca55..b6bfaa825 100644
--- a/.rustfmt.toml
+++ b/.rustfmt.toml
@@ -1,5 +1,4 @@
hard_tabs = true
-tab_spaces = 4
match_block_trailing_comma = true
max_width = 90
newline_style = "Unix"
diff --git a/.vscode/settings.json b/.vscode/settings.json
index d2be09262..05a5dc79c 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -1,26 +1,28 @@
{
- "cSpell.words": [
- "actix",
- "bpfrpt",
- "consts",
- "creationdate",
- "ipfs",
- "Keepsafe",
- "pathctx",
- "prismjs",
- "proptype",
- "quicktime",
- "repr",
- "Roadmap",
- "svgr",
- "tailwindcss",
- "trivago",
- "tsparticles",
- "upsert"
- ],
- "[rust]": {
- "editor.defaultFormatter": "matklad.rust-analyzer"
- },
- "rust-analyzer.procMacro.enable": true,
- "rust-analyzer.diagnostics.experimental.enable": false
+ "cSpell.words": [
+ "actix",
+ "bpfrpt",
+ "consts",
+ "creationdate",
+ "ipfs",
+ "Keepsafe",
+ "pathctx",
+ "prismjs",
+ "proptype",
+ "quicktime",
+ "repr",
+ "Roadmap",
+ "svgr",
+ "tailwindcss",
+ "trivago",
+ "tsparticles",
+ "upsert"
+ ],
+ "[rust]": {
+ "editor.defaultFormatter": "matklad.rust-analyzer"
+ },
+ "rust-analyzer.procMacro.enable": true,
+ "rust-analyzer.diagnostics.experimental.enable": false,
+ "rust-analyzer.inlayHints.parameterHints.enable": false,
+ "rust-analyzer.inlayHints.typeHints.enable": false
}
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index 5df1a2aa4..bb617bbf5 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -1,4 +1,3 @@
-
# Contributor Covenant Code of Conduct
## Our Pledge
@@ -18,23 +17,23 @@ diverse, inclusive, and healthy community.
Examples of behavior that contributes to a positive environment for our
community include:
-* Demonstrating empathy and kindness toward other people
-* Being respectful of differing opinions, viewpoints, and experiences
-* Giving and gracefully accepting constructive feedback
-* Accepting responsibility and apologizing to those affected by our mistakes,
+- Demonstrating empathy and kindness toward other people
+- Being respectful of differing opinions, viewpoints, and experiences
+- Giving and gracefully accepting constructive feedback
+- Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
-* Focusing on what is best not just for us as individuals, but for the
+- Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
-* The use of sexualized language or imagery, and sexual attention or
+- The use of sexualized language or imagery, and sexual attention or
advances of any kind
-* Trolling, insulting or derogatory comments, and personal or political attacks
-* Public or private harassment
-* Publishing others' private information, such as a physical or email
+- Trolling, insulting or derogatory comments, and personal or political attacks
+- Public or private harassment
+- Publishing others' private information, such as a physical or email
address, without their explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
+- Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
@@ -107,7 +106,7 @@ Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
-standards, including sustained inappropriate behavior, harassment of an
+standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
@@ -119,15 +118,15 @@ This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
[https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0].
-Community Impact Guidelines were inspired by
-[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
+Community Impact Guidelines were inspired by
+[Mozilla's code of conduct enforcement ladder][mozilla coc].
For answers to common questions about this code of conduct, see the FAQ at
-[https://www.contributor-covenant.org/faq][FAQ]. Translations are available
+[https://www.contributor-covenant.org/faq][faq]. Translations are available
at [https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org
[v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html
-[Mozilla CoC]: https://github.com/mozilla/diversity
-[FAQ]: https://www.contributor-covenant.org/faq
+[mozilla coc]: https://github.com/mozilla/diversity
+[faq]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 41df6d1a5..b8ba04560 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -61,17 +61,18 @@ If you are having issues ensure you are using the following versions of Rust and
### Pull Request
When you're finished with the changes, create a pull request, also known as a PR.
-- Fill the "Ready for review" template so that we can review your PR. This template helps reviewers understand your changes as well as the purpose of your pull request.
+
+- Fill the "Ready for review" template so that we can review your PR. This template helps reviewers understand your changes as well as the purpose of your pull request.
- Don't forget to [link PR to issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue) if you are solving one.
- Enable the checkbox to [allow maintainer edits](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/allowing-changes-to-a-pull-request-branch-created-from-a-fork) so the branch can be updated for a merge.
-Once you submit your PR, a team member will review your proposal. We may ask questions or request for additional information.
+ Once you submit your PR, a team member will review your proposal. We may ask questions or request for additional information.
- We may ask for changes to be made before a PR can be merged, either using [suggested changes](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/incorporating-feedback-in-your-pull-request) or pull request comments. You can apply suggested changes directly through the UI. You can make any other changes in your fork, then commit them to your branch.
- As you update your PR and apply changes, mark each conversation as [resolved](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/commenting-on-a-pull-request#resolving-conversations).
- If you run into any merge issues, checkout this [git tutorial](https://lab.github.com/githubtraining/managing-merge-conflicts) to help you resolve merge conflicts and other issues.
### Your PR is merged!
-Congratulations :tada::tada: The Spacedrive team thanks you :sparkles:.
+Congratulations :tada::tada: The Spacedrive team thanks you :sparkles:.
Once your PR is merged, your contributions will be included in the next release of the application.
diff --git a/README.md b/README.md
index ebf04950e..1d66cefc2 100644
--- a/README.md
+++ b/README.md
@@ -38,7 +38,6 @@ Organize files across many devices in one place. From cloud services to offline
For independent creatives, hoarders and those that want to own their digital footprint. Spacedrive provides a file management experience like no other, and it's completely free.
-
diff --git a/apps/desktop/package.json b/apps/desktop/package.json
index 0aaedc26f..e72ead71a 100644
--- a/apps/desktop/package.json
+++ b/apps/desktop/package.json
@@ -1,41 +1,41 @@
{
- "name": "@sd/desktop",
- "version": "1.0.0",
- "main": "index.js",
- "license": "MIT",
- "private": true,
- "scripts": {
- "vite": "vite",
- "dev": "concurrently \"pnpm tauri dev\" \"vite\"",
- "tauri": "tauri",
- "build": "vite build"
- },
- "dependencies": {
- "@sd/client": "workspace:*",
- "@sd/core": "workspace:*",
- "@sd/interface": "workspace:*",
- "@sd/ui": "workspace:*",
- "@tauri-apps/api": "^1.0.0-rc.3",
- "react": "^18.0.0",
- "react-dom": "^18.0.0"
- },
- "devDependencies": {
- "@tauri-apps/cli": "^1.0.0-rc.8",
- "@tauri-apps/tauricon": "github:tauri-apps/tauricon",
- "@types/babel-core": "^6.25.7",
- "@types/byte-size": "^8.1.0",
- "@types/react": "^18.0.8",
- "@types/react-dom": "^18.0.0",
- "@types/react-router-dom": "^5.3.3",
- "@types/react-window": "^1.8.5",
- "@types/tailwindcss": "^3.0.10",
- "@vitejs/plugin-react": "^1.3.1",
- "concurrently": "^7.1.0",
- "prettier": "^2.6.2",
- "sass": "^1.50.0",
- "typescript": "^4.6.3",
- "vite": "^2.9.5",
- "vite-plugin-filter-replace": "^0.1.9",
- "vite-plugin-svgr": "^1.1.0"
- }
+ "name": "@sd/desktop",
+ "version": "1.0.0",
+ "main": "index.js",
+ "license": "MIT",
+ "private": true,
+ "scripts": {
+ "vite": "vite",
+ "dev": "concurrently \"pnpm tauri dev\" \"vite\"",
+ "tauri": "tauri",
+ "build": "vite build"
+ },
+ "dependencies": {
+ "@sd/client": "workspace:*",
+ "@sd/core": "workspace:*",
+ "@sd/interface": "workspace:*",
+ "@sd/ui": "workspace:*",
+ "@tauri-apps/api": "^1.0.0-rc.3",
+ "react": "^18.0.0",
+ "react-dom": "^18.0.0"
+ },
+ "devDependencies": {
+ "@tauri-apps/cli": "^1.0.0-rc.8",
+ "@tauri-apps/tauricon": "github:tauri-apps/tauricon",
+ "@types/babel-core": "^6.25.7",
+ "@types/byte-size": "^8.1.0",
+ "@types/react": "^18.0.8",
+ "@types/react-dom": "^18.0.0",
+ "@types/react-router-dom": "^5.3.3",
+ "@types/react-window": "^1.8.5",
+ "@types/tailwindcss": "^3.0.10",
+ "@vitejs/plugin-react": "^1.3.1",
+ "concurrently": "^7.1.0",
+ "prettier": "^2.6.2",
+ "sass": "^1.50.0",
+ "typescript": "^4.6.3",
+ "vite": "^2.9.5",
+ "vite-plugin-filter-replace": "^0.1.9",
+ "vite-plugin-svgr": "^1.1.0"
+ }
}
diff --git a/apps/desktop/src-tauri/rustfmt.toml b/apps/desktop/src-tauri/rustfmt.toml
index 136f5f330..a231bfab7 100644
--- a/apps/desktop/src-tauri/rustfmt.toml
+++ b/apps/desktop/src-tauri/rustfmt.toml
@@ -1,6 +1,5 @@
max_width = 100
-hard_tabs = false
-tab_spaces = 2
+hard_tabs = true
newline_style = "Auto"
use_small_heuristics = "Default"
reorder_imports = true
diff --git a/apps/desktop/src-tauri/src/build.rs b/apps/desktop/src-tauri/src/build.rs
index 5e6666d8b..5aba44c8d 100644
--- a/apps/desktop/src-tauri/src/build.rs
+++ b/apps/desktop/src-tauri/src/build.rs
@@ -1,11 +1,11 @@
// use swift_rs::build_utils::{link_swift, link_swift_package};
fn main() {
- // HOTFIX: compile the swift code for arm64
- // std::env::set_var("CARGO_CFG_TARGET_ARCH", "arm64");
+ // HOTFIX: compile the swift code for arm64
+ // std::env::set_var("CARGO_CFG_TARGET_ARCH", "arm64");
- // link_swift();
- // link_swift_package("swift-lib", "../../../packages/macos/");
+ // link_swift();
+ // link_swift_package("swift-lib", "../../../packages/macos/");
- tauri_build::build();
+ tauri_build::build();
}
diff --git a/apps/desktop/src-tauri/src/main.rs b/apps/desktop/src-tauri/src/main.rs
index fdc82d8cd..a9383971a 100644
--- a/apps/desktop/src-tauri/src/main.rs
+++ b/apps/desktop/src-tauri/src/main.rs
@@ -11,106 +11,106 @@ use window::WindowExt;
#[tauri::command(async)]
async fn client_query_transport(
- core: tauri::State<'_, CoreController>,
- data: ClientQuery,
+ core: tauri::State<'_, CoreController>,
+ data: ClientQuery,
) -> Result {
- match core.query(data).await {
- Ok(response) => Ok(response),
- Err(err) => {
- println!("query error: {:?}", err);
- Err(err.to_string())
- }
- }
+ match core.query(data).await {
+ Ok(response) => Ok(response),
+ Err(err) => {
+ println!("query error: {:?}", err);
+ Err(err.to_string())
+ }
+ }
}
#[tauri::command(async)]
async fn client_command_transport(
- core: tauri::State<'_, CoreController>,
- data: ClientCommand,
+ core: tauri::State<'_, CoreController>,
+ data: ClientCommand,
) -> Result {
- match core.command(data).await {
- Ok(response) => Ok(response),
- Err(err) => {
- println!("command error: {:?}", err);
- Err(err.to_string())
- }
- }
+ match core.command(data).await {
+ Ok(response) => Ok(response),
+ Err(err) => {
+ println!("command error: {:?}", err);
+ Err(err.to_string())
+ }
+ }
}
#[tauri::command(async)]
async fn app_ready(app_handle: tauri::AppHandle) {
- let window = app_handle.get_window("main").unwrap();
+ let window = app_handle.get_window("main").unwrap();
- window.show().unwrap();
+ window.show().unwrap();
- #[cfg(target_os = "macos")]
- {
- std::thread::sleep(std::time::Duration::from_millis(1000));
- println!("fixing shadow for, {:?}", window.ns_window().unwrap());
- window.fix_shadow();
- }
+ #[cfg(target_os = "macos")]
+ {
+ std::thread::sleep(std::time::Duration::from_millis(1000));
+ println!("fixing shadow for, {:?}", window.ns_window().unwrap());
+ window.fix_shadow();
+ }
}
#[tokio::main]
async fn main() {
- let data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./"));
- // create an instance of the core
- let (mut node, mut event_receiver) = Node::new(data_dir).await;
- // run startup tasks
- node.initializer().await;
- // extract the node controller
- let controller = node.get_controller();
- // throw the node into a dedicated thread
- tokio::spawn(async move {
- node.start().await;
- });
- // create tauri app
- tauri::Builder::default()
- // pass controller to the tauri state manager
- .manage(controller)
- .setup(|app| {
- let app = app.handle();
+ let data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./"));
+ // create an instance of the core
+ let (mut node, mut event_receiver) = Node::new(data_dir).await;
+ // run startup tasks
+ node.initializer().await;
+ // extract the node controller
+ let controller = node.get_controller();
+ // throw the node into a dedicated thread
+ tokio::spawn(async move {
+ node.start().await;
+ });
+ // create tauri app
+ tauri::Builder::default()
+ // pass controller to the tauri state manager
+ .manage(controller)
+ .setup(|app| {
+ let app = app.handle();
- app.windows().iter().for_each(|(_, window)| {
- window.hide().unwrap();
+ app.windows().iter().for_each(|(_, window)| {
+ window.hide().unwrap();
- #[cfg(target_os = "windows")]
- window.set_decorations(true).unwrap();
+ #[cfg(target_os = "windows")]
+ window.set_decorations(true).unwrap();
- #[cfg(target_os = "macos")]
- window.set_transparent_titlebar(true, true);
- });
+ #[cfg(target_os = "macos")]
+ window.set_transparent_titlebar(true, true);
+ });
- // core event transport
- tokio::spawn(async move {
- let mut last = Instant::now();
- // handle stream output
- while let Some(event) = event_receiver.recv().await {
- match event {
- CoreEvent::InvalidateQueryDebounced(_) => {
- let current = Instant::now();
- if current.duration_since(last) > Duration::from_millis(1000 / 60) {
- last = current;
- app.emit_all("core_event", &event).unwrap();
- }
- }
- event => {
- app.emit_all("core_event", &event).unwrap();
- }
- }
- }
- });
+ // core event transport
+ tokio::spawn(async move {
+ let mut last = Instant::now();
+ // handle stream output
+ while let Some(event) = event_receiver.recv().await {
+ match event {
+ CoreEvent::InvalidateQueryDebounced(_) => {
+ let current = Instant::now();
+ if current.duration_since(last) > Duration::from_millis(1000 / 60) {
+ last = current;
+ app.emit_all("core_event", &event).unwrap();
+ }
+ }
+ event => {
+ app.emit_all("core_event", &event).unwrap();
+ }
+ }
+ }
+ });
- Ok(())
- })
- .on_menu_event(|event| menu::handle_menu_event(event))
- .on_window_event(|event| window::handle_window_event(event))
- .invoke_handler(tauri::generate_handler![
- client_query_transport,
- client_command_transport,
- app_ready,
- ])
- .menu(menu::get_menu())
- .run(tauri::generate_context!())
- .expect("error while running tauri application");
+ Ok(())
+ })
+ .on_menu_event(|event| menu::handle_menu_event(event))
+ .on_window_event(|event| window::handle_window_event(event))
+ .invoke_handler(tauri::generate_handler![
+ client_query_transport,
+ client_command_transport,
+ app_ready,
+ ])
+ .menu(menu::get_menu())
+ .run(tauri::generate_context!())
+ .expect("error while running tauri application");
}
diff --git a/apps/desktop/src-tauri/src/menu.rs b/apps/desktop/src-tauri/src/menu.rs
index 52db3400a..ffd1a4b2e 100644
--- a/apps/desktop/src-tauri/src/menu.rs
+++ b/apps/desktop/src-tauri/src/menu.rs
@@ -3,88 +3,88 @@ use std::env::consts;
use tauri::{AboutMetadata, CustomMenuItem, Menu, MenuItem, Submenu, WindowMenuEvent, Wry};
pub(crate) fn get_menu() -> Menu {
- match consts::OS {
- "linux" => Menu::new(),
- "macos" => custom_menu_bar(),
- _ => Menu::new(),
- }
+ match consts::OS {
+ "linux" => Menu::new(),
+ "macos" => custom_menu_bar(),
+ _ => Menu::new(),
+ }
}
fn custom_menu_bar() -> Menu {
- // let quit = CustomMenuItem::new("quit".to_string(), "Quit");
- // let close = CustomMenuItem::new("close".to_string(), "Close");
- // let jeff = CustomMenuItem::new("jeff".to_string(), "Jeff");
- // let submenu = Submenu::new(
- // "File",
- // Menu::new().add_item(quit).add_item(close).add_item(jeff),
- // );
- let spacedrive = Submenu::new(
- "Spacedrive",
- Menu::new()
- .add_native_item(MenuItem::About(
- "Spacedrive".to_string(),
- AboutMetadata::new(),
- )) // TODO: fill out about metadata
- .add_native_item(MenuItem::Separator)
- .add_native_item(MenuItem::Services)
- .add_native_item(MenuItem::Separator)
- .add_native_item(MenuItem::Hide)
- .add_native_item(MenuItem::HideOthers)
- .add_native_item(MenuItem::ShowAll)
- .add_native_item(MenuItem::Separator)
- .add_native_item(MenuItem::Quit),
- );
+ // let quit = CustomMenuItem::new("quit".to_string(), "Quit");
+ // let close = CustomMenuItem::new("close".to_string(), "Close");
+ // let jeff = CustomMenuItem::new("jeff".to_string(), "Jeff");
+ // let submenu = Submenu::new(
+ // "File",
+ // Menu::new().add_item(quit).add_item(close).add_item(jeff),
+ // );
+ let spacedrive = Submenu::new(
+ "Spacedrive",
+ Menu::new()
+ .add_native_item(MenuItem::About(
+ "Spacedrive".to_string(),
+ AboutMetadata::new(),
+ )) // TODO: fill out about metadata
+ .add_native_item(MenuItem::Separator)
+ .add_native_item(MenuItem::Services)
+ .add_native_item(MenuItem::Separator)
+ .add_native_item(MenuItem::Hide)
+ .add_native_item(MenuItem::HideOthers)
+ .add_native_item(MenuItem::ShowAll)
+ .add_native_item(MenuItem::Separator)
+ .add_native_item(MenuItem::Quit),
+ );
- let file = Submenu::new(
- "File",
- Menu::new()
- .add_item(
- CustomMenuItem::new("new_window".to_string(), "New Window")
- .accelerator("CmdOrCtrl+N")
- .disabled(),
- )
- .add_item(
- CustomMenuItem::new("close".to_string(), "Close Window").accelerator("CmdOrCtrl+W"),
- ),
- );
- let edit = Submenu::new(
- "Edit",
- Menu::new()
- .add_native_item(MenuItem::Copy)
- .add_native_item(MenuItem::Paste),
- );
- let view = Submenu::new(
- "View",
- Menu::new()
- .add_item(
- CustomMenuItem::new("command_pallete".to_string(), "Command Pallete")
- .accelerator("CmdOrCtrl+P"),
- )
- .add_item(CustomMenuItem::new("layout".to_string(), "Layout").disabled()),
- );
- let window = Submenu::new(
- "Window",
- Menu::new().add_native_item(MenuItem::EnterFullScreen),
- );
+ let file = Submenu::new(
+ "File",
+ Menu::new()
+ .add_item(
+ CustomMenuItem::new("new_window".to_string(), "New Window")
+ .accelerator("CmdOrCtrl+N")
+ .disabled(),
+ )
+ .add_item(
+ CustomMenuItem::new("close".to_string(), "Close Window").accelerator("CmdOrCtrl+W"),
+ ),
+ );
+ let edit = Submenu::new(
+ "Edit",
+ Menu::new()
+ .add_native_item(MenuItem::Copy)
+ .add_native_item(MenuItem::Paste),
+ );
+ let view = Submenu::new(
+ "View",
+ Menu::new()
+ .add_item(
+ CustomMenuItem::new("command_pallete".to_string(), "Command Pallete")
+ .accelerator("CmdOrCtrl+P"),
+ )
+ .add_item(CustomMenuItem::new("layout".to_string(), "Layout").disabled()),
+ );
+ let window = Submenu::new(
+ "Window",
+ Menu::new().add_native_item(MenuItem::EnterFullScreen),
+ );
- let menu = Menu::new()
- .add_submenu(spacedrive)
- .add_submenu(file)
- .add_submenu(edit)
- .add_submenu(view)
- .add_submenu(window);
+ let menu = Menu::new()
+ .add_submenu(spacedrive)
+ .add_submenu(file)
+ .add_submenu(edit)
+ .add_submenu(view)
+ .add_submenu(window);
- menu
+ menu
}
pub(crate) fn handle_menu_event(event: WindowMenuEvent) {
- match event.menu_item_id() {
- "quit" => {
- std::process::exit(0);
- }
- "close" => {
- event.window().close().unwrap();
- }
- _ => {}
- }
+ match event.menu_item_id() {
+ "quit" => {
+ std::process::exit(0);
+ }
+ "close" => {
+ event.window().close().unwrap();
+ }
+ _ => {}
+ }
}
diff --git a/apps/desktop/src-tauri/src/window.rs b/apps/desktop/src-tauri/src/window.rs
index 77f56c679..676c98292 100644
--- a/apps/desktop/src-tauri/src/window.rs
+++ b/apps/desktop/src-tauri/src/window.rs
@@ -1,93 +1,93 @@
use tauri::{GlobalWindowEvent, Runtime, Window, Wry};
pub(crate) fn handle_window_event(event: GlobalWindowEvent) {
- match event.event() {
- _ => {}
- }
+ match event.event() {
+ _ => {}
+ }
}
pub trait WindowExt {
- #[cfg(target_os = "macos")]
- fn set_toolbar(&self, shown: bool);
- #[cfg(target_os = "macos")]
- fn set_transparent_titlebar(&self, transparent: bool, large: bool);
- #[cfg(target_os = "macos")]
- fn fix_shadow(&self);
+ #[cfg(target_os = "macos")]
+ fn set_toolbar(&self, shown: bool);
+ #[cfg(target_os = "macos")]
+ fn set_transparent_titlebar(&self, transparent: bool, large: bool);
+ #[cfg(target_os = "macos")]
+ fn fix_shadow(&self);
}
impl WindowExt for Window {
- #[cfg(target_os = "macos")]
- fn set_toolbar(&self, shown: bool) {
- use cocoa::{
- appkit::{NSToolbar, NSWindow},
- base::{nil, NO},
- foundation::NSString,
- };
+ #[cfg(target_os = "macos")]
+ fn set_toolbar(&self, shown: bool) {
+ use cocoa::{
+ appkit::{NSToolbar, NSWindow},
+ base::{nil, NO},
+ foundation::NSString,
+ };
- unsafe {
- let id = self.ns_window().unwrap() as cocoa::base::id;
+ unsafe {
+ let id = self.ns_window().unwrap() as cocoa::base::id;
- if shown {
- let toolbar =
- NSToolbar::alloc(nil).initWithIdentifier_(NSString::alloc(nil).init_str("wat"));
- toolbar.setShowsBaselineSeparator_(NO);
- id.setToolbar_(toolbar);
- } else {
- id.setToolbar_(nil);
- }
- }
- }
+ if shown {
+ let toolbar =
+ NSToolbar::alloc(nil).initWithIdentifier_(NSString::alloc(nil).init_str("wat"));
+ toolbar.setShowsBaselineSeparator_(NO);
+ id.setToolbar_(toolbar);
+ } else {
+ id.setToolbar_(nil);
+ }
+ }
+ }
- #[cfg(target_os = "macos")]
- fn set_transparent_titlebar(&self, transparent: bool, large: bool) {
- use cocoa::{
- appkit::{NSWindow, NSWindowStyleMask, NSWindowTitleVisibility},
- base::{NO, YES},
- };
+ #[cfg(target_os = "macos")]
+ fn set_transparent_titlebar(&self, transparent: bool, large: bool) {
+ use cocoa::{
+ appkit::{NSWindow, NSWindowStyleMask, NSWindowTitleVisibility},
+ base::{NO, YES},
+ };
- unsafe {
- let id = self.ns_window().unwrap() as cocoa::base::id;
+ unsafe {
+ let id = self.ns_window().unwrap() as cocoa::base::id;
- let mut style_mask = id.styleMask();
- // println!("existing style mask, {:#?}", style_mask);
- style_mask.set(
- NSWindowStyleMask::NSFullSizeContentViewWindowMask,
- transparent,
- );
- style_mask.set(
- NSWindowStyleMask::NSTexturedBackgroundWindowMask,
- transparent,
- );
- style_mask.set(
- NSWindowStyleMask::NSUnifiedTitleAndToolbarWindowMask,
- transparent && large,
- );
- id.setStyleMask_(style_mask);
+ let mut style_mask = id.styleMask();
+ // println!("existing style mask, {:#?}", style_mask);
+ style_mask.set(
+ NSWindowStyleMask::NSFullSizeContentViewWindowMask,
+ transparent,
+ );
+ style_mask.set(
+ NSWindowStyleMask::NSTexturedBackgroundWindowMask,
+ transparent,
+ );
+ style_mask.set(
+ NSWindowStyleMask::NSUnifiedTitleAndToolbarWindowMask,
+ transparent && large,
+ );
+ id.setStyleMask_(style_mask);
- if large {
- self.set_toolbar(true);
- }
+ if large {
+ self.set_toolbar(true);
+ }
- id.setTitleVisibility_(if transparent {
- NSWindowTitleVisibility::NSWindowTitleHidden
- } else {
- NSWindowTitleVisibility::NSWindowTitleVisible
- });
+ id.setTitleVisibility_(if transparent {
+ NSWindowTitleVisibility::NSWindowTitleHidden
+ } else {
+ NSWindowTitleVisibility::NSWindowTitleVisible
+ });
- id.setTitlebarAppearsTransparent_(if transparent { YES } else { NO });
- }
- }
+ id.setTitlebarAppearsTransparent_(if transparent { YES } else { NO });
+ }
+ }
- #[cfg(target_os = "macos")]
- fn fix_shadow(&self) {
- use cocoa::appkit::NSWindow;
+ #[cfg(target_os = "macos")]
+ fn fix_shadow(&self) {
+ use cocoa::appkit::NSWindow;
- unsafe {
- let id = self.ns_window().unwrap() as cocoa::base::id;
+ unsafe {
+ let id = self.ns_window().unwrap() as cocoa::base::id;
- println!("recomputing shadow for window {:?}", id.title());
+ println!("recomputing shadow for window {:?}", id.title());
- id.invalidateShadow();
- }
- }
+ id.invalidateShadow();
+ }
+ }
}
diff --git a/apps/desktop/src-tauri/tauri.conf.json b/apps/desktop/src-tauri/tauri.conf.json
index 34a5757fc..efbb3361b 100644
--- a/apps/desktop/src-tauri/tauri.conf.json
+++ b/apps/desktop/src-tauri/tauri.conf.json
@@ -1,83 +1,83 @@
{
- "package": {
- "productName": "Spacedrive",
- "version": "0.1.0"
- },
- "build": {
- "distDir": "../dist",
- "devPath": "http://localhost:8001",
- "beforeDevCommand": "",
- "beforeBuildCommand": ""
- },
- "tauri": {
- "macOSPrivateApi": true,
- "bundle": {
- "active": true,
- "targets": "all",
- "identifier": "app.spacedrive.desktop",
- "icon": [
- "icons/32x32.png",
- "icons/128x128.png",
- "icons/128x128@2x.png",
- "icons/icon.icns",
- "icons/icon.ico"
- ],
- "resources": [],
- "externalBin": [],
- "copyright": "Jamie Pine",
- "shortDescription": "The Universal File Explorer",
- "longDescription": "A cross-platform file explorer, powered by an open source virtual distributed filesystem.",
- "deb": {
- "depends": [],
- "useBootstrapper": false
- },
- "macOS": {
- "frameworks": [],
- "minimumSystemVersion": "",
- "useBootstrapper": false,
- "exceptionDomain": "",
- "signingIdentity": null,
- "entitlements": null
- },
- "windows": {
- "certificateThumbprint": null,
- "digestAlgorithm": "sha256",
- "timestampUrl": ""
- }
- },
- "updater": {
- "active": false
- },
- "allowlist": {
- "all": true,
- "protocol": {
- "assetScope": ["*"]
- },
- "dialog": {
- "all": true,
- "open": true,
- "save": true
- }
- },
- "windows": [
- {
- "title": "Spacedrive",
- "width": 1200,
- "height": 725,
- "minWidth": 700,
- "minHeight": 500,
- "resizable": true,
- "fullscreen": false,
- "alwaysOnTop": false,
- "focus": false,
- "fileDropEnabled": false,
- "decorations": true,
- "transparent": true,
- "center": true
- }
- ],
- "security": {
- "csp": "default-src asset: https://asset.localhost blob: data: filesystem: ws: wss: http: https: tauri: 'unsafe-eval' 'unsafe-inline' 'self' img-src: 'self'"
- }
- }
+ "package": {
+ "productName": "Spacedrive",
+ "version": "0.1.0"
+ },
+ "build": {
+ "distDir": "../dist",
+ "devPath": "http://localhost:8001",
+ "beforeDevCommand": "",
+ "beforeBuildCommand": ""
+ },
+ "tauri": {
+ "macOSPrivateApi": true,
+ "bundle": {
+ "active": true,
+ "targets": "all",
+ "identifier": "app.spacedrive.desktop",
+ "icon": [
+ "icons/32x32.png",
+ "icons/128x128.png",
+ "icons/128x128@2x.png",
+ "icons/icon.icns",
+ "icons/icon.ico"
+ ],
+ "resources": [],
+ "externalBin": [],
+ "copyright": "Jamie Pine",
+ "shortDescription": "The Universal File Explorer",
+ "longDescription": "A cross-platform file explorer, powered by an open source virtual distributed filesystem.",
+ "deb": {
+ "depends": [],
+ "useBootstrapper": false
+ },
+ "macOS": {
+ "frameworks": [],
+ "minimumSystemVersion": "",
+ "useBootstrapper": false,
+ "exceptionDomain": "",
+ "signingIdentity": null,
+ "entitlements": null
+ },
+ "windows": {
+ "certificateThumbprint": null,
+ "digestAlgorithm": "sha256",
+ "timestampUrl": ""
+ }
+ },
+ "updater": {
+ "active": false
+ },
+ "allowlist": {
+ "all": true,
+ "protocol": {
+ "assetScope": ["*"]
+ },
+ "dialog": {
+ "all": true,
+ "open": true,
+ "save": true
+ }
+ },
+ "windows": [
+ {
+ "title": "Spacedrive",
+ "width": 1200,
+ "height": 725,
+ "minWidth": 700,
+ "minHeight": 500,
+ "resizable": true,
+ "fullscreen": false,
+ "alwaysOnTop": false,
+ "focus": false,
+ "fileDropEnabled": false,
+ "decorations": true,
+ "transparent": true,
+ "center": true
+ }
+ ],
+ "security": {
+ "csp": "default-src asset: https://asset.localhost blob: data: filesystem: ws: wss: http: https: tauri: 'unsafe-eval' 'unsafe-inline' 'self' img-src: 'self'"
+ }
+ }
}
diff --git a/apps/desktop/src-tauri/tauri.linux.conf.json b/apps/desktop/src-tauri/tauri.linux.conf.json
index 8ab285e3d..16a6e4a2d 100644
--- a/apps/desktop/src-tauri/tauri.linux.conf.json
+++ b/apps/desktop/src-tauri/tauri.linux.conf.json
@@ -1,74 +1,74 @@
{
- "package": {
- "productName": "Spacedrive",
- "version": "0.1.0"
- },
- "build": {
- "distDir": "../dist",
- "devPath": "http://localhost:8001",
- "beforeDevCommand": "",
- "beforeBuildCommand": ""
- },
- "tauri": {
- "bundle": {
- "active": true,
- "targets": "all",
- "identifier": "co.spacedrive.desktop",
- "icon": ["icons/icon.icns"],
- "resources": [],
- "externalBin": [],
- "copyright": "Jamie Pine",
- "shortDescription": "Your personal virtual cloud.",
- "longDescription": "Spacedrive is an open source virtual filesystem, a personal cloud powered by your everyday devices. Feature-rich benefits of the cloud, only its owned and hosted by you with security, privacy and ownership as a foundation. Spacedrive makes it possible to create a limitless directory of your digital life that will stand the test of time.",
- "deb": {
- "depends": [],
- "useBootstrapper": false
- },
- "macOS": {
- "frameworks": [],
- "minimumSystemVersion": "",
- "useBootstrapper": false,
- "exceptionDomain": "",
- "signingIdentity": null,
- "entitlements": null
- },
- "windows": {
- "certificateThumbprint": null,
- "digestAlgorithm": "sha256",
- "timestampUrl": ""
- }
- },
- "updater": {
- "active": false
- },
- "allowlist": {
- "all": true,
- "os": {
- "all": true
- },
- "dialog": {
- "all": true,
- "open": true,
- "save": true
- }
- },
- "windows": [
- {
- "title": "Spacedrive",
- "width": 1250,
- "height": 625,
- "resizable": true,
- "fullscreen": false,
- "alwaysOnTop": false,
- "focus": true,
- "fileDropEnabled": false,
- "decorations": true,
- "transparent": false,
- "center": true
- }
- ],
- "security": {
- "csp": "default-src asset: blob: data: filesystem: ws: wss: http: https: tauri: 'unsafe-eval' 'unsafe-inline' 'self' img-src: 'self'"
- }
- }
+ "package": {
+ "productName": "Spacedrive",
+ "version": "0.1.0"
+ },
+ "build": {
+ "distDir": "../dist",
+ "devPath": "http://localhost:8001",
+ "beforeDevCommand": "",
+ "beforeBuildCommand": ""
+ },
+ "tauri": {
+ "bundle": {
+ "active": true,
+ "targets": "all",
+ "identifier": "co.spacedrive.desktop",
+ "icon": ["icons/icon.icns"],
+ "resources": [],
+ "externalBin": [],
+ "copyright": "Jamie Pine",
+ "shortDescription": "Your personal virtual cloud.",
+ "longDescription": "Spacedrive is an open source virtual filesystem, a personal cloud powered by your everyday devices. Feature-rich benefits of the cloud, only its owned and hosted by you with security, privacy and ownership as a foundation. Spacedrive makes it possible to create a limitless directory of your digital life that will stand the test of time.",
+ "deb": {
+ "depends": [],
+ "useBootstrapper": false
+ },
+ "macOS": {
+ "frameworks": [],
+ "minimumSystemVersion": "",
+ "useBootstrapper": false,
+ "exceptionDomain": "",
+ "signingIdentity": null,
+ "entitlements": null
+ },
+ "windows": {
+ "certificateThumbprint": null,
+ "digestAlgorithm": "sha256",
+ "timestampUrl": ""
+ }
+ },
+ "updater": {
+ "active": false
+ },
+ "allowlist": {
+ "all": true,
+ "os": {
+ "all": true
+ },
+ "dialog": {
+ "all": true,
+ "open": true,
+ "save": true
+ }
+ },
+ "windows": [
+ {
+ "title": "Spacedrive",
+ "width": 1250,
+ "height": 625,
+ "resizable": true,
+ "fullscreen": false,
+ "alwaysOnTop": false,
+ "focus": true,
+ "fileDropEnabled": false,
+ "decorations": true,
+ "transparent": false,
+ "center": true
+ }
+ ],
+ "security": {
+ "csp": "default-src asset: blob: data: filesystem: ws: wss: http: https: tauri: 'unsafe-eval' 'unsafe-inline' 'self' img-src: 'self'"
+ }
+ }
}
diff --git a/apps/desktop/src/index.html b/apps/desktop/src/index.html
index f493787ea..4d441a0c7 100644
--- a/apps/desktop/src/index.html
+++ b/apps/desktop/src/index.html
@@ -1,13 +1,13 @@
-
-
-
-
- Spacedrive
-
-
-
-
-
+
+
+
+
+ Spacedrive
+
+
+
+
+
diff --git a/apps/desktop/src/index.tsx b/apps/desktop/src/index.tsx
index 34835a780..423e5c79e 100644
--- a/apps/desktop/src/index.tsx
+++ b/apps/desktop/src/index.tsx
@@ -15,79 +15,79 @@ import { appWindow } from '@tauri-apps/api/window';
// bind state to core via Tauri
class Transport extends BaseTransport {
- constructor() {
- super();
+ constructor() {
+ super();
- listen('core_event', (e: Event) => {
- this.emit('core_event', e.payload);
- });
- }
- async query(query: ClientQuery) {
- return await invoke('client_query_transport', { data: query });
- }
- async command(query: ClientCommand) {
- return await invoke('client_command_transport', { data: query });
- }
+ listen('core_event', (e: Event) => {
+ this.emit('core_event', e.payload);
+ });
+ }
+ async query(query: ClientQuery) {
+ return await invoke('client_query_transport', { data: query });
+ }
+ async command(query: ClientCommand) {
+ return await invoke('client_command_transport', { data: query });
+ }
}
function App() {
- function getPlatform(platform: string): Platform {
- switch (platform) {
- case 'darwin':
- return 'macOS';
- case 'win32':
- return 'windows';
- case 'linux':
- return 'linux';
- default:
- return 'browser';
- }
- }
+ function getPlatform(platform: string): Platform {
+ switch (platform) {
+ case 'darwin':
+ return 'macOS';
+ case 'win32':
+ return 'windows';
+ case 'linux':
+ return 'linux';
+ default:
+ return 'browser';
+ }
+ }
- const [platform, setPlatform] = useState('macOS');
- const [focused, setFocused] = useState(true);
+ const [platform, setPlatform] = useState('macOS');
+ const [focused, setFocused] = useState(true);
- useEffect(() => {
- os.platform().then((platform) => setPlatform(getPlatform(platform)));
- invoke('app_ready');
- }, []);
+ useEffect(() => {
+ os.platform().then((platform) => setPlatform(getPlatform(platform)));
+ invoke('app_ready');
+ }, []);
- useEffect(() => {
- const unlistenFocus = listen('tauri://focus', () => setFocused(true));
- const unlistenBlur = listen('tauri://blur', () => setFocused(false));
+ useEffect(() => {
+ const unlistenFocus = listen('tauri://focus', () => setFocused(true));
+ const unlistenBlur = listen('tauri://blur', () => setFocused(false));
- return () => {
- unlistenFocus.then((unlisten) => unlisten());
- unlistenBlur.then((unlisten) => unlisten());
- };
- }, []);
+ return () => {
+ unlistenFocus.then((unlisten) => unlisten());
+ unlistenBlur.then((unlisten) => unlisten());
+ };
+ }, []);
- return (
- {
- return dialog.open(options);
- }}
- isFocused={focused}
- onClose={() => appWindow.close()}
- onFullscreen={() => appWindow.setFullscreen(true)}
- onMinimize={() => appWindow.minimize()}
- onOpen={(path: string) => shell.open(path)}
- />
- );
+ return (
+ {
+ return dialog.open(options);
+ }}
+ isFocused={focused}
+ onClose={() => appWindow.close()}
+ onFullscreen={() => appWindow.setFullscreen(true)}
+ onMinimize={() => appWindow.minimize()}
+ onOpen={(path: string) => shell.open(path)}
+ />
+ );
}
const root = createRoot(document.getElementById('root')!);
root.render(
-
-
-
+
+
+
);
diff --git a/apps/desktop/src/vite-env.d.ts b/apps/desktop/src/vite-env.d.ts
index 04e148c9d..16334d7f0 100644
--- a/apps/desktop/src/vite-env.d.ts
+++ b/apps/desktop/src/vite-env.d.ts
@@ -1,7 +1,7 @@
///
declare interface ImportMetaEnv {
- VITE_OS: string;
+ VITE_OS: string;
}
declare module '@babel/core' {}
diff --git a/apps/desktop/tsconfig.json b/apps/desktop/tsconfig.json
index c44e3731e..168f12434 100644
--- a/apps/desktop/tsconfig.json
+++ b/apps/desktop/tsconfig.json
@@ -1,5 +1,5 @@
{
- "extends": "../../packages/config/interface.tsconfig.json",
- "compilerOptions": {},
- "include": ["src"]
+ "extends": "../../packages/config/interface.tsconfig.json",
+ "compilerOptions": {},
+ "include": ["src"]
}
diff --git a/apps/desktop/vite.config.ts b/apps/desktop/vite.config.ts
index d669f544a..a787cf66e 100644
--- a/apps/desktop/vite.config.ts
+++ b/apps/desktop/vite.config.ts
@@ -1,27 +1,27 @@
import { defineConfig } from 'vite';
import react from '@vitejs/plugin-react';
import { name, version } from './package.json';
-import svg from "vite-plugin-svgr"
+import svg from 'vite-plugin-svgr';
// https://vitejs.dev/config/
export default defineConfig({
- server: {
- port: 8001
- },
- plugins: [
- //@ts-ignore
- react({
- jsxRuntime: 'classic'
- }),
- svg({ svgrOptions: { icon: true } })
- ],
- root: 'src',
- publicDir: '../../packages/interface/src/assets',
- define: {
- pkgJson: { name, version }
- },
- build: {
- outDir: '../dist',
- assetsDir: '.'
- }
+ server: {
+ port: 8001
+ },
+ plugins: [
+ //@ts-ignore
+ react({
+ jsxRuntime: 'classic'
+ }),
+ svg({ svgrOptions: { icon: true } })
+ ],
+ root: 'src',
+ publicDir: '../../packages/interface/src/assets',
+ define: {
+ pkgJson: { name, version }
+ },
+ build: {
+ outDir: '../dist',
+ assetsDir: '.'
+ }
});
diff --git a/apps/landing/index.html b/apps/landing/index.html
index 46e87dcfc..1f739c216 100644
--- a/apps/landing/index.html
+++ b/apps/landing/index.html
@@ -1,23 +1,23 @@
-
-
-
-
- Spacedrive — A file manager from the future.
-
-
-
-
-
-
-
-
-
+
+
+
+
+ Spacedrive — A file manager from the future.
+
+
+
+
+
+
+
+
+
diff --git a/apps/landing/package.json b/apps/landing/package.json
index a9b21cdc8..358b46af4 100644
--- a/apps/landing/package.json
+++ b/apps/landing/package.json
@@ -1,59 +1,59 @@
{
- "name": "@sd/landing",
- "private": true,
- "version": "0.0.0",
- "scripts": {
- "dev": "vite",
- "build": "vite build",
- "serve": "vite preview"
- },
- "dependencies": {
- "@fontsource/inter": "^4.5.7",
- "@headlessui/react": "^1.5.0",
- "@heroicons/react": "^1.0.6",
- "@icons-pack/react-simple-icons": "^4.6.1",
- "@sd/client": "workspace:*",
- "@sd/core": "workspace:*",
- "@sd/interface": "workspace:*",
- "@sd/ui": "workspace:*",
- "@tailwindcss/typography": "^0.5.2",
- "@types/compression": "^1.7.2",
- "@types/express": "^4.17.13",
- "clsx": "^1.1.1",
- "compression": "^1.7.4",
- "express": "^4.17.3",
- "phosphor-react": "^1.4.1",
- "prismjs": "^1.28.0",
- "react": "^18.0.0",
- "react-device-detect": "^2.2.2",
- "react-dom": "^18.0.0",
- "react-helmet": "^6.1.0",
- "react-router-dom": "6.3.0",
- "react-tsparticles": "^2.0.6",
- "simple-icons": "^6.19.0",
- "tsparticles": "^2.0.6"
- },
- "devDependencies": {
- "@babel/preset-react": "^7.16.7",
- "@types/lodash": "^4.14.182",
- "@types/prismjs": "^1.26.0",
- "@types/react": "^18.0.8",
- "@types/react-dom": "^18.0.0",
- "@types/react-helmet": "^6.1.5",
- "@vitejs/plugin-react": "^1.3.1",
- "autoprefixer": "^10.4.4",
- "nodemon": "^2.0.15",
- "postcss": "^8.4.12",
- "sass": "^1.50.0",
- "tailwind": "^4.0.0",
- "ts-node": "^10.7.0",
- "typescript": "^4.6.3",
- "vite": "^2.9.5",
- "vite-plugin-markdown": "^2.0.2",
- "vite-plugin-md": "^0.13.0",
- "vite-plugin-pages": "^0.23.0",
- "vite-plugin-pages-sitemap": "^1.2.2",
- "vite-plugin-ssr": "^0.3.64",
- "vite-plugin-svgr": "^1.1.0"
- }
+ "name": "@sd/landing",
+ "private": true,
+ "version": "0.0.0",
+ "scripts": {
+ "dev": "vite",
+ "build": "vite build",
+ "serve": "vite preview"
+ },
+ "dependencies": {
+ "@fontsource/inter": "^4.5.7",
+ "@headlessui/react": "^1.5.0",
+ "@heroicons/react": "^1.0.6",
+ "@icons-pack/react-simple-icons": "^4.6.1",
+ "@sd/client": "workspace:*",
+ "@sd/core": "workspace:*",
+ "@sd/interface": "workspace:*",
+ "@sd/ui": "workspace:*",
+ "@tailwindcss/typography": "^0.5.2",
+ "@types/compression": "^1.7.2",
+ "@types/express": "^4.17.13",
+ "clsx": "^1.1.1",
+ "compression": "^1.7.4",
+ "express": "^4.17.3",
+ "phosphor-react": "^1.4.1",
+ "prismjs": "^1.28.0",
+ "react": "^18.0.0",
+ "react-device-detect": "^2.2.2",
+ "react-dom": "^18.0.0",
+ "react-helmet": "^6.1.0",
+ "react-router-dom": "6.3.0",
+ "react-tsparticles": "^2.0.6",
+ "simple-icons": "^6.19.0",
+ "tsparticles": "^2.0.6"
+ },
+ "devDependencies": {
+ "@babel/preset-react": "^7.16.7",
+ "@types/lodash": "^4.14.182",
+ "@types/prismjs": "^1.26.0",
+ "@types/react": "^18.0.8",
+ "@types/react-dom": "^18.0.0",
+ "@types/react-helmet": "^6.1.5",
+ "@vitejs/plugin-react": "^1.3.1",
+ "autoprefixer": "^10.4.4",
+ "nodemon": "^2.0.15",
+ "postcss": "^8.4.12",
+ "sass": "^1.50.0",
+ "tailwind": "^4.0.0",
+ "ts-node": "^10.7.0",
+ "typescript": "^4.6.3",
+ "vite": "^2.9.5",
+ "vite-plugin-markdown": "^2.0.2",
+ "vite-plugin-md": "^0.13.0",
+ "vite-plugin-pages": "^0.23.0",
+ "vite-plugin-pages-sitemap": "^1.2.2",
+ "vite-plugin-ssr": "^0.3.64",
+ "vite-plugin-svgr": "^1.1.0"
+ }
}
diff --git a/apps/landing/src/atom-one.css b/apps/landing/src/atom-one.css
index 9e4e86c27..b8457e47e 100644
--- a/apps/landing/src/atom-one.css
+++ b/apps/landing/src/atom-one.css
@@ -28,413 +28,416 @@
* --syntax-cursor-line: hsla(220, 100%, 80%, 0.04);
*/
- code[class*="language-"],
- pre[class*="language-"] {
- background: hsl(220, 9%, 6%);
- color: hsl(220, 14%, 71%);
- text-shadow: 0 1px rgba(0, 0, 0, 0.3);
- font-family: "Fira Code", "Fira Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace;
- direction: ltr;
- text-align: left;
- white-space: pre;
- word-spacing: normal;
- word-break: normal;
- line-height: 1.5;
- -moz-tab-size: 2;
- -o-tab-size: 2;
- tab-size: 2;
- -webkit-hyphens: none;
- -moz-hyphens: none;
- -ms-hyphens: none;
- hyphens: none;
- }
-
- /* Selection */
- code[class*="language-"]::-moz-selection,
- code[class*="language-"] *::-moz-selection,
- pre[class*="language-"] *::-moz-selection {
- background: hsl(220, 13%, 28%);
- color: inherit;
- text-shadow: none;
- }
-
- code[class*="language-"]::selection,
- code[class*="language-"] *::selection,
- pre[class*="language-"] *::selection {
- background: hsl(220, 13%, 28%);
- color: inherit;
- text-shadow: none;
- }
-
- /* Code blocks */
- pre[class*="language-"] {
- padding: 1em;
- margin: 0.5em 0;
- overflow: auto;
- border-radius: 0.3em;
- }
-
- /* Inline code */
- :not(pre) > code[class*="language-"] {
- padding: 0.2em 0.3em;
- border-radius: 0.3em;
- white-space: normal;
- }
-
- /* Print */
- @media print {
- code[class*="language-"],
- pre[class*="language-"] {
- text-shadow: none;
- }
- }
-
- .token.comment,
- .token.prolog,
- .token.cdata {
- color: hsl(220, 10%, 40%);
- }
-
- .token.doctype,
- .token.punctuation,
- .token.entity {
- color: hsl(220, 14%, 71%);
- }
-
- .token.attr-name,
- .token.class-name,
- .token.boolean,
- .token.constant,
- .token.number,
- .token.atrule {
- color: hsl(29, 54%, 61%);
- }
-
- .token.keyword {
- color: hsl(286, 60%, 67%);
- }
-
- .token.property,
- .token.tag,
- .token.symbol,
- .token.deleted,
- .token.important {
- color: hsl(355, 65%, 65%);
- }
-
- .token.selector,
- .token.string,
- .token.char,
- .token.builtin,
- .token.inserted,
- .token.regex,
- .token.attr-value,
- .token.attr-value > .token.punctuation {
- color: hsl(95, 38%, 62%);
- }
-
- .token.variable,
- .token.operator,
- .token.function {
- color: hsl(207, 82%, 66%);
- }
-
- .token.url {
- color: hsl(187, 47%, 55%);
- }
-
- /* HTML overrides */
- .token.attr-value > .token.punctuation.attr-equals,
- .token.special-attr > .token.attr-value > .token.value.css {
- color: hsl(220, 14%, 71%);
- }
-
- /* CSS overrides */
- .language-css .token.selector {
- color: hsl(355, 65%, 65%);
- }
-
- .language-css .token.property {
- color: hsl(220, 14%, 71%);
- }
-
- .language-css .token.function,
- .language-css .token.url > .token.function {
- color: hsl(187, 47%, 55%);
- }
-
- .language-css .token.url > .token.string.url {
- color: hsl(95, 38%, 62%);
- }
-
- .language-css .token.important,
- .language-css .token.atrule .token.rule {
- color: hsl(286, 60%, 67%);
- }
-
- /* JS overrides */
- .language-javascript .token.operator {
- color: hsl(286, 60%, 67%);
- }
-
- .language-javascript .token.template-string > .token.interpolation > .token.interpolation-punctuation.punctuation {
- color: hsl(5, 48%, 51%);
- }
-
- /* JSON overrides */
- .language-json .token.operator {
- color: hsl(220, 14%, 71%);
- }
-
- .language-json .token.null.keyword {
- color: hsl(29, 54%, 61%);
- }
-
- /* MD overrides */
- .language-markdown .token.url,
- .language-markdown .token.url > .token.operator,
- .language-markdown .token.url-reference.url > .token.string {
- color: hsl(220, 14%, 71%);
- }
-
- .language-markdown .token.url > .token.content {
- color: hsl(207, 82%, 66%);
- }
-
- .language-markdown .token.url > .token.url,
- .language-markdown .token.url-reference.url {
- color: hsl(187, 47%, 55%);
- }
-
- .language-markdown .token.blockquote.punctuation,
- .language-markdown .token.hr.punctuation {
- color: hsl(220, 10%, 40%);
- font-style: italic;
- }
-
- .language-markdown .token.code-snippet {
- color: hsl(95, 38%, 62%);
- }
-
- .language-markdown .token.bold .token.content {
- color: hsl(29, 54%, 61%);
- }
-
- .language-markdown .token.italic .token.content {
- color: hsl(286, 60%, 67%);
- }
-
- .language-markdown .token.strike .token.content,
- .language-markdown .token.strike .token.punctuation,
- .language-markdown .token.list.punctuation,
- .language-markdown .token.title.important > .token.punctuation {
- color: hsl(355, 65%, 65%);
- }
-
- /* General */
- .token.bold {
- font-weight: bold;
- }
-
- .token.comment,
- .token.italic {
- font-style: italic;
- }
-
- .token.entity {
- cursor: help;
- }
-
- .token.namespace {
- opacity: 0.8;
- }
-
- /* Plugin overrides */
- /* Selectors should have higher specificity than those in the plugins' default stylesheets */
-
- /* Show Invisibles plugin overrides */
- .token.token.tab:not(:empty):before,
- .token.token.cr:before,
- .token.token.lf:before,
- .token.token.space:before {
- color: hsla(220, 14%, 71%, 0.15);
- text-shadow: none;
- }
-
- /* Toolbar plugin overrides */
- /* Space out all buttons and move them away from the right edge of the code block */
- div.code-toolbar > .toolbar.toolbar > .toolbar-item {
- margin-right: 0.4em;
- }
-
- /* Styling the buttons */
- div.code-toolbar > .toolbar.toolbar > .toolbar-item > button,
- div.code-toolbar > .toolbar.toolbar > .toolbar-item > a,
- div.code-toolbar > .toolbar.toolbar > .toolbar-item > span {
- background: hsl(220, 13%, 26%);
- color: hsl(220, 9%, 55%);
- padding: 0.1em 0.4em;
- border-radius: 0.3em;
- }
-
- div.code-toolbar > .toolbar.toolbar > .toolbar-item > button:hover,
- div.code-toolbar > .toolbar.toolbar > .toolbar-item > button:focus,
- div.code-toolbar > .toolbar.toolbar > .toolbar-item > a:hover,
- div.code-toolbar > .toolbar.toolbar > .toolbar-item > a:focus,
- div.code-toolbar > .toolbar.toolbar > .toolbar-item > span:hover,
- div.code-toolbar > .toolbar.toolbar > .toolbar-item > span:focus {
- background: hsl(220, 13%, 28%);
- color: hsl(220, 14%, 71%);
- }
-
- /* Line Highlight plugin overrides */
- /* The highlighted line itself */
- .line-highlight.line-highlight {
- background: hsla(220, 100%, 80%, 0.04);
- }
-
- /* Default line numbers in Line Highlight plugin */
- .line-highlight.line-highlight:before,
- .line-highlight.line-highlight[data-end]:after {
- background: hsl(220, 13%, 26%);
- color: hsl(220, 14%, 71%);
- padding: 0.1em 0.6em;
- border-radius: 0.3em;
- box-shadow: 0 2px 0 0 rgba(0, 0, 0, 0.2); /* same as Toolbar plugin default */
- }
-
- /* Hovering over a linkable line number (in the gutter area) */
- /* Requires Line Numbers plugin as well */
- pre[id].linkable-line-numbers.linkable-line-numbers span.line-numbers-rows > span:hover:before {
- background-color: hsla(220, 100%, 80%, 0.04);
- }
-
- /* Line Numbers and Command Line plugins overrides */
- /* Line separating gutter from coding area */
- .line-numbers.line-numbers .line-numbers-rows,
- .command-line .command-line-prompt {
- border-right-color: hsla(220, 14%, 71%, 0.15);
- }
-
- /* Stuff in the gutter */
- .line-numbers .line-numbers-rows > span:before,
- .command-line .command-line-prompt > span:before {
- color: hsl(220, 14%, 45%);
- }
-
- /* Match Braces plugin overrides */
- /* Note: Outline colour is inherited from the braces */
- .rainbow-braces .token.token.punctuation.brace-level-1,
- .rainbow-braces .token.token.punctuation.brace-level-5,
- .rainbow-braces .token.token.punctuation.brace-level-9 {
- color: hsl(355, 65%, 65%);
- }
-
- .rainbow-braces .token.token.punctuation.brace-level-2,
- .rainbow-braces .token.token.punctuation.brace-level-6,
- .rainbow-braces .token.token.punctuation.brace-level-10 {
- color: hsl(95, 38%, 62%);
- }
-
- .rainbow-braces .token.token.punctuation.brace-level-3,
- .rainbow-braces .token.token.punctuation.brace-level-7,
- .rainbow-braces .token.token.punctuation.brace-level-11 {
- color: hsl(207, 82%, 66%);
- }
-
- .rainbow-braces .token.token.punctuation.brace-level-4,
- .rainbow-braces .token.token.punctuation.brace-level-8,
- .rainbow-braces .token.token.punctuation.brace-level-12 {
- color: hsl(286, 60%, 67%);
- }
-
- /* Diff Highlight plugin overrides */
- /* Taken from https://github.com/atom/github/blob/master/styles/variables.less */
- pre.diff-highlight > code .token.token.deleted:not(.prefix),
- pre > code.diff-highlight .token.token.deleted:not(.prefix) {
- background-color: hsla(353, 100%, 66%, 0.15);
- }
-
- pre.diff-highlight > code .token.token.deleted:not(.prefix)::-moz-selection,
- pre.diff-highlight > code .token.token.deleted:not(.prefix) *::-moz-selection,
- pre > code.diff-highlight .token.token.deleted:not(.prefix)::-moz-selection,
- pre > code.diff-highlight .token.token.deleted:not(.prefix) *::-moz-selection {
- background-color: hsla(353, 95%, 66%, 0.25);
- }
-
- pre.diff-highlight > code .token.token.deleted:not(.prefix)::selection,
- pre.diff-highlight > code .token.token.deleted:not(.prefix) *::selection,
- pre > code.diff-highlight .token.token.deleted:not(.prefix)::selection,
- pre > code.diff-highlight .token.token.deleted:not(.prefix) *::selection {
- background-color: hsla(353, 95%, 66%, 0.25);
- }
-
- pre.diff-highlight > code .token.token.inserted:not(.prefix),
- pre > code.diff-highlight .token.token.inserted:not(.prefix) {
- background-color: hsla(137, 100%, 55%, 0.15);
- }
-
- pre.diff-highlight > code .token.token.inserted:not(.prefix)::-moz-selection,
- pre.diff-highlight > code .token.token.inserted:not(.prefix) *::-moz-selection,
- pre > code.diff-highlight .token.token.inserted:not(.prefix)::-moz-selection,
- pre > code.diff-highlight .token.token.inserted:not(.prefix) *::-moz-selection {
- background-color: hsla(135, 73%, 55%, 0.25);
- }
-
- pre.diff-highlight > code .token.token.inserted:not(.prefix)::selection,
- pre.diff-highlight > code .token.token.inserted:not(.prefix) *::selection,
- pre > code.diff-highlight .token.token.inserted:not(.prefix)::selection,
- pre > code.diff-highlight .token.token.inserted:not(.prefix) *::selection {
- background-color: hsla(135, 73%, 55%, 0.25);
- }
-
- /* Previewers plugin overrides */
- /* Based on https://github.com/atom-community/atom-ide-datatip/blob/master/styles/atom-ide-datatips.less and https://github.com/atom/atom/blob/master/packages/one-dark-ui */
- /* Border around popup */
- .prism-previewer.prism-previewer:before,
- .prism-previewer-gradient.prism-previewer-gradient div {
- border-color: hsl(224, 13%, 17%);
- }
-
- /* Angle and time should remain as circles and are hence not included */
- .prism-previewer-color.prism-previewer-color:before,
- .prism-previewer-gradient.prism-previewer-gradient div,
- .prism-previewer-easing.prism-previewer-easing:before {
- border-radius: 0.3em;
- }
-
- /* Triangles pointing to the code */
- .prism-previewer.prism-previewer:after {
- border-top-color: hsl(224, 13%, 17%);
- }
-
- .prism-previewer-flipped.prism-previewer-flipped.after {
- border-bottom-color: hsl(224, 13%, 17%);
- }
-
- /* Background colour within the popup */
- .prism-previewer-angle.prism-previewer-angle:before,
- .prism-previewer-time.prism-previewer-time:before,
- .prism-previewer-easing.prism-previewer-easing {
- background: hsl(219, 13%, 22%);
- }
-
- /* For angle, this is the positive area (eg. 90deg will display one quadrant in this colour) */
- /* For time, this is the alternate colour */
- .prism-previewer-angle.prism-previewer-angle circle,
- .prism-previewer-time.prism-previewer-time circle {
- stroke: hsl(220, 14%, 71%);
- stroke-opacity: 1;
- }
-
- /* Stroke colours of the handle, direction point, and vector itself */
- .prism-previewer-easing.prism-previewer-easing circle,
- .prism-previewer-easing.prism-previewer-easing path,
- .prism-previewer-easing.prism-previewer-easing line {
- stroke: hsl(220, 14%, 71%);
- }
-
- /* Fill colour of the handle */
- .prism-previewer-easing.prism-previewer-easing circle {
- fill: transparent;
- }
\ No newline at end of file
+code[class*='language-'],
+pre[class*='language-'] {
+ background: hsl(220, 9%, 6%);
+ color: hsl(220, 14%, 71%);
+ text-shadow: 0 1px rgba(0, 0, 0, 0.3);
+ font-family: 'Fira Code', 'Fira Mono', Menlo, Consolas, 'DejaVu Sans Mono', monospace;
+ direction: ltr;
+ text-align: left;
+ white-space: pre;
+ word-spacing: normal;
+ word-break: normal;
+ line-height: 1.5;
+ -moz-tab-size: 2;
+ -o-tab-size: 2;
+ tab-size: 2;
+ -webkit-hyphens: none;
+ -moz-hyphens: none;
+ -ms-hyphens: none;
+ hyphens: none;
+}
+
+/* Selection */
+code[class*='language-']::-moz-selection,
+code[class*='language-'] *::-moz-selection,
+pre[class*='language-'] *::-moz-selection {
+ background: hsl(220, 13%, 28%);
+ color: inherit;
+ text-shadow: none;
+}
+
+code[class*='language-']::selection,
+code[class*='language-'] *::selection,
+pre[class*='language-'] *::selection {
+ background: hsl(220, 13%, 28%);
+ color: inherit;
+ text-shadow: none;
+}
+
+/* Code blocks */
+pre[class*='language-'] {
+ padding: 1em;
+ margin: 0.5em 0;
+ overflow: auto;
+ border-radius: 0.3em;
+}
+
+/* Inline code */
+:not(pre) > code[class*='language-'] {
+ padding: 0.2em 0.3em;
+ border-radius: 0.3em;
+ white-space: normal;
+}
+
+/* Print */
+@media print {
+ code[class*='language-'],
+ pre[class*='language-'] {
+ text-shadow: none;
+ }
+}
+
+.token.comment,
+.token.prolog,
+.token.cdata {
+ color: hsl(220, 10%, 40%);
+}
+
+.token.doctype,
+.token.punctuation,
+.token.entity {
+ color: hsl(220, 14%, 71%);
+}
+
+.token.attr-name,
+.token.class-name,
+.token.boolean,
+.token.constant,
+.token.number,
+.token.atrule {
+ color: hsl(29, 54%, 61%);
+}
+
+.token.keyword {
+ color: hsl(286, 60%, 67%);
+}
+
+.token.property,
+.token.tag,
+.token.symbol,
+.token.deleted,
+.token.important {
+ color: hsl(355, 65%, 65%);
+}
+
+.token.selector,
+.token.string,
+.token.char,
+.token.builtin,
+.token.inserted,
+.token.regex,
+.token.attr-value,
+.token.attr-value > .token.punctuation {
+ color: hsl(95, 38%, 62%);
+}
+
+.token.variable,
+.token.operator,
+.token.function {
+ color: hsl(207, 82%, 66%);
+}
+
+.token.url {
+ color: hsl(187, 47%, 55%);
+}
+
+/* HTML overrides */
+.token.attr-value > .token.punctuation.attr-equals,
+.token.special-attr > .token.attr-value > .token.value.css {
+ color: hsl(220, 14%, 71%);
+}
+
+/* CSS overrides */
+.language-css .token.selector {
+ color: hsl(355, 65%, 65%);
+}
+
+.language-css .token.property {
+ color: hsl(220, 14%, 71%);
+}
+
+.language-css .token.function,
+.language-css .token.url > .token.function {
+ color: hsl(187, 47%, 55%);
+}
+
+.language-css .token.url > .token.string.url {
+ color: hsl(95, 38%, 62%);
+}
+
+.language-css .token.important,
+.language-css .token.atrule .token.rule {
+ color: hsl(286, 60%, 67%);
+}
+
+/* JS overrides */
+.language-javascript .token.operator {
+ color: hsl(286, 60%, 67%);
+}
+
+.language-javascript
+ .token.template-string
+ > .token.interpolation
+ > .token.interpolation-punctuation.punctuation {
+ color: hsl(5, 48%, 51%);
+}
+
+/* JSON overrides */
+.language-json .token.operator {
+ color: hsl(220, 14%, 71%);
+}
+
+.language-json .token.null.keyword {
+ color: hsl(29, 54%, 61%);
+}
+
+/* MD overrides */
+.language-markdown .token.url,
+.language-markdown .token.url > .token.operator,
+.language-markdown .token.url-reference.url > .token.string {
+ color: hsl(220, 14%, 71%);
+}
+
+.language-markdown .token.url > .token.content {
+ color: hsl(207, 82%, 66%);
+}
+
+.language-markdown .token.url > .token.url,
+.language-markdown .token.url-reference.url {
+ color: hsl(187, 47%, 55%);
+}
+
+.language-markdown .token.blockquote.punctuation,
+.language-markdown .token.hr.punctuation {
+ color: hsl(220, 10%, 40%);
+ font-style: italic;
+}
+
+.language-markdown .token.code-snippet {
+ color: hsl(95, 38%, 62%);
+}
+
+.language-markdown .token.bold .token.content {
+ color: hsl(29, 54%, 61%);
+}
+
+.language-markdown .token.italic .token.content {
+ color: hsl(286, 60%, 67%);
+}
+
+.language-markdown .token.strike .token.content,
+.language-markdown .token.strike .token.punctuation,
+.language-markdown .token.list.punctuation,
+.language-markdown .token.title.important > .token.punctuation {
+ color: hsl(355, 65%, 65%);
+}
+
+/* General */
+.token.bold {
+ font-weight: bold;
+}
+
+.token.comment,
+.token.italic {
+ font-style: italic;
+}
+
+.token.entity {
+ cursor: help;
+}
+
+.token.namespace {
+ opacity: 0.8;
+}
+
+/* Plugin overrides */
+/* Selectors should have higher specificity than those in the plugins' default stylesheets */
+
+/* Show Invisibles plugin overrides */
+.token.token.tab:not(:empty):before,
+.token.token.cr:before,
+.token.token.lf:before,
+.token.token.space:before {
+ color: hsla(220, 14%, 71%, 0.15);
+ text-shadow: none;
+}
+
+/* Toolbar plugin overrides */
+/* Space out all buttons and move them away from the right edge of the code block */
+div.code-toolbar > .toolbar.toolbar > .toolbar-item {
+ margin-right: 0.4em;
+}
+
+/* Styling the buttons */
+div.code-toolbar > .toolbar.toolbar > .toolbar-item > button,
+div.code-toolbar > .toolbar.toolbar > .toolbar-item > a,
+div.code-toolbar > .toolbar.toolbar > .toolbar-item > span {
+ background: hsl(220, 13%, 26%);
+ color: hsl(220, 9%, 55%);
+ padding: 0.1em 0.4em;
+ border-radius: 0.3em;
+}
+
+div.code-toolbar > .toolbar.toolbar > .toolbar-item > button:hover,
+div.code-toolbar > .toolbar.toolbar > .toolbar-item > button:focus,
+div.code-toolbar > .toolbar.toolbar > .toolbar-item > a:hover,
+div.code-toolbar > .toolbar.toolbar > .toolbar-item > a:focus,
+div.code-toolbar > .toolbar.toolbar > .toolbar-item > span:hover,
+div.code-toolbar > .toolbar.toolbar > .toolbar-item > span:focus {
+ background: hsl(220, 13%, 28%);
+ color: hsl(220, 14%, 71%);
+}
+
+/* Line Highlight plugin overrides */
+/* The highlighted line itself */
+.line-highlight.line-highlight {
+ background: hsla(220, 100%, 80%, 0.04);
+}
+
+/* Default line numbers in Line Highlight plugin */
+.line-highlight.line-highlight:before,
+.line-highlight.line-highlight[data-end]:after {
+ background: hsl(220, 13%, 26%);
+ color: hsl(220, 14%, 71%);
+ padding: 0.1em 0.6em;
+ border-radius: 0.3em;
+ box-shadow: 0 2px 0 0 rgba(0, 0, 0, 0.2); /* same as Toolbar plugin default */
+}
+
+/* Hovering over a linkable line number (in the gutter area) */
+/* Requires Line Numbers plugin as well */
+pre[id].linkable-line-numbers.linkable-line-numbers span.line-numbers-rows > span:hover:before {
+ background-color: hsla(220, 100%, 80%, 0.04);
+}
+
+/* Line Numbers and Command Line plugins overrides */
+/* Line separating gutter from coding area */
+.line-numbers.line-numbers .line-numbers-rows,
+.command-line .command-line-prompt {
+ border-right-color: hsla(220, 14%, 71%, 0.15);
+}
+
+/* Stuff in the gutter */
+.line-numbers .line-numbers-rows > span:before,
+.command-line .command-line-prompt > span:before {
+ color: hsl(220, 14%, 45%);
+}
+
+/* Match Braces plugin overrides */
+/* Note: Outline colour is inherited from the braces */
+.rainbow-braces .token.token.punctuation.brace-level-1,
+.rainbow-braces .token.token.punctuation.brace-level-5,
+.rainbow-braces .token.token.punctuation.brace-level-9 {
+ color: hsl(355, 65%, 65%);
+}
+
+.rainbow-braces .token.token.punctuation.brace-level-2,
+.rainbow-braces .token.token.punctuation.brace-level-6,
+.rainbow-braces .token.token.punctuation.brace-level-10 {
+ color: hsl(95, 38%, 62%);
+}
+
+.rainbow-braces .token.token.punctuation.brace-level-3,
+.rainbow-braces .token.token.punctuation.brace-level-7,
+.rainbow-braces .token.token.punctuation.brace-level-11 {
+ color: hsl(207, 82%, 66%);
+}
+
+.rainbow-braces .token.token.punctuation.brace-level-4,
+.rainbow-braces .token.token.punctuation.brace-level-8,
+.rainbow-braces .token.token.punctuation.brace-level-12 {
+ color: hsl(286, 60%, 67%);
+}
+
+/* Diff Highlight plugin overrides */
+/* Taken from https://github.com/atom/github/blob/master/styles/variables.less */
+pre.diff-highlight > code .token.token.deleted:not(.prefix),
+pre > code.diff-highlight .token.token.deleted:not(.prefix) {
+ background-color: hsla(353, 100%, 66%, 0.15);
+}
+
+pre.diff-highlight > code .token.token.deleted:not(.prefix)::-moz-selection,
+pre.diff-highlight > code .token.token.deleted:not(.prefix) *::-moz-selection,
+pre > code.diff-highlight .token.token.deleted:not(.prefix)::-moz-selection,
+pre > code.diff-highlight .token.token.deleted:not(.prefix) *::-moz-selection {
+ background-color: hsla(353, 95%, 66%, 0.25);
+}
+
+pre.diff-highlight > code .token.token.deleted:not(.prefix)::selection,
+pre.diff-highlight > code .token.token.deleted:not(.prefix) *::selection,
+pre > code.diff-highlight .token.token.deleted:not(.prefix)::selection,
+pre > code.diff-highlight .token.token.deleted:not(.prefix) *::selection {
+ background-color: hsla(353, 95%, 66%, 0.25);
+}
+
+pre.diff-highlight > code .token.token.inserted:not(.prefix),
+pre > code.diff-highlight .token.token.inserted:not(.prefix) {
+ background-color: hsla(137, 100%, 55%, 0.15);
+}
+
+pre.diff-highlight > code .token.token.inserted:not(.prefix)::-moz-selection,
+pre.diff-highlight > code .token.token.inserted:not(.prefix) *::-moz-selection,
+pre > code.diff-highlight .token.token.inserted:not(.prefix)::-moz-selection,
+pre > code.diff-highlight .token.token.inserted:not(.prefix) *::-moz-selection {
+ background-color: hsla(135, 73%, 55%, 0.25);
+}
+
+pre.diff-highlight > code .token.token.inserted:not(.prefix)::selection,
+pre.diff-highlight > code .token.token.inserted:not(.prefix) *::selection,
+pre > code.diff-highlight .token.token.inserted:not(.prefix)::selection,
+pre > code.diff-highlight .token.token.inserted:not(.prefix) *::selection {
+ background-color: hsla(135, 73%, 55%, 0.25);
+}
+
+/* Previewers plugin overrides */
+/* Based on https://github.com/atom-community/atom-ide-datatip/blob/master/styles/atom-ide-datatips.less and https://github.com/atom/atom/blob/master/packages/one-dark-ui */
+/* Border around popup */
+.prism-previewer.prism-previewer:before,
+.prism-previewer-gradient.prism-previewer-gradient div {
+ border-color: hsl(224, 13%, 17%);
+}
+
+/* Angle and time should remain as circles and are hence not included */
+.prism-previewer-color.prism-previewer-color:before,
+.prism-previewer-gradient.prism-previewer-gradient div,
+.prism-previewer-easing.prism-previewer-easing:before {
+ border-radius: 0.3em;
+}
+
+/* Triangles pointing to the code */
+.prism-previewer.prism-previewer:after {
+ border-top-color: hsl(224, 13%, 17%);
+}
+
+.prism-previewer-flipped.prism-previewer-flipped.after {
+ border-bottom-color: hsl(224, 13%, 17%);
+}
+
+/* Background colour within the popup */
+.prism-previewer-angle.prism-previewer-angle:before,
+.prism-previewer-time.prism-previewer-time:before,
+.prism-previewer-easing.prism-previewer-easing {
+ background: hsl(219, 13%, 22%);
+}
+
+/* For angle, this is the positive area (eg. 90deg will display one quadrant in this colour) */
+/* For time, this is the alternate colour */
+.prism-previewer-angle.prism-previewer-angle circle,
+.prism-previewer-time.prism-previewer-time circle {
+ stroke: hsl(220, 14%, 71%);
+ stroke-opacity: 1;
+}
+
+/* Stroke colours of the handle, direction point, and vector itself */
+.prism-previewer-easing.prism-previewer-easing circle,
+.prism-previewer-easing.prism-previewer-easing path,
+.prism-previewer-easing.prism-previewer-easing line {
+ stroke: hsl(220, 14%, 71%);
+}
+
+/* Fill colour of the handle */
+.prism-previewer-easing.prism-previewer-easing circle {
+ fill: transparent;
+}
diff --git a/apps/landing/src/components/AppEmbed.tsx b/apps/landing/src/components/AppEmbed.tsx
index 229835b05..4503a60f1 100644
--- a/apps/landing/src/components/AppEmbed.tsx
+++ b/apps/landing/src/components/AppEmbed.tsx
@@ -4,93 +4,93 @@ import { useEffect } from 'react';
import { isMobile } from 'react-device-detect';
export default function AppEmbed() {
- const [showApp, setShowApp] = useState(false);
- const [iFrameAppReady, setIframeAppReady] = useState(false);
- const [forceImg, setForceImg] = useState(false);
- const [imgFallback, setImageFallback] = useState(false);
- const iFrame = useRef(null);
+ const [showApp, setShowApp] = useState(false);
+ const [iFrameAppReady, setIframeAppReady] = useState(false);
+ const [forceImg, setForceImg] = useState(false);
+ const [imgFallback, setImageFallback] = useState(false);
+ const iFrame = useRef(null);
- function handleResize() {
- if (window.innerWidth < 1000) {
- setForceImg(true);
- } else if (forceImg) {
- setForceImg(false);
- }
- }
+ function handleResize() {
+ if (window.innerWidth < 1000) {
+ setForceImg(true);
+ } else if (forceImg) {
+ setForceImg(false);
+ }
+ }
- useEffect(() => {
- window.addEventListener('resize', handleResize);
- handleResize();
- return () => window.removeEventListener('resize', handleResize);
- }, []);
+ useEffect(() => {
+ window.addEventListener('resize', handleResize);
+ handleResize();
+ return () => window.removeEventListener('resize', handleResize);
+ }, []);
- function handleEvent(e: any) {
- if (e.data === 'spacedrive-hello') {
- if (!iFrameAppReady) setIframeAppReady(true);
- }
- }
+ function handleEvent(e: any) {
+ if (e.data === 'spacedrive-hello') {
+ if (!iFrameAppReady) setIframeAppReady(true);
+ }
+ }
- // after five minutes kill the live demo
- useEffect(() => {
- const timer = setTimeout(() => {
- setIframeAppReady(false);
- }, 300000);
- return () => clearTimeout(timer);
- }, []);
+ // after five minutes kill the live demo
+ useEffect(() => {
+ const timer = setTimeout(() => {
+ setIframeAppReady(false);
+ }, 300000);
+ return () => clearTimeout(timer);
+ }, []);
- useEffect(() => {
- window.addEventListener('message', handleEvent, false);
- setShowApp(true);
+ useEffect(() => {
+ window.addEventListener('message', handleEvent, false);
+ setShowApp(true);
- return () => window.removeEventListener('message', handleEvent);
- }, []);
+ return () => window.removeEventListener('message', handleEvent);
+ }, []);
- useEffect(() => {
- setTimeout(() => {
- if (!iFrameAppReady) setImageFallback(true);
- }, 1500);
- }, []);
+ useEffect(() => {
+ setTimeout(() => {
+ if (!iFrameAppReady) setImageFallback(true);
+ }, 1500);
+ }, []);
- const renderImage = (imgFallback && !iFrameAppReady) || forceImg;
+ const renderImage = (imgFallback && !iFrameAppReady) || forceImg;
- const renderBloom = renderImage || iFrameAppReady;
+ const renderBloom = renderImage || iFrameAppReady;
- return (
-
- {renderBloom && (
-
- )}
-
-
- {showApp && !forceImg && (
-
- )}
+ return (
+
+ {renderBloom && (
+
+ )}
+
+
+ {showApp && !forceImg && (
+
+ )}
- {renderImage &&
}
-
-
-
- );
+ {renderImage &&
}
+
+
+
+ );
}
diff --git a/apps/landing/src/components/Bubbles.tsx b/apps/landing/src/components/Bubbles.tsx
index 067ad4d47..1e6ff5030 100644
--- a/apps/landing/src/components/Bubbles.tsx
+++ b/apps/landing/src/components/Bubbles.tsx
@@ -3,70 +3,70 @@ import Particles from 'react-tsparticles';
import { loadFull } from 'tsparticles';
export const Bubbles = () => {
- const particlesInit = async (main: any) => {
- console.log(main);
- await loadFull(main);
- };
+ const particlesInit = async (main: any) => {
+ console.log(main);
+ await loadFull(main);
+ };
- const particlesLoaded = (container: any) => {
- console.log(container);
- };
+ const particlesLoaded = (container: any) => {
+ console.log(container);
+ };
- return (
- //@ts-ignore
-
- );
+ return (
+ //@ts-ignore
+
+ );
};
diff --git a/apps/landing/src/components/Footer.tsx b/apps/landing/src/components/Footer.tsx
index ef91d32ed..dc2a1fbd1 100644
--- a/apps/landing/src/components/Footer.tsx
+++ b/apps/landing/src/components/Footer.tsx
@@ -1,103 +1,103 @@
import React from 'react';
import { ReactComponent as AppLogo } from '../assets/app-logo.svg';
import {
- Twitter,
- Discord,
- Instagram,
- Github,
- Opencollective,
- Twitch
+ Twitter,
+ Discord,
+ Instagram,
+ Github,
+ Opencollective,
+ Twitch
} from '@icons-pack/react-simple-icons';
function FooterLink(props: { children: string | JSX.Element; link: string }) {
- return (
-
- {props.children}
-
- );
+ return (
+
+ {props.children}
+
+ );
}
export function Footer() {
- return (
-
+ );
}
diff --git a/apps/landing/src/components/Markdown.tsx b/apps/landing/src/components/Markdown.tsx
index 99a10c06e..ae6ff2c75 100644
--- a/apps/landing/src/components/Markdown.tsx
+++ b/apps/landing/src/components/Markdown.tsx
@@ -5,20 +5,20 @@ import 'prismjs/components/prism-rust';
import '../atom-one.css';
interface MarkdownPageProps {
- children: React.ReactNode;
+ children: React.ReactNode;
}
function MarkdownPage(props: MarkdownPageProps) {
- useEffect(() => {
- Prism.highlightAll();
- }, []);
- return (
-
- );
+ useEffect(() => {
+ Prism.highlightAll();
+ }, []);
+ return (
+
+ );
}
export default MarkdownPage;
diff --git a/apps/landing/src/components/NavBar.tsx b/apps/landing/src/components/NavBar.tsx
index 362c9f884..37dc9beda 100644
--- a/apps/landing/src/components/NavBar.tsx
+++ b/apps/landing/src/components/NavBar.tsx
@@ -5,125 +5,125 @@ import { Link, List, MapPin, Question } from 'phosphor-react';
import { ReactComponent as AppLogo } from '../assets/app-logo.svg';
import { Discord, Github } from '@icons-pack/react-simple-icons';
import {
- ClockIcon,
- CogIcon,
- HeartIcon,
- LockClosedIcon,
- MapIcon,
- QuestionMarkCircleIcon
+ ClockIcon,
+ CogIcon,
+ HeartIcon,
+ LockClosedIcon,
+ MapIcon,
+ QuestionMarkCircleIcon
} from '@heroicons/react/solid';
function NavLink(props: { link?: string; children: string }) {
- return (
-
- {props.children}
-
- );
+ return (
+
+ {props.children}
+
+ );
}
export default function NavBar() {
- const [isAtTop, setIsAtTop] = useState(window.pageYOffset < 20);
+ const [isAtTop, setIsAtTop] = useState(window.pageYOffset < 20);
- function onScroll(event: Event) {
- if (window.pageYOffset < 20) setIsAtTop(true);
- else if (isAtTop) setIsAtTop(false);
- }
+ function onScroll(event: Event) {
+ if (window.pageYOffset < 20) setIsAtTop(true);
+ else if (isAtTop) setIsAtTop(false);
+ }
- useEffect(() => {
- window.addEventListener('scroll', onScroll);
- return () => window.removeEventListener('scroll', onScroll);
- }, []);
+ useEffect(() => {
+ window.addEventListener('scroll', onScroll);
+ return () => window.removeEventListener('scroll', onScroll);
+ }, []);
- return (
-
-
-
-
-
- Spacedrive
- {/* ALPHA */}
-
-
+ return (
+
+
+
+
+
+ Spacedrive
+ {/* ALPHA */}
+
+
-
-
Roadmap
-
FAQ
-
Team
- {/*
Changelog
+
+ Roadmap
+ FAQ
+ Team
+ {/* Changelog
Privacy */}
- Sponsor us
-
-
- (window.location.href = 'https://github.com/spacedriveapp/spacedrive')
- },
- {
- name: 'Join Discord',
- icon: Discord,
- onPress: () => (window.location.href = 'https://discord.gg/gTaF2Z44f5')
- }
- ],
- [
- {
- name: 'Roadmap',
- icon: MapIcon,
- onPress: () => (window.location.href = '/roadmap'),
- selected: window.location.href.includes('/roadmap')
- },
- {
- name: 'FAQ',
- icon: QuestionMarkCircleIcon,
- onPress: () => (window.location.href = '/faq'),
- selected: window.location.href.includes('/faq')
- },
- // {
- // name: 'Changelog',
- // icon: ClockIcon,
- // onPress: () => (window.location.href = '/changelog'),
- // selected: window.location.href.includes('/changelog')
- // },
- // {
- // name: 'Privacy',
- // icon: LockClosedIcon,
- // onPress: () => (window.location.href = '/privacy'),
- // selected: window.location.href.includes('/privacy')
- // },
- {
- name: 'Sponsor us',
- icon: HeartIcon,
- onPress: () => (window.location.href = 'https://opencollective.com/spacedrive')
- }
- ]
- ]}
- buttonIcon={
}
- buttonProps={{ className: '!p-1 ml-[140px]' }}
- />
-
-
-
- );
+
Sponsor us
+
+
+ (window.location.href = 'https://github.com/spacedriveapp/spacedrive')
+ },
+ {
+ name: 'Join Discord',
+ icon: Discord,
+ onPress: () => (window.location.href = 'https://discord.gg/gTaF2Z44f5')
+ }
+ ],
+ [
+ {
+ name: 'Roadmap',
+ icon: MapIcon,
+ onPress: () => (window.location.href = '/roadmap'),
+ selected: window.location.href.includes('/roadmap')
+ },
+ {
+ name: 'FAQ',
+ icon: QuestionMarkCircleIcon,
+ onPress: () => (window.location.href = '/faq'),
+ selected: window.location.href.includes('/faq')
+ },
+ // {
+ // name: 'Changelog',
+ // icon: ClockIcon,
+ // onPress: () => (window.location.href = '/changelog'),
+ // selected: window.location.href.includes('/changelog')
+ // },
+ // {
+ // name: 'Privacy',
+ // icon: LockClosedIcon,
+ // onPress: () => (window.location.href = '/privacy'),
+ // selected: window.location.href.includes('/privacy')
+ // },
+ {
+ name: 'Sponsor us',
+ icon: HeartIcon,
+ onPress: () => (window.location.href = 'https://opencollective.com/spacedrive')
+ }
+ ]
+ ]}
+ buttonIcon={
}
+ buttonProps={{ className: '!p-1 ml-[140px]' }}
+ />
+
+
+
+ );
}
diff --git a/apps/landing/src/main.tsx b/apps/landing/src/main.tsx
index eb2304ed2..6bfb8c9af 100644
--- a/apps/landing/src/main.tsx
+++ b/apps/landing/src/main.tsx
@@ -11,33 +11,33 @@ import './style.scss';
import { Button } from '@sd/ui';
function App() {
- return (
- Loading...
}>
-
-
- Skip to content
-
+ return (
+
Loading...}>
+
+
+ Skip to content
+
-
-
- {useRoutes(routes)}
-
-
-
-
- );
+
+
+ {useRoutes(routes)}
+
+
+
+
+ );
}
const root = createRoot(document.getElementById('root')!);
root.render(
-
-
-
-
-
+
+
+
+
+
);
diff --git a/apps/landing/src/pages/[...all].tsx b/apps/landing/src/pages/[...all].tsx
index 3680c7cca..291204cd7 100644
--- a/apps/landing/src/pages/[...all].tsx
+++ b/apps/landing/src/pages/[...all].tsx
@@ -5,31 +5,31 @@ import { Button } from '@sd/ui';
import { SmileyXEyes } from 'phosphor-react';
function Page() {
- return (
-
-
- Not Found - Spacedrive
-
-
-
-
In the quantum realm this page potentially exists.
-
In other words, thats a 404.
-
-
- ← Back
-
-
- Discover Spacedrive →
-
-
-
-
-
- );
+ return (
+
+
+ Not Found - Spacedrive
+
+
+
+
In the quantum realm this page potentially exists.
+
In other words, thats a 404.
+
+
+ ← Back
+
+
+ Discover Spacedrive →
+
+
+
+
+
+ );
}
export default Page;
diff --git a/apps/landing/src/pages/changelog.tsx b/apps/landing/src/pages/changelog.tsx
index 76b1588d7..49e11cdf9 100644
--- a/apps/landing/src/pages/changelog.tsx
+++ b/apps/landing/src/pages/changelog.tsx
@@ -4,15 +4,15 @@ import { ReactComponent as Content } from '~/docs/changelog/index.md';
import { Helmet } from 'react-helmet';
function Page() {
- return (
-
-
- Changelog - Spacedrive
-
-
-
-
- );
+ return (
+
+
+ Changelog - Spacedrive
+
+
+
+
+ );
}
export default Page;
diff --git a/apps/landing/src/pages/docs/architecture/distributed-data-sync.tsx b/apps/landing/src/pages/docs/architecture/distributed-data-sync.tsx
index c10371873..d11d9f643 100644
--- a/apps/landing/src/pages/docs/architecture/distributed-data-sync.tsx
+++ b/apps/landing/src/pages/docs/architecture/distributed-data-sync.tsx
@@ -4,18 +4,18 @@ import { ReactComponent as Content } from '~/docs/architecture/distributed-data-
import { Helmet } from 'react-helmet';
function Page() {
- return (
-
-
- Distributed Data Sync - Spacedrive Documentation
-
-
-
-
- );
+ return (
+
+
+ Distributed Data Sync - Spacedrive Documentation
+
+
+
+
+ );
}
export default Page;
diff --git a/apps/landing/src/pages/faq.tsx b/apps/landing/src/pages/faq.tsx
index f492bf06f..affeabf1a 100644
--- a/apps/landing/src/pages/faq.tsx
+++ b/apps/landing/src/pages/faq.tsx
@@ -4,15 +4,15 @@ import { ReactComponent as Content } from '~/docs/product/faq.md';
import { Helmet } from 'react-helmet';
function Page() {
- return (
-
-
- FAQ - Spacedrive
-
-
-
-
- );
+ return (
+
+
+ FAQ - Spacedrive
+
+
+
+
+ );
}
export default Page;
diff --git a/apps/landing/src/pages/index.tsx b/apps/landing/src/pages/index.tsx
index 41e8dafda..f08bf3c37 100644
--- a/apps/landing/src/pages/index.tsx
+++ b/apps/landing/src/pages/index.tsx
@@ -6,98 +6,98 @@ import clsx from 'clsx';
import AppEmbed from '../components/AppEmbed';
interface SectionProps {
- orientation: 'left' | 'right';
- heading?: string;
- description?: string | React.ReactNode;
- children?: React.ReactNode;
- className?: string;
+ orientation: 'left' | 'right';
+ heading?: string;
+ description?: string | React.ReactNode;
+ children?: React.ReactNode;
+ className?: string;
}
function Section(props: SectionProps = { orientation: 'left' }) {
- let info = (
-
- {props.heading &&
{props.heading} }
- {props.description &&
{props.description}
}
-
- );
- return (
-
- {props.orientation === 'right' ? (
- <>
- {info}
- {props.children}
- >
- ) : (
- <>
- {props.children}
- {info}
- >
- )}
-
- );
+ let info = (
+
+ {props.heading &&
{props.heading} }
+ {props.description &&
{props.description}
}
+
+ );
+ return (
+
+ {props.orientation === 'right' ? (
+ <>
+ {info}
+ {props.children}
+ >
+ ) : (
+ <>
+ {props.children}
+ {info}
+ >
+ )}
+
+ );
}
function Page() {
- return (
- <>
-
+ return (
+ <>
+
-
- A file explorer from the future.
-
-
- Combine your drives and clouds into one database that you can organize and explore from any
- device.
-
-
- Designed for creators, hoarders and the painfully disorganized.
-
-
-
-
-
- Star on GitHub
-
-
-
- Coming soon on macOS, Windows and Linux.
-
- Shortly after to iOS & Android.
-
+
+ A file explorer from the future.
+
+
+ Combine your drives and clouds into one database that you can organize and explore from any
+ device.
+
+
+ Designed for creators, hoarders and the painfully disorganized.
+
+
+
+
+
+ Star on GitHub
+
+
+
+ Coming soon on macOS, Windows and Linux.
+
+ Shortly after to iOS & Android.
+
-
-
- Spacedrive accounts for every file you own, uniquely fingerprinting and extracting
- metadata so you can sort, tag, backup and share files without limitations of any one
- cloud provider.
-
-
-
- Find out more →
-
- >
- }
- />
-
- >
- );
+
+
+ Spacedrive accounts for every file you own, uniquely fingerprinting and extracting
+ metadata so you can sort, tag, backup and share files without limitations of any one
+ cloud provider.
+
+
+
+ Find out more →
+
+ >
+ }
+ />
+
+ >
+ );
}
export default Page;
diff --git a/apps/landing/src/pages/roadmap.tsx b/apps/landing/src/pages/roadmap.tsx
index a2d6970b9..7a67107b1 100644
--- a/apps/landing/src/pages/roadmap.tsx
+++ b/apps/landing/src/pages/roadmap.tsx
@@ -5,18 +5,18 @@ import { Helmet } from 'react-helmet';
import { ReactComponent as Folder } from '../../../../packages/interface/src/assets/svg/folder.svg';
function Page() {
- return (
-
-
- Roadmap - Spacedrive
-
-
-
-
-
-
-
- );
+ return (
+
+
+ Roadmap - Spacedrive
+
+
+
+
+
+
+
+ );
}
export default Page;
diff --git a/apps/landing/src/pages/team.tsx b/apps/landing/src/pages/team.tsx
index 36c389aea..d737e0146 100644
--- a/apps/landing/src/pages/team.tsx
+++ b/apps/landing/src/pages/team.tsx
@@ -4,17 +4,17 @@ import { ReactComponent as Content } from '~/docs/product/credits.md';
import { Helmet } from 'react-helmet';
function Page() {
- return (
-
-
- Our Team - Spacedrive
-
-
-
-
-
-
- );
+ return (
+
+
+ Our Team - Spacedrive
+
+
+
+
+
+
+ );
}
export default Page;
diff --git a/apps/landing/src/style.scss b/apps/landing/src/style.scss
index fcc5b724c..7385140a4 100644
--- a/apps/landing/src/style.scss
+++ b/apps/landing/src/style.scss
@@ -1,88 +1,84 @@
html {
- @apply bg-black;
- -ms-overflow-style: none; /* IE and Edge */
- scrollbar-width: none; /* Firefox */
- &::-webkit-scrollbar {
- display: none;
- }
+ @apply bg-black;
+ -ms-overflow-style: none; /* IE and Edge */
+ scrollbar-width: none; /* Firefox */
+ &::-webkit-scrollbar {
+ display: none;
+ }
}
-
.landing-img {
- background-image: url('/app.png');
- background-size: contain;
- background-repeat: no-repeat;
- background-position: center top;
+ background-image: url('/app.png');
+ background-size: contain;
+ background-repeat: no-repeat;
+ background-position: center top;
}
.fade-in-app-embed {
- animation: fadeInUp 3s;
- -webkit-animation: fadeInUp 3s;
- -moz-animation: fadeInUp 3s;
- -o-animation: fadeInUp 3s;
- -ms-animation: fadeInUp 3s;
+ animation: fadeInUp 3s;
+ -webkit-animation: fadeInUp 3s;
+ -moz-animation: fadeInUp 3s;
+ -o-animation: fadeInUp 3s;
+ -ms-animation: fadeInUp 3s;
}
-
-
.fade-in-heading {
- animation: fadeInUp 1s;
+ animation: fadeInUp 1s;
}
@keyframes fadeInUp {
- 0% {
- opacity:0;
- // transform: translateY(10px);
-}
- 100% {
- opacity:1;
+ 0% {
+ opacity: 0;
+ // transform: translateY(10px);
+ }
+ 100% {
+ opacity: 1;
- // transform: translateY(0px);
- }
+ // transform: translateY(0px);
+ }
}
-
.bloom {
- @apply absolute w-96 h-96;
- will-change: opacity;
- opacity: 0;
- filter: blur(160px);
- border-radius: 50%;
- transform: scale(1.5);
- animation-name: bloomBurst;
- animation-duration: 1s;
- animation-timing-function: ease-in-out;
- animation-fill-mode: forwards;
- animation-iteration-count: 1;
- animation-direction: forwards;
- &.bloom-one {
- background: conic-gradient(from 90deg at 50% 50%, #255bef, #aa1cca);
- animation-delay: 500ms;
- }
- &.bloom-two {
- background: conic-gradient(from 90deg at 50% 50%, #c62dbb, #1D054B);
- animation-delay: 300ms;
- }
- &.bloom-three {
- background: conic-gradient(from 90deg at 50% 50%, #2d53c6, #1D054B);
- animation-delay: 1100ms;
- }
+ @apply absolute w-96 h-96;
+ will-change: opacity;
+ opacity: 0;
+ filter: blur(160px);
+ border-radius: 50%;
+ transform: scale(1.5);
+ animation-name: bloomBurst;
+ animation-duration: 1s;
+ animation-timing-function: ease-in-out;
+ animation-fill-mode: forwards;
+ animation-iteration-count: 1;
+ animation-direction: forwards;
+ &.bloom-one {
+ background: conic-gradient(from 90deg at 50% 50%, #255bef, #aa1cca);
+ animation-delay: 500ms;
+ }
+ &.bloom-two {
+ background: conic-gradient(from 90deg at 50% 50%, #c62dbb, #1d054b);
+ animation-delay: 300ms;
+ }
+ &.bloom-three {
+ background: conic-gradient(from 90deg at 50% 50%, #2d53c6, #1d054b);
+ animation-delay: 1100ms;
+ }
}
@keyframes bloomBurst {
- from {
- opacity: 0;
- }
- 40% {
- opacity: 1;
- }
- to {
- opacity: 0.6;
- }
+ from {
+ opacity: 0;
+ }
+ 40% {
+ opacity: 1;
+ }
+ to {
+ opacity: 0.6;
+ }
}
.shadow-iframe {
- box-shadow: 0px 0px 100px 0px rgba(0,0,0,0.5);
+ box-shadow: 0px 0px 100px 0px rgba(0, 0, 0, 0.5);
}
// Gradient colors
@@ -90,4 +86,4 @@ html {
// #7A1D77
// #8E4CAB
// #1D054B
-// #9A3F8C
\ No newline at end of file
+// #9A3F8C
diff --git a/apps/landing/src/vite-env.d.ts b/apps/landing/src/vite-env.d.ts
index 8ceb097b1..21141b149 100644
--- a/apps/landing/src/vite-env.d.ts
+++ b/apps/landing/src/vite-env.d.ts
@@ -2,24 +2,24 @@
///
interface ImportMetaEnv {
- readonly VITE_SDWEB_BASE_URL: string;
+ readonly VITE_SDWEB_BASE_URL: string;
}
interface ImportMeta {
- readonly env: ImportMetaEnv;
+ readonly env: ImportMetaEnv;
}
declare module '*.md' {
- // "unknown" would be more detailed depends on how you structure frontmatter
- const attributes: Record;
+ // "unknown" would be more detailed depends on how you structure frontmatter
+ const attributes: Record;
- // When "Mode.TOC" is requested
- const toc: { level: string; content: string }[];
+ // When "Mode.TOC" is requested
+ const toc: { level: string; content: string }[];
- // When "Mode.HTML" is requested
- const html: string;
+ // When "Mode.HTML" is requested
+ const html: string;
- // When "Mode.React" is requested. VFC could take a generic like React.VFC<{ MyComponent: TypeOfMyComponent }>
- import React from 'react';
- const ReactComponent: React.VFC;
+ // When "Mode.React" is requested. VFC could take a generic like React.VFC<{ MyComponent: TypeOfMyComponent }>
+ import React from 'react';
+ const ReactComponent: React.VFC;
}
diff --git a/apps/landing/tsconfig.json b/apps/landing/tsconfig.json
index c44e3731e..168f12434 100644
--- a/apps/landing/tsconfig.json
+++ b/apps/landing/tsconfig.json
@@ -1,5 +1,5 @@
{
- "extends": "../../packages/config/interface.tsconfig.json",
- "compilerOptions": {},
- "include": ["src"]
+ "extends": "../../packages/config/interface.tsconfig.json",
+ "compilerOptions": {},
+ "include": ["src"]
}
diff --git a/apps/landing/vercel.json b/apps/landing/vercel.json
index 3a48e56ba..8f728db13 100644
--- a/apps/landing/vercel.json
+++ b/apps/landing/vercel.json
@@ -1,3 +1,3 @@
{
- "rewrites": [{ "source": "/(.*)", "destination": "/" }]
+ "rewrites": [{ "source": "/(.*)", "destination": "/" }]
}
diff --git a/apps/landing/vite.config.ts b/apps/landing/vite.config.ts
index d53d7b7bc..4bb5154ca 100644
--- a/apps/landing/vite.config.ts
+++ b/apps/landing/vite.config.ts
@@ -6,23 +6,23 @@ import svg from 'vite-plugin-svgr';
// https://vitejs.dev/config/
export default defineConfig({
- // @ts-ignore
- plugins: [
- react(),
- pages({
- dirs: 'src/pages'
- // onRoutesGenerated: (routes) => generateSitemap({ routes })
- }),
- svg(),
- md({ mode: [Mode.REACT] })
- ],
- resolve: {
- alias: {
- '~/docs': __dirname + '../../../docs'
- }
- },
- server: {
- port: 8003
- },
- publicDir: 'public'
+ // @ts-ignore
+ plugins: [
+ react(),
+ pages({
+ dirs: 'src/pages'
+ // onRoutesGenerated: (routes) => generateSitemap({ routes })
+ }),
+ svg(),
+ md({ mode: [Mode.REACT] })
+ ],
+ resolve: {
+ alias: {
+ '~/docs': __dirname + '../../../docs'
+ }
+ },
+ server: {
+ port: 8003
+ },
+ publicDir: 'public'
});
diff --git a/apps/mobile/package.json b/apps/mobile/package.json
index 84d6df566..82e4e7fec 100644
--- a/apps/mobile/package.json
+++ b/apps/mobile/package.json
@@ -1,6 +1,6 @@
{
- "name": "mobile",
- "version": "0.0.0",
- "main": "index.js",
- "license": "MIT"
+ "name": "mobile",
+ "version": "0.0.0",
+ "main": "index.js",
+ "license": "MIT"
}
diff --git a/apps/server/package.json b/apps/server/package.json
index c9212ac1e..0dc984fc1 100644
--- a/apps/server/package.json
+++ b/apps/server/package.json
@@ -1,6 +1,6 @@
{
- "name": "@sd/server",
- "version": "0.0.0",
- "main": "index.js",
- "license": "MIT"
+ "name": "@sd/server",
+ "version": "0.0.0",
+ "main": "index.js",
+ "license": "MIT"
}
diff --git a/apps/web/package.json b/apps/web/package.json
index aa153cdb1..5bf57d074 100644
--- a/apps/web/package.json
+++ b/apps/web/package.json
@@ -1,31 +1,31 @@
{
- "name": "@sd/web",
- "private": true,
- "version": "0.0.0",
- "scripts": {
- "dev": "vite",
- "build": "vite build",
- "preview": "vite preview"
- },
- "dependencies": {
- "@fontsource/inter": "^4.5.7",
- "@sd/client": "*",
- "@sd/core": "*",
- "@sd/interface": "*",
- "@sd/ui": "*",
- "react": "^18.0.0",
- "react-dom": "^18.0.0"
- },
- "devDependencies": {
- "@types/react": "^18.0.8",
- "@types/react-dom": "^18.0.0",
- "@vitejs/plugin-react": "^1.3.1",
- "autoprefixer": "^10.4.4",
- "postcss": "^8.4.12",
- "tailwind": "^4.0.0",
- "typescript": "^4.6.3",
- "vite": "^2.9.5",
- "vite-plugin-svgr": "^1.1.0",
- "vite-plugin-tsconfig-paths": "^1.0.5"
- }
+ "name": "@sd/web",
+ "private": true,
+ "version": "0.0.0",
+ "scripts": {
+ "dev": "vite",
+ "build": "vite build",
+ "preview": "vite preview"
+ },
+ "dependencies": {
+ "@fontsource/inter": "^4.5.7",
+ "@sd/client": "*",
+ "@sd/core": "*",
+ "@sd/interface": "*",
+ "@sd/ui": "*",
+ "react": "^18.0.0",
+ "react-dom": "^18.0.0"
+ },
+ "devDependencies": {
+ "@types/react": "^18.0.8",
+ "@types/react-dom": "^18.0.0",
+ "@vitejs/plugin-react": "^1.3.1",
+ "autoprefixer": "^10.4.4",
+ "postcss": "^8.4.12",
+ "tailwind": "^4.0.0",
+ "typescript": "^4.6.3",
+ "vite": "^2.9.5",
+ "vite-plugin-svgr": "^1.1.0",
+ "vite-plugin-tsconfig-paths": "^1.0.5"
+ }
}
diff --git a/apps/web/public/manifest.json b/apps/web/public/manifest.json
index 6a73e4bdc..a5d9bf516 100644
--- a/apps/web/public/manifest.json
+++ b/apps/web/public/manifest.json
@@ -1,25 +1,25 @@
{
- "short_name": "Spacedrive",
- "name": "Spacedrive",
- "icons": [
- {
- "src": "favicon.ico",
- "sizes": "64x64 32x32 24x24 16x16",
- "type": "image/x-icon"
- },
- {
- "src": "logo192.png",
- "type": "image/png",
- "sizes": "192x192"
- },
- {
- "src": "logo512.png",
- "type": "image/png",
- "sizes": "512x512"
- }
- ],
- "start_url": ".",
- "display": "standalone",
- "theme_color": "#000000",
- "background_color": "#ffffff"
+ "short_name": "Spacedrive",
+ "name": "Spacedrive",
+ "icons": [
+ {
+ "src": "favicon.ico",
+ "sizes": "64x64 32x32 24x24 16x16",
+ "type": "image/x-icon"
+ },
+ {
+ "src": "logo192.png",
+ "type": "image/png",
+ "sizes": "192x192"
+ },
+ {
+ "src": "logo512.png",
+ "type": "image/png",
+ "sizes": "512x512"
+ }
+ ],
+ "start_url": ".",
+ "display": "standalone",
+ "theme_color": "#000000",
+ "background_color": "#ffffff"
}
diff --git a/apps/web/src/App.tsx b/apps/web/src/App.tsx
index c471715c4..4a80ecfcd 100644
--- a/apps/web/src/App.tsx
+++ b/apps/web/src/App.tsx
@@ -10,83 +10,83 @@ const randomId = () => Math.random().toString(36).slice(2);
// bind state to core via Tauri
class Transport extends BaseTransport {
- requestMap = new Map void>();
+ requestMap = new Map void>();
- constructor() {
- super();
+ constructor() {
+ super();
- websocket.addEventListener('message', (event) => {
- if (!event.data) return;
+ websocket.addEventListener('message', (event) => {
+ if (!event.data) return;
- const { id, payload } = JSON.parse(event.data);
+ const { id, payload } = JSON.parse(event.data);
- const { type, data } = payload;
- if (type === 'event') {
- this.emit('core_event', data);
- } else if (type === 'query' || type === 'command') {
- if (this.requestMap.has(id)) {
- this.requestMap.get(id)?.(data);
- this.requestMap.delete(id);
- }
- }
- });
- }
- async query(query: ClientQuery) {
- const id = randomId();
- let resolve: (data: any) => void;
+ const { type, data } = payload;
+ if (type === 'event') {
+ this.emit('core_event', data);
+ } else if (type === 'query' || type === 'command') {
+ if (this.requestMap.has(id)) {
+ this.requestMap.get(id)?.(data);
+ this.requestMap.delete(id);
+ }
+ }
+ });
+ }
+ async query(query: ClientQuery) {
+ const id = randomId();
+ let resolve: (data: any) => void;
- const promise = new Promise((res) => {
- resolve = res;
- });
+ const promise = new Promise((res) => {
+ resolve = res;
+ });
- // @ts-ignore
- this.requestMap.set(id, resolve);
+ // @ts-ignore
+ this.requestMap.set(id, resolve);
- websocket.send(JSON.stringify({ id, payload: { type: 'query', data: query } }));
+ websocket.send(JSON.stringify({ id, payload: { type: 'query', data: query } }));
- return await promise;
- }
- async command(command: ClientCommand) {
- const id = randomId();
- let resolve: (data: any) => void;
+ return await promise;
+ }
+ async command(command: ClientCommand) {
+ const id = randomId();
+ let resolve: (data: any) => void;
- const promise = new Promise((res) => {
- resolve = res;
- });
+ const promise = new Promise((res) => {
+ resolve = res;
+ });
- // @ts-ignore
- this.requestMap.set(id, resolve);
+ // @ts-ignore
+ this.requestMap.set(id, resolve);
- websocket.send(JSON.stringify({ id, payload: { type: 'command', data: command } }));
+ websocket.send(JSON.stringify({ id, payload: { type: 'command', data: command } }));
- return await promise;
- }
+ return await promise;
+ }
}
function App() {
- useEffect(() => {
- window.parent.postMessage('spacedrive-hello', '*');
- }, []);
+ useEffect(() => {
+ window.parent.postMessage('spacedrive-hello', '*');
+ }, []);
- return (
-
- {/* */}
- {
- return Promise.resolve([]);
- }}
- />
-
- );
+ return (
+
+ {/* */}
+ {
+ return Promise.resolve([]);
+ }}
+ />
+
+ );
}
export default App;
diff --git a/apps/web/src/env.d.ts b/apps/web/src/env.d.ts
index be63506a6..06e6e15b4 100644
--- a/apps/web/src/env.d.ts
+++ b/apps/web/src/env.d.ts
@@ -1,9 +1,9 @@
///
interface ImportMetaEnv {
- readonly VITE_SDSERVER_BASE_URL: string;
+ readonly VITE_SDSERVER_BASE_URL: string;
}
interface ImportMeta {
- readonly env: ImportMetaEnv;
+ readonly env: ImportMetaEnv;
}
diff --git a/apps/web/src/index.html b/apps/web/src/index.html
index 66a5bb247..6237f0bed 100644
--- a/apps/web/src/index.html
+++ b/apps/web/src/index.html
@@ -1,12 +1,12 @@
-
-
- Spacedrive
-
-
-
-
-
-
+
+
+ Spacedrive
+
+
+
+
+
+
diff --git a/apps/web/src/index.tsx b/apps/web/src/index.tsx
index b18b0963a..1a2c714a6 100644
--- a/apps/web/src/index.tsx
+++ b/apps/web/src/index.tsx
@@ -5,7 +5,7 @@ import '@sd/ui/style';
const root = ReactDOM.createRoot(document.getElementById('root') as HTMLElement);
root.render(
-
-
-
+
+
+
);
diff --git a/apps/web/tsconfig.json b/apps/web/tsconfig.json
index c44e3731e..168f12434 100644
--- a/apps/web/tsconfig.json
+++ b/apps/web/tsconfig.json
@@ -1,5 +1,5 @@
{
- "extends": "../../packages/config/interface.tsconfig.json",
- "compilerOptions": {},
- "include": ["src"]
+ "extends": "../../packages/config/interface.tsconfig.json",
+ "compilerOptions": {},
+ "include": ["src"]
}
diff --git a/apps/web/vercel.json b/apps/web/vercel.json
index 3a48e56ba..8f728db13 100644
--- a/apps/web/vercel.json
+++ b/apps/web/vercel.json
@@ -1,3 +1,3 @@
{
- "rewrites": [{ "source": "/(.*)", "destination": "/" }]
+ "rewrites": [{ "source": "/(.*)", "destination": "/" }]
}
diff --git a/apps/web/vite.config.ts b/apps/web/vite.config.ts
index 8e0b71c72..effd9e0e7 100644
--- a/apps/web/vite.config.ts
+++ b/apps/web/vite.config.ts
@@ -7,24 +7,24 @@ import { name, version } from './package.json';
// https://vitejs.dev/config/
export default defineConfig({
- server: {
- port: 8002
- },
- plugins: [
- // @ts-ignore
- react({
- jsxRuntime: 'classic'
- }),
- svg({ svgrOptions: { icon: true } }),
- tsconfigPaths()
- ],
- root: 'src',
- publicDir: '../../packages/interface/src/assets',
- define: {
- pkgJson: { name, version }
- },
- build: {
- outDir: '../dist',
- assetsDir: '.'
- }
+ server: {
+ port: 8002
+ },
+ plugins: [
+ // @ts-ignore
+ react({
+ jsxRuntime: 'classic'
+ }),
+ svg({ svgrOptions: { icon: true } }),
+ tsconfigPaths()
+ ],
+ root: 'src',
+ publicDir: '../../packages/interface/src/assets',
+ define: {
+ pkgJson: { name, version }
+ },
+ build: {
+ outDir: '../dist',
+ assetsDir: '.'
+ }
});
diff --git a/core/.rustfmt.toml b/core/.rustfmt.toml
index 3901c3cce..411a5f052 100644
--- a/core/.rustfmt.toml
+++ b/core/.rustfmt.toml
@@ -1,6 +1,5 @@
max_width = 100
-hard_tabs = false
-tab_spaces = 2
+hard_tabs = true
newline_style = "Unix"
use_small_heuristics = "Default"
reorder_imports = true
diff --git a/core/bindings/Client.ts b/core/bindings/Client.ts
index 2c0d9f32f..4660f7e8d 100644
--- a/core/bindings/Client.ts
+++ b/core/bindings/Client.ts
@@ -1,3 +1,10 @@
-import type { Platform } from "./Platform";
+import type { Platform } from './Platform';
-export interface Client { uuid: string, name: string, platform: Platform, tcp_address: string, last_seen: string, last_synchronized: string, }
\ No newline at end of file
+export interface Client {
+ uuid: string;
+ name: string;
+ platform: Platform;
+ tcp_address: string;
+ last_seen: string;
+ last_synchronized: string;
+}
diff --git a/core/bindings/ClientCommand.ts b/core/bindings/ClientCommand.ts
index d6b37840c..3ff84de41 100644
--- a/core/bindings/ClientCommand.ts
+++ b/core/bindings/ClientCommand.ts
@@ -1,2 +1,14 @@
-
-export type ClientCommand = { key: "FileRead", params: { id: number, } } | { key: "FileDelete", params: { id: number, } } | { key: "LibDelete", params: { id: number, } } | { key: "TagCreate", params: { name: string, color: string, } } | { key: "TagUpdate", params: { name: string, color: string, } } | { key: "TagAssign", params: { file_id: number, tag_id: number, } } | { key: "TagDelete", params: { id: number, } } | { key: "LocCreate", params: { path: string, } } | { key: "LocUpdate", params: { id: number, name: string | null, } } | { key: "LocDelete", params: { id: number, } } | { key: "SysVolumeUnmount", params: { id: number, } } | { key: "GenerateThumbsForLocation", params: { id: number, path: string, } } | { key: "IdentifyUniqueFiles" };
\ No newline at end of file
+export type ClientCommand =
+ | { key: 'FileRead'; params: { id: number } }
+ | { key: 'FileDelete'; params: { id: number } }
+ | { key: 'LibDelete'; params: { id: number } }
+ | { key: 'TagCreate'; params: { name: string; color: string } }
+ | { key: 'TagUpdate'; params: { name: string; color: string } }
+ | { key: 'TagAssign'; params: { file_id: number; tag_id: number } }
+ | { key: 'TagDelete'; params: { id: number } }
+ | { key: 'LocCreate'; params: { path: string } }
+ | { key: 'LocUpdate'; params: { id: number; name: string | null } }
+ | { key: 'LocDelete'; params: { id: number } }
+ | { key: 'SysVolumeUnmount'; params: { id: number } }
+ | { key: 'GenerateThumbsForLocation'; params: { id: number; path: string } }
+ | { key: 'IdentifyUniqueFiles' };
diff --git a/core/bindings/ClientQuery.ts b/core/bindings/ClientQuery.ts
index 21439f03c..67c0f608d 100644
--- a/core/bindings/ClientQuery.ts
+++ b/core/bindings/ClientQuery.ts
@@ -1,2 +1,10 @@
-
-export type ClientQuery = { key: "ClientGetState" } | { key: "SysGetVolumes" } | { key: "LibGetTags" } | { key: "JobGetRunning" } | { key: "JobGetHistory" } | { key: "SysGetLocations" } | { key: "SysGetLocation", params: { id: number, } } | { key: "LibGetExplorerDir", params: { location_id: number, path: string, limit: number, } } | { key: "GetLibraryStatistics" };
\ No newline at end of file
+export type ClientQuery =
+ | { key: 'ClientGetState' }
+ | { key: 'SysGetVolumes' }
+ | { key: 'LibGetTags' }
+ | { key: 'JobGetRunning' }
+ | { key: 'JobGetHistory' }
+ | { key: 'SysGetLocations' }
+ | { key: 'SysGetLocation'; params: { id: number } }
+ | { key: 'LibGetExplorerDir'; params: { location_id: number; path: string; limit: number } }
+ | { key: 'GetLibraryStatistics' };
diff --git a/core/bindings/ClientState.ts b/core/bindings/ClientState.ts
index 34d84b404..ce5c2d7af 100644
--- a/core/bindings/ClientState.ts
+++ b/core/bindings/ClientState.ts
@@ -1,3 +1,11 @@
-import type { LibraryState } from "./LibraryState";
+import type { LibraryState } from './LibraryState';
-export interface ClientState { client_uuid: string, client_id: number, client_name: string, data_path: string, tcp_port: number, libraries: Array, current_library_uuid: string, }
\ No newline at end of file
+export interface ClientState {
+ client_uuid: string;
+ client_id: number;
+ client_name: string;
+ data_path: string;
+ tcp_port: number;
+ libraries: Array;
+ current_library_uuid: string;
+}
diff --git a/core/bindings/CoreEvent.ts b/core/bindings/CoreEvent.ts
index 043bfa562..ac9b9d2b8 100644
--- a/core/bindings/CoreEvent.ts
+++ b/core/bindings/CoreEvent.ts
@@ -1,4 +1,10 @@
-import type { ClientQuery } from "./ClientQuery";
-import type { CoreResource } from "./CoreResource";
+import type { ClientQuery } from './ClientQuery';
+import type { CoreResource } from './CoreResource';
-export type CoreEvent = { key: "InvalidateQuery", data: ClientQuery } | { key: "InvalidateQueryDebounced", data: ClientQuery } | { key: "InvalidateResource", data: CoreResource } | { key: "NewThumbnail", data: { cas_id: string, } } | { key: "Log", data: { message: string, } } | { key: "DatabaseDisconnected", data: { reason: string | null, } };
\ No newline at end of file
+export type CoreEvent =
+ | { key: 'InvalidateQuery'; data: ClientQuery }
+ | { key: 'InvalidateQueryDebounced'; data: ClientQuery }
+ | { key: 'InvalidateResource'; data: CoreResource }
+ | { key: 'NewThumbnail'; data: { cas_id: string } }
+ | { key: 'Log'; data: { message: string } }
+ | { key: 'DatabaseDisconnected'; data: { reason: string | null } };
diff --git a/core/bindings/CoreResource.ts b/core/bindings/CoreResource.ts
index 3c2f70c91..0ae2fc774 100644
--- a/core/bindings/CoreResource.ts
+++ b/core/bindings/CoreResource.ts
@@ -1,5 +1,11 @@
-import type { File } from "./File";
-import type { JobReport } from "./JobReport";
-import type { LocationResource } from "./LocationResource";
+import type { File } from './File';
+import type { JobReport } from './JobReport';
+import type { LocationResource } from './LocationResource';
-export type CoreResource = "Client" | "Library" | { Location: LocationResource } | { File: File } | { Job: JobReport } | "Tag";
\ No newline at end of file
+export type CoreResource =
+ | 'Client'
+ | 'Library'
+ | { Location: LocationResource }
+ | { File: File }
+ | { Job: JobReport }
+ | 'Tag';
diff --git a/core/bindings/CoreResponse.ts b/core/bindings/CoreResponse.ts
index 0efa68e79..04c755350 100644
--- a/core/bindings/CoreResponse.ts
+++ b/core/bindings/CoreResponse.ts
@@ -1,8 +1,18 @@
-import type { ClientState } from "./ClientState";
-import type { DirectoryWithContents } from "./DirectoryWithContents";
-import type { JobReport } from "./JobReport";
-import type { LocationResource } from "./LocationResource";
-import type { Statistics } from "./Statistics";
-import type { Volume } from "./Volume";
+import type { ClientState } from './ClientState';
+import type { DirectoryWithContents } from './DirectoryWithContents';
+import type { JobReport } from './JobReport';
+import type { LocationResource } from './LocationResource';
+import type { Statistics } from './Statistics';
+import type { Volume } from './Volume';
-export type CoreResponse = { key: "Success", data: null } | { key: "SysGetVolumes", data: Array } | { key: "SysGetLocation", data: LocationResource } | { key: "SysGetLocations", data: Array } | { key: "LibGetExplorerDir", data: DirectoryWithContents } | { key: "ClientGetState", data: ClientState } | { key: "LocCreate", data: LocationResource } | { key: "JobGetRunning", data: Array } | { key: "JobGetHistory", data: Array } | { key: "GetLibraryStatistics", data: Statistics };
\ No newline at end of file
+export type CoreResponse =
+ | { key: 'Success'; data: null }
+ | { key: 'SysGetVolumes'; data: Array }
+ | { key: 'SysGetLocation'; data: LocationResource }
+ | { key: 'SysGetLocations'; data: Array }
+ | { key: 'LibGetExplorerDir'; data: DirectoryWithContents }
+ | { key: 'ClientGetState'; data: ClientState }
+ | { key: 'LocCreate'; data: LocationResource }
+ | { key: 'JobGetRunning'; data: Array }
+ | { key: 'JobGetHistory'; data: Array }
+ | { key: 'GetLibraryStatistics'; data: Statistics };
diff --git a/core/bindings/DirectoryWithContents.ts b/core/bindings/DirectoryWithContents.ts
index 89c0fb64f..b8af1220c 100644
--- a/core/bindings/DirectoryWithContents.ts
+++ b/core/bindings/DirectoryWithContents.ts
@@ -1,3 +1,6 @@
-import type { FilePath } from "./FilePath";
+import type { FilePath } from './FilePath';
-export interface DirectoryWithContents { directory: FilePath, contents: Array, }
\ No newline at end of file
+export interface DirectoryWithContents {
+ directory: FilePath;
+ contents: Array;
+}
diff --git a/core/bindings/EncryptionAlgorithm.ts b/core/bindings/EncryptionAlgorithm.ts
index d5b7d9889..d38357bd2 100644
--- a/core/bindings/EncryptionAlgorithm.ts
+++ b/core/bindings/EncryptionAlgorithm.ts
@@ -1,2 +1 @@
-
-export type EncryptionAlgorithm = "None" | "AES128" | "AES192" | "AES256";
\ No newline at end of file
+export type EncryptionAlgorithm = 'None' | 'AES128' | 'AES192' | 'AES256';
diff --git a/core/bindings/File.ts b/core/bindings/File.ts
index 81017797f..6260db13d 100644
--- a/core/bindings/File.ts
+++ b/core/bindings/File.ts
@@ -1,5 +1,24 @@
-import type { EncryptionAlgorithm } from "./EncryptionAlgorithm";
-import type { FileKind } from "./FileKind";
-import type { FilePath } from "./FilePath";
+import type { EncryptionAlgorithm } from './EncryptionAlgorithm';
+import type { FileKind } from './FileKind';
+import type { FilePath } from './FilePath';
-export interface File { id: number, cas_id: string, integrity_checksum: string | null, size_in_bytes: string, kind: FileKind, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, encryption: EncryptionAlgorithm, ipfs_id: string | null, comment: string | null, date_created: string, date_modified: string, date_indexed: string, paths: Array, }
\ No newline at end of file
+export interface File {
+ id: number;
+ cas_id: string;
+ integrity_checksum: string | null;
+ size_in_bytes: string;
+ kind: FileKind;
+ hidden: boolean;
+ favorite: boolean;
+ important: boolean;
+ has_thumbnail: boolean;
+ has_thumbstrip: boolean;
+ has_video_preview: boolean;
+ encryption: EncryptionAlgorithm;
+ ipfs_id: string | null;
+ comment: string | null;
+ date_created: string;
+ date_modified: string;
+ date_indexed: string;
+ paths: Array;
+}
diff --git a/core/bindings/FileKind.ts b/core/bindings/FileKind.ts
index 1e0ec7b6c..90dbc6630 100644
--- a/core/bindings/FileKind.ts
+++ b/core/bindings/FileKind.ts
@@ -1,2 +1,10 @@
-
-export type FileKind = "Unknown" | "Directory" | "Package" | "Archive" | "Image" | "Video" | "Audio" | "Plaintext" | "Alias";
\ No newline at end of file
+export type FileKind =
+ | 'Unknown'
+ | 'Directory'
+ | 'Package'
+ | 'Archive'
+ | 'Image'
+ | 'Video'
+ | 'Audio'
+ | 'Plaintext'
+ | 'Alias';
diff --git a/core/bindings/FilePath.ts b/core/bindings/FilePath.ts
index 33626e50a..5a6d49ee1 100644
--- a/core/bindings/FilePath.ts
+++ b/core/bindings/FilePath.ts
@@ -1,2 +1,16 @@
-
-export interface FilePath { id: number, is_dir: boolean, location_id: number, materialized_path: string, name: string, extension: string | null, file_id: number | null, parent_id: number | null, temp_cas_id: string | null, has_local_thumbnail: boolean, date_created: string, date_modified: string, date_indexed: string, permissions: string | null, }
\ No newline at end of file
+export interface FilePath {
+ id: number;
+ is_dir: boolean;
+ location_id: number;
+ materialized_path: string;
+ name: string;
+ extension: string | null;
+ file_id: number | null;
+ parent_id: number | null;
+ temp_cas_id: string | null;
+ has_local_thumbnail: boolean;
+ date_created: string;
+ date_modified: string;
+ date_indexed: string;
+ permissions: string | null;
+}
diff --git a/core/bindings/JobReport.ts b/core/bindings/JobReport.ts
index a2adc7462..1b805c687 100644
--- a/core/bindings/JobReport.ts
+++ b/core/bindings/JobReport.ts
@@ -1,3 +1,12 @@
-import type { JobStatus } from "./JobStatus";
+import type { JobStatus } from './JobStatus';
-export interface JobReport { id: string, date_created: string, date_modified: string, status: JobStatus, task_count: number, completed_task_count: number, message: string, seconds_elapsed: string, }
\ No newline at end of file
+export interface JobReport {
+ id: string;
+ date_created: string;
+ date_modified: string;
+ status: JobStatus;
+ task_count: number;
+ completed_task_count: number;
+ message: string;
+ seconds_elapsed: string;
+}
diff --git a/core/bindings/JobStatus.ts b/core/bindings/JobStatus.ts
index 8fc11c5dc..8cfc3f318 100644
--- a/core/bindings/JobStatus.ts
+++ b/core/bindings/JobStatus.ts
@@ -1,2 +1 @@
-
-export type JobStatus = "Queued" | "Running" | "Completed" | "Canceled" | "Failed";
\ No newline at end of file
+export type JobStatus = 'Queued' | 'Running' | 'Completed' | 'Canceled' | 'Failed';
diff --git a/core/bindings/LibraryState.ts b/core/bindings/LibraryState.ts
index 17acd81a2..f58dd547e 100644
--- a/core/bindings/LibraryState.ts
+++ b/core/bindings/LibraryState.ts
@@ -1,2 +1,6 @@
-
-export interface LibraryState { library_uuid: string, library_id: number, library_path: string, offline: boolean, }
\ No newline at end of file
+export interface LibraryState {
+ library_uuid: string;
+ library_id: number;
+ library_path: string;
+ offline: boolean;
+}
diff --git a/core/bindings/LocationResource.ts b/core/bindings/LocationResource.ts
index 70bd1e15a..ce74ad548 100644
--- a/core/bindings/LocationResource.ts
+++ b/core/bindings/LocationResource.ts
@@ -1,2 +1,10 @@
-
-export interface LocationResource { id: number, name: string | null, path: string | null, total_capacity: number | null, available_capacity: number | null, is_removable: boolean | null, is_online: boolean, date_created: string, }
\ No newline at end of file
+export interface LocationResource {
+ id: number;
+ name: string | null;
+ path: string | null;
+ total_capacity: number | null;
+ available_capacity: number | null;
+ is_removable: boolean | null;
+ is_online: boolean;
+ date_created: string;
+}
diff --git a/core/bindings/Platform.ts b/core/bindings/Platform.ts
index a25022b64..e460baab5 100644
--- a/core/bindings/Platform.ts
+++ b/core/bindings/Platform.ts
@@ -1,2 +1 @@
-
-export type Platform = "Unknown" | "Windows" | "MacOS" | "Linux" | "IOS" | "Android";
\ No newline at end of file
+export type Platform = 'Unknown' | 'Windows' | 'MacOS' | 'Linux' | 'IOS' | 'Android';
diff --git a/core/bindings/Statistics.ts b/core/bindings/Statistics.ts
index d3af91139..76147c3c7 100644
--- a/core/bindings/Statistics.ts
+++ b/core/bindings/Statistics.ts
@@ -1,2 +1,9 @@
-
-export interface Statistics { total_file_count: number, total_bytes_used: string, total_bytes_capacity: string, total_bytes_free: string, total_unique_bytes: string, preview_media_bytes: string, library_db_size: string, }
\ No newline at end of file
+export interface Statistics {
+ total_file_count: number;
+ total_bytes_used: string;
+ total_bytes_capacity: string;
+ total_bytes_free: string;
+ total_unique_bytes: string;
+ preview_media_bytes: string;
+ library_db_size: string;
+}
diff --git a/core/bindings/Volume.ts b/core/bindings/Volume.ts
index 4b59fefd3..493547009 100644
--- a/core/bindings/Volume.ts
+++ b/core/bindings/Volume.ts
@@ -1,2 +1,10 @@
-
-export interface Volume { name: string, mount_point: string, total_capacity: bigint, available_capacity: bigint, is_removable: boolean, disk_type: string | null, file_system: string | null, is_root_filesystem: boolean, }
\ No newline at end of file
+export interface Volume {
+ name: string;
+ mount_point: string;
+ total_capacity: bigint;
+ available_capacity: bigint;
+ is_removable: boolean;
+ disk_type: string | null;
+ file_system: string | null;
+ is_root_filesystem: boolean;
+}
diff --git a/core/derive/src/lib.rs b/core/derive/src/lib.rs
index 6b77d73c6..2d107dab2 100644
--- a/core/derive/src/lib.rs
+++ b/core/derive/src/lib.rs
@@ -15,28 +15,28 @@ use syn::{parse_macro_input, Data, DeriveInput};
/// ```
#[proc_macro_derive(PropertyOperationApply)]
pub fn property_operation_apply(input: TokenStream) -> TokenStream {
- let DeriveInput { ident, data, .. } = parse_macro_input!(input);
+ let DeriveInput { ident, data, .. } = parse_macro_input!(input);
- if let Data::Enum(data) = data {
- let impls = data.variants.iter().map(|variant| {
- let variant_ident = &variant.ident;
- quote! {
- #ident::#variant_ident(method) => method.apply(ctx),
- }
- });
+ if let Data::Enum(data) = data {
+ let impls = data.variants.iter().map(|variant| {
+ let variant_ident = &variant.ident;
+ quote! {
+ #ident::#variant_ident(method) => method.apply(ctx),
+ }
+ });
- let expanded = quote! {
- impl #ident {
- fn apply(operation: CrdtCtx, ctx: self::engine::SyncContext) {
- match operation.resource {
- #(#impls)*
- };
- }
- }
- };
+ let expanded = quote! {
+ impl #ident {
+ fn apply(operation: CrdtCtx, ctx: self::engine::SyncContext) {
+ match operation.resource {
+ #(#impls)*
+ };
+ }
+ }
+ };
- TokenStream::from(expanded)
- } else {
- panic!("The 'PropertyOperationApply' macro can only be used on enums!");
- }
+ TokenStream::from(expanded)
+ } else {
+ panic!("The 'PropertyOperationApply' macro can only be used on enums!");
+ }
}
diff --git a/core/package.json b/core/package.json
index b8d39544c..cdddf09f3 100644
--- a/core/package.json
+++ b/core/package.json
@@ -1,18 +1,18 @@
{
- "name": "@sd/core",
- "version": "0.0.0",
- "main": "index.js",
- "license": "MIT",
- "scripts": {
- "codegen": "cargo test && ts-node ./scripts/bindingsIndex.ts",
- "build": "cargo build",
- "test": "cargo test",
- "test:log": "cargo test -- --nocapture",
- "prisma": "cargo prisma"
- },
- "devDependencies": {
- "@types/node": "^17.0.23",
- "ts-node": "^10.7.0",
- "typescript": "^4.6.3"
- }
+ "name": "@sd/core",
+ "version": "0.0.0",
+ "main": "index.js",
+ "license": "MIT",
+ "scripts": {
+ "codegen": "cargo test && ts-node ./scripts/bindingsIndex.ts",
+ "build": "cargo build",
+ "test": "cargo test",
+ "test:log": "cargo test -- --nocapture",
+ "prisma": "cargo prisma"
+ },
+ "devDependencies": {
+ "@types/node": "^17.0.23",
+ "ts-node": "^10.7.0",
+ "typescript": "^4.6.3"
+ }
}
diff --git a/core/prisma/src/main.rs b/core/prisma/src/main.rs
index b873b532b..96e11d24f 100644
--- a/core/prisma/src/main.rs
+++ b/core/prisma/src/main.rs
@@ -1,3 +1,3 @@
fn main() {
- prisma_client_rust_cli::run();
+ prisma_client_rust_cli::run();
}
diff --git a/core/scripts/bindingsIndex.ts b/core/scripts/bindingsIndex.ts
index d7830e079..addea2bf6 100644
--- a/core/scripts/bindingsIndex.ts
+++ b/core/scripts/bindingsIndex.ts
@@ -2,29 +2,29 @@ import * as fs from 'fs/promises';
import * as path from 'path';
(async function main() {
- async function exists(path: string) {
- try {
- await fs.access(path);
- return true;
- } catch {
- return false;
- }
- }
+ async function exists(path: string) {
+ try {
+ await fs.access(path);
+ return true;
+ } catch {
+ return false;
+ }
+ }
- const files = await fs.readdir(path.join(__dirname, '../bindings'));
- const bindings = files.filter((f) => f.endsWith('.ts'));
- let str = '';
- // str += `export * from './types';\n`;
+ const files = await fs.readdir(path.join(__dirname, '../bindings'));
+ const bindings = files.filter((f) => f.endsWith('.ts'));
+ let str = '';
+ // str += `export * from './types';\n`;
- for (let binding of bindings) {
- str += `export * from './bindings/${binding.split('.')[0]}';\n`;
- }
+ for (let binding of bindings) {
+ str += `export * from './bindings/${binding.split('.')[0]}';\n`;
+ }
- let indexExists = await exists(path.join(__dirname, '../index.ts'));
+ let indexExists = await exists(path.join(__dirname, '../index.ts'));
- if (indexExists) {
- await fs.rm(path.join(__dirname, '../index.ts'));
- }
+ if (indexExists) {
+ await fs.rm(path.join(__dirname, '../index.ts'));
+ }
- await fs.writeFile(path.join(__dirname, '../index.ts'), str);
+ await fs.writeFile(path.join(__dirname, '../index.ts'), str);
})();
diff --git a/core/src/crypto/encryption.rs b/core/src/crypto/encryption.rs
index 9771faf01..78b744561 100644
--- a/core/src/crypto/encryption.rs
+++ b/core/src/crypto/encryption.rs
@@ -6,8 +6,8 @@ use ts_rs::TS;
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum EncryptionAlgorithm {
- None = 0,
- AES128 = 1,
- AES192 = 2,
- AES256 = 3,
+ None = 0,
+ AES128 = 1,
+ AES192 = 2,
+ AES256 = 3,
}
diff --git a/core/src/db/migrate.rs b/core/src/db/migrate.rs
index f89265b24..c7b72f4fe 100644
--- a/core/src/db/migrate.rs
+++ b/core/src/db/migrate.rs
@@ -11,140 +11,142 @@ const INIT_MIGRATION: &str = include_str!("../../prisma/migrations/migration_tab
static MIGRATIONS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/prisma/migrations");
pub fn sha256_digest(mut reader: R) -> Result {
- let mut context = Context::new(&SHA256);
- let mut buffer = [0; 1024];
- loop {
- let count = reader.read(&mut buffer)?;
- if count == 0 {
- break;
- }
- context.update(&buffer[..count]);
- }
- Ok(context.finish())
+ let mut context = Context::new(&SHA256);
+ let mut buffer = [0; 1024];
+ loop {
+ let count = reader.read(&mut buffer)?;
+ if count == 0 {
+ break;
+ }
+ context.update(&buffer[..count]);
+ }
+ Ok(context.finish())
}
pub async fn run_migrations(ctx: &CoreContext) -> Result<()> {
- let client = &ctx.database;
+ let client = &ctx.database;
- match client
- ._query_raw::(
- "SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'",
- )
- .await
- {
- Ok(data) => {
- if data.len() == 0 {
- #[cfg(debug_assertions)]
- println!("Migration table does not exist");
- // execute migration
- match client._execute_raw(INIT_MIGRATION).await {
- Ok(_) => {}
- Err(e) => {
- println!("Failed to create migration table: {}", e);
- }
- };
+ match client
+ ._query_raw::(
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'",
+ )
+ .await
+ {
+ Ok(data) => {
+ if data.len() == 0 {
+ #[cfg(debug_assertions)]
+ println!("Migration table does not exist");
+ // execute migration
+ match client._execute_raw(INIT_MIGRATION).await {
+ Ok(_) => {}
+ Err(e) => {
+ println!("Failed to create migration table: {}", e);
+ }
+ };
- let value: Vec = client
- ._query_raw("SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'")
- .await
- .unwrap();
+ let value: Vec = client
+ ._query_raw(
+ "SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'",
+ )
+ .await
+ .unwrap();
- #[cfg(debug_assertions)]
- println!("Migration table created: {:?}", value);
- } else {
- #[cfg(debug_assertions)]
- println!("Migration table exists: {:?}", data);
- }
+ #[cfg(debug_assertions)]
+ println!("Migration table created: {:?}", value);
+ } else {
+ #[cfg(debug_assertions)]
+ println!("Migration table exists: {:?}", data);
+ }
- let mut migration_subdirs = MIGRATIONS_DIR
- .dirs()
- .filter(|subdir| {
- subdir
- .path()
- .file_name()
- .map(|name| name != OsStr::new("migration_table"))
- .unwrap_or(false)
- })
- .collect::>();
+ let mut migration_subdirs = MIGRATIONS_DIR
+ .dirs()
+ .filter(|subdir| {
+ subdir
+ .path()
+ .file_name()
+ .map(|name| name != OsStr::new("migration_table"))
+ .unwrap_or(false)
+ })
+ .collect::>();
- migration_subdirs.sort_by(|a, b| {
- let a_name = a.path().file_name().unwrap().to_str().unwrap();
- let b_name = b.path().file_name().unwrap().to_str().unwrap();
+ migration_subdirs.sort_by(|a, b| {
+ let a_name = a.path().file_name().unwrap().to_str().unwrap();
+ let b_name = b.path().file_name().unwrap().to_str().unwrap();
- let a_time = a_name[..14].parse::().unwrap();
- let b_time = b_name[..14].parse::().unwrap();
+ let a_time = a_name[..14].parse::().unwrap();
+ let b_time = b_name[..14].parse::().unwrap();
- a_time.cmp(&b_time)
- });
+ a_time.cmp(&b_time)
+ });
- for subdir in migration_subdirs {
- println!("{:?}", subdir.path());
- let migration_file = subdir
- .get_file(subdir.path().join("./migration.sql"))
- .unwrap();
- let migration_sql = migration_file.contents_utf8().unwrap();
+ for subdir in migration_subdirs {
+ println!("{:?}", subdir.path());
+ let migration_file = subdir
+ .get_file(subdir.path().join("./migration.sql"))
+ .unwrap();
+ let migration_sql = migration_file.contents_utf8().unwrap();
- let digest = sha256_digest(BufReader::new(migration_file.contents()))?;
- // create a lowercase hash from
- let checksum = HEXLOWER.encode(digest.as_ref());
- let name = subdir.path().file_name().unwrap().to_str().unwrap();
+ let digest = sha256_digest(BufReader::new(migration_file.contents()))?;
+ // create a lowercase hash from
+ let checksum = HEXLOWER.encode(digest.as_ref());
+ let name = subdir.path().file_name().unwrap().to_str().unwrap();
- // get existing migration by checksum, if it doesn't exist run the migration
- let existing_migration = client
- .migration()
- .find_unique(migration::checksum::equals(checksum.clone()))
- .exec()
- .await?;
+ // get existing migration by checksum, if it doesn't exist run the migration
+ let existing_migration = client
+ .migration()
+ .find_unique(migration::checksum::equals(checksum.clone()))
+ .exec()
+ .await?;
- if existing_migration.is_none() {
- #[cfg(debug_assertions)]
- println!("Running migration: {}", name);
+ if existing_migration.is_none() {
+ #[cfg(debug_assertions)]
+ println!("Running migration: {}", name);
- let steps = migration_sql.split(";").collect::>();
- let steps = &steps[0..steps.len() - 1];
+ let steps = migration_sql.split(";").collect::>();
+ let steps = &steps[0..steps.len() - 1];
- client
- .migration()
- .create(
- migration::name::set(name.to_string()),
- migration::checksum::set(checksum.clone()),
- vec![],
- )
- .exec()
- .await?;
+ client
+ .migration()
+ .create(
+ migration::name::set(name.to_string()),
+ migration::checksum::set(checksum.clone()),
+ vec![],
+ )
+ .exec()
+ .await?;
- for (i, step) in steps.iter().enumerate() {
- match client._execute_raw(&format!("{};", step)).await {
- Ok(_) => {
- #[cfg(debug_assertions)]
- println!("Step {} ran successfully", i);
- client
- .migration()
- .find_unique(migration::checksum::equals(checksum.clone()))
- .update(vec![migration::steps_applied::set(i as i32 + 1)])
- .exec()
- .await?;
- }
- Err(e) => {
- println!("Error running migration: {}", name);
- println!("{}", e);
- break;
- }
- }
- }
+ for (i, step) in steps.iter().enumerate() {
+ match client._execute_raw(&format!("{};", step)).await {
+ Ok(_) => {
+ #[cfg(debug_assertions)]
+ println!("Step {} ran successfully", i);
+ client
+ .migration()
+ .find_unique(migration::checksum::equals(checksum.clone()))
+ .update(vec![migration::steps_applied::set(i as i32 + 1)])
+ .exec()
+ .await?;
+ }
+ Err(e) => {
+ println!("Error running migration: {}", name);
+ println!("{}", e);
+ break;
+ }
+ }
+ }
- #[cfg(debug_assertions)]
- println!("Migration {} recorded successfully", name);
- } else {
- #[cfg(debug_assertions)]
- println!("Migration {} already exists", name);
- }
- }
- }
- Err(err) => {
- panic!("Failed to check migration table existence: {:?}", err);
- }
- }
+ #[cfg(debug_assertions)]
+ println!("Migration {} recorded successfully", name);
+ } else {
+ #[cfg(debug_assertions)]
+ println!("Migration {} already exists", name);
+ }
+ }
+ }
+ Err(err) => {
+ panic!("Failed to check migration table existence: {:?}", err);
+ }
+ }
- Ok(())
+ Ok(())
}
diff --git a/core/src/db/mod.rs b/core/src/db/mod.rs
index c1cbf7961..c76b40580 100644
--- a/core/src/db/mod.rs
+++ b/core/src/db/mod.rs
@@ -4,17 +4,17 @@ pub mod migrate;
#[derive(Error, Debug)]
pub enum DatabaseError {
- #[error("Failed to connect to database")]
- MissingConnection,
- #[error("Unable find current_library in the client config")]
- MalformedConfig,
- #[error("Unable to initialize the Prisma client")]
- ClientError(#[from] prisma::NewClientError),
+ #[error("Failed to connect to database")]
+ MissingConnection,
+ #[error("Unable find current_library in the client config")]
+ MalformedConfig,
+ #[error("Unable to initialize the Prisma client")]
+ ClientError(#[from] prisma::NewClientError),
}
pub async fn create_connection(path: &str) -> Result {
- println!("Creating database connection: {:?}", path);
- let client = prisma::new_client_with_url(&format!("file:{}", &path)).await?;
+ println!("Creating database connection: {:?}", path);
+ let client = prisma::new_client_with_url(&format!("file:{}", &path)).await?;
- Ok(client)
+ Ok(client)
}
diff --git a/core/src/encode/metadata.rs b/core/src/encode/metadata.rs
index 93e782cdd..1d1147600 100644
--- a/core/src/encode/metadata.rs
+++ b/core/src/encode/metadata.rs
@@ -5,132 +5,132 @@ use std::{ffi::OsStr, path::Path};
#[derive(Default, Debug)]
pub struct MediaItem {
- pub created_at: Option,
- pub brand: Option,
- pub model: Option,
- pub duration_seconds: f64,
- pub best_video_stream_index: usize,
- pub best_audio_stream_index: usize,
- pub best_subtitle_stream_index: usize,
- pub steams: Vec,
+ pub created_at: Option,
+ pub brand: Option,
+ pub model: Option,
+ pub duration_seconds: f64,
+ pub best_video_stream_index: usize,
+ pub best_audio_stream_index: usize,
+ pub best_subtitle_stream_index: usize,
+ pub steams: Vec,
}
#[derive(Debug)]
pub struct Stream {
- pub codec: String,
- pub frames: f64,
- pub duration_seconds: f64,
- pub kind: Option,
+ pub codec: String,
+ pub frames: f64,
+ pub duration_seconds: f64,
+ pub kind: Option,
}
#[derive(Debug)]
pub enum StreamKind {
- Video(VideoStream),
- Audio(AudioStream),
+ Video(VideoStream),
+ Audio(AudioStream),
}
#[derive(Debug)]
pub struct VideoStream {
- pub width: u32,
- pub height: u32,
- pub aspect_ratio: String,
- pub format: format::Pixel,
- pub bitrate: usize,
+ pub width: u32,
+ pub height: u32,
+ pub aspect_ratio: String,
+ pub format: format::Pixel,
+ pub bitrate: usize,
}
#[derive(Debug)]
pub struct AudioStream {
- pub channels: u16,
- pub format: format::Sample,
- pub bitrate: usize,
- pub rate: u32,
+ pub channels: u16,
+ pub format: format::Sample,
+ pub bitrate: usize,
+ pub rate: u32,
}
fn extract(iter: &mut Iter, key: &str) -> Option {
- iter.find(|k| k.0.contains(key)).map(|k| k.1.to_string())
+ iter.find(|k| k.0.contains(key)).map(|k| k.1.to_string())
}
pub fn get_video_metadata(path: &str) -> Result<(), ffmpeg::Error> {
- ffmpeg::init().unwrap();
+ ffmpeg::init().unwrap();
- let mut name = Path::new(path)
- .file_name()
- .and_then(OsStr::to_str)
- .map(ToString::to_string)
- .unwrap_or(String::new());
+ let mut name = Path::new(path)
+ .file_name()
+ .and_then(OsStr::to_str)
+ .map(ToString::to_string)
+ .unwrap_or(String::new());
- // strip to exact potential date length and attempt to parse
- name = name.chars().take(19).collect();
- // specifically OBS uses this format for time, other checks could be added
- let potential_date = NaiveDateTime::parse_from_str(&name, "%Y-%m-%d %H-%M-%S");
+ // strip to exact potential date length and attempt to parse
+ name = name.chars().take(19).collect();
+ // specifically OBS uses this format for time, other checks could be added
+ let potential_date = NaiveDateTime::parse_from_str(&name, "%Y-%m-%d %H-%M-%S");
- match ffmpeg::format::input(&path) {
- Ok(context) => {
- let mut media_item = MediaItem::default();
- let metadata = context.metadata();
- let mut iter = metadata.iter();
+ match ffmpeg::format::input(&path) {
+ Ok(context) => {
+ let mut media_item = MediaItem::default();
+ let metadata = context.metadata();
+ let mut iter = metadata.iter();
- // creation_time is usually the creation date of the file
- media_item.created_at = extract(&mut iter, "creation_time");
- // apple photos use "com.apple.quicktime.creationdate", which we care more about than the creation_time
- media_item.created_at = extract(&mut iter, "creationdate");
- // fallback to potential time if exists
- if media_item.created_at.is_none() {
- media_item.created_at = potential_date.map(|d| d.to_string()).ok();
- }
- // origin metadata
- media_item.brand = extract(&mut iter, "major_brand");
- media_item.brand = extract(&mut iter, "make");
- media_item.model = extract(&mut iter, "model");
+ // creation_time is usually the creation date of the file
+ media_item.created_at = extract(&mut iter, "creation_time");
+ // apple photos use "com.apple.quicktime.creationdate", which we care more about than the creation_time
+ media_item.created_at = extract(&mut iter, "creationdate");
+ // fallback to potential time if exists
+ if media_item.created_at.is_none() {
+ media_item.created_at = potential_date.map(|d| d.to_string()).ok();
+ }
+ // origin metadata
+ media_item.brand = extract(&mut iter, "major_brand");
+ media_item.brand = extract(&mut iter, "make");
+ media_item.model = extract(&mut iter, "model");
- if let Some(stream) = context.streams().best(ffmpeg::media::Type::Video) {
- media_item.best_video_stream_index = stream.index();
- }
- if let Some(stream) = context.streams().best(ffmpeg::media::Type::Audio) {
- media_item.best_audio_stream_index = stream.index();
- }
- if let Some(stream) = context.streams().best(ffmpeg::media::Type::Subtitle) {
- media_item.best_subtitle_stream_index = stream.index();
- }
- media_item.duration_seconds =
- context.duration() as f64 / f64::from(ffmpeg::ffi::AV_TIME_BASE);
+ if let Some(stream) = context.streams().best(ffmpeg::media::Type::Video) {
+ media_item.best_video_stream_index = stream.index();
+ }
+ if let Some(stream) = context.streams().best(ffmpeg::media::Type::Audio) {
+ media_item.best_audio_stream_index = stream.index();
+ }
+ if let Some(stream) = context.streams().best(ffmpeg::media::Type::Subtitle) {
+ media_item.best_subtitle_stream_index = stream.index();
+ }
+ media_item.duration_seconds =
+ context.duration() as f64 / f64::from(ffmpeg::ffi::AV_TIME_BASE);
- for stream in context.streams() {
- let codec = ffmpeg::codec::context::Context::from_parameters(stream.parameters())?;
+ for stream in context.streams() {
+ let codec = ffmpeg::codec::context::Context::from_parameters(stream.parameters())?;
- let mut stream_item = Stream {
- codec: codec.id().name().to_string(),
- frames: stream.frames() as f64,
- duration_seconds: stream.duration() as f64 * f64::from(stream.time_base()),
- kind: None,
- };
+ let mut stream_item = Stream {
+ codec: codec.id().name().to_string(),
+ frames: stream.frames() as f64,
+ duration_seconds: stream.duration() as f64 * f64::from(stream.time_base()),
+ kind: None,
+ };
- if codec.medium() == ffmpeg::media::Type::Video {
- if let Ok(video) = codec.decoder().video() {
- stream_item.kind = Some(StreamKind::Video(VideoStream {
- bitrate: video.bit_rate(),
- format: video.format(),
- width: video.width(),
- height: video.height(),
- aspect_ratio: video.aspect_ratio().to_string(),
- }));
- }
- } else if codec.medium() == ffmpeg::media::Type::Audio {
- if let Ok(audio) = codec.decoder().audio() {
- stream_item.kind = Some(StreamKind::Audio(AudioStream {
- channels: audio.channels(),
- bitrate: audio.bit_rate(),
- rate: audio.rate(),
- format: audio.format(),
- }));
- }
- }
- media_item.steams.push(stream_item);
- }
- println!("{:#?}", media_item);
- }
+ if codec.medium() == ffmpeg::media::Type::Video {
+ if let Ok(video) = codec.decoder().video() {
+ stream_item.kind = Some(StreamKind::Video(VideoStream {
+ bitrate: video.bit_rate(),
+ format: video.format(),
+ width: video.width(),
+ height: video.height(),
+ aspect_ratio: video.aspect_ratio().to_string(),
+ }));
+ }
+ } else if codec.medium() == ffmpeg::media::Type::Audio {
+ if let Ok(audio) = codec.decoder().audio() {
+ stream_item.kind = Some(StreamKind::Audio(AudioStream {
+ channels: audio.channels(),
+ bitrate: audio.bit_rate(),
+ rate: audio.rate(),
+ format: audio.format(),
+ }));
+ }
+ }
+ media_item.steams.push(stream_item);
+ }
+ println!("{:#?}", media_item);
+ }
- Err(error) => println!("error: {}", error),
- }
- Ok(())
+ Err(error) => println!("error: {}", error),
+ }
+ Ok(())
}
diff --git a/core/src/encode/thumb.rs b/core/src/encode/thumb.rs
index a82f4ceaa..b8ebd1110 100644
--- a/core/src/encode/thumb.rs
+++ b/core/src/encode/thumb.rs
@@ -1,9 +1,9 @@
use crate::job::jobs::JobReportUpdate;
use crate::node::state;
use crate::{
- job::{jobs::Job, worker::WorkerContext},
- prisma::file_path,
- CoreContext,
+ job::{jobs::Job, worker::WorkerContext},
+ prisma::file_path,
+ CoreContext,
};
use crate::{sys, CoreEvent};
use anyhow::Result;
@@ -15,9 +15,9 @@ use webp::*;
#[derive(Debug, Clone)]
pub struct ThumbnailJob {
- pub location_id: i32,
- pub path: String,
- pub background: bool,
+ pub location_id: i32,
+ pub path: String,
+ pub background: bool,
}
static THUMBNAIL_SIZE_FACTOR: f32 = 0.2;
@@ -26,133 +26,136 @@ pub static THUMBNAIL_CACHE_DIR_NAME: &str = "thumbnails";
#[async_trait::async_trait]
impl Job for ThumbnailJob {
- fn name(&self) -> &'static str {
- "file_identifier"
- }
- async fn run(&self, ctx: WorkerContext) -> Result<()> {
- let config = state::get();
- let core_ctx = ctx.core_ctx.clone();
+ fn name(&self) -> &'static str {
+ "file_identifier"
+ }
+ async fn run(&self, ctx: WorkerContext) -> Result<()> {
+ let config = state::get();
+ let core_ctx = ctx.core_ctx.clone();
- let location = sys::locations::get_location(&core_ctx, self.location_id).await?;
+ let location = sys::locations::get_location(&core_ctx, self.location_id).await?;
- fs::create_dir_all(
- Path::new(&config.data_path)
- .join(THUMBNAIL_CACHE_DIR_NAME)
- .join(format!("{}", self.location_id)),
- )?;
+ fs::create_dir_all(
+ Path::new(&config.data_path)
+ .join(THUMBNAIL_CACHE_DIR_NAME)
+ .join(format!("{}", self.location_id)),
+ )?;
- let root_path = location.path.unwrap();
+ let root_path = location.path.unwrap();
- let image_files = get_images(&core_ctx, self.location_id, &self.path).await?;
+ let image_files = get_images(&core_ctx, self.location_id, &self.path).await?;
- let location_id = location.id.clone();
+ let location_id = location.id.clone();
- println!("Found {:?} files", image_files.len());
+ println!("Found {:?} files", image_files.len());
- let is_background = self.background.clone();
+ let is_background = self.background.clone();
- tokio::task::spawn_blocking(move || {
- ctx.progress(vec![
- JobReportUpdate::TaskCount(image_files.len()),
- JobReportUpdate::Message(format!("Preparing to process {} files", image_files.len())),
- ]);
+ tokio::task::spawn_blocking(move || {
+ ctx.progress(vec![
+ JobReportUpdate::TaskCount(image_files.len()),
+ JobReportUpdate::Message(format!(
+ "Preparing to process {} files",
+ image_files.len()
+ )),
+ ]);
- for (i, image_file) in image_files.iter().enumerate() {
- ctx.progress(vec![JobReportUpdate::Message(format!(
- "Processing {}",
- image_file.materialized_path.clone()
- ))]);
- let path = format!("{}{}", root_path, image_file.materialized_path);
- println!("image_file {:?}", image_file);
+ for (i, image_file) in image_files.iter().enumerate() {
+ ctx.progress(vec![JobReportUpdate::Message(format!(
+ "Processing {}",
+ image_file.materialized_path.clone()
+ ))]);
+ let path = format!("{}{}", root_path, image_file.materialized_path);
+ println!("image_file {:?}", image_file);
- let cas_id = match image_file.file() {
- Ok(i) => i.unwrap().cas_id.clone(),
- Err(_) => todo!(),
- };
+ let cas_id = match image_file.file() {
+ Ok(i) => i.unwrap().cas_id.clone(),
+ Err(_) => todo!(),
+ };
- // Define and write the WebP-encoded file to a given path
- let output_path = Path::new(&config.data_path)
- .join(THUMBNAIL_CACHE_DIR_NAME)
- .join(format!("{}", location_id))
- .join(&cas_id)
- .with_extension("webp");
+ // Define and write the WebP-encoded file to a given path
+ let output_path = Path::new(&config.data_path)
+ .join(THUMBNAIL_CACHE_DIR_NAME)
+ .join(format!("{}", location_id))
+ .join(&cas_id)
+ .with_extension("webp");
- // check if file exists at output path
- if !output_path.exists() {
- println!("writing {:?} to {}", output_path, path);
- generate_thumbnail(&path, &output_path)
- .map_err(|e| {
- println!("error generating thumb {:?}", e);
- })
- .unwrap_or(());
+ // check if file exists at output path
+ if !output_path.exists() {
+ println!("writing {:?} to {}", output_path, path);
+ generate_thumbnail(&path, &output_path)
+ .map_err(|e| {
+ println!("error generating thumb {:?}", e);
+ })
+ .unwrap_or(());
- ctx.progress(vec![JobReportUpdate::CompletedTaskCount(i + 1)]);
+ ctx.progress(vec![JobReportUpdate::CompletedTaskCount(i + 1)]);
- if !is_background {
- block_on(ctx.core_ctx.emit(CoreEvent::NewThumbnail { cas_id }));
- };
- } else {
- println!("Thumb exists, skipping... {}", output_path.display());
- }
- }
- })
- .await?;
+ if !is_background {
+ block_on(ctx.core_ctx.emit(CoreEvent::NewThumbnail { cas_id }));
+ };
+ } else {
+ println!("Thumb exists, skipping... {}", output_path.display());
+ }
+ }
+ })
+ .await?;
- Ok(())
- }
+ Ok(())
+ }
}
pub fn generate_thumbnail(file_path: &str, output_path: &PathBuf) -> Result<()> {
- // Using `image` crate, open the included .jpg file
- let img = image::open(file_path)?;
- let (w, h) = img.dimensions();
- // Optionally, resize the existing photo and convert back into DynamicImage
- let img: DynamicImage = image::DynamicImage::ImageRgba8(imageops::resize(
- &img,
- (w as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
- (h as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
- imageops::FilterType::Triangle,
- ));
- // Create the WebP encoder for the above image
- let encoder: Encoder = Encoder::from_image(&img).map_err(|_| anyhow::anyhow!("jeff"))?;
+ // Using `image` crate, open the included .jpg file
+ let img = image::open(file_path)?;
+ let (w, h) = img.dimensions();
+ // Optionally, resize the existing photo and convert back into DynamicImage
+ let img: DynamicImage = image::DynamicImage::ImageRgba8(imageops::resize(
+ &img,
+ (w as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
+ (h as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
+ imageops::FilterType::Triangle,
+ ));
+ // Create the WebP encoder for the above image
+ let encoder: Encoder = Encoder::from_image(&img).map_err(|_| anyhow::anyhow!("jeff"))?;
- // Encode the image at a specified quality 0-100
- let webp: WebPMemory = encoder.encode(THUMBNAIL_QUALITY);
+ // Encode the image at a specified quality 0-100
+ let webp: WebPMemory = encoder.encode(THUMBNAIL_QUALITY);
- println!("Writing to {}", output_path.display());
+ println!("Writing to {}", output_path.display());
- std::fs::write(&output_path, &*webp)?;
+ std::fs::write(&output_path, &*webp)?;
- Ok(())
+ Ok(())
}
pub async fn get_images(
- ctx: &CoreContext,
- location_id: i32,
- path: &str,
+ ctx: &CoreContext,
+ location_id: i32,
+ path: &str,
) -> Result> {
- let mut params = vec![
- file_path::location_id::equals(location_id),
- file_path::extension::in_vec(vec![
- "png".to_string(),
- "jpeg".to_string(),
- "jpg".to_string(),
- "gif".to_string(),
- "webp".to_string(),
- ]),
- ];
+ let mut params = vec![
+ file_path::location_id::equals(location_id),
+ file_path::extension::in_vec(vec![
+ "png".to_string(),
+ "jpeg".to_string(),
+ "jpg".to_string(),
+ "gif".to_string(),
+ "webp".to_string(),
+ ]),
+ ];
- if !path.is_empty() {
- params.push(file_path::materialized_path::starts_with(path.to_string()))
- }
+ if !path.is_empty() {
+ params.push(file_path::materialized_path::starts_with(path.to_string()))
+ }
- let image_files = ctx
- .database
- .file_path()
- .find_many(params)
- .with(file_path::file::fetch())
- .exec()
- .await?;
+ let image_files = ctx
+ .database
+ .file_path()
+ .find_many(params)
+ .with(file_path::file::fetch())
+ .exec()
+ .await?;
- Ok(image_files)
+ Ok(image_files)
}
diff --git a/core/src/encode/vthumb.rs b/core/src/encode/vthumb.rs
index e69de29bb..8b1378917 100644
--- a/core/src/encode/vthumb.rs
+++ b/core/src/encode/vthumb.rs
@@ -0,0 +1 @@
+
diff --git a/core/src/encode/vthumbstrip.rs b/core/src/encode/vthumbstrip.rs
index e69de29bb..8b1378917 100644
--- a/core/src/encode/vthumbstrip.rs
+++ b/core/src/encode/vthumbstrip.rs
@@ -0,0 +1 @@
+
diff --git a/core/src/file/cas/checksum.rs b/core/src/file/cas/checksum.rs
index 7b7eedaa3..ea016fd62 100644
--- a/core/src/file/cas/checksum.rs
+++ b/core/src/file/cas/checksum.rs
@@ -16,62 +16,62 @@ static SAMPLE_COUNT: u64 = 4;
static SAMPLE_SIZE: u64 = 10000;
fn read_at(file: &File, offset: u64, size: u64) -> Result> {
- let mut buf = vec![0u8; size as usize];
+ let mut buf = vec![0u8; size as usize];
- #[cfg(target_family = "unix")]
- file.read_exact_at(&mut buf, offset)?;
+ #[cfg(target_family = "unix")]
+ file.read_exact_at(&mut buf, offset)?;
- #[cfg(target_family = "windows")]
- file.seek_read(&mut buf, offset)?;
+ #[cfg(target_family = "windows")]
+ file.seek_read(&mut buf, offset)?;
- Ok(buf)
+ Ok(buf)
}
pub fn generate_cas_id(path: &str, size: u64) -> Result {
- // open file reference
- let file = File::open(path)?;
+ // open file reference
+ let file = File::open(path)?;
- let mut context = Context::new(&SHA256);
+ let mut context = Context::new(&SHA256);
- // include the file size in the checksum
- context.update(&size.to_le_bytes());
+ // include the file size in the checksum
+ context.update(&size.to_le_bytes());
- // if size is small enough, just read the whole thing
- if SAMPLE_COUNT * SAMPLE_SIZE > size {
- let buf = read_at(&file, 0, size.try_into()?)?;
- context.update(&buf);
- } else {
- // loop over samples
- for i in 0..SAMPLE_COUNT {
- let buf = read_at(&file, (size / SAMPLE_COUNT) * i, SAMPLE_SIZE.try_into()?)?;
- context.update(&buf);
- }
- // sample end of file
- let buf = read_at(&file, size - SAMPLE_SIZE, SAMPLE_SIZE.try_into()?)?;
- context.update(&buf);
- }
+ // if size is small enough, just read the whole thing
+ if SAMPLE_COUNT * SAMPLE_SIZE > size {
+ let buf = read_at(&file, 0, size.try_into()?)?;
+ context.update(&buf);
+ } else {
+ // loop over samples
+ for i in 0..SAMPLE_COUNT {
+ let buf = read_at(&file, (size / SAMPLE_COUNT) * i, SAMPLE_SIZE.try_into()?)?;
+ context.update(&buf);
+ }
+ // sample end of file
+ let buf = read_at(&file, size - SAMPLE_SIZE, SAMPLE_SIZE.try_into()?)?;
+ context.update(&buf);
+ }
- let digest = context.finish();
- let hex = HEXLOWER.encode(digest.as_ref());
+ let digest = context.finish();
+ let hex = HEXLOWER.encode(digest.as_ref());
- Ok(hex)
+ Ok(hex)
}
pub fn full_checksum(path: &str) -> Result {
- // read file as buffer and convert to digest
- let mut reader = BufReader::new(File::open(path).unwrap());
- let mut context = Context::new(&SHA256);
- let mut buffer = [0; 1024];
- loop {
- let count = reader.read(&mut buffer)?;
- if count == 0 {
- break;
- }
- context.update(&buffer[..count]);
- }
- let digest = context.finish();
- // create a lowercase hash from
- let hex = HEXLOWER.encode(digest.as_ref());
+ // read file as buffer and convert to digest
+ let mut reader = BufReader::new(File::open(path).unwrap());
+ let mut context = Context::new(&SHA256);
+ let mut buffer = [0; 1024];
+ loop {
+ let count = reader.read(&mut buffer)?;
+ if count == 0 {
+ break;
+ }
+ context.update(&buffer[..count]);
+ }
+ let digest = context.finish();
+ // create a lowercase hash from
+ let hex = HEXLOWER.encode(digest.as_ref());
- Ok(hex)
+ Ok(hex)
}
diff --git a/core/src/file/cas/identifier.rs b/core/src/file/cas/identifier.rs
index 051d171bc..5dbad17f4 100644
--- a/core/src/file/cas/identifier.rs
+++ b/core/src/file/cas/identifier.rs
@@ -2,22 +2,22 @@ use std::fs;
use crate::job::jobs::JobReportUpdate;
use crate::{
- file::FileError,
- job::{jobs::Job, worker::WorkerContext},
- prisma::{file_path},
- CoreContext,
+ file::FileError,
+ job::{jobs::Job, worker::WorkerContext},
+ prisma::file_path,
+ CoreContext,
};
use anyhow::Result;
use futures::executor::block_on;
-use serde::{Deserialize, Serialize};
use prisma_client_rust::Direction;
+use serde::{Deserialize, Serialize};
use super::checksum::generate_cas_id;
#[derive(Deserialize, Serialize, Debug)]
pub struct FileCreated {
- pub id: i32,
- pub cas_id: String,
+ pub id: i32,
+ pub cas_id: String,
}
#[derive(Debug)]
@@ -25,24 +25,24 @@ pub struct FileIdentifierJob;
#[async_trait::async_trait]
impl Job for FileIdentifierJob {
- fn name(&self) -> &'static str {
- "file_identifier"
- }
- async fn run(&self, ctx: WorkerContext) -> Result<()> {
- println!("Identifying files");
- let total_count = count_orphan_file_paths(&ctx.core_ctx).await?;
- println!("Found {} orphan file paths", total_count);
+ fn name(&self) -> &'static str {
+ "file_identifier"
+ }
+ async fn run(&self, ctx: WorkerContext) -> Result<()> {
+ println!("Identifying files");
+ let total_count = count_orphan_file_paths(&ctx.core_ctx).await?;
+ println!("Found {} orphan file paths", total_count);
- let task_count = (total_count as f64 / 100f64).ceil() as usize;
+ let task_count = (total_count as f64 / 100f64).ceil() as usize;
- println!("Will process {} tasks", task_count);
+ println!("Will process {} tasks", task_count);
- // update job with total task count based on orphan file_paths count
- ctx.progress(vec![JobReportUpdate::TaskCount(task_count)]);
+ // update job with total task count based on orphan file_paths count
+ ctx.progress(vec![JobReportUpdate::TaskCount(task_count)]);
- let db = ctx.core_ctx.database.clone();
+ let db = ctx.core_ctx.database.clone();
- let ctx = tokio::task::spawn_blocking(move || {
+ let ctx = tokio::task::spawn_blocking(move || {
let mut completed: usize = 0;
let mut cursor: i32 = 1;
@@ -102,69 +102,68 @@ impl Job for FileIdentifierJob {
ctx
}).await?;
- let remaining = count_orphan_file_paths(&ctx.core_ctx).await?;
+ let remaining = count_orphan_file_paths(&ctx.core_ctx).await?;
- println!("Finished with {} files remaining because your code is bad.", remaining);
+ println!(
+ "Finished with {} files remaining because your code is bad.",
+ remaining
+ );
- // if remaining > 0 {
- // ctx.core_ctx.spawn_job(Box::new(FileIdentifierJob));
- // }
+ // if remaining > 0 {
+ // ctx.core_ctx.spawn_job(Box::new(FileIdentifierJob));
+ // }
- Ok(())
- }
+ Ok(())
+ }
}
#[derive(Deserialize, Serialize, Debug)]
struct CountRes {
- count: Option,
+ count: Option,
}
pub async fn count_orphan_file_paths(ctx: &CoreContext) -> Result {
- let db = &ctx.database;
- let files_count = db
- ._query_raw::(
- r#"SELECT COUNT(*) AS count FROM file_paths WHERE file_id IS NULL AND is_dir IS FALSE"#,
- )
- .await?;
- Ok(files_count[0].count.unwrap_or(0))
+ let db = &ctx.database;
+ let files_count = db
+ ._query_raw::(
+ r#"SELECT COUNT(*) AS count FROM file_paths WHERE file_id IS NULL AND is_dir IS FALSE"#,
+ )
+ .await?;
+ Ok(files_count[0].count.unwrap_or(0))
}
pub async fn get_orphan_file_paths(
- ctx: &CoreContext,
- cursor: i32,
+ ctx: &CoreContext,
+ cursor: i32,
) -> Result, FileError> {
- let db = &ctx.database;
- println!("cursor: {:?}", cursor);
- let files = db
- .file_path()
- .find_many(vec![
- file_path::file_id::equals(None),
- file_path::is_dir::equals(false),
- ])
- .order_by(file_path::id::order(Direction::Asc))
- .cursor(file_path::id::cursor(cursor))
- .take(100)
- .exec()
- .await?;
- Ok(files)
+ let db = &ctx.database;
+ println!("cursor: {:?}", cursor);
+ let files = db
+ .file_path()
+ .find_many(vec![
+ file_path::file_id::equals(None),
+ file_path::is_dir::equals(false),
+ ])
+ .order_by(file_path::id::order(Direction::Asc))
+ .cursor(file_path::id::cursor(cursor))
+ .take(100)
+ .exec()
+ .await?;
+ Ok(files)
}
pub fn prepare_file_values(file_path: &file_path::Data) -> Result {
- let metadata = fs::metadata(&file_path.materialized_path)?;
- let cas_id = {
- if !file_path.is_dir {
- // TODO: remove unwrap
- let mut x = generate_cas_id(&file_path.materialized_path, metadata.len()).unwrap();
- x.truncate(16);
- x
- } else {
- "".to_string()
- }
- };
- // TODO: add all metadata
- Ok(format!(
- "(\"{}\",\"{}\")",
- cas_id,
- "0"
- ))
+ let metadata = fs::metadata(&file_path.materialized_path)?;
+ let cas_id = {
+ if !file_path.is_dir {
+ // TODO: remove unwrap
+ let mut x = generate_cas_id(&file_path.materialized_path, metadata.len()).unwrap();
+ x.truncate(16);
+ x
+ } else {
+ "".to_string()
+ }
+ };
+ // TODO: add all metadata
+ Ok(format!("(\"{}\",\"{}\")", cas_id, "0"))
}
diff --git a/core/src/file/explorer/open.rs b/core/src/file/explorer/open.rs
index 3d4a2d43c..2ff867955 100644
--- a/core/src/file/explorer/open.rs
+++ b/core/src/file/explorer/open.rs
@@ -1,62 +1,62 @@
use crate::{
- encode::thumb::THUMBNAIL_CACHE_DIR_NAME,
- file::{DirectoryWithContents, File, FileError},
- node::state,
- prisma::{file, file_path},
- sys::locations::get_location,
- CoreContext,
+ encode::thumb::THUMBNAIL_CACHE_DIR_NAME,
+ file::{DirectoryWithContents, File, FileError},
+ node::state,
+ prisma::{file, file_path},
+ sys::locations::get_location,
+ CoreContext,
};
use std::path::Path;
pub async fn open_dir(
- ctx: &CoreContext,
- location_id: &i32,
- path: &str,
+ ctx: &CoreContext,
+ location_id: &i32,
+ path: &str,
) -> Result {
- let db = &ctx.database;
- let config = state::get();
+ let db = &ctx.database;
+ let config = state::get();
- // get location
- let location = get_location(ctx, location_id.clone()).await?;
+ // get location
+ let location = get_location(ctx, location_id.clone()).await?;
- let directory = db
- .file_path()
- .find_first(vec![
- file_path::location_id::equals(location.id),
- file_path::materialized_path::equals(path.into()),
- file_path::is_dir::equals(true),
- ])
- .exec()
- .await?
- .ok_or(FileError::DirectoryNotFound(path.to_string()))?;
+ let directory = db
+ .file_path()
+ .find_first(vec![
+ file_path::location_id::equals(location.id),
+ file_path::materialized_path::equals(path.into()),
+ file_path::is_dir::equals(true),
+ ])
+ .exec()
+ .await?
+ .ok_or(FileError::DirectoryNotFound(path.to_string()))?;
- // TODO: this is incorrect, we need to query on file paths
- let files: Vec = db
- .file()
- .find_many(vec![file::paths::some(vec![file_path::parent_id::equals(
- Some(directory.id),
- )])])
- .exec()
- .await?
- .into_iter()
- .map(Into::into)
- .collect();
+ // TODO: this is incorrect, we need to query on file paths
+ let files: Vec = db
+ .file()
+ .find_many(vec![file::paths::some(vec![file_path::parent_id::equals(
+ Some(directory.id),
+ )])])
+ .exec()
+ .await?
+ .into_iter()
+ .map(Into::into)
+ .collect();
- let mut contents: Vec = vec![];
+ let mut contents: Vec = vec![];
- for mut file in files {
- let thumb_path = Path::new(&config.data_path)
- .join(THUMBNAIL_CACHE_DIR_NAME)
- .join(format!("{}", location.id))
- .join(file.cas_id.clone())
- .with_extension("webp");
+ for mut file in files {
+ let thumb_path = Path::new(&config.data_path)
+ .join(THUMBNAIL_CACHE_DIR_NAME)
+ .join(format!("{}", location.id))
+ .join(file.cas_id.clone())
+ .with_extension("webp");
- file.has_thumbnail = thumb_path.exists();
- contents.push(file);
- }
+ file.has_thumbnail = thumb_path.exists();
+ contents.push(file);
+ }
- Ok(DirectoryWithContents {
- directory: directory.into(),
- contents,
- })
+ Ok(DirectoryWithContents {
+ directory: directory.into(),
+ contents,
+ })
}
diff --git a/core/src/file/indexer/mod.rs b/core/src/file/indexer/mod.rs
index 0aa7f14dc..27a423a49 100644
--- a/core/src/file/indexer/mod.rs
+++ b/core/src/file/indexer/mod.rs
@@ -1,6 +1,6 @@
use crate::job::{
- jobs::{Job, JobReportUpdate},
- worker::WorkerContext,
+ jobs::{Job, JobReportUpdate},
+ worker::WorkerContext,
};
use anyhow::Result;
@@ -12,28 +12,28 @@ pub use {pathctx::PathContext, scan::scan_path};
#[derive(Debug)]
pub struct IndexerJob {
- pub path: String,
+ pub path: String,
}
#[async_trait::async_trait]
impl Job for IndexerJob {
- fn name(&self) -> &'static str {
- "indexer"
- }
- async fn run(&self, ctx: WorkerContext) -> Result<()> {
- let core_ctx = ctx.core_ctx.clone();
- scan_path(&core_ctx, self.path.as_str(), move |p| {
- ctx.progress(
- p.iter()
- .map(|p| match p.clone() {
- ScanProgress::ChunkCount(c) => JobReportUpdate::TaskCount(c),
- ScanProgress::SavedChunks(p) => JobReportUpdate::CompletedTaskCount(p),
- ScanProgress::Message(m) => JobReportUpdate::Message(m),
- })
- .collect(),
- )
- })
- .await?;
- Ok(())
- }
+ fn name(&self) -> &'static str {
+ "indexer"
+ }
+ async fn run(&self, ctx: WorkerContext) -> Result<()> {
+ let core_ctx = ctx.core_ctx.clone();
+ scan_path(&core_ctx, self.path.as_str(), move |p| {
+ ctx.progress(
+ p.iter()
+ .map(|p| match p.clone() {
+ ScanProgress::ChunkCount(c) => JobReportUpdate::TaskCount(c),
+ ScanProgress::SavedChunks(p) => JobReportUpdate::CompletedTaskCount(p),
+ ScanProgress::Message(m) => JobReportUpdate::Message(m),
+ })
+ .collect(),
+ )
+ })
+ .await?;
+ Ok(())
+ }
}
diff --git a/core/src/file/indexer/pathctx.rs b/core/src/file/indexer/pathctx.rs
index d329ed24c..aa6a28444 100644
--- a/core/src/file/indexer/pathctx.rs
+++ b/core/src/file/indexer/pathctx.rs
@@ -1,13 +1,13 @@
// PathContext provides the indexer with instruction to handle particular directory structures and identify rich context.
pub struct PathContext {
- // an app specific key "com.github.repo"
- pub key: String,
- pub name: String,
- pub is_dir: bool,
- // possible file extensions for this path
- pub extensions: Vec,
- // sub-paths that must be found
- pub must_contain_sub_paths: Vec,
- // sub-paths that are ignored
- pub always_ignored_sub_paths: Option,
+ // an app specific key "com.github.repo"
+ pub key: String,
+ pub name: String,
+ pub is_dir: bool,
+ // possible file extensions for this path
+ pub extensions: Vec,
+ // sub-paths that must be found
+ pub must_contain_sub_paths: Vec,
+ // sub-paths that are ignored
+ pub always_ignored_sub_paths: Option,
}
diff --git a/core/src/file/indexer/scan.rs b/core/src/file/indexer/scan.rs
index cba7335eb..68cf2e135 100644
--- a/core/src/file/indexer/scan.rs
+++ b/core/src/file/indexer/scan.rs
@@ -10,283 +10,283 @@ use walkdir::{DirEntry, WalkDir};
#[derive(Clone)]
pub enum ScanProgress {
- ChunkCount(usize),
- SavedChunks(usize),
- Message(String),
+ ChunkCount(usize),
+ SavedChunks(usize),
+ Message(String),
}
static BATCH_SIZE: usize = 100;
// creates a vector of valid path buffers from a directory
pub async fn scan_path(
- ctx: &CoreContext,
- path: &str,
- on_progress: impl Fn(Vec) + Send + Sync + 'static,
+ ctx: &CoreContext,
+ path: &str,
+ on_progress: impl Fn(Vec) + Send + Sync + 'static,
) -> Result<()> {
- let db = &ctx.database;
- let path = path.to_string();
+ let db = &ctx.database;
+ let path = path.to_string();
- let location = create_location(&ctx, &path).await?;
+ let location = create_location(&ctx, &path).await?;
- // query db to highers id, so we can increment it for the new files indexed
- #[derive(Deserialize, Serialize, Debug)]
- struct QueryRes {
- id: Option,
- }
- // grab the next id so we can increment in memory for batch inserting
- let first_file_id = match db
- ._query_raw::(r#"SELECT MAX(id) id FROM file_paths"#)
- .await
- {
- Ok(rows) => rows[0].id.unwrap_or(0),
- Err(e) => Err(anyhow!("Error querying for next file id: {}", e))?,
- };
+ // query db to highers id, so we can increment it for the new files indexed
+ #[derive(Deserialize, Serialize, Debug)]
+ struct QueryRes {
+ id: Option,
+ }
+ // grab the next id so we can increment in memory for batch inserting
+ let first_file_id = match db
+ ._query_raw::(r#"SELECT MAX(id) id FROM file_paths"#)
+ .await
+ {
+ Ok(rows) => rows[0].id.unwrap_or(0),
+ Err(e) => Err(anyhow!("Error querying for next file id: {}", e))?,
+ };
- //check is path is a directory
- if !PathBuf::from(&path).is_dir() {
- return Err(anyhow::anyhow!("{} is not a directory", &path));
- }
- let dir_path = path.clone();
+ //check is path is a directory
+ if !PathBuf::from(&path).is_dir() {
+ return Err(anyhow::anyhow!("{} is not a directory", &path));
+ }
+ let dir_path = path.clone();
- // spawn a dedicated thread to scan the directory for performance
- let (paths, scan_start, on_progress) = tokio::task::spawn_blocking(move || {
- // store every valid path discovered
- let mut paths: Vec<(PathBuf, i32, Option, bool)> = Vec::new();
- // store a hashmap of directories to their file ids for fast lookup
- let mut dirs: HashMap = HashMap::new();
- // begin timer for logging purposes
- let scan_start = Instant::now();
+ // spawn a dedicated thread to scan the directory for performance
+ let (paths, scan_start, on_progress) = tokio::task::spawn_blocking(move || {
+ // store every valid path discovered
+ let mut paths: Vec<(PathBuf, i32, Option, bool)> = Vec::new();
+ // store a hashmap of directories to their file ids for fast lookup
+ let mut dirs: HashMap = HashMap::new();
+ // begin timer for logging purposes
+ let scan_start = Instant::now();
- let mut next_file_id = first_file_id;
- let mut get_id = || {
- next_file_id += 1;
- next_file_id
- };
- // walk through directory recursively
- for entry in WalkDir::new(&dir_path).into_iter().filter_entry(|dir| {
- let approved =
- !is_hidden(dir) && !is_app_bundle(dir) && !is_node_modules(dir) && !is_library(dir);
- approved
- }) {
- // extract directory entry or log and continue if failed
- let entry = match entry {
- Ok(entry) => entry,
- Err(e) => {
- println!("Error reading file {}", e);
- continue;
- }
- };
- let path = entry.path();
+ let mut next_file_id = first_file_id;
+ let mut get_id = || {
+ next_file_id += 1;
+ next_file_id
+ };
+ // walk through directory recursively
+ for entry in WalkDir::new(&dir_path).into_iter().filter_entry(|dir| {
+ let approved =
+ !is_hidden(dir) && !is_app_bundle(dir) && !is_node_modules(dir) && !is_library(dir);
+ approved
+ }) {
+ // extract directory entry or log and continue if failed
+ let entry = match entry {
+ Ok(entry) => entry,
+ Err(e) => {
+ println!("Error reading file {}", e);
+ continue;
+ }
+ };
+ let path = entry.path();
- println!("found: {:?}", path);
+ println!("found: {:?}", path);
- let parent_path = path
- .parent()
- .unwrap_or(Path::new(""))
- .to_str()
- .unwrap_or("");
- let parent_dir_id = dirs.get(&*parent_path);
+ let parent_path = path
+ .parent()
+ .unwrap_or(Path::new(""))
+ .to_str()
+ .unwrap_or("");
+ let parent_dir_id = dirs.get(&*parent_path);
- let str = match path.as_os_str().to_str() {
- Some(str) => str,
- None => {
- println!("Error reading file {}", &path.display());
- continue;
- }
- };
+ let str = match path.as_os_str().to_str() {
+ Some(str) => str,
+ None => {
+ println!("Error reading file {}", &path.display());
+ continue;
+ }
+ };
- on_progress(vec![
- ScanProgress::Message(format!("{}", str)),
- ScanProgress::ChunkCount(paths.len() / BATCH_SIZE),
- ]);
+ on_progress(vec![
+ ScanProgress::Message(format!("{}", str)),
+ ScanProgress::ChunkCount(paths.len() / BATCH_SIZE),
+ ]);
- let file_id = get_id();
- let file_type = entry.file_type();
- let is_dir = file_type.is_dir();
+ let file_id = get_id();
+ let file_type = entry.file_type();
+ let is_dir = file_type.is_dir();
- if is_dir || file_type.is_file() {
- paths.push((path.to_owned(), file_id, parent_dir_id.cloned(), is_dir));
- }
+ if is_dir || file_type.is_file() {
+ paths.push((path.to_owned(), file_id, parent_dir_id.cloned(), is_dir));
+ }
- if is_dir {
- let _path = match path.to_str() {
- Some(path) => path.to_owned(),
- None => continue,
- };
- dirs.insert(_path, file_id);
- }
- }
- (paths, scan_start, on_progress)
- })
- .await
- .unwrap();
+ if is_dir {
+ let _path = match path.to_str() {
+ Some(path) => path.to_owned(),
+ None => continue,
+ };
+ dirs.insert(_path, file_id);
+ }
+ }
+ (paths, scan_start, on_progress)
+ })
+ .await
+ .unwrap();
- let db_write_start = Instant::now();
- let scan_read_time = scan_start.elapsed();
+ let db_write_start = Instant::now();
+ let scan_read_time = scan_start.elapsed();
- for (i, chunk) in paths.chunks(BATCH_SIZE).enumerate() {
- on_progress(vec![
- ScanProgress::SavedChunks(i as usize),
- ScanProgress::Message(format!(
- "Writing {} of {} to library",
- i * chunk.len(),
- paths.len(),
- )),
- ]);
+ for (i, chunk) in paths.chunks(BATCH_SIZE).enumerate() {
+ on_progress(vec![
+ ScanProgress::SavedChunks(i as usize),
+ ScanProgress::Message(format!(
+ "Writing {} of {} to library",
+ i * chunk.len(),
+ paths.len(),
+ )),
+ ]);
- // vector to store active models
- let mut files: Vec = Vec::new();
- for (file_path, file_id, parent_dir_id, is_dir) in chunk {
- files.push(
- match prepare_values(&file_path, *file_id, &location, parent_dir_id, *is_dir) {
- Ok(file) => file,
- Err(e) => {
- println!("Error creating file model from path {:?}: {}", file_path, e);
- continue;
- }
- },
- );
- }
- let raw_sql = format!(
- r#"
+ // vector to store active models
+ let mut files: Vec = Vec::new();
+ for (file_path, file_id, parent_dir_id, is_dir) in chunk {
+ files.push(
+ match prepare_values(&file_path, *file_id, &location, parent_dir_id, *is_dir) {
+ Ok(file) => file,
+ Err(e) => {
+ println!("Error creating file model from path {:?}: {}", file_path, e);
+ continue;
+ }
+ },
+ );
+ }
+ let raw_sql = format!(
+ r#"
INSERT INTO file_paths (id, is_dir, location_id, materialized_path, name, extension, parent_id)
VALUES {}
"#,
- files.join(", ")
- );
- // println!("{}", raw_sql);
- let count = db._execute_raw(&raw_sql).await;
- println!("Inserted {:?} records", count);
- }
- println!(
- "scan of {:?} completed in {:?}. {:?} files found. db write completed in {:?}",
- &path,
- scan_read_time,
- paths.len(),
- db_write_start.elapsed()
- );
- Ok(())
+ files.join(", ")
+ );
+ // println!("{}", raw_sql);
+ let count = db._execute_raw(&raw_sql).await;
+ println!("Inserted {:?} records", count);
+ }
+ println!(
+ "scan of {:?} completed in {:?}. {:?} files found. db write completed in {:?}",
+ &path,
+ scan_read_time,
+ paths.len(),
+ db_write_start.elapsed()
+ );
+ Ok(())
}
// reads a file at a path and creates an ActiveModel with metadata
fn prepare_values(
- file_path: &PathBuf,
- id: i32,
- location: &LocationResource,
- parent_id: &Option,
- is_dir: bool,
+ file_path: &PathBuf,
+ id: i32,
+ location: &LocationResource,
+ parent_id: &Option,
+ is_dir: bool,
) -> Result {
- // let metadata = fs::metadata(&file_path)?;
- let location_path = location.path.as_ref().unwrap().as_str();
- // let size = metadata.len();
- let name;
- let extension;
+ // let metadata = fs::metadata(&file_path)?;
+ let location_path = location.path.as_ref().unwrap().as_str();
+ // let size = metadata.len();
+ let name;
+ let extension;
- // if the 'file_path' is not a directory, then get the extension and name.
+ // if the 'file_path' is not a directory, then get the extension and name.
- // if 'file_path' is a directory, set extension to an empty string to avoid periods in folder names
- // - being interpreted as file extensions
- if is_dir {
- extension = "".to_string();
- name = extract_name(file_path.file_name());
- } else {
- extension = extract_name(file_path.extension());
- name = extract_name(file_path.file_stem());
- }
+ // if 'file_path' is a directory, set extension to an empty string to avoid periods in folder names
+ // - being interpreted as file extensions
+ if is_dir {
+ extension = "".to_string();
+ name = extract_name(file_path.file_name());
+ } else {
+ extension = extract_name(file_path.extension());
+ name = extract_name(file_path.file_stem());
+ }
- let materialized_path = match file_path.to_str() {
- Some(p) => p
- .clone()
- .strip_prefix(&location_path)
- // .and_then(|p| p.strip_suffix(format!("{}{}", name, extension).as_str()))
- .unwrap_or_default(),
- None => return Err(anyhow!("{}", file_path.to_str().unwrap_or_default())),
- };
+ let materialized_path = match file_path.to_str() {
+ Some(p) => p
+ .clone()
+ .strip_prefix(&location_path)
+ // .and_then(|p| p.strip_suffix(format!("{}{}", name, extension).as_str()))
+ .unwrap_or_default(),
+ None => return Err(anyhow!("{}", file_path.to_str().unwrap_or_default())),
+ };
- // let cas_id = {
- // if !metadata.is_dir() {
- // // TODO: remove unwrap, skip and make sure to continue loop
- // let mut x = generate_cas_id(&file_path.to_str().unwrap(), metadata.len()).unwrap();
- // x.truncate(16);
- // x
- // } else {
- // "".to_string()
- // }
- // };
+ // let cas_id = {
+ // if !metadata.is_dir() {
+ // // TODO: remove unwrap, skip and make sure to continue loop
+ // let mut x = generate_cas_id(&file_path.to_str().unwrap(), metadata.len()).unwrap();
+ // x.truncate(16);
+ // x
+ // } else {
+ // "".to_string()
+ // }
+ // };
- // let date_created: DateTime = metadata.created().unwrap().into();
- // let parsed_date_created = date_created.to_rfc3339_opts(SecondsFormat::Millis, true);
+ // let date_created: DateTime = metadata.created().unwrap().into();
+ // let parsed_date_created = date_created.to_rfc3339_opts(SecondsFormat::Millis, true);
- let values = format!(
- "({}, {}, {}, \"{}\", \"{}\", \"{}\", {})",
- id,
- is_dir,
- location.id,
- materialized_path,
- name,
- extension.to_lowercase(),
- parent_id
- .clone()
- .map(|id| format!("\"{}\"", &id))
- .unwrap_or("NULL".to_string()),
- // parsed_date_created,
- // cas_id
- );
+ let values = format!(
+ "({}, {}, {}, \"{}\", \"{}\", \"{}\", {})",
+ id,
+ is_dir,
+ location.id,
+ materialized_path,
+ name,
+ extension.to_lowercase(),
+ parent_id
+ .clone()
+ .map(|id| format!("\"{}\"", &id))
+ .unwrap_or("NULL".to_string()),
+ // parsed_date_created,
+ // cas_id
+ );
- println!("{}", values);
+ println!("{}", values);
- Ok(values)
+ Ok(values)
}
// extract name from OsStr returned by PathBuff
fn extract_name(os_string: Option<&OsStr>) -> String {
- os_string
- .unwrap_or_default()
- .to_str()
- .unwrap_or_default()
- .to_owned()
+ os_string
+ .unwrap_or_default()
+ .to_str()
+ .unwrap_or_default()
+ .to_owned()
}
fn is_hidden(entry: &DirEntry) -> bool {
- entry
- .file_name()
- .to_str()
- .map(|s| s.starts_with("."))
- .unwrap_or(false)
+ entry
+ .file_name()
+ .to_str()
+ .map(|s| s.starts_with("."))
+ .unwrap_or(false)
}
fn is_library(entry: &DirEntry) -> bool {
- entry
- .path()
- .to_str()
- // make better this is shit
- .map(|s| s.contains("/Library/"))
- .unwrap_or(false)
+ entry
+ .path()
+ .to_str()
+ // make better this is shit
+ .map(|s| s.contains("/Library/"))
+ .unwrap_or(false)
}
fn is_node_modules(entry: &DirEntry) -> bool {
- entry
- .file_name()
- .to_str()
- .map(|s| s.contains("node_modules"))
- .unwrap_or(false)
+ entry
+ .file_name()
+ .to_str()
+ .map(|s| s.contains("node_modules"))
+ .unwrap_or(false)
}
fn is_app_bundle(entry: &DirEntry) -> bool {
- let is_dir = entry.metadata().unwrap().is_dir();
- let contains_dot = entry
- .file_name()
- .to_str()
- .map(|s| s.contains(".app") | s.contains(".bundle"))
- .unwrap_or(false);
+ let is_dir = entry.metadata().unwrap().is_dir();
+ let contains_dot = entry
+ .file_name()
+ .to_str()
+ .map(|s| s.contains(".app") | s.contains(".bundle"))
+ .unwrap_or(false);
- let is_app_bundle = is_dir && contains_dot;
- // if is_app_bundle {
- // let path_buff = entry.path();
- // let path = path_buff.to_str().unwrap();
+ let is_app_bundle = is_dir && contains_dot;
+ // if is_app_bundle {
+ // let path_buff = entry.path();
+ // let path = path_buff.to_str().unwrap();
- // self::path(&path, );
- // }
+ // self::path(&path, );
+ // }
- is_app_bundle
+ is_app_bundle
}
diff --git a/core/src/file/mod.rs b/core/src/file/mod.rs
index 7992b49ed..43f612eda 100644
--- a/core/src/file/mod.rs
+++ b/core/src/file/mod.rs
@@ -4,9 +4,9 @@ use thiserror::Error;
use ts_rs::TS;
use crate::{
- crypto::encryption::EncryptionAlgorithm,
- prisma::{self, file, file_path},
- sys::SysError,
+ crypto::encryption::EncryptionAlgorithm,
+ prisma::{self, file, file_path},
+ sys::SysError,
};
pub mod cas;
pub mod explorer;
@@ -17,133 +17,133 @@ pub mod watcher;
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
pub struct File {
- pub id: i32,
- pub cas_id: String,
- pub integrity_checksum: Option,
- pub size_in_bytes: String,
- pub kind: FileKind,
+ pub id: i32,
+ pub cas_id: String,
+ pub integrity_checksum: Option,
+ pub size_in_bytes: String,
+ pub kind: FileKind,
- pub hidden: bool,
- pub favorite: bool,
- pub important: bool,
- pub has_thumbnail: bool,
- pub has_thumbstrip: bool,
- pub has_video_preview: bool,
- // pub encryption: EncryptionAlgorithm,
- pub ipfs_id: Option,
- pub comment: Option,
+ pub hidden: bool,
+ pub favorite: bool,
+ pub important: bool,
+ pub has_thumbnail: bool,
+ pub has_thumbstrip: bool,
+ pub has_video_preview: bool,
+ // pub encryption: EncryptionAlgorithm,
+ pub ipfs_id: Option,
+ pub comment: Option,
- #[ts(type = "string")]
- pub date_created: chrono::DateTime,
- #[ts(type = "string")]
- pub date_modified: chrono::DateTime,
- #[ts(type = "string")]
- pub date_indexed: chrono::DateTime,
+ #[ts(type = "string")]
+ pub date_created: chrono::DateTime,
+ #[ts(type = "string")]
+ pub date_modified: chrono::DateTime,
+ #[ts(type = "string")]
+ pub date_indexed: chrono::DateTime,
- pub paths: Vec,
- // pub media_data: Option,
- // pub tags: Vec,
- // pub label: Vec,
+ pub paths: Vec,
+ // pub media_data: Option,
+ // pub tags: Vec,
+ // pub label: Vec,
}
// A physical file path
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
pub struct FilePath {
- pub id: i32,
- pub is_dir: bool,
- pub location_id: i32,
- pub materialized_path: String,
- pub name: String,
- pub extension: Option,
- pub file_id: Option,
- pub parent_id: Option,
- // pub temp_cas_id: Option,
- pub has_local_thumbnail: bool,
- #[ts(type = "string")]
- pub date_created: chrono::DateTime,
- #[ts(type = "string")]
- pub date_modified: chrono::DateTime,
- #[ts(type = "string")]
- pub date_indexed: chrono::DateTime,
+ pub id: i32,
+ pub is_dir: bool,
+ pub location_id: i32,
+ pub materialized_path: String,
+ pub name: String,
+ pub extension: Option,
+ pub file_id: Option,
+ pub parent_id: Option,
+ // pub temp_cas_id: Option,
+ pub has_local_thumbnail: bool,
+ #[ts(type = "string")]
+ pub date_created: chrono::DateTime,
+ #[ts(type = "string")]
+ pub date_modified: chrono::DateTime,
+ #[ts(type = "string")]
+ pub date_indexed: chrono::DateTime,
}
#[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum FileKind {
- Unknown = 0,
- Directory = 1,
- Package = 2,
- Archive = 3,
- Image = 4,
- Video = 5,
- Audio = 6,
- Plaintext = 7,
- Alias = 8,
+ Unknown = 0,
+ Directory = 1,
+ Package = 2,
+ Archive = 3,
+ Image = 4,
+ Video = 5,
+ Audio = 6,
+ Plaintext = 7,
+ Alias = 8,
}
impl Into for file::Data {
- fn into(self) -> File {
- File {
- id: self.id,
- cas_id: self.cas_id,
- integrity_checksum: self.integrity_checksum,
- kind: IntEnum::from_int(self.kind).unwrap(),
- size_in_bytes: self.size_in_bytes.to_string(),
- // encryption: EncryptionAlgorithm::from_int(self.encryption).unwrap(),
- ipfs_id: self.ipfs_id,
- hidden: self.hidden,
- favorite: self.favorite,
- important: self.important,
- has_thumbnail: self.has_thumbnail,
- has_thumbstrip: self.has_thumbstrip,
- has_video_preview: self.has_video_preview,
- comment: self.comment,
- date_created: self.date_created,
- date_modified: self.date_modified,
- date_indexed: self.date_indexed,
- paths: vec![],
- }
- }
+ fn into(self) -> File {
+ File {
+ id: self.id,
+ cas_id: self.cas_id,
+ integrity_checksum: self.integrity_checksum,
+ kind: IntEnum::from_int(self.kind).unwrap(),
+ size_in_bytes: self.size_in_bytes.to_string(),
+ // encryption: EncryptionAlgorithm::from_int(self.encryption).unwrap(),
+ ipfs_id: self.ipfs_id,
+ hidden: self.hidden,
+ favorite: self.favorite,
+ important: self.important,
+ has_thumbnail: self.has_thumbnail,
+ has_thumbstrip: self.has_thumbstrip,
+ has_video_preview: self.has_video_preview,
+ comment: self.comment,
+ date_created: self.date_created,
+ date_modified: self.date_modified,
+ date_indexed: self.date_indexed,
+ paths: vec![],
+ }
+ }
}
impl Into for file_path::Data {
- fn into(self) -> FilePath {
- FilePath {
- id: self.id,
- is_dir: self.is_dir,
- materialized_path: self.materialized_path,
- file_id: self.file_id,
- parent_id: self.parent_id,
- location_id: self.location_id,
- date_indexed: self.date_indexed,
- // permissions: self.permissions,
- has_local_thumbnail: false,
- name: self.name,
- extension: self.extension,
- // temp_cas_id: self.temp_cas_id,
- date_created: self.date_created,
- date_modified: self.date_modified,
- }
- }
+ fn into(self) -> FilePath {
+ FilePath {
+ id: self.id,
+ is_dir: self.is_dir,
+ materialized_path: self.materialized_path,
+ file_id: self.file_id,
+ parent_id: self.parent_id,
+ location_id: self.location_id,
+ date_indexed: self.date_indexed,
+ // permissions: self.permissions,
+ has_local_thumbnail: false,
+ name: self.name,
+ extension: self.extension,
+ // temp_cas_id: self.temp_cas_id,
+ date_created: self.date_created,
+ date_modified: self.date_modified,
+ }
+ }
}
#[derive(Serialize, Deserialize, TS, Debug)]
#[ts(export)]
pub struct DirectoryWithContents {
- pub directory: FilePath,
- pub contents: Vec,
+ pub directory: FilePath,
+ pub contents: Vec,
}
#[derive(Error, Debug)]
pub enum FileError {
- #[error("Directory not found (path: {0:?})")]
- DirectoryNotFound(String),
- #[error("File not found (path: {0:?})")]
- FileNotFound(String),
- #[error("Database error")]
- DatabaseError(#[from] prisma::QueryError),
- #[error("System error")]
- SysError(#[from] SysError),
+ #[error("Directory not found (path: {0:?})")]
+ DirectoryNotFound(String),
+ #[error("File not found (path: {0:?})")]
+ FileNotFound(String),
+ #[error("Database error")]
+ DatabaseError(#[from] prisma::QueryError),
+ #[error("System error")]
+ SysError(#[from] SysError),
}
diff --git a/core/src/file/watcher.rs b/core/src/file/watcher.rs
index 7f74bf721..0b60e86cd 100644
--- a/core/src/file/watcher.rs
+++ b/core/src/file/watcher.rs
@@ -1,25 +1,25 @@
use std::path::Path;
use hotwatch::{
- blocking::{Flow, Hotwatch},
- Event,
+ blocking::{Flow, Hotwatch},
+ Event,
};
pub fn watch_dir(path: &str) {
- let mut hotwatch = Hotwatch::new().expect("hotwatch failed to initialize!");
- hotwatch
- .watch(&path, |event: Event| {
- if let Event::Write(path) = event {
- println!("{:?} changed!", path);
- // Flow::Exit
- Flow::Continue
- } else {
- Flow::Continue
- }
- })
- .expect("failed to watch file!");
+ let mut hotwatch = Hotwatch::new().expect("hotwatch failed to initialize!");
+ hotwatch
+ .watch(&path, |event: Event| {
+ if let Event::Write(path) = event {
+ println!("{:?} changed!", path);
+ // Flow::Exit
+ Flow::Continue
+ } else {
+ Flow::Continue
+ }
+ })
+ .expect("failed to watch file!");
- hotwatch.run();
+ hotwatch.run();
- println!("watching directory {:?}", Path::new(&path));
+ println!("watching directory {:?}", Path::new(&path));
}
diff --git a/core/src/job/jobs.rs b/core/src/job/jobs.rs
index b31264d18..85d0d8969 100644
--- a/core/src/job/jobs.rs
+++ b/core/src/job/jobs.rs
@@ -1,12 +1,12 @@
use super::{
- worker::{Worker, WorkerContext},
- JobError,
+ worker::{Worker, WorkerContext},
+ JobError,
};
use crate::{
- node::state,
- prisma::{job, node},
- sync::{crdt::Replicate, engine::SyncContext},
- CoreContext,
+ node::state,
+ prisma::{job, node},
+ sync::{crdt::Replicate, engine::SyncContext},
+ CoreContext,
};
use anyhow::Result;
use int_enum::IntEnum;
@@ -19,166 +19,164 @@ const MAX_WORKERS: usize = 4;
#[async_trait::async_trait]
pub trait Job: Send + Sync + Debug {
- async fn run(&self, ctx: WorkerContext) -> Result<()>;
- fn name(&self) -> &'static str;
+ async fn run(&self, ctx: WorkerContext) -> Result<()>;
+ fn name(&self) -> &'static str;
}
// jobs struct is maintained by the core
pub struct Jobs {
- job_queue: Vec>,
- // workers are spawned when jobs are picked off the queue
- running_workers: HashMap>>,
+ job_queue: Vec>,
+ // workers are spawned when jobs are picked off the queue
+ running_workers: HashMap>>,
}
impl Jobs {
- pub fn new() -> Self {
- Self {
- job_queue: vec![],
- running_workers: HashMap::new(),
- }
- }
- pub async fn ingest(&mut self, ctx: &CoreContext, job: Box) {
- // create worker to process job
- if self.running_workers.len() < MAX_WORKERS {
- let worker = Worker::new(job);
- let id = worker.id();
+ pub fn new() -> Self {
+ Self {
+ job_queue: vec![],
+ running_workers: HashMap::new(),
+ }
+ }
+ pub async fn ingest(&mut self, ctx: &CoreContext, job: Box) {
+ // create worker to process job
+ if self.running_workers.len() < MAX_WORKERS {
+ let worker = Worker::new(job);
+ let id = worker.id();
- let wrapped_worker = Arc::new(Mutex::new(worker));
+ let wrapped_worker = Arc::new(Mutex::new(worker));
- Worker::spawn(wrapped_worker.clone(), ctx).await;
+ Worker::spawn(wrapped_worker.clone(), ctx).await;
- self.running_workers.insert(id, wrapped_worker);
- } else {
- self.job_queue.push(job);
- }
- }
- pub fn ingest_queue(&mut self, ctx: &CoreContext, job: Box) {
- self.job_queue.push(job);
- }
- pub async fn complete(&mut self, ctx: &CoreContext, job_id: String) {
- // remove worker from running workers
- self.running_workers.remove(&job_id);
- // continue queue
- let job = self.job_queue.pop();
- if let Some(job) = job {
- self.ingest(ctx, job).await;
- }
- }
- pub async fn get_running(&self) -> Vec {
- let mut ret = vec![];
+ self.running_workers.insert(id, wrapped_worker);
+ } else {
+ self.job_queue.push(job);
+ }
+ }
+ pub fn ingest_queue(&mut self, ctx: &CoreContext, job: Box) {
+ self.job_queue.push(job);
+ }
+ pub async fn complete(&mut self, ctx: &CoreContext, job_id: String) {
+ // remove worker from running workers
+ self.running_workers.remove(&job_id);
+ // continue queue
+ let job = self.job_queue.pop();
+ if let Some(job) = job {
+ self.ingest(ctx, job).await;
+ }
+ }
+ pub async fn get_running(&self) -> Vec {
+ let mut ret = vec![];
- for worker in self.running_workers.values() {
- let worker = worker.lock().await;
- ret.push(worker.job_report.clone());
- }
- ret
- }
- pub async fn get_history(ctx: &CoreContext) -> Result, JobError> {
- let db = &ctx.database;
- let jobs = db
- .job()
- .find_many(vec![job::status::not(JobStatus::Running.int_value())])
- .exec()
- .await?;
+ for worker in self.running_workers.values() {
+ let worker = worker.lock().await;
+ ret.push(worker.job_report.clone());
+ }
+ ret
+ }
+ pub async fn get_history(ctx: &CoreContext) -> Result, JobError> {
+ let db = &ctx.database;
+ let jobs = db
+ .job()
+ .find_many(vec![job::status::not(JobStatus::Running.int_value())])
+ .exec()
+ .await?;
- Ok(jobs.into_iter().map(|j| j.into()).collect())
- }
+ Ok(jobs.into_iter().map(|j| j.into()).collect())
+ }
}
#[derive(Debug)]
pub enum JobReportUpdate {
- TaskCount(usize),
- CompletedTaskCount(usize),
- Message(String),
- SecondsElapsed(u64),
+ TaskCount(usize),
+ CompletedTaskCount(usize),
+ Message(String),
+ SecondsElapsed(u64),
}
#[derive(Debug, Serialize, Deserialize, TS, Clone)]
#[ts(export)]
pub struct JobReport {
- pub id: String,
- pub name: String,
- // client_id: i32,
- #[ts(type = "string")]
- pub date_created: chrono::DateTime,
- #[ts(type = "string")]
- pub date_modified: chrono::DateTime,
+ pub id: String,
+ pub name: String,
+ // client_id: i32,
+ #[ts(type = "string")]
+ pub date_created: chrono::DateTime,
+ #[ts(type = "string")]
+ pub date_modified: chrono::DateTime,
- pub status: JobStatus,
- pub task_count: i32,
- pub completed_task_count: i32,
+ pub status: JobStatus,
+ pub task_count: i32,
+ pub completed_task_count: i32,
- pub message: String,
- // pub percentage_complete: f64,
- #[ts(type = "string")]
- pub seconds_elapsed: i32,
+ pub message: String,
+ // pub percentage_complete: f64,
+ #[ts(type = "string")]
+ pub seconds_elapsed: i32,
}
// convert database struct into a resource struct
impl Into for job::Data {
- fn into(self) -> JobReport {
- JobReport {
- id: self.id,
- name: self.name,
- // client_id: self.client_id,
- status: JobStatus::from_int(self.status).unwrap(),
- task_count: self.task_count,
- completed_task_count: self.completed_task_count,
- date_created: self.date_created,
- date_modified: self.date_modified,
- message: String::new(),
- seconds_elapsed: self.seconds_elapsed,
- }
- }
+ fn into(self) -> JobReport {
+ JobReport {
+ id: self.id,
+ name: self.name,
+ // client_id: self.client_id,
+ status: JobStatus::from_int(self.status).unwrap(),
+ task_count: self.task_count,
+ completed_task_count: self.completed_task_count,
+ date_created: self.date_created,
+ date_modified: self.date_modified,
+ message: String::new(),
+ seconds_elapsed: self.seconds_elapsed,
+ }
+ }
}
impl JobReport {
- pub fn new(uuid: String, name: String) -> Self {
- Self {
- id: uuid,
- name,
- // client_id: 0,
- date_created: chrono::Utc::now(),
- date_modified: chrono::Utc::now(),
- status: JobStatus::Queued,
- task_count: 0,
- completed_task_count: 0,
- message: String::new(),
- seconds_elapsed: 0,
- }
- }
- pub async fn create(&self, ctx: &CoreContext) -> Result<(), JobError> {
- let config = state::get();
- ctx
- .database
- .job()
- .create(
- job::id::set(self.id.clone()),
- job::name::set(self.name.clone()),
- job::action::set(1),
- job::nodes::link(node::id::equals(config.node_id)),
- vec![],
- )
- .exec()
- .await?;
- Ok(())
- }
- pub async fn update(&self, ctx: &CoreContext) -> Result<(), JobError> {
- ctx
- .database
- .job()
- .find_unique(job::id::equals(self.id.clone()))
- .update(vec![
- job::status::set(self.status.int_value()),
- job::task_count::set(self.task_count),
- job::completed_task_count::set(self.completed_task_count),
- job::date_modified::set(chrono::Utc::now()),
- job::seconds_elapsed::set(self.seconds_elapsed),
- ])
- .exec()
- .await?;
- Ok(())
- }
+ pub fn new(uuid: String, name: String) -> Self {
+ Self {
+ id: uuid,
+ name,
+ // client_id: 0,
+ date_created: chrono::Utc::now(),
+ date_modified: chrono::Utc::now(),
+ status: JobStatus::Queued,
+ task_count: 0,
+ completed_task_count: 0,
+ message: String::new(),
+ seconds_elapsed: 0,
+ }
+ }
+ pub async fn create(&self, ctx: &CoreContext) -> Result<(), JobError> {
+ let config = state::get();
+ ctx.database
+ .job()
+ .create(
+ job::id::set(self.id.clone()),
+ job::name::set(self.name.clone()),
+ job::action::set(1),
+ job::nodes::link(node::id::equals(config.node_id)),
+ vec![],
+ )
+ .exec()
+ .await?;
+ Ok(())
+ }
+ pub async fn update(&self, ctx: &CoreContext) -> Result<(), JobError> {
+ ctx.database
+ .job()
+ .find_unique(job::id::equals(self.id.clone()))
+ .update(vec![
+ job::status::set(self.status.int_value()),
+ job::task_count::set(self.task_count),
+ job::completed_task_count::set(self.completed_task_count),
+ job::date_modified::set(chrono::Utc::now()),
+ job::seconds_elapsed::set(self.seconds_elapsed),
+ ])
+ .exec()
+ .await?;
+ Ok(())
+ }
}
#[derive(Clone)]
@@ -186,19 +184,19 @@ pub struct JobReportCreate {}
#[async_trait::async_trait]
impl Replicate for JobReport {
- type Create = JobReportCreate;
+ type Create = JobReportCreate;
- async fn create(_data: Self::Create, _ctx: SyncContext) {}
- async fn delete(_ctx: SyncContext) {}
+ async fn create(_data: Self::Create, _ctx: SyncContext) {}
+ async fn delete(_ctx: SyncContext) {}
}
#[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum JobStatus {
- Queued = 0,
- Running = 1,
- Completed = 2,
- Canceled = 3,
- Failed = 4,
+ Queued = 0,
+ Running = 1,
+ Completed = 2,
+ Canceled = 3,
+ Failed = 4,
}
diff --git a/core/src/job/mod.rs b/core/src/job/mod.rs
index 1c93eead2..9f8d6d15f 100644
--- a/core/src/job/mod.rs
+++ b/core/src/job/mod.rs
@@ -8,8 +8,8 @@ pub mod worker;
#[derive(Error, Debug)]
pub enum JobError {
- #[error("Failed to create job (job_id {job_id:?})")]
- CreateFailure { job_id: String },
- #[error("Database error")]
- DatabaseError(#[from] prisma::QueryError),
+ #[error("Failed to create job (job_id {job_id:?})")]
+ CreateFailure { job_id: String },
+ #[error("Database error")]
+ DatabaseError(#[from] prisma::QueryError),
}
diff --git a/core/src/job/worker.rs b/core/src/job/worker.rs
index b21d05f6f..043f6ec39 100644
--- a/core/src/job/worker.rs
+++ b/core/src/job/worker.rs
@@ -2,190 +2,186 @@ use super::jobs::{JobReport, JobReportUpdate, JobStatus};
use crate::{ClientQuery, CoreContext, CoreEvent, InternalEvent, Job};
use std::{sync::Arc, time::Duration};
use tokio::{
- sync::{
- mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
- Mutex,
- },
- time::{sleep, Instant},
+ sync::{
+ mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
+ Mutex,
+ },
+ time::{sleep, Instant},
};
// used to update the worker state from inside the worker thread
pub enum WorkerEvent {
- Progressed(Vec),
- Completed,
- Failed,
+ Progressed(Vec),
+ Completed,
+ Failed,
}
enum WorkerState {
- Pending(Box, UnboundedReceiver),
- Running,
+ Pending(Box, UnboundedReceiver),
+ Running,
}
#[derive(Clone)]
pub struct WorkerContext {
- pub uuid: String,
- pub core_ctx: CoreContext,
- pub sender: UnboundedSender,
+ pub uuid: String,
+ pub core_ctx: CoreContext,
+ pub sender: UnboundedSender,
}
impl WorkerContext {
- pub fn progress(&self, updates: Vec) {
- self
- .sender
- .send(WorkerEvent::Progressed(updates))
- .unwrap_or(());
- }
+ pub fn progress(&self, updates: Vec) {
+ self.sender
+ .send(WorkerEvent::Progressed(updates))
+ .unwrap_or(());
+ }
}
// a worker is a dedicated thread that runs a single job
// once the job is complete the worker will exit
pub struct Worker {
- pub job_report: JobReport,
- state: WorkerState,
- worker_sender: UnboundedSender,
+ pub job_report: JobReport,
+ state: WorkerState,
+ worker_sender: UnboundedSender,
}
impl Worker {
- pub fn new(job: Box) -> Self {
- let (worker_sender, worker_receiver) = unbounded_channel();
- let uuid = uuid::Uuid::new_v4().to_string();
- let name = job.name();
+ pub fn new(job: Box) -> Self {
+ let (worker_sender, worker_receiver) = unbounded_channel();
+ let uuid = uuid::Uuid::new_v4().to_string();
+ let name = job.name();
- Self {
- state: WorkerState::Pending(job, worker_receiver),
- job_report: JobReport::new(uuid, name.to_string()),
- worker_sender,
- }
- }
- // spawns a thread and extracts channel sender to communicate with it
- pub async fn spawn(worker: Arc>, ctx: &CoreContext) {
- // we capture the worker receiver channel so state can be updated from inside the worker
- let mut worker_mut = worker.lock().await;
- // extract owned job and receiver from Self
- let (job, worker_receiver) =
- match std::mem::replace(&mut worker_mut.state, WorkerState::Running) {
- WorkerState::Pending(job, worker_receiver) => {
- worker_mut.state = WorkerState::Running;
- (job, worker_receiver)
- }
- WorkerState::Running => unreachable!(),
- };
- let worker_sender = worker_mut.worker_sender.clone();
- let core_ctx = ctx.clone();
+ Self {
+ state: WorkerState::Pending(job, worker_receiver),
+ job_report: JobReport::new(uuid, name.to_string()),
+ worker_sender,
+ }
+ }
+ // spawns a thread and extracts channel sender to communicate with it
+ pub async fn spawn(worker: Arc>, ctx: &CoreContext) {
+ // we capture the worker receiver channel so state can be updated from inside the worker
+ let mut worker_mut = worker.lock().await;
+ // extract owned job and receiver from Self
+ let (job, worker_receiver) =
+ match std::mem::replace(&mut worker_mut.state, WorkerState::Running) {
+ WorkerState::Pending(job, worker_receiver) => {
+ worker_mut.state = WorkerState::Running;
+ (job, worker_receiver)
+ }
+ WorkerState::Running => unreachable!(),
+ };
+ let worker_sender = worker_mut.worker_sender.clone();
+ let core_ctx = ctx.clone();
- worker_mut.job_report.status = JobStatus::Running;
+ worker_mut.job_report.status = JobStatus::Running;
- worker_mut.job_report.create(&ctx).await.unwrap_or(());
+ worker_mut.job_report.create(&ctx).await.unwrap_or(());
- // spawn task to handle receiving events from the worker
- tokio::spawn(Worker::track_progress(
- worker.clone(),
- worker_receiver,
- ctx.clone(),
- ));
+ // spawn task to handle receiving events from the worker
+ tokio::spawn(Worker::track_progress(
+ worker.clone(),
+ worker_receiver,
+ ctx.clone(),
+ ));
- let uuid = worker_mut.job_report.id.clone();
- // spawn task to handle running the job
- tokio::spawn(async move {
- let worker_ctx = WorkerContext {
- uuid,
- core_ctx,
- sender: worker_sender,
- };
- let job_start = Instant::now();
+ let uuid = worker_mut.job_report.id.clone();
+ // spawn task to handle running the job
+ tokio::spawn(async move {
+ let worker_ctx = WorkerContext {
+ uuid,
+ core_ctx,
+ sender: worker_sender,
+ };
+ let job_start = Instant::now();
- // track time
- let sender = worker_ctx.sender.clone();
- tokio::spawn(async move {
- loop {
- let elapsed = job_start.elapsed().as_secs();
- sender
- .send(WorkerEvent::Progressed(vec![
- JobReportUpdate::SecondsElapsed(elapsed),
- ]))
- .unwrap_or(());
- sleep(Duration::from_millis(1000)).await;
- }
- });
+ // track time
+ let sender = worker_ctx.sender.clone();
+ tokio::spawn(async move {
+ loop {
+ let elapsed = job_start.elapsed().as_secs();
+ sender
+ .send(WorkerEvent::Progressed(vec![
+ JobReportUpdate::SecondsElapsed(elapsed),
+ ]))
+ .unwrap_or(());
+ sleep(Duration::from_millis(1000)).await;
+ }
+ });
- let result = job.run(worker_ctx.clone()).await;
+ let result = job.run(worker_ctx.clone()).await;
- if let Err(e) = result {
- println!("job failed {:?}", e);
- worker_ctx.sender.send(WorkerEvent::Failed).unwrap_or(());
- } else {
- // handle completion
- worker_ctx.sender.send(WorkerEvent::Completed).unwrap_or(());
- }
- worker_ctx
- .core_ctx
- .internal_sender
- .send(InternalEvent::JobComplete(worker_ctx.uuid.clone()))
- .unwrap_or(());
- });
- }
+ if let Err(e) = result {
+ println!("job failed {:?}", e);
+ worker_ctx.sender.send(WorkerEvent::Failed).unwrap_or(());
+ } else {
+ // handle completion
+ worker_ctx.sender.send(WorkerEvent::Completed).unwrap_or(());
+ }
+ worker_ctx
+ .core_ctx
+ .internal_sender
+ .send(InternalEvent::JobComplete(worker_ctx.uuid.clone()))
+ .unwrap_or(());
+ });
+ }
- pub fn id(&self) -> String {
- self.job_report.id.to_owned()
- }
+ pub fn id(&self) -> String {
+ self.job_report.id.to_owned()
+ }
- async fn track_progress(
- worker: Arc>,
- mut channel: UnboundedReceiver,
- ctx: CoreContext,
- ) {
- while let Some(command) = channel.recv().await {
- let mut worker = worker.lock().await;
+ async fn track_progress(
+ worker: Arc>,
+ mut channel: UnboundedReceiver,
+ ctx: CoreContext,
+ ) {
+ while let Some(command) = channel.recv().await {
+ let mut worker = worker.lock().await;
- match command {
- WorkerEvent::Progressed(changes) => {
- // protect against updates if job is not running
- if worker.job_report.status != JobStatus::Running {
- continue;
- };
- for change in changes {
- match change {
- JobReportUpdate::TaskCount(task_count) => {
- worker.job_report.task_count = task_count as i32;
- }
- JobReportUpdate::CompletedTaskCount(completed_task_count) => {
- worker.job_report.completed_task_count = completed_task_count as i32;
- }
- JobReportUpdate::Message(message) => {
- worker.job_report.message = message;
- }
- JobReportUpdate::SecondsElapsed(seconds) => {
- worker.job_report.seconds_elapsed = seconds as i32;
- }
- }
- }
- ctx
- .emit(CoreEvent::InvalidateQueryDebounced(
- ClientQuery::JobGetRunning,
- ))
- .await;
- }
- WorkerEvent::Completed => {
- worker.job_report.status = JobStatus::Completed;
- worker.job_report.update(&ctx).await.unwrap_or(());
+ match command {
+ WorkerEvent::Progressed(changes) => {
+ // protect against updates if job is not running
+ if worker.job_report.status != JobStatus::Running {
+ continue;
+ };
+ for change in changes {
+ match change {
+ JobReportUpdate::TaskCount(task_count) => {
+ worker.job_report.task_count = task_count as i32;
+ }
+ JobReportUpdate::CompletedTaskCount(completed_task_count) => {
+ worker.job_report.completed_task_count =
+ completed_task_count as i32;
+ }
+ JobReportUpdate::Message(message) => {
+ worker.job_report.message = message;
+ }
+ JobReportUpdate::SecondsElapsed(seconds) => {
+ worker.job_report.seconds_elapsed = seconds as i32;
+ }
+ }
+ }
+ ctx.emit(CoreEvent::InvalidateQueryDebounced(
+ ClientQuery::JobGetRunning,
+ ))
+ .await;
+ }
+ WorkerEvent::Completed => {
+ worker.job_report.status = JobStatus::Completed;
+ worker.job_report.update(&ctx).await.unwrap_or(());
- ctx
- .emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetRunning))
- .await;
- ctx
- .emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
- .await;
- break;
- }
- WorkerEvent::Failed => {
- worker.job_report.status = JobStatus::Failed;
- worker.job_report.update(&ctx).await.unwrap_or(());
+ ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetRunning))
+ .await;
+ ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
+ .await;
+ break;
+ }
+ WorkerEvent::Failed => {
+ worker.job_report.status = JobStatus::Failed;
+ worker.job_report.update(&ctx).await.unwrap_or(());
- ctx
- .emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
- .await;
- break;
- }
- }
- }
- }
+ ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
+ .await;
+ break;
+ }
+ }
+ }
+ }
}
diff --git a/core/src/lib.rs b/core/src/lib.rs
index 04b1fff8f..4b7489659 100644
--- a/core/src/lib.rs
+++ b/core/src/lib.rs
@@ -1,6 +1,6 @@
use crate::{
- file::cas::identifier::FileIdentifierJob, library::loader::get_library_path,
- node::state::NodeState,
+ file::cas::identifier::FileIdentifierJob, library::loader::get_library_path,
+ node::state::NodeState,
};
use job::jobs::{Job, JobReport, Jobs};
use prisma::PrismaClient;
@@ -8,8 +8,8 @@ use serde::{Deserialize, Serialize};
use std::{fs, sync::Arc};
use thiserror::Error;
use tokio::sync::{
- mpsc::{self, unbounded_channel, UnboundedReceiver, UnboundedSender},
- oneshot,
+ mpsc::{self, unbounded_channel, UnboundedReceiver, UnboundedSender},
+ oneshot,
};
use ts_rs::TS;
@@ -34,308 +34,310 @@ pub mod util;
// a wrapper around external input with a returning sender channel for core to respond
#[derive(Debug)]
pub struct ReturnableMessage> {
- data: D,
- return_sender: oneshot::Sender,
+ data: D,
+ return_sender: oneshot::Sender,
}
// core controller is passed to the client to communicate with the core which runs in a dedicated thread
pub struct CoreController {
- query_sender: UnboundedSender>,
- command_sender: UnboundedSender>,
+ query_sender: UnboundedSender>,
+ command_sender: UnboundedSender>,
}
impl CoreController {
- pub async fn query(&self, query: ClientQuery) -> Result {
- // a one time use channel to send and await a response
- let (sender, recv) = oneshot::channel();
- self
- .query_sender
- .send(ReturnableMessage {
- data: query,
- return_sender: sender,
- })
- .unwrap_or(());
- // wait for response and return
- recv.await.unwrap_or(Err(CoreError::QueryError))
- }
+ pub async fn query(&self, query: ClientQuery) -> Result {
+ // a one time use channel to send and await a response
+ let (sender, recv) = oneshot::channel();
+ self.query_sender
+ .send(ReturnableMessage {
+ data: query,
+ return_sender: sender,
+ })
+ .unwrap_or(());
+ // wait for response and return
+ recv.await.unwrap_or(Err(CoreError::QueryError))
+ }
- pub async fn command(&self, command: ClientCommand) -> Result {
- let (sender, recv) = oneshot::channel();
- self
- .command_sender
- .send(ReturnableMessage {
- data: command,
- return_sender: sender,
- })
- .unwrap_or(());
+ pub async fn command(&self, command: ClientCommand) -> Result {
+ let (sender, recv) = oneshot::channel();
+ self.command_sender
+ .send(ReturnableMessage {
+ data: command,
+ return_sender: sender,
+ })
+ .unwrap_or(());
- recv.await.unwrap()
- }
+ recv.await.unwrap()
+ }
}
#[derive(Debug)]
pub enum InternalEvent {
- JobIngest(Box),
- JobQueue(Box),
- JobComplete(String),
+ JobIngest(Box),
+ JobQueue(Box),
+ JobComplete(String),
}
#[derive(Clone)]
pub struct CoreContext {
- pub database: Arc,
- pub event_sender: mpsc::Sender,
- pub internal_sender: UnboundedSender,
+ pub database: Arc,
+ pub event_sender: mpsc::Sender,
+ pub internal_sender: UnboundedSender,
}
impl CoreContext {
- pub fn spawn_job(&self, job: Box) {
- self
- .internal_sender
- .send(InternalEvent::JobIngest(job))
- .unwrap_or_else(|e| {
- println!("Failed to spawn job. {:?}", e);
- });
- }
- pub fn queue_job(&self, job: Box) {
- self
- .internal_sender
- .send(InternalEvent::JobIngest(job))
- .unwrap_or_else(|e| {
- println!("Failed to queue job. {:?}", e);
- });
- }
- pub async fn emit(&self, event: CoreEvent) {
- self.event_sender.send(event).await.unwrap_or_else(|e| {
- println!("Failed to emit event. {:?}", e);
- });
- }
+ pub fn spawn_job(&self, job: Box) {
+ self.internal_sender
+ .send(InternalEvent::JobIngest(job))
+ .unwrap_or_else(|e| {
+ println!("Failed to spawn job. {:?}", e);
+ });
+ }
+ pub fn queue_job(&self, job: Box) {
+ self.internal_sender
+ .send(InternalEvent::JobIngest(job))
+ .unwrap_or_else(|e| {
+ println!("Failed to queue job. {:?}", e);
+ });
+ }
+ pub async fn emit(&self, event: CoreEvent) {
+ self.event_sender.send(event).await.unwrap_or_else(|e| {
+ println!("Failed to emit event. {:?}", e);
+ });
+ }
}
pub struct Node {
- state: NodeState,
- jobs: job::jobs::Jobs,
- database: Arc,
- // filetype_registry: library::TypeRegistry,
- // extension_registry: library::ExtensionRegistry,
+ state: NodeState,
+ jobs: job::jobs::Jobs,
+ database: Arc,
+ // filetype_registry: library::TypeRegistry,
+ // extension_registry: library::ExtensionRegistry,
- // global messaging channels
- query_channel: (
- UnboundedSender>,
- UnboundedReceiver>,
- ),
- command_channel: (
- UnboundedSender>,
- UnboundedReceiver>,
- ),
- event_sender: mpsc::Sender,
+ // global messaging channels
+ query_channel: (
+ UnboundedSender>,
+ UnboundedReceiver>,
+ ),
+ command_channel: (
+ UnboundedSender>,
+ UnboundedReceiver>,
+ ),
+ event_sender: mpsc::Sender,
- // a channel for child threads to send events back to the core
- internal_channel: (
- UnboundedSender,
- UnboundedReceiver,
- ),
+ // a channel for child threads to send events back to the core
+ internal_channel: (
+ UnboundedSender,
+ UnboundedReceiver,
+ ),
}
impl Node {
- // create new instance of node, run startup tasks
- pub async fn new(mut data_dir: std::path::PathBuf) -> (Node, mpsc::Receiver) {
- let (event_sender, event_recv) = mpsc::channel(100);
+ // create new instance of node, run startup tasks
+ pub async fn new(mut data_dir: std::path::PathBuf) -> (Node, mpsc::Receiver) {
+ let (event_sender, event_recv) = mpsc::channel(100);
- data_dir = data_dir.join("spacedrive");
- let data_dir = data_dir.to_str().unwrap();
- // create data directory if it doesn't exist
- fs::create_dir_all(&data_dir).unwrap();
- // prepare basic client state
- let mut state = NodeState::new(data_dir, "diamond-mastering-space-dragon").unwrap();
- // load from disk
- state
- .read_disk()
- .unwrap_or(println!("Error: No node state found, creating new one..."));
+ data_dir = data_dir.join("spacedrive");
+ let data_dir = data_dir.to_str().unwrap();
+ // create data directory if it doesn't exist
+ fs::create_dir_all(&data_dir).unwrap();
+ // prepare basic client state
+ let mut state = NodeState::new(data_dir, "diamond-mastering-space-dragon").unwrap();
+ // load from disk
+ state
+ .read_disk()
+ .unwrap_or(println!("Error: No node state found, creating new one..."));
- state.save();
+ state.save();
- println!("Node State: {:?}", state);
+ println!("Node State: {:?}", state);
- // connect to default library
- let database = Arc::new(
- db::create_connection(&get_library_path(&data_dir))
- .await
- .unwrap(),
- );
+ // connect to default library
+ let database = Arc::new(
+ db::create_connection(&get_library_path(&data_dir))
+ .await
+ .unwrap(),
+ );
- let internal_channel = unbounded_channel::();
+ let internal_channel = unbounded_channel::();
- let node = Node {
- state,
- query_channel: unbounded_channel(),
- command_channel: unbounded_channel(),
- jobs: Jobs::new(),
- event_sender,
- database,
- internal_channel,
- };
+ let node = Node {
+ state,
+ query_channel: unbounded_channel(),
+ command_channel: unbounded_channel(),
+ jobs: Jobs::new(),
+ event_sender,
+ database,
+ internal_channel,
+ };
- #[cfg(feature = "p2p")]
- tokio::spawn(async move {
- p2p::listener::listen(None).await.unwrap_or(());
- });
+ #[cfg(feature = "p2p")]
+ tokio::spawn(async move {
+ p2p::listener::listen(None).await.unwrap_or(());
+ });
- (node, event_recv)
- }
+ (node, event_recv)
+ }
- pub fn get_context(&self) -> CoreContext {
- CoreContext {
- database: self.database.clone(),
- event_sender: self.event_sender.clone(),
- internal_sender: self.internal_channel.0.clone(),
- }
- }
+ pub fn get_context(&self) -> CoreContext {
+ CoreContext {
+ database: self.database.clone(),
+ event_sender: self.event_sender.clone(),
+ internal_sender: self.internal_channel.0.clone(),
+ }
+ }
- pub fn get_controller(&self) -> CoreController {
- CoreController {
- query_sender: self.query_channel.0.clone(),
- command_sender: self.command_channel.0.clone(),
- }
- }
+ pub fn get_controller(&self) -> CoreController {
+ CoreController {
+ query_sender: self.query_channel.0.clone(),
+ command_sender: self.command_channel.0.clone(),
+ }
+ }
- pub async fn start(&mut self) {
- let ctx = self.get_context();
- loop {
- // listen on global messaging channels for incoming messages
- tokio::select! {
- Some(msg) = self.query_channel.1.recv() => {
- let res = self.exec_query(msg.data).await;
- msg.return_sender.send(res).unwrap_or(());
- }
- Some(msg) = self.command_channel.1.recv() => {
- let res = self.exec_command(msg.data).await;
- msg.return_sender.send(res).unwrap_or(());
- }
- Some(event) = self.internal_channel.1.recv() => {
- match event {
- InternalEvent::JobIngest(job) => {
- self.jobs.ingest(&ctx, job).await;
- },
- InternalEvent::JobQueue(job) => {
- self.jobs.ingest_queue(&ctx, job);
- },
- InternalEvent::JobComplete(id) => {
- self.jobs.complete(&ctx, id).await;
- },
- }
- }
- }
- }
- }
- // load library database + initialize client with db
- pub async fn initializer(&self) {
- println!("Initializing...");
- let ctx = self.get_context();
+ pub async fn start(&mut self) {
+ let ctx = self.get_context();
+ loop {
+ // listen on global messaging channels for incoming messages
+ tokio::select! {
+ Some(msg) = self.query_channel.1.recv() => {
+ let res = self.exec_query(msg.data).await;
+ msg.return_sender.send(res).unwrap_or(());
+ }
+ Some(msg) = self.command_channel.1.recv() => {
+ let res = self.exec_command(msg.data).await;
+ msg.return_sender.send(res).unwrap_or(());
+ }
+ Some(event) = self.internal_channel.1.recv() => {
+ match event {
+ InternalEvent::JobIngest(job) => {
+ self.jobs.ingest(&ctx, job).await;
+ },
+ InternalEvent::JobQueue(job) => {
+ self.jobs.ingest_queue(&ctx, job);
+ },
+ InternalEvent::JobComplete(id) => {
+ self.jobs.complete(&ctx, id).await;
+ },
+ }
+ }
+ }
+ }
+ }
+ // load library database + initialize client with db
+ pub async fn initializer(&self) {
+ println!("Initializing...");
+ let ctx = self.get_context();
- if self.state.libraries.len() == 0 {
- match library::loader::create(&ctx, None).await {
- Ok(library) => println!("Created new library: {:?}", library),
- Err(e) => println!("Error creating library: {:?}", e),
- }
- } else {
- for library in self.state.libraries.iter() {
- // init database for library
- match library::loader::load(&ctx, &library.library_path, &library.library_uuid).await {
- Ok(library) => println!("Loaded library: {:?}", library),
- Err(e) => println!("Error loading library: {:?}", e),
- }
- }
- }
- // init node data within library
- match node::LibraryNode::create(&self).await {
- Ok(_) => println!("Spacedrive online"),
- Err(e) => println!("Error initializing node: {:?}", e),
- };
- }
+ if self.state.libraries.len() == 0 {
+ match library::loader::create(&ctx, None).await {
+ Ok(library) => println!("Created new library: {:?}", library),
+ Err(e) => println!("Error creating library: {:?}", e),
+ }
+ } else {
+ for library in self.state.libraries.iter() {
+ // init database for library
+ match library::loader::load(&ctx, &library.library_path, &library.library_uuid)
+ .await
+ {
+ Ok(library) => println!("Loaded library: {:?}", library),
+ Err(e) => println!("Error loading library: {:?}", e),
+ }
+ }
+ }
+ // init node data within library
+ match node::LibraryNode::create(&self).await {
+ Ok(_) => println!("Spacedrive online"),
+ Err(e) => println!("Error initializing node: {:?}", e),
+ };
+ }
- async fn exec_command(&mut self, cmd: ClientCommand) -> Result {
- println!("Core command: {:?}", cmd);
- let ctx = self.get_context();
- Ok(match cmd {
- // CRUD for locations
- ClientCommand::LocCreate { path } => {
- let loc = sys::locations::new_location_and_scan(&ctx, &path).await?;
- ctx.queue_job(Box::new(FileIdentifierJob));
- CoreResponse::LocCreate(loc)
- }
- ClientCommand::LocUpdate { id: _, name: _ } => todo!(),
- ClientCommand::LocDelete { id: _ } => todo!(),
- // CRUD for files
- ClientCommand::FileRead { id: _ } => todo!(),
- // ClientCommand::FileEncrypt { id: _, algorithm: _ } => todo!(),
- ClientCommand::FileDelete { id: _ } => todo!(),
- // CRUD for tags
- ClientCommand::TagCreate { name: _, color: _ } => todo!(),
- ClientCommand::TagAssign {
- file_id: _,
- tag_id: _,
- } => todo!(),
- ClientCommand::TagDelete { id: _ } => todo!(),
- // CRUD for libraries
- ClientCommand::SysVolumeUnmount { id: _ } => todo!(),
- ClientCommand::LibDelete { id: _ } => todo!(),
- ClientCommand::TagUpdate { name: _, color: _ } => todo!(),
- ClientCommand::GenerateThumbsForLocation { id, path } => {
- ctx.spawn_job(Box::new(ThumbnailJob {
- location_id: id,
- path,
- background: false, // fix
- }));
- CoreResponse::Success(())
- }
- // ClientCommand::PurgeDatabase => {
- // println!("Purging database...");
- // fs::remove_file(Path::new(&self.state.data_path).join("library.db")).unwrap();
- // CoreResponse::Success(())
- // }
- ClientCommand::IdentifyUniqueFiles => {
- ctx.spawn_job(Box::new(FileIdentifierJob));
- CoreResponse::Success(())
- }
- })
- }
+ async fn exec_command(&mut self, cmd: ClientCommand) -> Result {
+ println!("Core command: {:?}", cmd);
+ let ctx = self.get_context();
+ Ok(match cmd {
+ // CRUD for locations
+ ClientCommand::LocCreate { path } => {
+ let loc = sys::locations::new_location_and_scan(&ctx, &path).await?;
+ ctx.queue_job(Box::new(FileIdentifierJob));
+ CoreResponse::LocCreate(loc)
+ }
+ ClientCommand::LocUpdate { id: _, name: _ } => todo!(),
+ ClientCommand::LocDelete { id: _ } => todo!(),
+ // CRUD for files
+ ClientCommand::FileRead { id: _ } => todo!(),
+ // ClientCommand::FileEncrypt { id: _, algorithm: _ } => todo!(),
+ ClientCommand::FileDelete { id: _ } => todo!(),
+ // CRUD for tags
+ ClientCommand::TagCreate { name: _, color: _ } => todo!(),
+ ClientCommand::TagAssign {
+ file_id: _,
+ tag_id: _,
+ } => todo!(),
+ ClientCommand::TagDelete { id: _ } => todo!(),
+ // CRUD for libraries
+ ClientCommand::SysVolumeUnmount { id: _ } => todo!(),
+ ClientCommand::LibDelete { id: _ } => todo!(),
+ ClientCommand::TagUpdate { name: _, color: _ } => todo!(),
+ ClientCommand::GenerateThumbsForLocation { id, path } => {
+ ctx.spawn_job(Box::new(ThumbnailJob {
+ location_id: id,
+ path,
+ background: false, // fix
+ }));
+ CoreResponse::Success(())
+ }
+ // ClientCommand::PurgeDatabase => {
+ // println!("Purging database...");
+ // fs::remove_file(Path::new(&self.state.data_path).join("library.db")).unwrap();
+ // CoreResponse::Success(())
+ // }
+ ClientCommand::IdentifyUniqueFiles => {
+ ctx.spawn_job(Box::new(FileIdentifierJob));
+ CoreResponse::Success(())
+ }
+ })
+ }
- // query sources of data
- async fn exec_query(&self, query: ClientQuery) -> Result {
- #[cfg(fdebug_assertions)]
- println!("Core query: {:?}", query);
- let ctx = self.get_context();
- Ok(match query {
- // return the client state from memory
- ClientQuery::ClientGetState => CoreResponse::ClientGetState(self.state.clone()),
- // get system volumes without saving to library
- ClientQuery::SysGetVolumes => {
- CoreResponse::SysGetVolumes(sys::volumes::Volume::get_volumes()?)
- }
- ClientQuery::SysGetLocations => {
- CoreResponse::SysGetLocations(sys::locations::get_locations(&ctx).await?)
- }
- // get location from library
- ClientQuery::SysGetLocation { id } => {
- CoreResponse::SysGetLocation(sys::locations::get_location(&ctx, id).await?)
- }
- // return contents of a directory for the explorer
- ClientQuery::LibGetExplorerDir {
- path,
- location_id,
- limit: _,
- } => CoreResponse::LibGetExplorerDir(
- file::explorer::open::open_dir(&ctx, &location_id, &path).await?,
- ),
- ClientQuery::LibGetTags => todo!(),
- ClientQuery::JobGetRunning => CoreResponse::JobGetRunning(self.jobs.get_running().await),
- ClientQuery::JobGetHistory => CoreResponse::JobGetHistory(Jobs::get_history(&ctx).await?),
- ClientQuery::GetLibraryStatistics => {
- CoreResponse::GetLibraryStatistics(library::statistics::Statistics::calculate(&ctx).await?)
- }
- ClientQuery::GetNodes => todo!(),
- })
- }
+ // query sources of data
+ async fn exec_query(&self, query: ClientQuery) -> Result {
+ #[cfg(fdebug_assertions)]
+ println!("Core query: {:?}", query);
+ let ctx = self.get_context();
+ Ok(match query {
+ // return the client state from memory
+ ClientQuery::ClientGetState => CoreResponse::ClientGetState(self.state.clone()),
+ // get system volumes without saving to library
+ ClientQuery::SysGetVolumes => {
+ CoreResponse::SysGetVolumes(sys::volumes::Volume::get_volumes()?)
+ }
+ ClientQuery::SysGetLocations => {
+ CoreResponse::SysGetLocations(sys::locations::get_locations(&ctx).await?)
+ }
+ // get location from library
+ ClientQuery::SysGetLocation { id } => {
+ CoreResponse::SysGetLocation(sys::locations::get_location(&ctx, id).await?)
+ }
+ // return contents of a directory for the explorer
+ ClientQuery::LibGetExplorerDir {
+ path,
+ location_id,
+ limit: _,
+ } => CoreResponse::LibGetExplorerDir(
+ file::explorer::open::open_dir(&ctx, &location_id, &path).await?,
+ ),
+ ClientQuery::LibGetTags => todo!(),
+ ClientQuery::JobGetRunning => {
+ CoreResponse::JobGetRunning(self.jobs.get_running().await)
+ }
+ ClientQuery::JobGetHistory => {
+ CoreResponse::JobGetHistory(Jobs::get_history(&ctx).await?)
+ }
+ ClientQuery::GetLibraryStatistics => CoreResponse::GetLibraryStatistics(
+ library::statistics::Statistics::calculate(&ctx).await?,
+ ),
+ ClientQuery::GetNodes => todo!(),
+ })
+ }
}
// represents an event this library can emit
@@ -343,26 +345,26 @@ impl Node {
#[serde(tag = "key", content = "params")]
#[ts(export)]
pub enum ClientCommand {
- // Files
- FileRead { id: i32 },
- // FileEncrypt { id: i32, algorithm: EncryptionAlgorithm },
- FileDelete { id: i32 },
- // Library
- LibDelete { id: i32 },
- // Tags
- TagCreate { name: String, color: String },
- TagUpdate { name: String, color: String },
- TagAssign { file_id: i32, tag_id: i32 },
- TagDelete { id: i32 },
- // Locations
- LocCreate { path: String },
- LocUpdate { id: i32, name: Option },
- LocDelete { id: i32 },
- // System
- SysVolumeUnmount { id: i32 },
- GenerateThumbsForLocation { id: i32, path: String },
- // PurgeDatabase,
- IdentifyUniqueFiles,
+ // Files
+ FileRead { id: i32 },
+ // FileEncrypt { id: i32, algorithm: EncryptionAlgorithm },
+ FileDelete { id: i32 },
+ // Library
+ LibDelete { id: i32 },
+ // Tags
+ TagCreate { name: String, color: String },
+ TagUpdate { name: String, color: String },
+ TagAssign { file_id: i32, tag_id: i32 },
+ TagDelete { id: i32 },
+ // Locations
+ LocCreate { path: String },
+ LocUpdate { id: i32, name: Option },
+ LocDelete { id: i32 },
+ // System
+ SysVolumeUnmount { id: i32 },
+ GenerateThumbsForLocation { id: i32, path: String },
+ // PurgeDatabase,
+ IdentifyUniqueFiles,
}
// represents an event this library can emit
@@ -370,22 +372,22 @@ pub enum ClientCommand {
#[serde(tag = "key", content = "params")]
#[ts(export)]
pub enum ClientQuery {
- ClientGetState,
- SysGetVolumes,
- LibGetTags,
- JobGetRunning,
- JobGetHistory,
- SysGetLocations,
- SysGetLocation {
- id: i32,
- },
- LibGetExplorerDir {
- location_id: i32,
- path: String,
- limit: i32,
- },
- GetLibraryStatistics,
- GetNodes,
+ ClientGetState,
+ SysGetVolumes,
+ LibGetTags,
+ JobGetRunning,
+ JobGetHistory,
+ SysGetLocations,
+ SysGetLocation {
+ id: i32,
+ },
+ LibGetExplorerDir {
+ location_id: i32,
+ path: String,
+ limit: i32,
+ },
+ GetLibraryStatistics,
+ GetNodes,
}
// represents an event this library can emit
@@ -393,54 +395,54 @@ pub enum ClientQuery {
#[serde(tag = "key", content = "data")]
#[ts(export)]
pub enum CoreEvent {
- // most all events should be once of these two
- InvalidateQuery(ClientQuery),
- InvalidateQueryDebounced(ClientQuery),
- InvalidateResource(CoreResource),
- NewThumbnail { cas_id: String },
- Log { message: String },
- DatabaseDisconnected { reason: Option },
+ // most all events should be once of these two
+ InvalidateQuery(ClientQuery),
+ InvalidateQueryDebounced(ClientQuery),
+ InvalidateResource(CoreResource),
+ NewThumbnail { cas_id: String },
+ Log { message: String },
+ DatabaseDisconnected { reason: Option },
}
#[derive(Serialize, Deserialize, Debug, TS)]
#[serde(tag = "key", content = "data")]
#[ts(export)]
pub enum CoreResponse {
- Success(()),
- SysGetVolumes(Vec),
- SysGetLocation(sys::locations::LocationResource),
- SysGetLocations(Vec),
- LibGetExplorerDir(file::DirectoryWithContents),
- ClientGetState(NodeState),
- LocCreate(sys::locations::LocationResource),
- JobGetRunning(Vec),
- JobGetHistory(Vec),
- GetLibraryStatistics(library::statistics::Statistics),
+ Success(()),
+ SysGetVolumes(Vec),
+ SysGetLocation(sys::locations::LocationResource),
+ SysGetLocations(Vec),
+ LibGetExplorerDir(file::DirectoryWithContents),
+ ClientGetState(NodeState),
+ LocCreate(sys::locations::LocationResource),
+ JobGetRunning(Vec),
+ JobGetHistory(Vec),
+ GetLibraryStatistics(library::statistics::Statistics),
}
#[derive(Error, Debug)]
pub enum CoreError {
- #[error("Query error")]
- QueryError,
- #[error("System error")]
- SysError(#[from] sys::SysError),
- #[error("File error")]
- FileError(#[from] file::FileError),
- #[error("Job error")]
- JobError(#[from] job::JobError),
- #[error("Database error")]
- DatabaseError(#[from] prisma::QueryError),
- #[error("Database error")]
- LibraryError(#[from] library::LibraryError),
+ #[error("Query error")]
+ QueryError,
+ #[error("System error")]
+ SysError(#[from] sys::SysError),
+ #[error("File error")]
+ FileError(#[from] file::FileError),
+ #[error("Job error")]
+ JobError(#[from] job::JobError),
+ #[error("Database error")]
+ DatabaseError(#[from] prisma::QueryError),
+ #[error("Database error")]
+ LibraryError(#[from] library::LibraryError),
}
#[derive(Serialize, Deserialize, Debug, TS)]
#[ts(export)]
pub enum CoreResource {
- Client,
- Library,
- Location(sys::locations::LocationResource),
- File(file::File),
- Job(JobReport),
- Tag,
+ Client,
+ Library,
+ Location(sys::locations::LocationResource),
+ File(file::File),
+ Job(JobReport),
+ Tag,
}
diff --git a/core/src/library/loader.rs b/core/src/library/loader.rs
index 8259d945c..32d3584a3 100644
--- a/core/src/library/loader.rs
+++ b/core/src/library/loader.rs
@@ -11,86 +11,86 @@ pub static LIBRARY_DB_NAME: &str = "library.db";
pub static DEFAULT_NAME: &str = "My Library";
pub fn get_library_path(data_path: &str) -> String {
- let path = data_path.to_owned();
- format!("{}/{}", path, LIBRARY_DB_NAME)
+ let path = data_path.to_owned();
+ format!("{}/{}", path, LIBRARY_DB_NAME)
}
pub async fn get(core: &Node) -> Result {
- let config = state::get();
- let db = &core.database;
+ let config = state::get();
+ let db = &core.database;
- let library_state = config.get_current_library();
+ let library_state = config.get_current_library();
- println!("{:?}", library_state);
+ println!("{:?}", library_state);
- // get library from db
- let library = match db
- .library()
- .find_unique(library::pub_id::equals(library_state.library_uuid.clone()))
- .exec()
- .await?
- {
- Some(library) => Ok(library),
- None => {
- // update config library state to offline
- // config.libraries
+ // get library from db
+ let library = match db
+ .library()
+ .find_unique(library::pub_id::equals(library_state.library_uuid.clone()))
+ .exec()
+ .await?
+ {
+ Some(library) => Ok(library),
+ None => {
+ // update config library state to offline
+ // config.libraries
- Err(anyhow::anyhow!("library_not_found"))
- }
- };
+ Err(anyhow::anyhow!("library_not_found"))
+ }
+ };
- Ok(library.unwrap())
+ Ok(library.unwrap())
}
pub async fn load(ctx: &CoreContext, library_path: &str, library_id: &str) -> Result<()> {
- let mut config = state::get();
+ let mut config = state::get();
- println!("Initializing library: {} {}", &library_id, library_path);
+ println!("Initializing library: {} {}", &library_id, library_path);
- if config.current_library_uuid != library_id {
- config.current_library_uuid = library_id.to_string();
- config.save();
- }
- // create connection with library database & run migrations
- migrate::run_migrations(&ctx).await?;
- // if doesn't exist, mark as offline
- Ok(())
+ if config.current_library_uuid != library_id {
+ config.current_library_uuid = library_id.to_string();
+ config.save();
+ }
+ // create connection with library database & run migrations
+ migrate::run_migrations(&ctx).await?;
+ // if doesn't exist, mark as offline
+ Ok(())
}
pub async fn create(ctx: &CoreContext, name: Option) -> Result<()> {
- let mut config = state::get();
+ let mut config = state::get();
- let uuid = Uuid::new_v4().to_string();
+ let uuid = Uuid::new_v4().to_string();
- println!("Creating library {:?}, UUID: {:?}", name, uuid);
+ println!("Creating library {:?}, UUID: {:?}", name, uuid);
- let library_state = LibraryState {
- library_uuid: uuid.clone(),
- library_path: get_library_path(&config.data_path),
- ..LibraryState::default()
- };
+ let library_state = LibraryState {
+ library_uuid: uuid.clone(),
+ library_path: get_library_path(&config.data_path),
+ ..LibraryState::default()
+ };
- migrate::run_migrations(&ctx).await?;
+ migrate::run_migrations(&ctx).await?;
- config.libraries.push(library_state);
+ config.libraries.push(library_state);
- config.current_library_uuid = uuid;
+ config.current_library_uuid = uuid;
- config.save();
+ config.save();
- let db = &ctx.database;
+ let db = &ctx.database;
- let _library = db
- .library()
- .create(
- library::pub_id::set(config.current_library_uuid),
- library::name::set(name.unwrap_or(DEFAULT_NAME.into())),
- vec![],
- )
- .exec()
- .await;
+ let _library = db
+ .library()
+ .create(
+ library::pub_id::set(config.current_library_uuid),
+ library::name::set(name.unwrap_or(DEFAULT_NAME.into())),
+ vec![],
+ )
+ .exec()
+ .await;
- println!("library created in database: {:?}", _library);
+ println!("library created in database: {:?}", _library);
- Ok(())
+ Ok(())
}
diff --git a/core/src/library/mod.rs b/core/src/library/mod.rs
index 4d5b8bbfe..0e4ea47bf 100644
--- a/core/src/library/mod.rs
+++ b/core/src/library/mod.rs
@@ -7,10 +7,10 @@ use crate::{prisma, sys::SysError};
#[derive(Error, Debug)]
pub enum LibraryError {
- #[error("Missing library")]
- LibraryNotFound,
- #[error("Database error")]
- DatabaseError(#[from] prisma::QueryError),
- #[error("System error")]
- SysError(#[from] SysError),
+ #[error("Missing library")]
+ LibraryNotFound,
+ #[error("Database error")]
+ DatabaseError(#[from] prisma::QueryError),
+ #[error("System error")]
+ SysError(#[from] SysError),
}
diff --git a/core/src/library/statistics.rs b/core/src/library/statistics.rs
index 9232a7189..3b32d6ae9 100644
--- a/core/src/library/statistics.rs
+++ b/core/src/library/statistics.rs
@@ -1,8 +1,8 @@
use crate::{
- node::state,
- prisma::{library, library_statistics::*},
- sys::{self, volumes::Volume},
- CoreContext,
+ node::state,
+ prisma::{library, library_statistics::*},
+ sys::{self, volumes::Volume},
+ CoreContext,
};
use fs_extra::dir::get_size;
use serde::{Deserialize, Serialize};
@@ -14,144 +14,144 @@ use super::LibraryError;
#[derive(Debug, Serialize, Deserialize, TS, Clone)]
#[ts(export)]
pub struct Statistics {
- pub total_file_count: i32,
- pub total_bytes_used: String,
- pub total_bytes_capacity: String,
- pub total_bytes_free: String,
- pub total_unique_bytes: String,
- pub preview_media_bytes: String,
- pub library_db_size: String,
+ pub total_file_count: i32,
+ pub total_bytes_used: String,
+ pub total_bytes_capacity: String,
+ pub total_bytes_free: String,
+ pub total_unique_bytes: String,
+ pub preview_media_bytes: String,
+ pub library_db_size: String,
}
impl Into for Data {
- fn into(self) -> Statistics {
- Statistics {
- total_file_count: self.total_file_count,
- total_bytes_used: self.total_bytes_used,
- total_bytes_capacity: self.total_bytes_capacity,
- total_bytes_free: self.total_bytes_free,
- total_unique_bytes: self.total_unique_bytes,
- preview_media_bytes: self.preview_media_bytes,
- library_db_size: String::new(),
- }
- }
+ fn into(self) -> Statistics {
+ Statistics {
+ total_file_count: self.total_file_count,
+ total_bytes_used: self.total_bytes_used,
+ total_bytes_capacity: self.total_bytes_capacity,
+ total_bytes_free: self.total_bytes_free,
+ total_unique_bytes: self.total_unique_bytes,
+ preview_media_bytes: self.preview_media_bytes,
+ library_db_size: String::new(),
+ }
+ }
}
impl Default for Statistics {
- fn default() -> Self {
- Self {
- total_file_count: 0,
- total_bytes_used: String::new(),
- total_bytes_capacity: String::new(),
- total_bytes_free: String::new(),
- total_unique_bytes: String::new(),
- preview_media_bytes: String::new(),
- library_db_size: String::new(),
- }
- }
+ fn default() -> Self {
+ Self {
+ total_file_count: 0,
+ total_bytes_used: String::new(),
+ total_bytes_capacity: String::new(),
+ total_bytes_free: String::new(),
+ total_unique_bytes: String::new(),
+ preview_media_bytes: String::new(),
+ library_db_size: String::new(),
+ }
+ }
}
impl Statistics {
- pub async fn retrieve(ctx: &CoreContext) -> Result {
- let config = state::get();
- let db = &ctx.database;
- let library_data = config.get_current_library();
+ pub async fn retrieve(ctx: &CoreContext) -> Result {
+ let config = state::get();
+ let db = &ctx.database;
+ let library_data = config.get_current_library();
- let library_statistics_db = match db
- .library_statistics()
- .find_unique(id::equals(library_data.library_id))
- .exec()
- .await?
- {
- Some(library_statistics_db) => library_statistics_db.into(),
- // create the default values if database has no entry
- None => Statistics::default(),
- };
- Ok(library_statistics_db.into())
- }
- pub async fn calculate(ctx: &CoreContext) -> Result {
- let config = state::get();
- let db = &ctx.database;
- // get library from client state
- let library_data = config.get_current_library();
- println!(
- "Calculating library statistics {:?}",
- library_data.library_uuid
- );
- // get library from db
- let library = db
- .library()
- .find_unique(library::pub_id::equals(
- library_data.library_uuid.to_string(),
- ))
- .exec()
- .await?;
+ let library_statistics_db = match db
+ .library_statistics()
+ .find_unique(id::equals(library_data.library_id))
+ .exec()
+ .await?
+ {
+ Some(library_statistics_db) => library_statistics_db.into(),
+ // create the default values if database has no entry
+ None => Statistics::default(),
+ };
+ Ok(library_statistics_db.into())
+ }
+ pub async fn calculate(ctx: &CoreContext) -> Result {
+ let config = state::get();
+ let db = &ctx.database;
+ // get library from client state
+ let library_data = config.get_current_library();
+ println!(
+ "Calculating library statistics {:?}",
+ library_data.library_uuid
+ );
+ // get library from db
+ let library = db
+ .library()
+ .find_unique(library::pub_id::equals(
+ library_data.library_uuid.to_string(),
+ ))
+ .exec()
+ .await?;
- if library.is_none() {
- return Err(LibraryError::LibraryNotFound);
- }
+ if library.is_none() {
+ return Err(LibraryError::LibraryNotFound);
+ }
- let library_statistics = db
- .library_statistics()
- .find_unique(id::equals(library_data.library_id))
- .exec()
- .await?;
+ let library_statistics = db
+ .library_statistics()
+ .find_unique(id::equals(library_data.library_id))
+ .exec()
+ .await?;
- // TODO: get from database, not sys
- let volumes = Volume::get_volumes();
- Volume::save(&ctx).await?;
+ // TODO: get from database, not sys
+ let volumes = Volume::get_volumes();
+ Volume::save(&ctx).await?;
- // println!("{:?}", volumes);
+ // println!("{:?}", volumes);
- let mut available_capacity: u64 = 0;
- let mut total_capacity: u64 = 0;
- if volumes.is_ok() {
- for volume in volumes.unwrap() {
- total_capacity += volume.total_capacity;
- available_capacity += volume.available_capacity;
- }
- }
+ let mut available_capacity: u64 = 0;
+ let mut total_capacity: u64 = 0;
+ if volumes.is_ok() {
+ for volume in volumes.unwrap() {
+ total_capacity += volume.total_capacity;
+ available_capacity += volume.available_capacity;
+ }
+ }
- let library_db_size = match fs::metadata(library_data.library_path.as_str()) {
- Ok(metadata) => metadata.len(),
- Err(_) => 0,
- };
+ let library_db_size = match fs::metadata(library_data.library_path.as_str()) {
+ Ok(metadata) => metadata.len(),
+ Err(_) => 0,
+ };
- println!("{:?}", library_statistics);
+ println!("{:?}", library_statistics);
- let thumbnail_folder_size = get_size(&format!("{}/{}", config.data_path, "thumbnails"));
+ let thumbnail_folder_size = get_size(&format!("{}/{}", config.data_path, "thumbnails"));
- let statistics = Statistics {
- library_db_size: library_db_size.to_string(),
- total_bytes_free: available_capacity.to_string(),
- total_bytes_capacity: total_capacity.to_string(),
- preview_media_bytes: thumbnail_folder_size.unwrap_or(0).to_string(),
- ..Statistics::default()
- };
+ let statistics = Statistics {
+ library_db_size: library_db_size.to_string(),
+ total_bytes_free: available_capacity.to_string(),
+ total_bytes_capacity: total_capacity.to_string(),
+ preview_media_bytes: thumbnail_folder_size.unwrap_or(0).to_string(),
+ ..Statistics::default()
+ };
- let library_local_id = match library {
- Some(library) => library.id,
- None => library_data.library_id,
- };
+ let library_local_id = match library {
+ Some(library) => library.id,
+ None => library_data.library_id,
+ };
- db.library_statistics()
- .upsert(library_id::equals(library_local_id))
- .create(
- library_id::set(library_local_id),
- vec![library_db_size::set(statistics.library_db_size.clone())],
- )
- .update(vec![
- total_file_count::set(statistics.total_file_count.clone()),
- total_bytes_used::set(statistics.total_bytes_used.clone()),
- total_bytes_capacity::set(statistics.total_bytes_capacity.clone()),
- total_bytes_free::set(statistics.total_bytes_free.clone()),
- total_unique_bytes::set(statistics.total_unique_bytes.clone()),
- preview_media_bytes::set(statistics.preview_media_bytes.clone()),
- library_db_size::set(statistics.library_db_size.clone()),
- ])
- .exec()
- .await?;
+ db.library_statistics()
+ .upsert(library_id::equals(library_local_id))
+ .create(
+ library_id::set(library_local_id),
+ vec![library_db_size::set(statistics.library_db_size.clone())],
+ )
+ .update(vec![
+ total_file_count::set(statistics.total_file_count.clone()),
+ total_bytes_used::set(statistics.total_bytes_used.clone()),
+ total_bytes_capacity::set(statistics.total_bytes_capacity.clone()),
+ total_bytes_free::set(statistics.total_bytes_free.clone()),
+ total_unique_bytes::set(statistics.total_unique_bytes.clone()),
+ preview_media_bytes::set(statistics.preview_media_bytes.clone()),
+ library_db_size::set(statistics.library_db_size.clone()),
+ ])
+ .exec()
+ .await?;
- Ok(statistics)
- }
+ Ok(statistics)
+ }
}
diff --git a/core/src/native/methods.rs b/core/src/native/methods.rs
index 0b441d42a..2a092cddc 100644
--- a/core/src/native/methods.rs
+++ b/core/src/native/methods.rs
@@ -8,26 +8,26 @@ use crate::library::volumes::Volume;
use swift_rs::types::{SRObjectArray, SRString};
pub fn get_file_thumbnail_base64(path: &str) -> SRString {
- #[cfg(target_os = "macos")]
- unsafe {
- swift::get_file_thumbnail_base64_(path.into())
- }
+ #[cfg(target_os = "macos")]
+ unsafe {
+ swift::get_file_thumbnail_base64_(path.into())
+ }
}
pub fn get_mounts() -> SRObjectArray {
- #[cfg(target_os = "macos")]
- unsafe {
- swift::get_mounts_()
- }
- // #[cfg(target_os = "macos")]
+ #[cfg(target_os = "macos")]
+ unsafe {
+ swift::get_mounts_()
+ }
+ // #[cfg(target_os = "macos")]
- // println!("getting mounts..");
- // let mut mounts: Vec = Vec::new();
- // let swift_mounts = unsafe { swift::get_mounts_() };
- // println!("mounts: {:?}", swift_mounts);
+ // println!("getting mounts..");
+ // let mut mounts: Vec = Vec::new();
+ // let swift_mounts = unsafe { swift::get_mounts_() };
+ // println!("mounts: {:?}", swift_mounts);
- // for mount in swift_mounts.iter() {
- // println!("mount: {:?}", *mount);
- // // mounts.push((&**mount).clone());
- // }
+ // for mount in swift_mounts.iter() {
+ // println!("mount: {:?}", *mount);
+ // // mounts.push((&**mount).clone());
+ // }
}
diff --git a/core/src/native/swift.rs b/core/src/native/swift.rs
index bc83ef9a5..5d16d03a2 100644
--- a/core/src/native/swift.rs
+++ b/core/src/native/swift.rs
@@ -2,9 +2,9 @@ use crate::library::volumes::Volume;
pub use swift_rs::types::{SRObjectArray, SRString};
extern "C" {
- #[link_name = "get_file_thumbnail_base64"]
- pub fn get_file_thumbnail_base64_(path: SRString) -> SRString;
+ #[link_name = "get_file_thumbnail_base64"]
+ pub fn get_file_thumbnail_base64_(path: SRString) -> SRString;
- #[link_name = "get_mounts"]
- pub fn get_mounts_() -> SRObjectArray;
+ #[link_name = "get_mounts"]
+ pub fn get_mounts_() -> SRObjectArray;
}
diff --git a/core/src/node/mod.rs b/core/src/node/mod.rs
index 9dbea5b1a..6110f1225 100644
--- a/core/src/node/mod.rs
+++ b/core/src/node/mod.rs
@@ -1,6 +1,6 @@
use crate::{
- prisma::{self, node},
- CoreContext, Node,
+ prisma::{self, node},
+ CoreContext, Node,
};
use chrono::{DateTime, Utc};
use int_enum::IntEnum;
@@ -14,91 +14,91 @@ pub mod state;
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
pub struct LibraryNode {
- pub uuid: String,
- pub name: String,
- pub platform: Platform,
- pub tcp_address: String,
- #[ts(type = "string")]
- pub last_seen: DateTime,
- #[ts(type = "string")]
- pub last_synchronized: DateTime,
+ pub uuid: String,
+ pub name: String,
+ pub platform: Platform,
+ pub tcp_address: String,
+ #[ts(type = "string")]
+ pub last_seen: DateTime,
+ #[ts(type = "string")]
+ pub last_synchronized: DateTime,
}
#[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum Platform {
- Unknown = 0,
- Windows = 1,
- MacOS = 2,
- Linux = 3,
- IOS = 4,
- Android = 5,
+ Unknown = 0,
+ Windows = 1,
+ MacOS = 2,
+ Linux = 3,
+ IOS = 4,
+ Android = 5,
}
impl LibraryNode {
- pub async fn create(node: &Node) -> Result<(), NodeError> {
- println!("Creating node...");
- let mut config = state::get();
+ pub async fn create(node: &Node) -> Result<(), NodeError> {
+ println!("Creating node...");
+ let mut config = state::get();
- let db = &node.database;
+ let db = &node.database;
- let hostname = match hostname::get() {
- Ok(hostname) => hostname.to_str().unwrap_or_default().to_owned(),
- Err(_) => "unknown".to_owned(),
- };
+ let hostname = match hostname::get() {
+ Ok(hostname) => hostname.to_str().unwrap_or_default().to_owned(),
+ Err(_) => "unknown".to_owned(),
+ };
- let platform = match env::consts::OS {
- "windows" => Platform::Windows,
- "macos" => Platform::MacOS,
- "linux" => Platform::Linux,
- _ => Platform::Unknown,
- };
+ let platform = match env::consts::OS {
+ "windows" => Platform::Windows,
+ "macos" => Platform::MacOS,
+ "linux" => Platform::Linux,
+ _ => Platform::Unknown,
+ };
- let _node = match db
- .node()
- .find_unique(node::pub_id::equals(config.node_pub_id.clone()))
- .exec()
- .await?
- {
- Some(node) => node,
- None => {
- db.node()
- .create(
- node::pub_id::set(config.node_pub_id.clone()),
- node::name::set(hostname.clone()),
- vec![
- node::platform::set(platform as i32),
- node::online::set(Some(true)),
- ],
- )
- .exec()
- .await?
- }
- };
+ let _node = match db
+ .node()
+ .find_unique(node::pub_id::equals(config.node_pub_id.clone()))
+ .exec()
+ .await?
+ {
+ Some(node) => node,
+ None => {
+ db.node()
+ .create(
+ node::pub_id::set(config.node_pub_id.clone()),
+ node::name::set(hostname.clone()),
+ vec![
+ node::platform::set(platform as i32),
+ node::online::set(Some(true)),
+ ],
+ )
+ .exec()
+ .await?
+ }
+ };
- config.node_name = hostname;
- config.node_id = _node.id;
- config.save();
+ config.node_name = hostname;
+ config.node_id = _node.id;
+ config.save();
- println!("node: {:?}", &_node);
+ println!("node: {:?}", &_node);
- Ok(())
- }
+ Ok(())
+ }
- pub async fn get_nodes(ctx: &CoreContext) -> Result, NodeError> {
- let db = &ctx.database;
+ pub async fn get_nodes(ctx: &CoreContext) -> Result, NodeError> {
+ let db = &ctx.database;
- let _node = db.node().find_many(vec![]).exec().await?;
+ let _node = db.node().find_many(vec![]).exec().await?;
- Ok(_node)
- }
+ Ok(_node)
+ }
}
#[derive(Error, Debug)]
pub enum NodeError {
- #[error("Database error")]
- DatabaseError(#[from] prisma::QueryError),
- #[error("Client not found error")]
- ClientNotFound,
+ #[error("Database error")]
+ DatabaseError(#[from] prisma::QueryError),
+ #[error("Client not found error")]
+ ClientNotFound,
}
diff --git a/core/src/node/state.rs b/core/src/node/state.rs
index 62a64d975..34b500e52 100644
--- a/core/src/node/state.rs
+++ b/core/src/node/state.rs
@@ -10,17 +10,17 @@ use uuid::Uuid;
#[derive(Debug, Serialize, Deserialize, Clone, Default, TS)]
#[ts(export)]
pub struct NodeState {
- pub node_pub_id: String,
- pub node_id: i32,
- pub node_name: String,
- // config path is stored as struct can exist only in memory during startup and be written to disk later without supplying path
- pub data_path: String,
- // the port this node uses to listen for incoming connections
- pub tcp_port: u32,
- // all the libraries loaded by this node
- pub libraries: Vec,
- // used to quickly find the default library
- pub current_library_uuid: String,
+ pub node_pub_id: String,
+ pub node_id: i32,
+ pub node_name: String,
+ // config path is stored as struct can exist only in memory during startup and be written to disk later without supplying path
+ pub data_path: String,
+ // the port this node uses to listen for incoming connections
+ pub tcp_port: u32,
+ // all the libraries loaded by this node
+ pub libraries: Vec,
+ // used to quickly find the default library
+ pub current_library_uuid: String,
}
pub static NODE_STATE_CONFIG_NAME: &str = "node_state.json";
@@ -28,76 +28,76 @@ pub static NODE_STATE_CONFIG_NAME: &str = "node_state.json";
#[derive(Debug, Serialize, Deserialize, Clone, Default, TS)]
#[ts(export)]
pub struct LibraryState {
- pub library_uuid: String,
- pub library_id: i32,
- pub library_path: String,
- pub offline: bool,
+ pub library_uuid: String,
+ pub library_id: i32,
+ pub library_path: String,
+ pub offline: bool,
}
// global, thread-safe storage for node state
lazy_static! {
- static ref CONFIG: RwLock> = RwLock::new(None);
+ static ref CONFIG: RwLock > = RwLock::new(None);
}
pub fn get() -> NodeState {
- match CONFIG.read() {
- Ok(guard) => guard.clone().unwrap_or(NodeState::default()),
- Err(_) => return NodeState::default(),
- }
+ match CONFIG.read() {
+ Ok(guard) => guard.clone().unwrap_or(NodeState::default()),
+ Err(_) => return NodeState::default(),
+ }
}
impl NodeState {
- pub fn new(data_path: &str, node_name: &str) -> Result {
- let uuid = Uuid::new_v4().to_string();
- // create struct and assign defaults
- let config = Self {
- node_pub_id: uuid,
- data_path: data_path.to_string(),
- node_name: node_name.to_string(),
- ..Default::default()
- };
- Ok(config)
- }
+ pub fn new(data_path: &str, node_name: &str) -> Result {
+ let uuid = Uuid::new_v4().to_string();
+ // create struct and assign defaults
+ let config = Self {
+ node_pub_id: uuid,
+ data_path: data_path.to_string(),
+ node_name: node_name.to_string(),
+ ..Default::default()
+ };
+ Ok(config)
+ }
- pub fn save(&self) {
- self.write_memory();
- // only write to disk if config path is set
- if !&self.data_path.is_empty() {
- let config_path = format!("{}/{}", &self.data_path, NODE_STATE_CONFIG_NAME);
- let mut file = fs::File::create(config_path).unwrap();
- let json = serde_json::to_string(&self).unwrap();
- file.write_all(json.as_bytes()).unwrap();
- }
- }
+ pub fn save(&self) {
+ self.write_memory();
+ // only write to disk if config path is set
+ if !&self.data_path.is_empty() {
+ let config_path = format!("{}/{}", &self.data_path, NODE_STATE_CONFIG_NAME);
+ let mut file = fs::File::create(config_path).unwrap();
+ let json = serde_json::to_string(&self).unwrap();
+ file.write_all(json.as_bytes()).unwrap();
+ }
+ }
- pub fn read_disk(&mut self) -> Result<()> {
- let config_path = format!("{}/{}", &self.data_path, NODE_STATE_CONFIG_NAME);
- // open the file and parse json
- let file = fs::File::open(config_path)?;
- let reader = BufReader::new(file);
- let data = serde_json::from_reader(reader)?;
- // assign to self
- *self = data;
- Ok(())
- }
+ pub fn read_disk(&mut self) -> Result<()> {
+ let config_path = format!("{}/{}", &self.data_path, NODE_STATE_CONFIG_NAME);
+ // open the file and parse json
+ let file = fs::File::open(config_path)?;
+ let reader = BufReader::new(file);
+ let data = serde_json::from_reader(reader)?;
+ // assign to self
+ *self = data;
+ Ok(())
+ }
- fn write_memory(&self) {
- let mut writeable = CONFIG.write().unwrap();
- *writeable = Some(self.clone());
- }
+ fn write_memory(&self) {
+ let mut writeable = CONFIG.write().unwrap();
+ *writeable = Some(self.clone());
+ }
- pub fn get_current_library(&self) -> LibraryState {
- match self
- .libraries
- .iter()
- .find(|lib| lib.library_uuid == self.current_library_uuid)
- {
- Some(lib) => lib.clone(),
- None => LibraryState::default(),
- }
- }
+ pub fn get_current_library(&self) -> LibraryState {
+ match self
+ .libraries
+ .iter()
+ .find(|lib| lib.library_uuid == self.current_library_uuid)
+ {
+ Some(lib) => lib.clone(),
+ None => LibraryState::default(),
+ }
+ }
- pub fn get_current_library_db_path(&self) -> String {
- format!("{}/library.db", &self.get_current_library().library_path)
- }
+ pub fn get_current_library_db_path(&self) -> String {
+ format!("{}/library.db", &self.get_current_library().library_path)
+ }
}
diff --git a/core/src/p2p/discover.rs b/core/src/p2p/discover.rs
index 3a74d4957..874a2eb05 100644
--- a/core/src/p2p/discover.rs
+++ b/core/src/p2p/discover.rs
@@ -5,32 +5,32 @@ use autodiscover_rs::{self, Method};
use env_logger;
fn handle_client(stream: std::io::Result) {
- println!("Got a connection from {:?}", stream.unwrap().peer_addr());
+ println!("Got a connection from {:?}", stream.unwrap().peer_addr());
}
pub fn listen() -> std::io::Result<()> {
- env_logger::init();
- // make sure to bind before announcing ready
- let listener = TcpListener::bind(":::0")?;
- // get the port we were bound too; note that the trailing :0 above gives us a random unused port
- let socket = listener.local_addr()?;
- thread::spawn(move || {
- // this function blocks forever; running it a separate thread
- autodiscover_rs::run(
- &socket,
- Method::Multicast("[ff0e::1]:1337".parse().unwrap()),
- |s| {
- // change this to task::spawn if using async_std or tokio
- thread::spawn(|| handle_client(s));
- },
- )
- .unwrap();
- });
- let mut incoming = listener.incoming();
- while let Some(stream) = incoming.next() {
- // if you are using an async library, such as async_std or tokio, you can convert the stream to the
- // appropriate type before using task::spawn from your library of choice.
- thread::spawn(|| handle_client(stream));
- }
- Ok(())
+ env_logger::init();
+ // make sure to bind before announcing ready
+ let listener = TcpListener::bind(":::0")?;
+ // get the port we were bound too; note that the trailing :0 above gives us a random unused port
+ let socket = listener.local_addr()?;
+ thread::spawn(move || {
+ // this function blocks forever; running it a separate thread
+ autodiscover_rs::run(
+ &socket,
+ Method::Multicast("[ff0e::1]:1337".parse().unwrap()),
+ |s| {
+ // change this to task::spawn if using async_std or tokio
+ thread::spawn(|| handle_client(s));
+ },
+ )
+ .unwrap();
+ });
+ let mut incoming = listener.incoming();
+ while let Some(stream) = incoming.next() {
+ // if you are using an async library, such as async_std or tokio, you can convert the stream to the
+ // appropriate type before using task::spawn from your library of choice.
+ thread::spawn(|| handle_client(stream));
+ }
+ Ok(())
}
diff --git a/core/src/p2p/listener.rs b/core/src/p2p/listener.rs
index 71659b206..e38e0b0bd 100644
--- a/core/src/p2p/listener.rs
+++ b/core/src/p2p/listener.rs
@@ -1,48 +1,48 @@
use futures::StreamExt;
use libp2p::{
- identity, ping,
- swarm::{Swarm, SwarmEvent},
- Multiaddr, PeerId,
+ identity, ping,
+ swarm::{Swarm, SwarmEvent},
+ Multiaddr, PeerId,
};
use std::error::Error;
pub async fn listen(port: Option) -> Result<(), Box> {
- let local_key = identity::Keypair::generate_ed25519();
- let local_peer_id = PeerId::from(local_key.public());
- println!("Local peer id: {:?}", local_peer_id);
+ let local_key = identity::Keypair::generate_ed25519();
+ let local_peer_id = PeerId::from(local_key.public());
+ println!("Local peer id: {:?}", local_peer_id);
- let transport = libp2p::development_transport(local_key).await?;
+ let transport = libp2p::development_transport(local_key).await?;
- // Create a ping network behavior.
- //
- // For illustrative purposes, the ping protocol is configured to
- // keep the connection alive, so a continuous sequence of pings
- // can be observed.
- let behavior = ping::Behaviour::new(ping::Config::new().with_keep_alive(true));
+ // Create a ping network behavior.
+ //
+ // For illustrative purposes, the ping protocol is configured to
+ // keep the connection alive, so a continuous sequence of pings
+ // can be observed.
+ let behavior = ping::Behaviour::new(ping::Config::new().with_keep_alive(true));
- let mut swarm = Swarm::new(transport, behavior, local_peer_id);
+ let mut swarm = Swarm::new(transport, behavior, local_peer_id);
- // Tell the swarm to listen on all interfaces and a random, OS-assigned
- // port.
- swarm.listen_on("/ip4/0.0.0.0/tcp/0".parse()?)?;
+ // Tell the swarm to listen on all interfaces and a random, OS-assigned
+ // port.
+ swarm.listen_on("/ip4/0.0.0.0/tcp/0".parse()?)?;
- // Dial the peer identified by the multi-address given as the second
- // command-line argument, if any.
+ // Dial the peer identified by the multi-address given as the second
+ // command-line argument, if any.
- if port.is_some() {
- let addr = format!("{:?}{:?}", "/ip4/127.0.0.1/tcp/", port);
- let remote: Multiaddr = addr.parse()?;
- swarm.dial(remote)?;
- println!("Dialed {}", addr)
- }
+ if port.is_some() {
+ let addr = format!("{:?}{:?}", "/ip4/127.0.0.1/tcp/", port);
+ let remote: Multiaddr = addr.parse()?;
+ swarm.dial(remote)?;
+ println!("Dialed {}", addr)
+ }
- loop {
- match swarm.select_next_some().await {
- SwarmEvent::NewListenAddr { address, .. } => {
- println!("Listening on {:?}", address)
- }
- SwarmEvent::Behaviour(event) => println!("{:?}", event),
- _ => {}
- }
- }
+ loop {
+ match swarm.select_next_some().await {
+ SwarmEvent::NewListenAddr { address, .. } => {
+ println!("Listening on {:?}", address)
+ }
+ SwarmEvent::Behaviour(event) => println!("{:?}", event),
+ _ => {}
+ }
+ }
}
diff --git a/core/src/p2p/mod.rs b/core/src/p2p/mod.rs
index 2346fea1d..263a27821 100644
--- a/core/src/p2p/mod.rs
+++ b/core/src/p2p/mod.rs
@@ -4,7 +4,7 @@ pub mod listener;
pub mod pool;
pub struct PeerConnection {
- pub client_uuid: String,
- pub tcp_address: String,
- pub message_sender: mpsc::Sender,
+ pub client_uuid: String,
+ pub tcp_address: String,
+ pub message_sender: mpsc::Sender,
}
diff --git a/core/src/p2p/pool.rs b/core/src/p2p/pool.rs
index 4ef2aeae0..4b94c355f 100644
--- a/core/src/p2p/pool.rs
+++ b/core/src/p2p/pool.rs
@@ -1,5 +1,5 @@
use crate::client::Client;
pub struct ClientPool {
- pub clients: Vec,
+ pub clients: Vec,
}
diff --git a/core/src/secret/keygen.rs b/core/src/secret/keygen.rs
index e69de29bb..8b1378917 100644
--- a/core/src/secret/keygen.rs
+++ b/core/src/secret/keygen.rs
@@ -0,0 +1 @@
+
diff --git a/core/src/secret/mod.rs b/core/src/secret/mod.rs
index e69de29bb..8b1378917 100644
--- a/core/src/secret/mod.rs
+++ b/core/src/secret/mod.rs
@@ -0,0 +1 @@
+
diff --git a/core/src/sync/crdt/mod.rs b/core/src/sync/crdt/mod.rs
index 9ecfa8d47..5ec05b631 100644
--- a/core/src/sync/crdt/mod.rs
+++ b/core/src/sync/crdt/mod.rs
@@ -4,15 +4,15 @@ pub mod replicate;
use serde::{Deserialize, Serialize};
pub use self::{
- operation::{PoMethod, PropertyOperation},
- replicate::{Replicate, ReplicateMethod},
+ operation::{PoMethod, PropertyOperation},
+ replicate::{Replicate, ReplicateMethod},
};
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename = "cr")]
pub struct CrdtCtx