format using tabs

This commit is contained in:
maxichrome 2022-05-22 23:07:35 -05:00
parent 198f52af20
commit a0d0938001
No known key found for this signature in database
GPG key ID: DDC459310E98B6AB
150 changed files with 5021 additions and 4855 deletions

View file

@ -1,6 +1,6 @@
name: 🐞 Bug Report name: 🐞 Bug Report
description: Report a bug description: Report a bug
labels: labels:
- kind/bug - kind/bug
- status/needs-triage - status/needs-triage
@ -43,8 +43,8 @@ body:
id: info id: info
attributes: attributes:
label: Platform and versions label: Platform and versions
description: "Please include the output of `pnpm --version && cargo --version && rustc --version` along with information about your Operating System such as version and/or specific distribution if revelant." description: 'Please include the output of `pnpm --version && cargo --version && rustc --version` along with information about your Operating System such as version and/or specific distribution if revelant.'
render: shell render: Shell
validations: validations:
required: true required: true
@ -52,8 +52,8 @@ body:
id: logs id: logs
attributes: attributes:
label: Stack trace label: Stack trace
render: shell render: Shell
- type: textarea - type: textarea
id: context id: context
attributes: attributes:

View file

@ -1,3 +1,5 @@
# tell yaml plugin that this is the config file and not a template of its own:
# yaml-language-server: $schema=https://json.schemastore.org/github-issue-config.json
blank_issues_enabled: false blank_issues_enabled: false
contact_links: contact_links:
- name: 📝 Report Typo - name: 📝 Report Typo
@ -11,4 +13,4 @@ contact_links:
about: Suggest any ideas you have using our discussion forums. about: Suggest any ideas you have using our discussion forums.
- name: 💬 Discord Chat - name: 💬 Discord Chat
url: https://discord.gg/gTaF2Z44f5 url: https://discord.gg/gTaF2Z44f5
about: Ask questions and talk to other Spacedrive users and the maintainers about: Ask questions and talk to other Spacedrive users and the maintainers

View file

@ -1,4 +1,4 @@
name: Build Server Image name: Build Server Image
description: Builds and publishes the docker image for the Spacedrive server description: Builds and publishes the docker image for the Spacedrive server
inputs: inputs:
gh_token: gh_token:

View file

@ -3,6 +3,6 @@ const core = require('@actions/core');
const exec = require('@actions/exec'); const exec = require('@actions/exec');
const github = require('@actions/github'); const github = require('@actions/github');
// const folders = // const folders =
exec.exec('brew', ['install', 'ffmpeg']); exec.exec('brew', ['install', 'ffmpeg']);

View file

@ -1,17 +1,17 @@
{ {
"name": "install-ffmpeg-macos", "name": "install-ffmpeg-macos",
"version": "0.0.0", "version": "0.0.0",
"description": "", "description": "",
"main": "index.js", "main": "index.js",
"scripts": { "scripts": {
"test": "echo \"Error: no test specified\" && exit 1" "test": "echo \"Error: no test specified\" && exit 1"
}, },
"keywords": [], "keywords": [],
"author": "Brendan Allan", "author": "Brendan Allan",
"license": "ISC", "license": "ISC",
"dependencies": { "dependencies": {
"@actions/core": "^1.6.0", "@actions/core": "^1.6.0",
"@actions/exec": "^1.1.1", "@actions/exec": "^1.1.1",
"@actions/github": "^5.0.1" "@actions/github": "^5.0.1"
} }
} }

View file

@ -1,10 +1,9 @@
<!-- Put any information about this PR up here --> <!-- Put any information about this PR up here -->
<!-- Which issue does this PR close? --> <!-- Which issue does this PR close? -->
<!-- If this PR does not have a corresponding issue, <!-- If this PR does not have a corresponding issue,
make sure one gets created before you create this PR. make sure one gets created before you create this PR.
You can create a bug report or feature request at You can create a bug report or feature request at
https://github.com/spacedriveapp/spacedrive/issues/new/choose --> https://github.com/spacedriveapp/spacedrive/issues/new/choose -->
Closes #(issue) Closes #(issue)

View file

@ -1,5 +1,4 @@
hard_tabs = true hard_tabs = true
tab_spaces = 4
match_block_trailing_comma = true match_block_trailing_comma = true
max_width = 90 max_width = 90
newline_style = "Unix" newline_style = "Unix"

50
.vscode/settings.json vendored
View file

@ -1,26 +1,28 @@
{ {
"cSpell.words": [ "cSpell.words": [
"actix", "actix",
"bpfrpt", "bpfrpt",
"consts", "consts",
"creationdate", "creationdate",
"ipfs", "ipfs",
"Keepsafe", "Keepsafe",
"pathctx", "pathctx",
"prismjs", "prismjs",
"proptype", "proptype",
"quicktime", "quicktime",
"repr", "repr",
"Roadmap", "Roadmap",
"svgr", "svgr",
"tailwindcss", "tailwindcss",
"trivago", "trivago",
"tsparticles", "tsparticles",
"upsert" "upsert"
], ],
"[rust]": { "[rust]": {
"editor.defaultFormatter": "matklad.rust-analyzer" "editor.defaultFormatter": "matklad.rust-analyzer"
}, },
"rust-analyzer.procMacro.enable": true, "rust-analyzer.procMacro.enable": true,
"rust-analyzer.diagnostics.experimental.enable": false "rust-analyzer.diagnostics.experimental.enable": false,
"rust-analyzer.inlayHints.parameterHints.enable": false,
"rust-analyzer.inlayHints.typeHints.enable": false
} }

View file

@ -1,4 +1,3 @@
# Contributor Covenant Code of Conduct # Contributor Covenant Code of Conduct
## Our Pledge ## Our Pledge
@ -18,23 +17,23 @@ diverse, inclusive, and healthy community.
Examples of behavior that contributes to a positive environment for our Examples of behavior that contributes to a positive environment for our
community include: community include:
* Demonstrating empathy and kindness toward other people - Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences - Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback - Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes, - Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience and learning from the experience
* Focusing on what is best not just for us as individuals, but for the - Focusing on what is best not just for us as individuals, but for the
overall community overall community
Examples of unacceptable behavior include: Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or - The use of sexualized language or imagery, and sexual attention or
advances of any kind advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks - Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment - Public or private harassment
* Publishing others' private information, such as a physical or email - Publishing others' private information, such as a physical or email
address, without their explicit permission address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a - Other conduct which could reasonably be considered inappropriate in a
professional setting professional setting
## Enforcement Responsibilities ## Enforcement Responsibilities
@ -107,7 +106,7 @@ Violating these terms may lead to a permanent ban.
### 4. Permanent Ban ### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community **Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals. individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within **Consequence**: A permanent ban from any sort of public interaction within
@ -119,15 +118,15 @@ This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at version 2.0, available at
[https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0]. [https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0].
Community Impact Guidelines were inspired by Community Impact Guidelines were inspired by
[Mozilla's code of conduct enforcement ladder][Mozilla CoC]. [Mozilla's code of conduct enforcement ladder][mozilla coc].
For answers to common questions about this code of conduct, see the FAQ at For answers to common questions about this code of conduct, see the FAQ at
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available [https://www.contributor-covenant.org/faq][faq]. Translations are available
at [https://www.contributor-covenant.org/translations][translations]. at [https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org [homepage]: https://www.contributor-covenant.org
[v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html [v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity [mozilla coc]: https://github.com/mozilla/diversity
[FAQ]: https://www.contributor-covenant.org/faq [faq]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations [translations]: https://www.contributor-covenant.org/translations

View file

@ -61,17 +61,18 @@ If you are having issues ensure you are using the following versions of Rust and
### Pull Request ### Pull Request
When you're finished with the changes, create a pull request, also known as a PR. When you're finished with the changes, create a pull request, also known as a PR.
- Fill the "Ready for review" template so that we can review your PR. This template helps reviewers understand your changes as well as the purpose of your pull request.
- Fill the "Ready for review" template so that we can review your PR. This template helps reviewers understand your changes as well as the purpose of your pull request.
- Don't forget to [link PR to issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue) if you are solving one. - Don't forget to [link PR to issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue) if you are solving one.
- Enable the checkbox to [allow maintainer edits](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/allowing-changes-to-a-pull-request-branch-created-from-a-fork) so the branch can be updated for a merge. - Enable the checkbox to [allow maintainer edits](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/allowing-changes-to-a-pull-request-branch-created-from-a-fork) so the branch can be updated for a merge.
Once you submit your PR, a team member will review your proposal. We may ask questions or request for additional information. Once you submit your PR, a team member will review your proposal. We may ask questions or request for additional information.
- We may ask for changes to be made before a PR can be merged, either using [suggested changes](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/incorporating-feedback-in-your-pull-request) or pull request comments. You can apply suggested changes directly through the UI. You can make any other changes in your fork, then commit them to your branch. - We may ask for changes to be made before a PR can be merged, either using [suggested changes](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/incorporating-feedback-in-your-pull-request) or pull request comments. You can apply suggested changes directly through the UI. You can make any other changes in your fork, then commit them to your branch.
- As you update your PR and apply changes, mark each conversation as [resolved](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/commenting-on-a-pull-request#resolving-conversations). - As you update your PR and apply changes, mark each conversation as [resolved](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/commenting-on-a-pull-request#resolving-conversations).
- If you run into any merge issues, checkout this [git tutorial](https://lab.github.com/githubtraining/managing-merge-conflicts) to help you resolve merge conflicts and other issues. - If you run into any merge issues, checkout this [git tutorial](https://lab.github.com/githubtraining/managing-merge-conflicts) to help you resolve merge conflicts and other issues.
### Your PR is merged! ### Your PR is merged!
Congratulations :tada::tada: The Spacedrive team thanks you :sparkles:. Congratulations :tada::tada: The Spacedrive team thanks you :sparkles:.
Once your PR is merged, your contributions will be included in the next release of the application. Once your PR is merged, your contributions will be included in the next release of the application.

View file

@ -38,7 +38,6 @@ Organize files across many devices in one place. From cloud services to offline
For independent creatives, hoarders and those that want to own their digital footprint. Spacedrive provides a file management experience like no other, and it's completely free. For independent creatives, hoarders and those that want to own their digital footprint. Spacedrive provides a file management experience like no other, and it's completely free.
<p align="center"> <p align="center">
<img src="https://raw.githubusercontent.com/spacedriveapp/.github/main/profile/app.png" alt="Logo"> <img src="https://raw.githubusercontent.com/spacedriveapp/.github/main/profile/app.png" alt="Logo">
<br /> <br />

View file

@ -1,41 +1,41 @@
{ {
"name": "@sd/desktop", "name": "@sd/desktop",
"version": "1.0.0", "version": "1.0.0",
"main": "index.js", "main": "index.js",
"license": "MIT", "license": "MIT",
"private": true, "private": true,
"scripts": { "scripts": {
"vite": "vite", "vite": "vite",
"dev": "concurrently \"pnpm tauri dev\" \"vite\"", "dev": "concurrently \"pnpm tauri dev\" \"vite\"",
"tauri": "tauri", "tauri": "tauri",
"build": "vite build" "build": "vite build"
}, },
"dependencies": { "dependencies": {
"@sd/client": "workspace:*", "@sd/client": "workspace:*",
"@sd/core": "workspace:*", "@sd/core": "workspace:*",
"@sd/interface": "workspace:*", "@sd/interface": "workspace:*",
"@sd/ui": "workspace:*", "@sd/ui": "workspace:*",
"@tauri-apps/api": "^1.0.0-rc.3", "@tauri-apps/api": "^1.0.0-rc.3",
"react": "^18.0.0", "react": "^18.0.0",
"react-dom": "^18.0.0" "react-dom": "^18.0.0"
}, },
"devDependencies": { "devDependencies": {
"@tauri-apps/cli": "^1.0.0-rc.8", "@tauri-apps/cli": "^1.0.0-rc.8",
"@tauri-apps/tauricon": "github:tauri-apps/tauricon", "@tauri-apps/tauricon": "github:tauri-apps/tauricon",
"@types/babel-core": "^6.25.7", "@types/babel-core": "^6.25.7",
"@types/byte-size": "^8.1.0", "@types/byte-size": "^8.1.0",
"@types/react": "^18.0.8", "@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0", "@types/react-dom": "^18.0.0",
"@types/react-router-dom": "^5.3.3", "@types/react-router-dom": "^5.3.3",
"@types/react-window": "^1.8.5", "@types/react-window": "^1.8.5",
"@types/tailwindcss": "^3.0.10", "@types/tailwindcss": "^3.0.10",
"@vitejs/plugin-react": "^1.3.1", "@vitejs/plugin-react": "^1.3.1",
"concurrently": "^7.1.0", "concurrently": "^7.1.0",
"prettier": "^2.6.2", "prettier": "^2.6.2",
"sass": "^1.50.0", "sass": "^1.50.0",
"typescript": "^4.6.3", "typescript": "^4.6.3",
"vite": "^2.9.5", "vite": "^2.9.5",
"vite-plugin-filter-replace": "^0.1.9", "vite-plugin-filter-replace": "^0.1.9",
"vite-plugin-svgr": "^1.1.0" "vite-plugin-svgr": "^1.1.0"
} }
} }

View file

@ -1,6 +1,5 @@
max_width = 100 max_width = 100
hard_tabs = false hard_tabs = true
tab_spaces = 2
newline_style = "Auto" newline_style = "Auto"
use_small_heuristics = "Default" use_small_heuristics = "Default"
reorder_imports = true reorder_imports = true

View file

@ -1,11 +1,11 @@
// use swift_rs::build_utils::{link_swift, link_swift_package}; // use swift_rs::build_utils::{link_swift, link_swift_package};
fn main() { fn main() {
// HOTFIX: compile the swift code for arm64 // HOTFIX: compile the swift code for arm64
// std::env::set_var("CARGO_CFG_TARGET_ARCH", "arm64"); // std::env::set_var("CARGO_CFG_TARGET_ARCH", "arm64");
// link_swift(); // link_swift();
// link_swift_package("swift-lib", "../../../packages/macos/"); // link_swift_package("swift-lib", "../../../packages/macos/");
tauri_build::build(); tauri_build::build();
} }

View file

@ -11,106 +11,106 @@ use window::WindowExt;
#[tauri::command(async)] #[tauri::command(async)]
async fn client_query_transport( async fn client_query_transport(
core: tauri::State<'_, CoreController>, core: tauri::State<'_, CoreController>,
data: ClientQuery, data: ClientQuery,
) -> Result<CoreResponse, String> { ) -> Result<CoreResponse, String> {
match core.query(data).await { match core.query(data).await {
Ok(response) => Ok(response), Ok(response) => Ok(response),
Err(err) => { Err(err) => {
println!("query error: {:?}", err); println!("query error: {:?}", err);
Err(err.to_string()) Err(err.to_string())
} }
} }
} }
#[tauri::command(async)] #[tauri::command(async)]
async fn client_command_transport( async fn client_command_transport(
core: tauri::State<'_, CoreController>, core: tauri::State<'_, CoreController>,
data: ClientCommand, data: ClientCommand,
) -> Result<CoreResponse, String> { ) -> Result<CoreResponse, String> {
match core.command(data).await { match core.command(data).await {
Ok(response) => Ok(response), Ok(response) => Ok(response),
Err(err) => { Err(err) => {
println!("command error: {:?}", err); println!("command error: {:?}", err);
Err(err.to_string()) Err(err.to_string())
} }
} }
} }
#[tauri::command(async)] #[tauri::command(async)]
async fn app_ready(app_handle: tauri::AppHandle) { async fn app_ready(app_handle: tauri::AppHandle) {
let window = app_handle.get_window("main").unwrap(); let window = app_handle.get_window("main").unwrap();
window.show().unwrap(); window.show().unwrap();
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
{ {
std::thread::sleep(std::time::Duration::from_millis(1000)); std::thread::sleep(std::time::Duration::from_millis(1000));
println!("fixing shadow for, {:?}", window.ns_window().unwrap()); println!("fixing shadow for, {:?}", window.ns_window().unwrap());
window.fix_shadow(); window.fix_shadow();
} }
} }
#[tokio::main] #[tokio::main]
async fn main() { async fn main() {
let data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./")); let data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./"));
// create an instance of the core // create an instance of the core
let (mut node, mut event_receiver) = Node::new(data_dir).await; let (mut node, mut event_receiver) = Node::new(data_dir).await;
// run startup tasks // run startup tasks
node.initializer().await; node.initializer().await;
// extract the node controller // extract the node controller
let controller = node.get_controller(); let controller = node.get_controller();
// throw the node into a dedicated thread // throw the node into a dedicated thread
tokio::spawn(async move { tokio::spawn(async move {
node.start().await; node.start().await;
}); });
// create tauri app // create tauri app
tauri::Builder::default() tauri::Builder::default()
// pass controller to the tauri state manager // pass controller to the tauri state manager
.manage(controller) .manage(controller)
.setup(|app| { .setup(|app| {
let app = app.handle(); let app = app.handle();
app.windows().iter().for_each(|(_, window)| { app.windows().iter().for_each(|(_, window)| {
window.hide().unwrap(); window.hide().unwrap();
#[cfg(target_os = "windows")] #[cfg(target_os = "windows")]
window.set_decorations(true).unwrap(); window.set_decorations(true).unwrap();
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
window.set_transparent_titlebar(true, true); window.set_transparent_titlebar(true, true);
}); });
// core event transport // core event transport
tokio::spawn(async move { tokio::spawn(async move {
let mut last = Instant::now(); let mut last = Instant::now();
// handle stream output // handle stream output
while let Some(event) = event_receiver.recv().await { while let Some(event) = event_receiver.recv().await {
match event { match event {
CoreEvent::InvalidateQueryDebounced(_) => { CoreEvent::InvalidateQueryDebounced(_) => {
let current = Instant::now(); let current = Instant::now();
if current.duration_since(last) > Duration::from_millis(1000 / 60) { if current.duration_since(last) > Duration::from_millis(1000 / 60) {
last = current; last = current;
app.emit_all("core_event", &event).unwrap(); app.emit_all("core_event", &event).unwrap();
} }
} }
event => { event => {
app.emit_all("core_event", &event).unwrap(); app.emit_all("core_event", &event).unwrap();
} }
} }
} }
}); });
Ok(()) Ok(())
}) })
.on_menu_event(|event| menu::handle_menu_event(event)) .on_menu_event(|event| menu::handle_menu_event(event))
.on_window_event(|event| window::handle_window_event(event)) .on_window_event(|event| window::handle_window_event(event))
.invoke_handler(tauri::generate_handler![ .invoke_handler(tauri::generate_handler![
client_query_transport, client_query_transport,
client_command_transport, client_command_transport,
app_ready, app_ready,
]) ])
.menu(menu::get_menu()) .menu(menu::get_menu())
.run(tauri::generate_context!()) .run(tauri::generate_context!())
.expect("error while running tauri application"); .expect("error while running tauri application");
} }

View file

@ -3,88 +3,88 @@ use std::env::consts;
use tauri::{AboutMetadata, CustomMenuItem, Menu, MenuItem, Submenu, WindowMenuEvent, Wry}; use tauri::{AboutMetadata, CustomMenuItem, Menu, MenuItem, Submenu, WindowMenuEvent, Wry};
pub(crate) fn get_menu() -> Menu { pub(crate) fn get_menu() -> Menu {
match consts::OS { match consts::OS {
"linux" => Menu::new(), "linux" => Menu::new(),
"macos" => custom_menu_bar(), "macos" => custom_menu_bar(),
_ => Menu::new(), _ => Menu::new(),
} }
} }
fn custom_menu_bar() -> Menu { fn custom_menu_bar() -> Menu {
// let quit = CustomMenuItem::new("quit".to_string(), "Quit"); // let quit = CustomMenuItem::new("quit".to_string(), "Quit");
// let close = CustomMenuItem::new("close".to_string(), "Close"); // let close = CustomMenuItem::new("close".to_string(), "Close");
// let jeff = CustomMenuItem::new("jeff".to_string(), "Jeff"); // let jeff = CustomMenuItem::new("jeff".to_string(), "Jeff");
// let submenu = Submenu::new( // let submenu = Submenu::new(
// "File", // "File",
// Menu::new().add_item(quit).add_item(close).add_item(jeff), // Menu::new().add_item(quit).add_item(close).add_item(jeff),
// ); // );
let spacedrive = Submenu::new( let spacedrive = Submenu::new(
"Spacedrive", "Spacedrive",
Menu::new() Menu::new()
.add_native_item(MenuItem::About( .add_native_item(MenuItem::About(
"Spacedrive".to_string(), "Spacedrive".to_string(),
AboutMetadata::new(), AboutMetadata::new(),
)) // TODO: fill out about metadata )) // TODO: fill out about metadata
.add_native_item(MenuItem::Separator) .add_native_item(MenuItem::Separator)
.add_native_item(MenuItem::Services) .add_native_item(MenuItem::Services)
.add_native_item(MenuItem::Separator) .add_native_item(MenuItem::Separator)
.add_native_item(MenuItem::Hide) .add_native_item(MenuItem::Hide)
.add_native_item(MenuItem::HideOthers) .add_native_item(MenuItem::HideOthers)
.add_native_item(MenuItem::ShowAll) .add_native_item(MenuItem::ShowAll)
.add_native_item(MenuItem::Separator) .add_native_item(MenuItem::Separator)
.add_native_item(MenuItem::Quit), .add_native_item(MenuItem::Quit),
); );
let file = Submenu::new( let file = Submenu::new(
"File", "File",
Menu::new() Menu::new()
.add_item( .add_item(
CustomMenuItem::new("new_window".to_string(), "New Window") CustomMenuItem::new("new_window".to_string(), "New Window")
.accelerator("CmdOrCtrl+N") .accelerator("CmdOrCtrl+N")
.disabled(), .disabled(),
) )
.add_item( .add_item(
CustomMenuItem::new("close".to_string(), "Close Window").accelerator("CmdOrCtrl+W"), CustomMenuItem::new("close".to_string(), "Close Window").accelerator("CmdOrCtrl+W"),
), ),
); );
let edit = Submenu::new( let edit = Submenu::new(
"Edit", "Edit",
Menu::new() Menu::new()
.add_native_item(MenuItem::Copy) .add_native_item(MenuItem::Copy)
.add_native_item(MenuItem::Paste), .add_native_item(MenuItem::Paste),
); );
let view = Submenu::new( let view = Submenu::new(
"View", "View",
Menu::new() Menu::new()
.add_item( .add_item(
CustomMenuItem::new("command_pallete".to_string(), "Command Pallete") CustomMenuItem::new("command_pallete".to_string(), "Command Pallete")
.accelerator("CmdOrCtrl+P"), .accelerator("CmdOrCtrl+P"),
) )
.add_item(CustomMenuItem::new("layout".to_string(), "Layout").disabled()), .add_item(CustomMenuItem::new("layout".to_string(), "Layout").disabled()),
); );
let window = Submenu::new( let window = Submenu::new(
"Window", "Window",
Menu::new().add_native_item(MenuItem::EnterFullScreen), Menu::new().add_native_item(MenuItem::EnterFullScreen),
); );
let menu = Menu::new() let menu = Menu::new()
.add_submenu(spacedrive) .add_submenu(spacedrive)
.add_submenu(file) .add_submenu(file)
.add_submenu(edit) .add_submenu(edit)
.add_submenu(view) .add_submenu(view)
.add_submenu(window); .add_submenu(window);
menu menu
} }
pub(crate) fn handle_menu_event(event: WindowMenuEvent<Wry>) { pub(crate) fn handle_menu_event(event: WindowMenuEvent<Wry>) {
match event.menu_item_id() { match event.menu_item_id() {
"quit" => { "quit" => {
std::process::exit(0); std::process::exit(0);
} }
"close" => { "close" => {
event.window().close().unwrap(); event.window().close().unwrap();
} }
_ => {} _ => {}
} }
} }

View file

@ -1,93 +1,93 @@
use tauri::{GlobalWindowEvent, Runtime, Window, Wry}; use tauri::{GlobalWindowEvent, Runtime, Window, Wry};
pub(crate) fn handle_window_event(event: GlobalWindowEvent<Wry>) { pub(crate) fn handle_window_event(event: GlobalWindowEvent<Wry>) {
match event.event() { match event.event() {
_ => {} _ => {}
} }
} }
pub trait WindowExt { pub trait WindowExt {
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
fn set_toolbar(&self, shown: bool); fn set_toolbar(&self, shown: bool);
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
fn set_transparent_titlebar(&self, transparent: bool, large: bool); fn set_transparent_titlebar(&self, transparent: bool, large: bool);
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
fn fix_shadow(&self); fn fix_shadow(&self);
} }
impl<R: Runtime> WindowExt for Window<R> { impl<R: Runtime> WindowExt for Window<R> {
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
fn set_toolbar(&self, shown: bool) { fn set_toolbar(&self, shown: bool) {
use cocoa::{ use cocoa::{
appkit::{NSToolbar, NSWindow}, appkit::{NSToolbar, NSWindow},
base::{nil, NO}, base::{nil, NO},
foundation::NSString, foundation::NSString,
}; };
unsafe { unsafe {
let id = self.ns_window().unwrap() as cocoa::base::id; let id = self.ns_window().unwrap() as cocoa::base::id;
if shown { if shown {
let toolbar = let toolbar =
NSToolbar::alloc(nil).initWithIdentifier_(NSString::alloc(nil).init_str("wat")); NSToolbar::alloc(nil).initWithIdentifier_(NSString::alloc(nil).init_str("wat"));
toolbar.setShowsBaselineSeparator_(NO); toolbar.setShowsBaselineSeparator_(NO);
id.setToolbar_(toolbar); id.setToolbar_(toolbar);
} else { } else {
id.setToolbar_(nil); id.setToolbar_(nil);
} }
} }
} }
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
fn set_transparent_titlebar(&self, transparent: bool, large: bool) { fn set_transparent_titlebar(&self, transparent: bool, large: bool) {
use cocoa::{ use cocoa::{
appkit::{NSWindow, NSWindowStyleMask, NSWindowTitleVisibility}, appkit::{NSWindow, NSWindowStyleMask, NSWindowTitleVisibility},
base::{NO, YES}, base::{NO, YES},
}; };
unsafe { unsafe {
let id = self.ns_window().unwrap() as cocoa::base::id; let id = self.ns_window().unwrap() as cocoa::base::id;
let mut style_mask = id.styleMask(); let mut style_mask = id.styleMask();
// println!("existing style mask, {:#?}", style_mask); // println!("existing style mask, {:#?}", style_mask);
style_mask.set( style_mask.set(
NSWindowStyleMask::NSFullSizeContentViewWindowMask, NSWindowStyleMask::NSFullSizeContentViewWindowMask,
transparent, transparent,
); );
style_mask.set( style_mask.set(
NSWindowStyleMask::NSTexturedBackgroundWindowMask, NSWindowStyleMask::NSTexturedBackgroundWindowMask,
transparent, transparent,
); );
style_mask.set( style_mask.set(
NSWindowStyleMask::NSUnifiedTitleAndToolbarWindowMask, NSWindowStyleMask::NSUnifiedTitleAndToolbarWindowMask,
transparent && large, transparent && large,
); );
id.setStyleMask_(style_mask); id.setStyleMask_(style_mask);
if large { if large {
self.set_toolbar(true); self.set_toolbar(true);
} }
id.setTitleVisibility_(if transparent { id.setTitleVisibility_(if transparent {
NSWindowTitleVisibility::NSWindowTitleHidden NSWindowTitleVisibility::NSWindowTitleHidden
} else { } else {
NSWindowTitleVisibility::NSWindowTitleVisible NSWindowTitleVisibility::NSWindowTitleVisible
}); });
id.setTitlebarAppearsTransparent_(if transparent { YES } else { NO }); id.setTitlebarAppearsTransparent_(if transparent { YES } else { NO });
} }
} }
#[cfg(target_os = "macos")] #[cfg(target_os = "macos")]
fn fix_shadow(&self) { fn fix_shadow(&self) {
use cocoa::appkit::NSWindow; use cocoa::appkit::NSWindow;
unsafe { unsafe {
let id = self.ns_window().unwrap() as cocoa::base::id; let id = self.ns_window().unwrap() as cocoa::base::id;
println!("recomputing shadow for window {:?}", id.title()); println!("recomputing shadow for window {:?}", id.title());
id.invalidateShadow(); id.invalidateShadow();
} }
} }
} }

View file

@ -1,83 +1,83 @@
{ {
"package": { "package": {
"productName": "Spacedrive", "productName": "Spacedrive",
"version": "0.1.0" "version": "0.1.0"
}, },
"build": { "build": {
"distDir": "../dist", "distDir": "../dist",
"devPath": "http://localhost:8001", "devPath": "http://localhost:8001",
"beforeDevCommand": "", "beforeDevCommand": "",
"beforeBuildCommand": "" "beforeBuildCommand": ""
}, },
"tauri": { "tauri": {
"macOSPrivateApi": true, "macOSPrivateApi": true,
"bundle": { "bundle": {
"active": true, "active": true,
"targets": "all", "targets": "all",
"identifier": "app.spacedrive.desktop", "identifier": "app.spacedrive.desktop",
"icon": [ "icon": [
"icons/32x32.png", "icons/32x32.png",
"icons/128x128.png", "icons/128x128.png",
"icons/128x128@2x.png", "icons/128x128@2x.png",
"icons/icon.icns", "icons/icon.icns",
"icons/icon.ico" "icons/icon.ico"
], ],
"resources": [], "resources": [],
"externalBin": [], "externalBin": [],
"copyright": "Jamie Pine", "copyright": "Jamie Pine",
"shortDescription": "The Universal File Explorer", "shortDescription": "The Universal File Explorer",
"longDescription": "A cross-platform file explorer, powered by an open source virtual distributed filesystem.", "longDescription": "A cross-platform file explorer, powered by an open source virtual distributed filesystem.",
"deb": { "deb": {
"depends": [], "depends": [],
"useBootstrapper": false "useBootstrapper": false
}, },
"macOS": { "macOS": {
"frameworks": [], "frameworks": [],
"minimumSystemVersion": "", "minimumSystemVersion": "",
"useBootstrapper": false, "useBootstrapper": false,
"exceptionDomain": "", "exceptionDomain": "",
"signingIdentity": null, "signingIdentity": null,
"entitlements": null "entitlements": null
}, },
"windows": { "windows": {
"certificateThumbprint": null, "certificateThumbprint": null,
"digestAlgorithm": "sha256", "digestAlgorithm": "sha256",
"timestampUrl": "" "timestampUrl": ""
} }
}, },
"updater": { "updater": {
"active": false "active": false
}, },
"allowlist": { "allowlist": {
"all": true, "all": true,
"protocol": { "protocol": {
"assetScope": ["*"] "assetScope": ["*"]
}, },
"dialog": { "dialog": {
"all": true, "all": true,
"open": true, "open": true,
"save": true "save": true
} }
}, },
"windows": [ "windows": [
{ {
"title": "Spacedrive", "title": "Spacedrive",
"width": 1200, "width": 1200,
"height": 725, "height": 725,
"minWidth": 700, "minWidth": 700,
"minHeight": 500, "minHeight": 500,
"resizable": true, "resizable": true,
"fullscreen": false, "fullscreen": false,
"alwaysOnTop": false, "alwaysOnTop": false,
"focus": false, "focus": false,
"fileDropEnabled": false, "fileDropEnabled": false,
"decorations": true, "decorations": true,
"transparent": true, "transparent": true,
"center": true "center": true
} }
], ],
"security": { "security": {
"csp": "default-src asset: https://asset.localhost blob: data: filesystem: ws: wss: http: https: tauri: 'unsafe-eval' 'unsafe-inline' 'self' img-src: 'self'" "csp": "default-src asset: https://asset.localhost blob: data: filesystem: ws: wss: http: https: tauri: 'unsafe-eval' 'unsafe-inline' 'self' img-src: 'self'"
} }
} }
} }

View file

@ -1,74 +1,74 @@
{ {
"package": { "package": {
"productName": "Spacedrive", "productName": "Spacedrive",
"version": "0.1.0" "version": "0.1.0"
}, },
"build": { "build": {
"distDir": "../dist", "distDir": "../dist",
"devPath": "http://localhost:8001", "devPath": "http://localhost:8001",
"beforeDevCommand": "", "beforeDevCommand": "",
"beforeBuildCommand": "" "beforeBuildCommand": ""
}, },
"tauri": { "tauri": {
"bundle": { "bundle": {
"active": true, "active": true,
"targets": "all", "targets": "all",
"identifier": "co.spacedrive.desktop", "identifier": "co.spacedrive.desktop",
"icon": ["icons/icon.icns"], "icon": ["icons/icon.icns"],
"resources": [], "resources": [],
"externalBin": [], "externalBin": [],
"copyright": "Jamie Pine", "copyright": "Jamie Pine",
"shortDescription": "Your personal virtual cloud.", "shortDescription": "Your personal virtual cloud.",
"longDescription": "Spacedrive is an open source virtual filesystem, a personal cloud powered by your everyday devices. Feature-rich benefits of the cloud, only its owned and hosted by you with security, privacy and ownership as a foundation. Spacedrive makes it possible to create a limitless directory of your digital life that will stand the test of time.", "longDescription": "Spacedrive is an open source virtual filesystem, a personal cloud powered by your everyday devices. Feature-rich benefits of the cloud, only its owned and hosted by you with security, privacy and ownership as a foundation. Spacedrive makes it possible to create a limitless directory of your digital life that will stand the test of time.",
"deb": { "deb": {
"depends": [], "depends": [],
"useBootstrapper": false "useBootstrapper": false
}, },
"macOS": { "macOS": {
"frameworks": [], "frameworks": [],
"minimumSystemVersion": "", "minimumSystemVersion": "",
"useBootstrapper": false, "useBootstrapper": false,
"exceptionDomain": "", "exceptionDomain": "",
"signingIdentity": null, "signingIdentity": null,
"entitlements": null "entitlements": null
}, },
"windows": { "windows": {
"certificateThumbprint": null, "certificateThumbprint": null,
"digestAlgorithm": "sha256", "digestAlgorithm": "sha256",
"timestampUrl": "" "timestampUrl": ""
} }
}, },
"updater": { "updater": {
"active": false "active": false
}, },
"allowlist": { "allowlist": {
"all": true, "all": true,
"os": { "os": {
"all": true "all": true
}, },
"dialog": { "dialog": {
"all": true, "all": true,
"open": true, "open": true,
"save": true "save": true
} }
}, },
"windows": [ "windows": [
{ {
"title": "Spacedrive", "title": "Spacedrive",
"width": 1250, "width": 1250,
"height": 625, "height": 625,
"resizable": true, "resizable": true,
"fullscreen": false, "fullscreen": false,
"alwaysOnTop": false, "alwaysOnTop": false,
"focus": true, "focus": true,
"fileDropEnabled": false, "fileDropEnabled": false,
"decorations": true, "decorations": true,
"transparent": false, "transparent": false,
"center": true "center": true
} }
], ],
"security": { "security": {
"csp": "default-src asset: blob: data: filesystem: ws: wss: http: https: tauri: 'unsafe-eval' 'unsafe-inline' 'self' img-src: 'self'" "csp": "default-src asset: blob: data: filesystem: ws: wss: http: https: tauri: 'unsafe-eval' 'unsafe-inline' 'self' img-src: 'self'"
} }
} }
} }

View file

@ -1,13 +1,13 @@
<!DOCTYPE html> <!DOCTYPE html>
<html lang="en" class="dark"> <html lang="en" class="dark">
<head> <head>
<meta charset="UTF-8" /> <meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/src/favicon.svg" /> <link rel="icon" type="image/svg+xml" href="/src/favicon.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Spacedrive</title> <title>Spacedrive</title>
</head> </head>
<body style="overflow: hidden"> <body style="overflow: hidden">
<div id="root"></div> <div id="root"></div>
<script type="module" src="./index.tsx"></script> <script type="module" src="./index.tsx"></script>
</body> </body>
</html> </html>

View file

@ -15,79 +15,79 @@ import { appWindow } from '@tauri-apps/api/window';
// bind state to core via Tauri // bind state to core via Tauri
class Transport extends BaseTransport { class Transport extends BaseTransport {
constructor() { constructor() {
super(); super();
listen('core_event', (e: Event<CoreEvent>) => { listen('core_event', (e: Event<CoreEvent>) => {
this.emit('core_event', e.payload); this.emit('core_event', e.payload);
}); });
} }
async query(query: ClientQuery) { async query(query: ClientQuery) {
return await invoke('client_query_transport', { data: query }); return await invoke('client_query_transport', { data: query });
} }
async command(query: ClientCommand) { async command(query: ClientCommand) {
return await invoke('client_command_transport', { data: query }); return await invoke('client_command_transport', { data: query });
} }
} }
function App() { function App() {
function getPlatform(platform: string): Platform { function getPlatform(platform: string): Platform {
switch (platform) { switch (platform) {
case 'darwin': case 'darwin':
return 'macOS'; return 'macOS';
case 'win32': case 'win32':
return 'windows'; return 'windows';
case 'linux': case 'linux':
return 'linux'; return 'linux';
default: default:
return 'browser'; return 'browser';
} }
} }
const [platform, setPlatform] = useState<Platform>('macOS'); const [platform, setPlatform] = useState<Platform>('macOS');
const [focused, setFocused] = useState(true); const [focused, setFocused] = useState(true);
useEffect(() => { useEffect(() => {
os.platform().then((platform) => setPlatform(getPlatform(platform))); os.platform().then((platform) => setPlatform(getPlatform(platform)));
invoke('app_ready'); invoke('app_ready');
}, []); }, []);
useEffect(() => { useEffect(() => {
const unlistenFocus = listen('tauri://focus', () => setFocused(true)); const unlistenFocus = listen('tauri://focus', () => setFocused(true));
const unlistenBlur = listen('tauri://blur', () => setFocused(false)); const unlistenBlur = listen('tauri://blur', () => setFocused(false));
return () => { return () => {
unlistenFocus.then((unlisten) => unlisten()); unlistenFocus.then((unlisten) => unlisten());
unlistenBlur.then((unlisten) => unlisten()); unlistenBlur.then((unlisten) => unlisten());
}; };
}, []); }, []);
return ( return (
<SpacedriveInterface <SpacedriveInterface
useMemoryRouter useMemoryRouter
transport={new Transport()} transport={new Transport()}
platform={platform} platform={platform}
convertFileSrc={function (url: string): string { convertFileSrc={function (url: string): string {
return convertFileSrc(url); return convertFileSrc(url);
}} }}
openDialog={function (options: { openDialog={function (options: {
directory?: boolean | undefined; directory?: boolean | undefined;
}): Promise<string | string[]> { }): Promise<string | string[]> {
return dialog.open(options); return dialog.open(options);
}} }}
isFocused={focused} isFocused={focused}
onClose={() => appWindow.close()} onClose={() => appWindow.close()}
onFullscreen={() => appWindow.setFullscreen(true)} onFullscreen={() => appWindow.setFullscreen(true)}
onMinimize={() => appWindow.minimize()} onMinimize={() => appWindow.minimize()}
onOpen={(path: string) => shell.open(path)} onOpen={(path: string) => shell.open(path)}
/> />
); );
} }
const root = createRoot(document.getElementById('root')!); const root = createRoot(document.getElementById('root')!);
root.render( root.render(
<React.StrictMode> <React.StrictMode>
<App /> <App />
</React.StrictMode> </React.StrictMode>
); );

View file

@ -1,7 +1,7 @@
/// <reference types="vite/client" /> /// <reference types="vite/client" />
declare interface ImportMetaEnv { declare interface ImportMetaEnv {
VITE_OS: string; VITE_OS: string;
} }
declare module '@babel/core' {} declare module '@babel/core' {}

View file

@ -1,5 +1,5 @@
{ {
"extends": "../../packages/config/interface.tsconfig.json", "extends": "../../packages/config/interface.tsconfig.json",
"compilerOptions": {}, "compilerOptions": {},
"include": ["src"] "include": ["src"]
} }

View file

@ -1,27 +1,27 @@
import { defineConfig } from 'vite'; import { defineConfig } from 'vite';
import react from '@vitejs/plugin-react'; import react from '@vitejs/plugin-react';
import { name, version } from './package.json'; import { name, version } from './package.json';
import svg from "vite-plugin-svgr" import svg from 'vite-plugin-svgr';
// https://vitejs.dev/config/ // https://vitejs.dev/config/
export default defineConfig({ export default defineConfig({
server: { server: {
port: 8001 port: 8001
}, },
plugins: [ plugins: [
//@ts-ignore //@ts-ignore
react({ react({
jsxRuntime: 'classic' jsxRuntime: 'classic'
}), }),
svg({ svgrOptions: { icon: true } }) svg({ svgrOptions: { icon: true } })
], ],
root: 'src', root: 'src',
publicDir: '../../packages/interface/src/assets', publicDir: '../../packages/interface/src/assets',
define: { define: {
pkgJson: { name, version } pkgJson: { name, version }
}, },
build: { build: {
outDir: '../dist', outDir: '../dist',
assetsDir: '.' assetsDir: '.'
} }
}); });

View file

@ -1,23 +1,23 @@
<!DOCTYPE html> <!DOCTYPE html>
<html lang="en" class="dark"> <html lang="en" class="dark">
<head> <head>
<meta charset="UTF-8" /> <meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/favicon.ico" /> <link rel="icon" type="image/svg+xml" href="/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Spacedrive — A file manager from the future.</title> <title>Spacedrive — A file manager from the future.</title>
<meta <meta
name="description" name="description"
content="Combine your drives and clouds into one database that you can organize and explore from any device. Designed for creators, hoarders and the painfully disorganized." content="Combine your drives and clouds into one database that you can organize and explore from any device. Designed for creators, hoarders and the painfully disorganized."
/> />
<meta <meta
name="keywords" name="keywords"
content="files,file manager,spacedrive,file explorer,vdfs,distributed filesystem,cas,content addressable storage,virtual filesystem,photos app, video organizer,video encoder,tags,tag based filesystem" content="files,file manager,spacedrive,file explorer,vdfs,distributed filesystem,cas,content addressable storage,virtual filesystem,photos app, video organizer,video encoder,tags,tag based filesystem"
/> />
<meta name="author" content="Jamie Pine" /> <meta name="author" content="Jamie Pine" />
<meta name="robots" content="index, follow" /> <meta name="robots" content="index, follow" />
</head> </head>
<body> <body>
<div id="root"></div> <div id="root"></div>
<script type="module" src="/src/main.tsx"></script> <script type="module" src="/src/main.tsx"></script>
</body> </body>
</html> </html>

View file

@ -1,59 +1,59 @@
{ {
"name": "@sd/landing", "name": "@sd/landing",
"private": true, "private": true,
"version": "0.0.0", "version": "0.0.0",
"scripts": { "scripts": {
"dev": "vite", "dev": "vite",
"build": "vite build", "build": "vite build",
"serve": "vite preview" "serve": "vite preview"
}, },
"dependencies": { "dependencies": {
"@fontsource/inter": "^4.5.7", "@fontsource/inter": "^4.5.7",
"@headlessui/react": "^1.5.0", "@headlessui/react": "^1.5.0",
"@heroicons/react": "^1.0.6", "@heroicons/react": "^1.0.6",
"@icons-pack/react-simple-icons": "^4.6.1", "@icons-pack/react-simple-icons": "^4.6.1",
"@sd/client": "workspace:*", "@sd/client": "workspace:*",
"@sd/core": "workspace:*", "@sd/core": "workspace:*",
"@sd/interface": "workspace:*", "@sd/interface": "workspace:*",
"@sd/ui": "workspace:*", "@sd/ui": "workspace:*",
"@tailwindcss/typography": "^0.5.2", "@tailwindcss/typography": "^0.5.2",
"@types/compression": "^1.7.2", "@types/compression": "^1.7.2",
"@types/express": "^4.17.13", "@types/express": "^4.17.13",
"clsx": "^1.1.1", "clsx": "^1.1.1",
"compression": "^1.7.4", "compression": "^1.7.4",
"express": "^4.17.3", "express": "^4.17.3",
"phosphor-react": "^1.4.1", "phosphor-react": "^1.4.1",
"prismjs": "^1.28.0", "prismjs": "^1.28.0",
"react": "^18.0.0", "react": "^18.0.0",
"react-device-detect": "^2.2.2", "react-device-detect": "^2.2.2",
"react-dom": "^18.0.0", "react-dom": "^18.0.0",
"react-helmet": "^6.1.0", "react-helmet": "^6.1.0",
"react-router-dom": "6.3.0", "react-router-dom": "6.3.0",
"react-tsparticles": "^2.0.6", "react-tsparticles": "^2.0.6",
"simple-icons": "^6.19.0", "simple-icons": "^6.19.0",
"tsparticles": "^2.0.6" "tsparticles": "^2.0.6"
}, },
"devDependencies": { "devDependencies": {
"@babel/preset-react": "^7.16.7", "@babel/preset-react": "^7.16.7",
"@types/lodash": "^4.14.182", "@types/lodash": "^4.14.182",
"@types/prismjs": "^1.26.0", "@types/prismjs": "^1.26.0",
"@types/react": "^18.0.8", "@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0", "@types/react-dom": "^18.0.0",
"@types/react-helmet": "^6.1.5", "@types/react-helmet": "^6.1.5",
"@vitejs/plugin-react": "^1.3.1", "@vitejs/plugin-react": "^1.3.1",
"autoprefixer": "^10.4.4", "autoprefixer": "^10.4.4",
"nodemon": "^2.0.15", "nodemon": "^2.0.15",
"postcss": "^8.4.12", "postcss": "^8.4.12",
"sass": "^1.50.0", "sass": "^1.50.0",
"tailwind": "^4.0.0", "tailwind": "^4.0.0",
"ts-node": "^10.7.0", "ts-node": "^10.7.0",
"typescript": "^4.6.3", "typescript": "^4.6.3",
"vite": "^2.9.5", "vite": "^2.9.5",
"vite-plugin-markdown": "^2.0.2", "vite-plugin-markdown": "^2.0.2",
"vite-plugin-md": "^0.13.0", "vite-plugin-md": "^0.13.0",
"vite-plugin-pages": "^0.23.0", "vite-plugin-pages": "^0.23.0",
"vite-plugin-pages-sitemap": "^1.2.2", "vite-plugin-pages-sitemap": "^1.2.2",
"vite-plugin-ssr": "^0.3.64", "vite-plugin-ssr": "^0.3.64",
"vite-plugin-svgr": "^1.1.0" "vite-plugin-svgr": "^1.1.0"
} }
} }

View file

@ -28,413 +28,416 @@
* --syntax-cursor-line: hsla(220, 100%, 80%, 0.04); * --syntax-cursor-line: hsla(220, 100%, 80%, 0.04);
*/ */
code[class*="language-"], code[class*='language-'],
pre[class*="language-"] { pre[class*='language-'] {
background: hsl(220, 9%, 6%); background: hsl(220, 9%, 6%);
color: hsl(220, 14%, 71%); color: hsl(220, 14%, 71%);
text-shadow: 0 1px rgba(0, 0, 0, 0.3); text-shadow: 0 1px rgba(0, 0, 0, 0.3);
font-family: "Fira Code", "Fira Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace; font-family: 'Fira Code', 'Fira Mono', Menlo, Consolas, 'DejaVu Sans Mono', monospace;
direction: ltr; direction: ltr;
text-align: left; text-align: left;
white-space: pre; white-space: pre;
word-spacing: normal; word-spacing: normal;
word-break: normal; word-break: normal;
line-height: 1.5; line-height: 1.5;
-moz-tab-size: 2; -moz-tab-size: 2;
-o-tab-size: 2; -o-tab-size: 2;
tab-size: 2; tab-size: 2;
-webkit-hyphens: none; -webkit-hyphens: none;
-moz-hyphens: none; -moz-hyphens: none;
-ms-hyphens: none; -ms-hyphens: none;
hyphens: none; hyphens: none;
} }
/* Selection */ /* Selection */
code[class*="language-"]::-moz-selection, code[class*='language-']::-moz-selection,
code[class*="language-"] *::-moz-selection, code[class*='language-'] *::-moz-selection,
pre[class*="language-"] *::-moz-selection { pre[class*='language-'] *::-moz-selection {
background: hsl(220, 13%, 28%); background: hsl(220, 13%, 28%);
color: inherit; color: inherit;
text-shadow: none; text-shadow: none;
} }
code[class*="language-"]::selection, code[class*='language-']::selection,
code[class*="language-"] *::selection, code[class*='language-'] *::selection,
pre[class*="language-"] *::selection { pre[class*='language-'] *::selection {
background: hsl(220, 13%, 28%); background: hsl(220, 13%, 28%);
color: inherit; color: inherit;
text-shadow: none; text-shadow: none;
} }
/* Code blocks */ /* Code blocks */
pre[class*="language-"] { pre[class*='language-'] {
padding: 1em; padding: 1em;
margin: 0.5em 0; margin: 0.5em 0;
overflow: auto; overflow: auto;
border-radius: 0.3em; border-radius: 0.3em;
} }
/* Inline code */ /* Inline code */
:not(pre) > code[class*="language-"] { :not(pre) > code[class*='language-'] {
padding: 0.2em 0.3em; padding: 0.2em 0.3em;
border-radius: 0.3em; border-radius: 0.3em;
white-space: normal; white-space: normal;
} }
/* Print */ /* Print */
@media print { @media print {
code[class*="language-"], code[class*='language-'],
pre[class*="language-"] { pre[class*='language-'] {
text-shadow: none; text-shadow: none;
} }
} }
.token.comment, .token.comment,
.token.prolog, .token.prolog,
.token.cdata { .token.cdata {
color: hsl(220, 10%, 40%); color: hsl(220, 10%, 40%);
} }
.token.doctype, .token.doctype,
.token.punctuation, .token.punctuation,
.token.entity { .token.entity {
color: hsl(220, 14%, 71%); color: hsl(220, 14%, 71%);
} }
.token.attr-name, .token.attr-name,
.token.class-name, .token.class-name,
.token.boolean, .token.boolean,
.token.constant, .token.constant,
.token.number, .token.number,
.token.atrule { .token.atrule {
color: hsl(29, 54%, 61%); color: hsl(29, 54%, 61%);
} }
.token.keyword { .token.keyword {
color: hsl(286, 60%, 67%); color: hsl(286, 60%, 67%);
} }
.token.property, .token.property,
.token.tag, .token.tag,
.token.symbol, .token.symbol,
.token.deleted, .token.deleted,
.token.important { .token.important {
color: hsl(355, 65%, 65%); color: hsl(355, 65%, 65%);
} }
.token.selector, .token.selector,
.token.string, .token.string,
.token.char, .token.char,
.token.builtin, .token.builtin,
.token.inserted, .token.inserted,
.token.regex, .token.regex,
.token.attr-value, .token.attr-value,
.token.attr-value > .token.punctuation { .token.attr-value > .token.punctuation {
color: hsl(95, 38%, 62%); color: hsl(95, 38%, 62%);
} }
.token.variable, .token.variable,
.token.operator, .token.operator,
.token.function { .token.function {
color: hsl(207, 82%, 66%); color: hsl(207, 82%, 66%);
} }
.token.url { .token.url {
color: hsl(187, 47%, 55%); color: hsl(187, 47%, 55%);
} }
/* HTML overrides */ /* HTML overrides */
.token.attr-value > .token.punctuation.attr-equals, .token.attr-value > .token.punctuation.attr-equals,
.token.special-attr > .token.attr-value > .token.value.css { .token.special-attr > .token.attr-value > .token.value.css {
color: hsl(220, 14%, 71%); color: hsl(220, 14%, 71%);
} }
/* CSS overrides */ /* CSS overrides */
.language-css .token.selector { .language-css .token.selector {
color: hsl(355, 65%, 65%); color: hsl(355, 65%, 65%);
} }
.language-css .token.property { .language-css .token.property {
color: hsl(220, 14%, 71%); color: hsl(220, 14%, 71%);
} }
.language-css .token.function, .language-css .token.function,
.language-css .token.url > .token.function { .language-css .token.url > .token.function {
color: hsl(187, 47%, 55%); color: hsl(187, 47%, 55%);
} }
.language-css .token.url > .token.string.url { .language-css .token.url > .token.string.url {
color: hsl(95, 38%, 62%); color: hsl(95, 38%, 62%);
} }
.language-css .token.important, .language-css .token.important,
.language-css .token.atrule .token.rule { .language-css .token.atrule .token.rule {
color: hsl(286, 60%, 67%); color: hsl(286, 60%, 67%);
} }
/* JS overrides */ /* JS overrides */
.language-javascript .token.operator { .language-javascript .token.operator {
color: hsl(286, 60%, 67%); color: hsl(286, 60%, 67%);
} }
.language-javascript .token.template-string > .token.interpolation > .token.interpolation-punctuation.punctuation { .language-javascript
color: hsl(5, 48%, 51%); .token.template-string
} > .token.interpolation
> .token.interpolation-punctuation.punctuation {
/* JSON overrides */ color: hsl(5, 48%, 51%);
.language-json .token.operator { }
color: hsl(220, 14%, 71%);
} /* JSON overrides */
.language-json .token.operator {
.language-json .token.null.keyword { color: hsl(220, 14%, 71%);
color: hsl(29, 54%, 61%); }
}
.language-json .token.null.keyword {
/* MD overrides */ color: hsl(29, 54%, 61%);
.language-markdown .token.url, }
.language-markdown .token.url > .token.operator,
.language-markdown .token.url-reference.url > .token.string { /* MD overrides */
color: hsl(220, 14%, 71%); .language-markdown .token.url,
} .language-markdown .token.url > .token.operator,
.language-markdown .token.url-reference.url > .token.string {
.language-markdown .token.url > .token.content { color: hsl(220, 14%, 71%);
color: hsl(207, 82%, 66%); }
}
.language-markdown .token.url > .token.content {
.language-markdown .token.url > .token.url, color: hsl(207, 82%, 66%);
.language-markdown .token.url-reference.url { }
color: hsl(187, 47%, 55%);
} .language-markdown .token.url > .token.url,
.language-markdown .token.url-reference.url {
.language-markdown .token.blockquote.punctuation, color: hsl(187, 47%, 55%);
.language-markdown .token.hr.punctuation { }
color: hsl(220, 10%, 40%);
font-style: italic; .language-markdown .token.blockquote.punctuation,
} .language-markdown .token.hr.punctuation {
color: hsl(220, 10%, 40%);
.language-markdown .token.code-snippet { font-style: italic;
color: hsl(95, 38%, 62%); }
}
.language-markdown .token.code-snippet {
.language-markdown .token.bold .token.content { color: hsl(95, 38%, 62%);
color: hsl(29, 54%, 61%); }
}
.language-markdown .token.bold .token.content {
.language-markdown .token.italic .token.content { color: hsl(29, 54%, 61%);
color: hsl(286, 60%, 67%); }
}
.language-markdown .token.italic .token.content {
.language-markdown .token.strike .token.content, color: hsl(286, 60%, 67%);
.language-markdown .token.strike .token.punctuation, }
.language-markdown .token.list.punctuation,
.language-markdown .token.title.important > .token.punctuation { .language-markdown .token.strike .token.content,
color: hsl(355, 65%, 65%); .language-markdown .token.strike .token.punctuation,
} .language-markdown .token.list.punctuation,
.language-markdown .token.title.important > .token.punctuation {
/* General */ color: hsl(355, 65%, 65%);
.token.bold { }
font-weight: bold;
} /* General */
.token.bold {
.token.comment, font-weight: bold;
.token.italic { }
font-style: italic;
} .token.comment,
.token.italic {
.token.entity { font-style: italic;
cursor: help; }
}
.token.entity {
.token.namespace { cursor: help;
opacity: 0.8; }
}
.token.namespace {
/* Plugin overrides */ opacity: 0.8;
/* Selectors should have higher specificity than those in the plugins' default stylesheets */ }
/* Show Invisibles plugin overrides */ /* Plugin overrides */
.token.token.tab:not(:empty):before, /* Selectors should have higher specificity than those in the plugins' default stylesheets */
.token.token.cr:before,
.token.token.lf:before, /* Show Invisibles plugin overrides */
.token.token.space:before { .token.token.tab:not(:empty):before,
color: hsla(220, 14%, 71%, 0.15); .token.token.cr:before,
text-shadow: none; .token.token.lf:before,
} .token.token.space:before {
color: hsla(220, 14%, 71%, 0.15);
/* Toolbar plugin overrides */ text-shadow: none;
/* Space out all buttons and move them away from the right edge of the code block */ }
div.code-toolbar > .toolbar.toolbar > .toolbar-item {
margin-right: 0.4em; /* Toolbar plugin overrides */
} /* Space out all buttons and move them away from the right edge of the code block */
div.code-toolbar > .toolbar.toolbar > .toolbar-item {
/* Styling the buttons */ margin-right: 0.4em;
div.code-toolbar > .toolbar.toolbar > .toolbar-item > button, }
div.code-toolbar > .toolbar.toolbar > .toolbar-item > a,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > span { /* Styling the buttons */
background: hsl(220, 13%, 26%); div.code-toolbar > .toolbar.toolbar > .toolbar-item > button,
color: hsl(220, 9%, 55%); div.code-toolbar > .toolbar.toolbar > .toolbar-item > a,
padding: 0.1em 0.4em; div.code-toolbar > .toolbar.toolbar > .toolbar-item > span {
border-radius: 0.3em; background: hsl(220, 13%, 26%);
} color: hsl(220, 9%, 55%);
padding: 0.1em 0.4em;
div.code-toolbar > .toolbar.toolbar > .toolbar-item > button:hover, border-radius: 0.3em;
div.code-toolbar > .toolbar.toolbar > .toolbar-item > button:focus, }
div.code-toolbar > .toolbar.toolbar > .toolbar-item > a:hover,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > a:focus, div.code-toolbar > .toolbar.toolbar > .toolbar-item > button:hover,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > span:hover, div.code-toolbar > .toolbar.toolbar > .toolbar-item > button:focus,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > span:focus { div.code-toolbar > .toolbar.toolbar > .toolbar-item > a:hover,
background: hsl(220, 13%, 28%); div.code-toolbar > .toolbar.toolbar > .toolbar-item > a:focus,
color: hsl(220, 14%, 71%); div.code-toolbar > .toolbar.toolbar > .toolbar-item > span:hover,
} div.code-toolbar > .toolbar.toolbar > .toolbar-item > span:focus {
background: hsl(220, 13%, 28%);
/* Line Highlight plugin overrides */ color: hsl(220, 14%, 71%);
/* The highlighted line itself */ }
.line-highlight.line-highlight {
background: hsla(220, 100%, 80%, 0.04); /* Line Highlight plugin overrides */
} /* The highlighted line itself */
.line-highlight.line-highlight {
/* Default line numbers in Line Highlight plugin */ background: hsla(220, 100%, 80%, 0.04);
.line-highlight.line-highlight:before, }
.line-highlight.line-highlight[data-end]:after {
background: hsl(220, 13%, 26%); /* Default line numbers in Line Highlight plugin */
color: hsl(220, 14%, 71%); .line-highlight.line-highlight:before,
padding: 0.1em 0.6em; .line-highlight.line-highlight[data-end]:after {
border-radius: 0.3em; background: hsl(220, 13%, 26%);
box-shadow: 0 2px 0 0 rgba(0, 0, 0, 0.2); /* same as Toolbar plugin default */ color: hsl(220, 14%, 71%);
} padding: 0.1em 0.6em;
border-radius: 0.3em;
/* Hovering over a linkable line number (in the gutter area) */ box-shadow: 0 2px 0 0 rgba(0, 0, 0, 0.2); /* same as Toolbar plugin default */
/* Requires Line Numbers plugin as well */ }
pre[id].linkable-line-numbers.linkable-line-numbers span.line-numbers-rows > span:hover:before {
background-color: hsla(220, 100%, 80%, 0.04); /* Hovering over a linkable line number (in the gutter area) */
} /* Requires Line Numbers plugin as well */
pre[id].linkable-line-numbers.linkable-line-numbers span.line-numbers-rows > span:hover:before {
/* Line Numbers and Command Line plugins overrides */ background-color: hsla(220, 100%, 80%, 0.04);
/* Line separating gutter from coding area */ }
.line-numbers.line-numbers .line-numbers-rows,
.command-line .command-line-prompt { /* Line Numbers and Command Line plugins overrides */
border-right-color: hsla(220, 14%, 71%, 0.15); /* Line separating gutter from coding area */
} .line-numbers.line-numbers .line-numbers-rows,
.command-line .command-line-prompt {
/* Stuff in the gutter */ border-right-color: hsla(220, 14%, 71%, 0.15);
.line-numbers .line-numbers-rows > span:before, }
.command-line .command-line-prompt > span:before {
color: hsl(220, 14%, 45%); /* Stuff in the gutter */
} .line-numbers .line-numbers-rows > span:before,
.command-line .command-line-prompt > span:before {
/* Match Braces plugin overrides */ color: hsl(220, 14%, 45%);
/* Note: Outline colour is inherited from the braces */ }
.rainbow-braces .token.token.punctuation.brace-level-1,
.rainbow-braces .token.token.punctuation.brace-level-5, /* Match Braces plugin overrides */
.rainbow-braces .token.token.punctuation.brace-level-9 { /* Note: Outline colour is inherited from the braces */
color: hsl(355, 65%, 65%); .rainbow-braces .token.token.punctuation.brace-level-1,
} .rainbow-braces .token.token.punctuation.brace-level-5,
.rainbow-braces .token.token.punctuation.brace-level-9 {
.rainbow-braces .token.token.punctuation.brace-level-2, color: hsl(355, 65%, 65%);
.rainbow-braces .token.token.punctuation.brace-level-6, }
.rainbow-braces .token.token.punctuation.brace-level-10 {
color: hsl(95, 38%, 62%); .rainbow-braces .token.token.punctuation.brace-level-2,
} .rainbow-braces .token.token.punctuation.brace-level-6,
.rainbow-braces .token.token.punctuation.brace-level-10 {
.rainbow-braces .token.token.punctuation.brace-level-3, color: hsl(95, 38%, 62%);
.rainbow-braces .token.token.punctuation.brace-level-7, }
.rainbow-braces .token.token.punctuation.brace-level-11 {
color: hsl(207, 82%, 66%); .rainbow-braces .token.token.punctuation.brace-level-3,
} .rainbow-braces .token.token.punctuation.brace-level-7,
.rainbow-braces .token.token.punctuation.brace-level-11 {
.rainbow-braces .token.token.punctuation.brace-level-4, color: hsl(207, 82%, 66%);
.rainbow-braces .token.token.punctuation.brace-level-8, }
.rainbow-braces .token.token.punctuation.brace-level-12 {
color: hsl(286, 60%, 67%); .rainbow-braces .token.token.punctuation.brace-level-4,
} .rainbow-braces .token.token.punctuation.brace-level-8,
.rainbow-braces .token.token.punctuation.brace-level-12 {
/* Diff Highlight plugin overrides */ color: hsl(286, 60%, 67%);
/* Taken from https://github.com/atom/github/blob/master/styles/variables.less */ }
pre.diff-highlight > code .token.token.deleted:not(.prefix),
pre > code.diff-highlight .token.token.deleted:not(.prefix) { /* Diff Highlight plugin overrides */
background-color: hsla(353, 100%, 66%, 0.15); /* Taken from https://github.com/atom/github/blob/master/styles/variables.less */
} pre.diff-highlight > code .token.token.deleted:not(.prefix),
pre > code.diff-highlight .token.token.deleted:not(.prefix) {
pre.diff-highlight > code .token.token.deleted:not(.prefix)::-moz-selection, background-color: hsla(353, 100%, 66%, 0.15);
pre.diff-highlight > code .token.token.deleted:not(.prefix) *::-moz-selection, }
pre > code.diff-highlight .token.token.deleted:not(.prefix)::-moz-selection,
pre > code.diff-highlight .token.token.deleted:not(.prefix) *::-moz-selection { pre.diff-highlight > code .token.token.deleted:not(.prefix)::-moz-selection,
background-color: hsla(353, 95%, 66%, 0.25); pre.diff-highlight > code .token.token.deleted:not(.prefix) *::-moz-selection,
} pre > code.diff-highlight .token.token.deleted:not(.prefix)::-moz-selection,
pre > code.diff-highlight .token.token.deleted:not(.prefix) *::-moz-selection {
pre.diff-highlight > code .token.token.deleted:not(.prefix)::selection, background-color: hsla(353, 95%, 66%, 0.25);
pre.diff-highlight > code .token.token.deleted:not(.prefix) *::selection, }
pre > code.diff-highlight .token.token.deleted:not(.prefix)::selection,
pre > code.diff-highlight .token.token.deleted:not(.prefix) *::selection { pre.diff-highlight > code .token.token.deleted:not(.prefix)::selection,
background-color: hsla(353, 95%, 66%, 0.25); pre.diff-highlight > code .token.token.deleted:not(.prefix) *::selection,
} pre > code.diff-highlight .token.token.deleted:not(.prefix)::selection,
pre > code.diff-highlight .token.token.deleted:not(.prefix) *::selection {
pre.diff-highlight > code .token.token.inserted:not(.prefix), background-color: hsla(353, 95%, 66%, 0.25);
pre > code.diff-highlight .token.token.inserted:not(.prefix) { }
background-color: hsla(137, 100%, 55%, 0.15);
} pre.diff-highlight > code .token.token.inserted:not(.prefix),
pre > code.diff-highlight .token.token.inserted:not(.prefix) {
pre.diff-highlight > code .token.token.inserted:not(.prefix)::-moz-selection, background-color: hsla(137, 100%, 55%, 0.15);
pre.diff-highlight > code .token.token.inserted:not(.prefix) *::-moz-selection, }
pre > code.diff-highlight .token.token.inserted:not(.prefix)::-moz-selection,
pre > code.diff-highlight .token.token.inserted:not(.prefix) *::-moz-selection { pre.diff-highlight > code .token.token.inserted:not(.prefix)::-moz-selection,
background-color: hsla(135, 73%, 55%, 0.25); pre.diff-highlight > code .token.token.inserted:not(.prefix) *::-moz-selection,
} pre > code.diff-highlight .token.token.inserted:not(.prefix)::-moz-selection,
pre > code.diff-highlight .token.token.inserted:not(.prefix) *::-moz-selection {
pre.diff-highlight > code .token.token.inserted:not(.prefix)::selection, background-color: hsla(135, 73%, 55%, 0.25);
pre.diff-highlight > code .token.token.inserted:not(.prefix) *::selection, }
pre > code.diff-highlight .token.token.inserted:not(.prefix)::selection,
pre > code.diff-highlight .token.token.inserted:not(.prefix) *::selection { pre.diff-highlight > code .token.token.inserted:not(.prefix)::selection,
background-color: hsla(135, 73%, 55%, 0.25); pre.diff-highlight > code .token.token.inserted:not(.prefix) *::selection,
} pre > code.diff-highlight .token.token.inserted:not(.prefix)::selection,
pre > code.diff-highlight .token.token.inserted:not(.prefix) *::selection {
/* Previewers plugin overrides */ background-color: hsla(135, 73%, 55%, 0.25);
/* Based on https://github.com/atom-community/atom-ide-datatip/blob/master/styles/atom-ide-datatips.less and https://github.com/atom/atom/blob/master/packages/one-dark-ui */ }
/* Border around popup */
.prism-previewer.prism-previewer:before, /* Previewers plugin overrides */
.prism-previewer-gradient.prism-previewer-gradient div { /* Based on https://github.com/atom-community/atom-ide-datatip/blob/master/styles/atom-ide-datatips.less and https://github.com/atom/atom/blob/master/packages/one-dark-ui */
border-color: hsl(224, 13%, 17%); /* Border around popup */
} .prism-previewer.prism-previewer:before,
.prism-previewer-gradient.prism-previewer-gradient div {
/* Angle and time should remain as circles and are hence not included */ border-color: hsl(224, 13%, 17%);
.prism-previewer-color.prism-previewer-color:before, }
.prism-previewer-gradient.prism-previewer-gradient div,
.prism-previewer-easing.prism-previewer-easing:before { /* Angle and time should remain as circles and are hence not included */
border-radius: 0.3em; .prism-previewer-color.prism-previewer-color:before,
} .prism-previewer-gradient.prism-previewer-gradient div,
.prism-previewer-easing.prism-previewer-easing:before {
/* Triangles pointing to the code */ border-radius: 0.3em;
.prism-previewer.prism-previewer:after { }
border-top-color: hsl(224, 13%, 17%);
} /* Triangles pointing to the code */
.prism-previewer.prism-previewer:after {
.prism-previewer-flipped.prism-previewer-flipped.after { border-top-color: hsl(224, 13%, 17%);
border-bottom-color: hsl(224, 13%, 17%); }
}
.prism-previewer-flipped.prism-previewer-flipped.after {
/* Background colour within the popup */ border-bottom-color: hsl(224, 13%, 17%);
.prism-previewer-angle.prism-previewer-angle:before, }
.prism-previewer-time.prism-previewer-time:before,
.prism-previewer-easing.prism-previewer-easing { /* Background colour within the popup */
background: hsl(219, 13%, 22%); .prism-previewer-angle.prism-previewer-angle:before,
} .prism-previewer-time.prism-previewer-time:before,
.prism-previewer-easing.prism-previewer-easing {
/* For angle, this is the positive area (eg. 90deg will display one quadrant in this colour) */ background: hsl(219, 13%, 22%);
/* For time, this is the alternate colour */ }
.prism-previewer-angle.prism-previewer-angle circle,
.prism-previewer-time.prism-previewer-time circle { /* For angle, this is the positive area (eg. 90deg will display one quadrant in this colour) */
stroke: hsl(220, 14%, 71%); /* For time, this is the alternate colour */
stroke-opacity: 1; .prism-previewer-angle.prism-previewer-angle circle,
} .prism-previewer-time.prism-previewer-time circle {
stroke: hsl(220, 14%, 71%);
/* Stroke colours of the handle, direction point, and vector itself */ stroke-opacity: 1;
.prism-previewer-easing.prism-previewer-easing circle, }
.prism-previewer-easing.prism-previewer-easing path,
.prism-previewer-easing.prism-previewer-easing line { /* Stroke colours of the handle, direction point, and vector itself */
stroke: hsl(220, 14%, 71%); .prism-previewer-easing.prism-previewer-easing circle,
} .prism-previewer-easing.prism-previewer-easing path,
.prism-previewer-easing.prism-previewer-easing line {
/* Fill colour of the handle */ stroke: hsl(220, 14%, 71%);
.prism-previewer-easing.prism-previewer-easing circle { }
fill: transparent;
} /* Fill colour of the handle */
.prism-previewer-easing.prism-previewer-easing circle {
fill: transparent;
}

View file

@ -4,93 +4,93 @@ import { useEffect } from 'react';
import { isMobile } from 'react-device-detect'; import { isMobile } from 'react-device-detect';
export default function AppEmbed() { export default function AppEmbed() {
const [showApp, setShowApp] = useState(false); const [showApp, setShowApp] = useState(false);
const [iFrameAppReady, setIframeAppReady] = useState(false); const [iFrameAppReady, setIframeAppReady] = useState(false);
const [forceImg, setForceImg] = useState(false); const [forceImg, setForceImg] = useState(false);
const [imgFallback, setImageFallback] = useState(false); const [imgFallback, setImageFallback] = useState(false);
const iFrame = useRef<HTMLIFrameElement>(null); const iFrame = useRef<HTMLIFrameElement>(null);
function handleResize() { function handleResize() {
if (window.innerWidth < 1000) { if (window.innerWidth < 1000) {
setForceImg(true); setForceImg(true);
} else if (forceImg) { } else if (forceImg) {
setForceImg(false); setForceImg(false);
} }
} }
useEffect(() => { useEffect(() => {
window.addEventListener('resize', handleResize); window.addEventListener('resize', handleResize);
handleResize(); handleResize();
return () => window.removeEventListener('resize', handleResize); return () => window.removeEventListener('resize', handleResize);
}, []); }, []);
function handleEvent(e: any) { function handleEvent(e: any) {
if (e.data === 'spacedrive-hello') { if (e.data === 'spacedrive-hello') {
if (!iFrameAppReady) setIframeAppReady(true); if (!iFrameAppReady) setIframeAppReady(true);
} }
} }
// after five minutes kill the live demo // after five minutes kill the live demo
useEffect(() => { useEffect(() => {
const timer = setTimeout(() => { const timer = setTimeout(() => {
setIframeAppReady(false); setIframeAppReady(false);
}, 300000); }, 300000);
return () => clearTimeout(timer); return () => clearTimeout(timer);
}, []); }, []);
useEffect(() => { useEffect(() => {
window.addEventListener('message', handleEvent, false); window.addEventListener('message', handleEvent, false);
setShowApp(true); setShowApp(true);
return () => window.removeEventListener('message', handleEvent); return () => window.removeEventListener('message', handleEvent);
}, []); }, []);
useEffect(() => { useEffect(() => {
setTimeout(() => { setTimeout(() => {
if (!iFrameAppReady) setImageFallback(true); if (!iFrameAppReady) setImageFallback(true);
}, 1500); }, 1500);
}, []); }, []);
const renderImage = (imgFallback && !iFrameAppReady) || forceImg; const renderImage = (imgFallback && !iFrameAppReady) || forceImg;
const renderBloom = renderImage || iFrameAppReady; const renderBloom = renderImage || iFrameAppReady;
return ( return (
<div className="w-screen"> <div className="w-screen">
{renderBloom && ( {renderBloom && (
<div className="relative max-w-full sm:w-full sm:max-w-[1200px] mx-auto"> <div className="relative max-w-full sm:w-full sm:max-w-[1200px] mx-auto">
<div className="absolute w-full overflow-visible top-[100px] h-32"> <div className="absolute w-full overflow-visible top-[100px] h-32">
<div className="left-0 mt-22 bloom bloom-one" /> <div className="left-0 mt-22 bloom bloom-one" />
<div className="left-[34%] -mt-32 bloom bloom-three " /> <div className="left-[34%] -mt-32 bloom bloom-three " />
<div className="right-0 invisible sm:visible bloom bloom-two" /> <div className="right-0 invisible sm:visible bloom bloom-two" />
</div> </div>
</div> </div>
)} )}
<div className="relative z-30 h-[228px] px-5 sm:h-[428px] md:h-[428px] lg:h-[628px] mt-8 sm:mt-16"> <div className="relative z-30 h-[228px] px-5 sm:h-[428px] md:h-[428px] lg:h-[628px] mt-8 sm:mt-16">
<div <div
className={clsx( className={clsx(
'relative h-full m-auto border rounded-lg max-w-7xl transition-opacity bg-gray-850 border-gray-550 opacity-0', 'relative h-full m-auto border rounded-lg max-w-7xl transition-opacity bg-gray-850 border-gray-550 opacity-0',
renderBloom && '!opacity-100', renderBloom && '!opacity-100',
renderImage && 'bg-transparent border-none' renderImage && 'bg-transparent border-none'
)} )}
> >
{showApp && !forceImg && ( {showApp && !forceImg && (
<iframe <iframe
ref={iFrame} ref={iFrame}
referrerPolicy="origin-when-cross-origin" referrerPolicy="origin-when-cross-origin"
className={clsx( className={clsx(
'w-full h-full z-30 rounded-lg shadow-iframe inset-center bg-gray-850', 'w-full h-full z-30 rounded-lg shadow-iframe inset-center bg-gray-850',
iFrameAppReady ? 'fade-in-app-embed opacity-100' : 'opacity-0 -ml-[10000px]' iFrameAppReady ? 'fade-in-app-embed opacity-100' : 'opacity-0 -ml-[10000px]'
)} )}
src={`${ src={`${
import.meta.env.VITE_SDWEB_BASE_URL || 'http://localhost:8002' import.meta.env.VITE_SDWEB_BASE_URL || 'http://localhost:8002'
}?library_id=9068c6ec-cf90-451b-bb30-4174781e7bc6`} }?library_id=9068c6ec-cf90-451b-bb30-4174781e7bc6`}
/> />
)} )}
{renderImage && <div className="z-40 h-full fade-in-app-embed landing-img " />} {renderImage && <div className="z-40 h-full fade-in-app-embed landing-img " />}
</div> </div>
</div> </div>
</div> </div>
); );
} }

View file

@ -3,70 +3,70 @@ import Particles from 'react-tsparticles';
import { loadFull } from 'tsparticles'; import { loadFull } from 'tsparticles';
export const Bubbles = () => { export const Bubbles = () => {
const particlesInit = async (main: any) => { const particlesInit = async (main: any) => {
console.log(main); console.log(main);
await loadFull(main); await loadFull(main);
}; };
const particlesLoaded = (container: any) => { const particlesLoaded = (container: any) => {
console.log(container); console.log(container);
}; };
return ( return (
//@ts-ignore //@ts-ignore
<Particles <Particles
id="tsparticles" id="tsparticles"
className="absolute z-0" className="absolute z-0"
init={particlesInit} init={particlesInit}
//@ts-ignore //@ts-ignore
loaded={particlesLoaded} loaded={particlesLoaded}
options={{ options={{
fpsLimit: 120, fpsLimit: 120,
interactivity: { interactivity: {
events: { events: {
onClick: { onClick: {
enable: true, enable: true,
mode: 'push' mode: 'push'
}, },
resize: true resize: true
} }
}, },
particles: { particles: {
color: { color: {
value: '#ffffff' value: '#ffffff'
}, },
collisions: { collisions: {
enable: true enable: true
}, },
move: { move: {
direction: 'top', direction: 'top',
enable: true, enable: true,
outModes: { outModes: {
default: 'destroy' default: 'destroy'
}, },
random: false, random: false,
speed: 0.2, speed: 0.2,
straight: true straight: true
}, },
number: { number: {
density: { density: {
enable: true, enable: true,
area: 900 area: 900
}, },
value: 100 value: 100
}, },
opacity: { opacity: {
value: 0.1 value: 0.1
}, },
shape: { shape: {
type: 'circle' type: 'circle'
}, },
size: { size: {
value: { min: 0.5, max: 3 } value: { min: 0.5, max: 3 }
} }
}, },
detectRetina: true detectRetina: true
}} }}
/> />
); );
}; };

View file

@ -1,103 +1,103 @@
import React from 'react'; import React from 'react';
import { ReactComponent as AppLogo } from '../assets/app-logo.svg'; import { ReactComponent as AppLogo } from '../assets/app-logo.svg';
import { import {
Twitter, Twitter,
Discord, Discord,
Instagram, Instagram,
Github, Github,
Opencollective, Opencollective,
Twitch Twitch
} from '@icons-pack/react-simple-icons'; } from '@icons-pack/react-simple-icons';
function FooterLink(props: { children: string | JSX.Element; link: string }) { function FooterLink(props: { children: string | JSX.Element; link: string }) {
return ( return (
<a href={props.link} target="_blank" className="text-gray-300 hover:text-white"> <a href={props.link} target="_blank" className="text-gray-300 hover:text-white">
{props.children} {props.children}
</a> </a>
); );
} }
export function Footer() { export function Footer() {
return ( return (
<footer id="footer" className="z-50 w-screen pt-3 border-t border-gray-550 bg-gray-850"> <footer id="footer" className="z-50 w-screen pt-3 border-t border-gray-550 bg-gray-850">
<div className="container grid grid-cols-2 gap-6 p-8 pt-10 pb-20 m-auto text-white min-h-64 sm:grid-cols-2 lg:grid-cols-6"> <div className="container grid grid-cols-2 gap-6 p-8 pt-10 pb-20 m-auto text-white min-h-64 sm:grid-cols-2 lg:grid-cols-6">
<div className="col-span-2"> <div className="col-span-2">
<AppLogo className="w-10 h-10 mb-5" /> <AppLogo className="w-10 h-10 mb-5" />
<h3 className="mb-1 text-xl font-bold">Spacedrive</h3> <h3 className="mb-1 text-xl font-bold">Spacedrive</h3>
<p className="text-sm text-gray-350">&copy; Copyright 2022 Jamie Pine</p> <p className="text-sm text-gray-350">&copy; Copyright 2022 Jamie Pine</p>
<div className="flex flex-row mt-6 mb-10 space-x-3"> <div className="flex flex-row mt-6 mb-10 space-x-3">
<FooterLink link="https://twitter.com/spacedriveapp"> <FooterLink link="https://twitter.com/spacedriveapp">
<Twitter /> <Twitter />
</FooterLink> </FooterLink>
<FooterLink link="https://discord.gg/gTaF2Z44f5"> <FooterLink link="https://discord.gg/gTaF2Z44f5">
<Discord /> <Discord />
</FooterLink> </FooterLink>
<FooterLink link="https://instagram.com/spacedriveapp"> <FooterLink link="https://instagram.com/spacedriveapp">
<Instagram /> <Instagram />
</FooterLink> </FooterLink>
<FooterLink link="https://github.com/spacedriveapp"> <FooterLink link="https://github.com/spacedriveapp">
<Github /> <Github />
</FooterLink> </FooterLink>
<FooterLink link="https://opencollective.com/spacedrive"> <FooterLink link="https://opencollective.com/spacedrive">
<Opencollective /> <Opencollective />
</FooterLink> </FooterLink>
<FooterLink link="https://twitch.tv/jamiepinelive"> <FooterLink link="https://twitch.tv/jamiepinelive">
<Twitch /> <Twitch />
</FooterLink> </FooterLink>
</div> </div>
</div> </div>
<div className="flex flex-col col-span-1 space-y-2"> <div className="flex flex-col col-span-1 space-y-2">
<h3 className="mb-1 text-xs font-bold uppercase ">About</h3> <h3 className="mb-1 text-xs font-bold uppercase ">About</h3>
<FooterLink link="/team">Team</FooterLink> <FooterLink link="/team">Team</FooterLink>
<FooterLink link="/faq">FAQ</FooterLink> <FooterLink link="/faq">FAQ</FooterLink>
<FooterLink link="https://github.com/spacedriveapp/spacedrive#motivation"> <FooterLink link="https://github.com/spacedriveapp/spacedrive#motivation">
Mission Mission
</FooterLink> </FooterLink>
<FooterLink link="/changelog">Changelog</FooterLink> <FooterLink link="/changelog">Changelog</FooterLink>
<div className="opacity-50 pointer-events-none"> <div className="opacity-50 pointer-events-none">
<FooterLink link="#">Blog</FooterLink> <FooterLink link="#">Blog</FooterLink>
</div> </div>
</div> </div>
<div className="flex flex-col col-span-1 space-y-2 pointer-events-none"> <div className="flex flex-col col-span-1 space-y-2 pointer-events-none">
<h3 className="mb-1 text-xs font-bold uppercase">Downloads</h3> <h3 className="mb-1 text-xs font-bold uppercase">Downloads</h3>
<div className="flex flex-col col-span-1 space-y-2 opacity-50"> <div className="flex flex-col col-span-1 space-y-2 opacity-50">
<FooterLink link="#">macOS</FooterLink> <FooterLink link="#">macOS</FooterLink>
<FooterLink link="#">Windows</FooterLink> <FooterLink link="#">Windows</FooterLink>
<FooterLink link="#">Linux</FooterLink> <FooterLink link="#">Linux</FooterLink>
</div> </div>
</div> </div>
<div className="flex flex-col col-span-1 space-y-2"> <div className="flex flex-col col-span-1 space-y-2">
<h3 className="mb-1 text-xs font-bold uppercase ">Developers</h3> <h3 className="mb-1 text-xs font-bold uppercase ">Developers</h3>
<FooterLink link="https://github.com/spacedriveapp/spacedrive/tree/main/docs"> <FooterLink link="https://github.com/spacedriveapp/spacedrive/tree/main/docs">
Documentation Documentation
</FooterLink> </FooterLink>
<FooterLink link="https://github.com/spacedriveapp/spacedrive/tree/main/docs/developer/contributing.md"> <FooterLink link="https://github.com/spacedriveapp/spacedrive/tree/main/docs/developer/contributing.md">
Contribute Contribute
</FooterLink> </FooterLink>
<div className="opacity-50 pointer-events-none"> <div className="opacity-50 pointer-events-none">
<FooterLink link="#">Extensions</FooterLink> <FooterLink link="#">Extensions</FooterLink>
</div> </div>
<div className="opacity-50 pointer-events-none"> <div className="opacity-50 pointer-events-none">
<FooterLink link="#">Self Host</FooterLink> <FooterLink link="#">Self Host</FooterLink>
</div> </div>
</div> </div>
<div className="flex flex-col col-span-1 space-y-2"> <div className="flex flex-col col-span-1 space-y-2">
<h3 className="mb-1 text-xs font-bold uppercase ">Org</h3> <h3 className="mb-1 text-xs font-bold uppercase ">Org</h3>
<FooterLink link="https://opencollective.com/spacedrive">Open Collective</FooterLink> <FooterLink link="https://opencollective.com/spacedrive">Open Collective</FooterLink>
<FooterLink link="https://github.com/spacedriveapp/spacedrive/blob/main/LICENSE"> <FooterLink link="https://github.com/spacedriveapp/spacedrive/blob/main/LICENSE">
License License
</FooterLink> </FooterLink>
<div className="opacity-50 pointer-events-none"> <div className="opacity-50 pointer-events-none">
<FooterLink link="#">Privacy</FooterLink> <FooterLink link="#">Privacy</FooterLink>
</div> </div>
<div className="opacity-50 pointer-events-none"> <div className="opacity-50 pointer-events-none">
<FooterLink link="#">Terms</FooterLink> <FooterLink link="#">Terms</FooterLink>
</div> </div>
</div> </div>
</div> </div>
</footer> </footer>
); );
} }

View file

@ -5,20 +5,20 @@ import 'prismjs/components/prism-rust';
import '../atom-one.css'; import '../atom-one.css';
interface MarkdownPageProps { interface MarkdownPageProps {
children: React.ReactNode; children: React.ReactNode;
} }
function MarkdownPage(props: MarkdownPageProps) { function MarkdownPage(props: MarkdownPageProps) {
useEffect(() => { useEffect(() => {
Prism.highlightAll(); Prism.highlightAll();
}, []); }, []);
return ( return (
<div className="container max-w-4xl p-4 mt-32 mb-20"> <div className="container max-w-4xl p-4 mt-32 mb-20">
<article id="content" className="m-auto prose lg:prose-xs dark:prose-invert"> <article id="content" className="m-auto prose lg:prose-xs dark:prose-invert">
{props.children} {props.children}
</article> </article>
</div> </div>
); );
} }
export default MarkdownPage; export default MarkdownPage;

View file

@ -5,125 +5,125 @@ import { Link, List, MapPin, Question } from 'phosphor-react';
import { ReactComponent as AppLogo } from '../assets/app-logo.svg'; import { ReactComponent as AppLogo } from '../assets/app-logo.svg';
import { Discord, Github } from '@icons-pack/react-simple-icons'; import { Discord, Github } from '@icons-pack/react-simple-icons';
import { import {
ClockIcon, ClockIcon,
CogIcon, CogIcon,
HeartIcon, HeartIcon,
LockClosedIcon, LockClosedIcon,
MapIcon, MapIcon,
QuestionMarkCircleIcon QuestionMarkCircleIcon
} from '@heroicons/react/solid'; } from '@heroicons/react/solid';
function NavLink(props: { link?: string; children: string }) { function NavLink(props: { link?: string; children: string }) {
return ( return (
<a <a
href={props.link ?? '#'} href={props.link ?? '#'}
target={props.link?.startsWith('http') ? '_blank' : undefined} target={props.link?.startsWith('http') ? '_blank' : undefined}
className="p-4 text-gray-300 no-underline transition cursor-pointer hover:text-gray-50" className="p-4 text-gray-300 no-underline transition cursor-pointer hover:text-gray-50"
> >
{props.children} {props.children}
</a> </a>
); );
} }
export default function NavBar() { export default function NavBar() {
const [isAtTop, setIsAtTop] = useState(window.pageYOffset < 20); const [isAtTop, setIsAtTop] = useState(window.pageYOffset < 20);
function onScroll(event: Event) { function onScroll(event: Event) {
if (window.pageYOffset < 20) setIsAtTop(true); if (window.pageYOffset < 20) setIsAtTop(true);
else if (isAtTop) setIsAtTop(false); else if (isAtTop) setIsAtTop(false);
} }
useEffect(() => { useEffect(() => {
window.addEventListener('scroll', onScroll); window.addEventListener('scroll', onScroll);
return () => window.removeEventListener('scroll', onScroll); return () => window.removeEventListener('scroll', onScroll);
}, []); }, []);
return ( return (
<div <div
className={clsx( className={clsx(
'fixed transition z-40 w-full h-16 border-b ', 'fixed transition z-40 w-full h-16 border-b ',
isAtTop isAtTop
? 'bg-transparent border-transparent' ? 'bg-transparent border-transparent'
: 'border-gray-550 bg-gray-750 bg-opacity-80 backdrop-blur' : 'border-gray-550 bg-gray-750 bg-opacity-80 backdrop-blur'
)} )}
> >
<div className="container relative flex items-center h-full px-5 m-auto"> <div className="container relative flex items-center h-full px-5 m-auto">
<a href="/" className="absolute flex flex-row items-center"> <a href="/" className="absolute flex flex-row items-center">
<AppLogo className="z-30 w-8 h-8 mr-3" /> <AppLogo className="z-30 w-8 h-8 mr-3" />
<h3 className="text-xl font-bold text-white"> <h3 className="text-xl font-bold text-white">
Spacedrive Spacedrive
{/* <span className="ml-2 text-xs text-gray-400 uppercase">ALPHA</span> */} {/* <span className="ml-2 text-xs text-gray-400 uppercase">ALPHA</span> */}
</h3> </h3>
</a> </a>
<div className="hidden m-auto space-x-4 text-white lg:block "> <div className="hidden m-auto space-x-4 text-white lg:block ">
<NavLink link="/roadmap">Roadmap</NavLink> <NavLink link="/roadmap">Roadmap</NavLink>
<NavLink link="/faq">FAQ</NavLink> <NavLink link="/faq">FAQ</NavLink>
<NavLink link="/team">Team</NavLink> <NavLink link="/team">Team</NavLink>
{/* <NavLink link="/change-log">Changelog</NavLink> {/* <NavLink link="/change-log">Changelog</NavLink>
<NavLink link="/privacy">Privacy</NavLink> */} <NavLink link="/privacy">Privacy</NavLink> */}
<NavLink link="https://opencollective.com/spacedrive">Sponsor us</NavLink> <NavLink link="https://opencollective.com/spacedrive">Sponsor us</NavLink>
</div> </div>
<Dropdown <Dropdown
className="absolute block h-6 w-44 top-2 right-4 lg:hidden" className="absolute block h-6 w-44 top-2 right-4 lg:hidden"
items={[ items={[
[ [
{ {
name: 'Repository', name: 'Repository',
icon: Github, icon: Github,
onPress: () => onPress: () =>
(window.location.href = 'https://github.com/spacedriveapp/spacedrive') (window.location.href = 'https://github.com/spacedriveapp/spacedrive')
}, },
{ {
name: 'Join Discord', name: 'Join Discord',
icon: Discord, icon: Discord,
onPress: () => (window.location.href = 'https://discord.gg/gTaF2Z44f5') onPress: () => (window.location.href = 'https://discord.gg/gTaF2Z44f5')
} }
], ],
[ [
{ {
name: 'Roadmap', name: 'Roadmap',
icon: MapIcon, icon: MapIcon,
onPress: () => (window.location.href = '/roadmap'), onPress: () => (window.location.href = '/roadmap'),
selected: window.location.href.includes('/roadmap') selected: window.location.href.includes('/roadmap')
}, },
{ {
name: 'FAQ', name: 'FAQ',
icon: QuestionMarkCircleIcon, icon: QuestionMarkCircleIcon,
onPress: () => (window.location.href = '/faq'), onPress: () => (window.location.href = '/faq'),
selected: window.location.href.includes('/faq') selected: window.location.href.includes('/faq')
}, },
// { // {
// name: 'Changelog', // name: 'Changelog',
// icon: ClockIcon, // icon: ClockIcon,
// onPress: () => (window.location.href = '/changelog'), // onPress: () => (window.location.href = '/changelog'),
// selected: window.location.href.includes('/changelog') // selected: window.location.href.includes('/changelog')
// }, // },
// { // {
// name: 'Privacy', // name: 'Privacy',
// icon: LockClosedIcon, // icon: LockClosedIcon,
// onPress: () => (window.location.href = '/privacy'), // onPress: () => (window.location.href = '/privacy'),
// selected: window.location.href.includes('/privacy') // selected: window.location.href.includes('/privacy')
// }, // },
{ {
name: 'Sponsor us', name: 'Sponsor us',
icon: HeartIcon, icon: HeartIcon,
onPress: () => (window.location.href = 'https://opencollective.com/spacedrive') onPress: () => (window.location.href = 'https://opencollective.com/spacedrive')
} }
] ]
]} ]}
buttonIcon={<List weight="bold" className="w-6 h-6" />} buttonIcon={<List weight="bold" className="w-6 h-6" />}
buttonProps={{ className: '!p-1 ml-[140px]' }} buttonProps={{ className: '!p-1 ml-[140px]' }}
/> />
<div className="absolute flex-row hidden space-x-5 right-3 lg:flex"> <div className="absolute flex-row hidden space-x-5 right-3 lg:flex">
<a href="https://discord.gg/gTaF2Z44f5" target="_blank"> <a href="https://discord.gg/gTaF2Z44f5" target="_blank">
<Discord className="text-white" /> <Discord className="text-white" />
</a> </a>
<a href="https://github.com/spacedriveapp/spacedrive" target="_blank"> <a href="https://github.com/spacedriveapp/spacedrive" target="_blank">
<Github className="text-white" /> <Github className="text-white" />
</a> </a>
</div> </div>
</div> </div>
</div> </div>
); );
} }

View file

@ -11,33 +11,33 @@ import './style.scss';
import { Button } from '@sd/ui'; import { Button } from '@sd/ui';
function App() { function App() {
return ( return (
<Suspense fallback={<p>Loading...</p>}> <Suspense fallback={<p>Loading...</p>}>
<div className="dark:bg-black dark:text-white "> <div className="dark:bg-black dark:text-white ">
<Button <Button
href="#content" href="#content"
className="fixed left-0 z-50 mt-3 ml-8 duration-200 -translate-y-16 cursor-pointer focus:translate-y-0" className="fixed left-0 z-50 mt-3 ml-8 duration-200 -translate-y-16 cursor-pointer focus:translate-y-0"
variant="gray" variant="gray"
> >
Skip to content Skip to content
</Button> </Button>
<NavBar /> <NavBar />
<div className="container z-10 flex flex-col items-center px-4 mx-auto overflow-x-hidden sm:overflow-x-visible "> <div className="container z-10 flex flex-col items-center px-4 mx-auto overflow-x-hidden sm:overflow-x-visible ">
{useRoutes(routes)} {useRoutes(routes)}
<Footer /> <Footer />
</div> </div>
</div> </div>
</Suspense> </Suspense>
); );
} }
const root = createRoot(document.getElementById('root')!); const root = createRoot(document.getElementById('root')!);
root.render( root.render(
<React.StrictMode> <React.StrictMode>
<Router> <Router>
<App /> <App />
</Router> </Router>
</React.StrictMode> </React.StrictMode>
); );

View file

@ -5,31 +5,31 @@ import { Button } from '@sd/ui';
import { SmileyXEyes } from 'phosphor-react'; import { SmileyXEyes } from 'phosphor-react';
function Page() { function Page() {
return ( return (
<Markdown> <Markdown>
<Helmet> <Helmet>
<title>Not Found - Spacedrive</title> <title>Not Found - Spacedrive</title>
</Helmet> </Helmet>
<div className="flex flex-col items-center"> <div className="flex flex-col items-center">
<SmileyXEyes className="mb-3 w-44 h-44" /> <SmileyXEyes className="mb-3 w-44 h-44" />
<h1 className="mb-2 text-center">In the quantum realm this page potentially exists.</h1> <h1 className="mb-2 text-center">In the quantum realm this page potentially exists.</h1>
<p>In other words, thats a 404.</p> <p>In other words, thats a 404.</p>
<div className="flex flex-wrap justify-center"> <div className="flex flex-wrap justify-center">
<Button <Button
href={document.referrer || 'javascript:history.back()'} href={document.referrer || 'javascript:history.back()'}
className="mt-2 mr-3 cursor-pointer " className="mt-2 mr-3 cursor-pointer "
variant="gray" variant="gray"
> >
Back Back
</Button> </Button>
<Button href="/" className="mt-2 cursor-pointer" variant="primary"> <Button href="/" className="mt-2 cursor-pointer" variant="primary">
Discover Spacedrive Discover Spacedrive
</Button> </Button>
</div> </div>
</div> </div>
<div className="h-96" /> <div className="h-96" />
</Markdown> </Markdown>
); );
} }
export default Page; export default Page;

View file

@ -4,15 +4,15 @@ import { ReactComponent as Content } from '~/docs/changelog/index.md';
import { Helmet } from 'react-helmet'; import { Helmet } from 'react-helmet';
function Page() { function Page() {
return ( return (
<Markdown> <Markdown>
<Helmet> <Helmet>
<title>Changelog - Spacedrive</title> <title>Changelog - Spacedrive</title>
<meta name="description" content="Updates and release builds of the Spacedrive app." /> <meta name="description" content="Updates and release builds of the Spacedrive app." />
</Helmet> </Helmet>
<Content /> <Content />
</Markdown> </Markdown>
); );
} }
export default Page; export default Page;

View file

@ -4,18 +4,18 @@ import { ReactComponent as Content } from '~/docs/architecture/distributed-data-
import { Helmet } from 'react-helmet'; import { Helmet } from 'react-helmet';
function Page() { function Page() {
return ( return (
<Markdown> <Markdown>
<Helmet> <Helmet>
<title>Distributed Data Sync - Spacedrive Documentation</title> <title>Distributed Data Sync - Spacedrive Documentation</title>
<meta <meta
name="description" name="description"
content="How we handle data sync with SQLite in a distributed network." content="How we handle data sync with SQLite in a distributed network."
/> />
</Helmet> </Helmet>
<Content /> <Content />
</Markdown> </Markdown>
); );
} }
export default Page; export default Page;

View file

@ -4,15 +4,15 @@ import { ReactComponent as Content } from '~/docs/product/faq.md';
import { Helmet } from 'react-helmet'; import { Helmet } from 'react-helmet';
function Page() { function Page() {
return ( return (
<Markdown> <Markdown>
<Helmet> <Helmet>
<title>FAQ - Spacedrive</title> <title>FAQ - Spacedrive</title>
<meta name="description" content="Updates and release builds of the Spacedrive app." /> <meta name="description" content="Updates and release builds of the Spacedrive app." />
</Helmet> </Helmet>
<Content /> <Content />
</Markdown> </Markdown>
); );
} }
export default Page; export default Page;

View file

@ -6,98 +6,98 @@ import clsx from 'clsx';
import AppEmbed from '../components/AppEmbed'; import AppEmbed from '../components/AppEmbed';
interface SectionProps { interface SectionProps {
orientation: 'left' | 'right'; orientation: 'left' | 'right';
heading?: string; heading?: string;
description?: string | React.ReactNode; description?: string | React.ReactNode;
children?: React.ReactNode; children?: React.ReactNode;
className?: string; className?: string;
} }
function Section(props: SectionProps = { orientation: 'left' }) { function Section(props: SectionProps = { orientation: 'left' }) {
let info = ( let info = (
<div className="p-10"> <div className="p-10">
{props.heading && <h1 className="text-4xl font-black">{props.heading}</h1>} {props.heading && <h1 className="text-4xl font-black">{props.heading}</h1>}
{props.description && <p className="mt-5 text-xl text-gray-450">{props.description}</p>} {props.description && <p className="mt-5 text-xl text-gray-450">{props.description}</p>}
</div> </div>
); );
return ( return (
<div className={clsx('grid grid-cols-1 my-10 lg:grid-cols-2 lg:my-44', props.className)}> <div className={clsx('grid grid-cols-1 my-10 lg:grid-cols-2 lg:my-44', props.className)}>
{props.orientation === 'right' ? ( {props.orientation === 'right' ? (
<> <>
{info} {info}
{props.children} {props.children}
</> </>
) : ( ) : (
<> <>
{props.children} {props.children}
{info} {info}
</> </>
)} )}
</div> </div>
); );
} }
function Page() { function Page() {
return ( return (
<> <>
<div className="mt-28 lg:mt-36" /> <div className="mt-28 lg:mt-36" />
<h1 <h1
id="content" id="content"
className="z-30 px-2 mb-3 text-4xl font-black leading-tight text-center md:text-6xl" className="z-30 px-2 mb-3 text-4xl font-black leading-tight text-center md:text-6xl"
> >
A file explorer from the future. A file explorer from the future.
</h1> </h1>
<p className="z-30 max-w-4xl mt-1 mb-8 text-center text-md lg:text-lg leading-2 lg:leading-8 text-gray-450"> <p className="z-30 max-w-4xl mt-1 mb-8 text-center text-md lg:text-lg leading-2 lg:leading-8 text-gray-450">
Combine your drives and clouds into one database that you can organize and explore from any Combine your drives and clouds into one database that you can organize and explore from any
device. device.
<br /> <br />
<span className="hidden sm:block"> <span className="hidden sm:block">
Designed for creators, hoarders and the painfully disorganized. Designed for creators, hoarders and the painfully disorganized.
</span> </span>
</p> </p>
<div className="flex flex-row space-x-4 delay-3 "> <div className="flex flex-row space-x-4 delay-3 ">
<Button <Button
href="https://github.com/spacedriveapp/spacedrive" href="https://github.com/spacedriveapp/spacedrive"
target="_blank" target="_blank"
className="z-30 cursor-pointer" className="z-30 cursor-pointer"
variant="gray" variant="gray"
> >
<Github className="inline w-5 h-5 -mt-[4px] -ml-1 mr-2" fill="white" /> <Github className="inline w-5 h-5 -mt-[4px] -ml-1 mr-2" fill="white" />
Star on GitHub Star on GitHub
</Button> </Button>
</div> </div>
<p className="z-30 px-6 mt-3 text-sm text-center text-gray-450 "> <p className="z-30 px-6 mt-3 text-sm text-center text-gray-450 ">
Coming soon on macOS, Windows and Linux. Coming soon on macOS, Windows and Linux.
<br /> <br />
Shortly after to iOS & Android. Shortly after to iOS & Android.
</p> </p>
<AppEmbed /> <AppEmbed />
<Section <Section
orientation="right" orientation="right"
heading="Never leave a file behind." heading="Never leave a file behind."
className="z-30" className="z-30"
description={ description={
<> <>
Spacedrive accounts for every file you own, uniquely fingerprinting and extracting Spacedrive accounts for every file you own, uniquely fingerprinting and extracting
metadata so you can sort, tag, backup and share files without limitations of any one metadata so you can sort, tag, backup and share files without limitations of any one
cloud provider. cloud provider.
<br /> <br />
<br /> <br />
<a <a
className="transition text-primary-600 hover:text-primary-500" className="transition text-primary-600 hover:text-primary-500"
href="https://github.com/spacedriveapp" href="https://github.com/spacedriveapp"
target="_blank" target="_blank"
> >
Find out more Find out more
</a> </a>
</> </>
} }
/> />
<Bubbles /> <Bubbles />
</> </>
); );
} }
export default Page; export default Page;

View file

@ -5,18 +5,18 @@ import { Helmet } from 'react-helmet';
import { ReactComponent as Folder } from '../../../../packages/interface/src/assets/svg/folder.svg'; import { ReactComponent as Folder } from '../../../../packages/interface/src/assets/svg/folder.svg';
function Page() { function Page() {
return ( return (
<Markdown> <Markdown>
<Helmet> <Helmet>
<title>Roadmap - Spacedrive</title> <title>Roadmap - Spacedrive</title>
<meta name="description" content="What can Spacedrive do?" /> <meta name="description" content="What can Spacedrive do?" />
</Helmet> </Helmet>
<div className="w-24 mb-10"> <div className="w-24 mb-10">
<Folder className="" /> <Folder className="" />
</div> </div>
<Content /> <Content />
</Markdown> </Markdown>
); );
} }
export default Page; export default Page;

View file

@ -4,17 +4,17 @@ import { ReactComponent as Content } from '~/docs/product/credits.md';
import { Helmet } from 'react-helmet'; import { Helmet } from 'react-helmet';
function Page() { function Page() {
return ( return (
<Markdown> <Markdown>
<Helmet> <Helmet>
<title>Our Team - Spacedrive</title> <title>Our Team - Spacedrive</title>
<meta name="description" content="Who's behind Spacedrive?" /> <meta name="description" content="Who's behind Spacedrive?" />
</Helmet> </Helmet>
<div className="team-page"> <div className="team-page">
<Content /> <Content />
</div> </div>
</Markdown> </Markdown>
); );
} }
export default Page; export default Page;

View file

@ -1,88 +1,84 @@
html { html {
@apply bg-black; @apply bg-black;
-ms-overflow-style: none; /* IE and Edge */ -ms-overflow-style: none; /* IE and Edge */
scrollbar-width: none; /* Firefox */ scrollbar-width: none; /* Firefox */
&::-webkit-scrollbar { &::-webkit-scrollbar {
display: none; display: none;
} }
} }
.landing-img { .landing-img {
background-image: url('/app.png'); background-image: url('/app.png');
background-size: contain; background-size: contain;
background-repeat: no-repeat; background-repeat: no-repeat;
background-position: center top; background-position: center top;
} }
.fade-in-app-embed { .fade-in-app-embed {
animation: fadeInUp 3s; animation: fadeInUp 3s;
-webkit-animation: fadeInUp 3s; -webkit-animation: fadeInUp 3s;
-moz-animation: fadeInUp 3s; -moz-animation: fadeInUp 3s;
-o-animation: fadeInUp 3s; -o-animation: fadeInUp 3s;
-ms-animation: fadeInUp 3s; -ms-animation: fadeInUp 3s;
} }
.fade-in-heading { .fade-in-heading {
animation: fadeInUp 1s; animation: fadeInUp 1s;
} }
@keyframes fadeInUp { @keyframes fadeInUp {
0% { 0% {
opacity:0; opacity: 0;
// transform: translateY(10px); // transform: translateY(10px);
} }
100% { 100% {
opacity:1; opacity: 1;
// transform: translateY(0px); // transform: translateY(0px);
} }
} }
.bloom { .bloom {
@apply absolute w-96 h-96; @apply absolute w-96 h-96;
will-change: opacity; will-change: opacity;
opacity: 0; opacity: 0;
filter: blur(160px); filter: blur(160px);
border-radius: 50%; border-radius: 50%;
transform: scale(1.5); transform: scale(1.5);
animation-name: bloomBurst; animation-name: bloomBurst;
animation-duration: 1s; animation-duration: 1s;
animation-timing-function: ease-in-out; animation-timing-function: ease-in-out;
animation-fill-mode: forwards; animation-fill-mode: forwards;
animation-iteration-count: 1; animation-iteration-count: 1;
animation-direction: forwards; animation-direction: forwards;
&.bloom-one { &.bloom-one {
background: conic-gradient(from 90deg at 50% 50%, #255bef, #aa1cca); background: conic-gradient(from 90deg at 50% 50%, #255bef, #aa1cca);
animation-delay: 500ms; animation-delay: 500ms;
} }
&.bloom-two { &.bloom-two {
background: conic-gradient(from 90deg at 50% 50%, #c62dbb, #1D054B); background: conic-gradient(from 90deg at 50% 50%, #c62dbb, #1d054b);
animation-delay: 300ms; animation-delay: 300ms;
} }
&.bloom-three { &.bloom-three {
background: conic-gradient(from 90deg at 50% 50%, #2d53c6, #1D054B); background: conic-gradient(from 90deg at 50% 50%, #2d53c6, #1d054b);
animation-delay: 1100ms; animation-delay: 1100ms;
} }
} }
@keyframes bloomBurst { @keyframes bloomBurst {
from { from {
opacity: 0; opacity: 0;
} }
40% { 40% {
opacity: 1; opacity: 1;
} }
to { to {
opacity: 0.6; opacity: 0.6;
} }
} }
.shadow-iframe { .shadow-iframe {
box-shadow: 0px 0px 100px 0px rgba(0,0,0,0.5); box-shadow: 0px 0px 100px 0px rgba(0, 0, 0, 0.5);
} }
// Gradient colors // Gradient colors
@ -90,4 +86,4 @@ html {
// #7A1D77 // #7A1D77
// #8E4CAB // #8E4CAB
// #1D054B // #1D054B
// #9A3F8C // #9A3F8C

View file

@ -2,24 +2,24 @@
/// <reference types="vite-plugin-pages/client-react" /> /// <reference types="vite-plugin-pages/client-react" />
interface ImportMetaEnv { interface ImportMetaEnv {
readonly VITE_SDWEB_BASE_URL: string; readonly VITE_SDWEB_BASE_URL: string;
} }
interface ImportMeta { interface ImportMeta {
readonly env: ImportMetaEnv; readonly env: ImportMetaEnv;
} }
declare module '*.md' { declare module '*.md' {
// "unknown" would be more detailed depends on how you structure frontmatter // "unknown" would be more detailed depends on how you structure frontmatter
const attributes: Record<string, unknown>; const attributes: Record<string, unknown>;
// When "Mode.TOC" is requested // When "Mode.TOC" is requested
const toc: { level: string; content: string }[]; const toc: { level: string; content: string }[];
// When "Mode.HTML" is requested // When "Mode.HTML" is requested
const html: string; const html: string;
// When "Mode.React" is requested. VFC could take a generic like React.VFC<{ MyComponent: TypeOfMyComponent }> // When "Mode.React" is requested. VFC could take a generic like React.VFC<{ MyComponent: TypeOfMyComponent }>
import React from 'react'; import React from 'react';
const ReactComponent: React.VFC; const ReactComponent: React.VFC;
} }

View file

@ -1,5 +1,5 @@
{ {
"extends": "../../packages/config/interface.tsconfig.json", "extends": "../../packages/config/interface.tsconfig.json",
"compilerOptions": {}, "compilerOptions": {},
"include": ["src"] "include": ["src"]
} }

View file

@ -1,3 +1,3 @@
{ {
"rewrites": [{ "source": "/(.*)", "destination": "/" }] "rewrites": [{ "source": "/(.*)", "destination": "/" }]
} }

View file

@ -6,23 +6,23 @@ import svg from 'vite-plugin-svgr';
// https://vitejs.dev/config/ // https://vitejs.dev/config/
export default defineConfig({ export default defineConfig({
// @ts-ignore // @ts-ignore
plugins: [ plugins: [
react(), react(),
pages({ pages({
dirs: 'src/pages' dirs: 'src/pages'
// onRoutesGenerated: (routes) => generateSitemap({ routes }) // onRoutesGenerated: (routes) => generateSitemap({ routes })
}), }),
svg(), svg(),
md({ mode: [Mode.REACT] }) md({ mode: [Mode.REACT] })
], ],
resolve: { resolve: {
alias: { alias: {
'~/docs': __dirname + '../../../docs' '~/docs': __dirname + '../../../docs'
} }
}, },
server: { server: {
port: 8003 port: 8003
}, },
publicDir: 'public' publicDir: 'public'
}); });

View file

@ -1,6 +1,6 @@
{ {
"name": "mobile", "name": "mobile",
"version": "0.0.0", "version": "0.0.0",
"main": "index.js", "main": "index.js",
"license": "MIT" "license": "MIT"
} }

View file

@ -1,6 +1,6 @@
{ {
"name": "@sd/server", "name": "@sd/server",
"version": "0.0.0", "version": "0.0.0",
"main": "index.js", "main": "index.js",
"license": "MIT" "license": "MIT"
} }

View file

@ -1,31 +1,31 @@
{ {
"name": "@sd/web", "name": "@sd/web",
"private": true, "private": true,
"version": "0.0.0", "version": "0.0.0",
"scripts": { "scripts": {
"dev": "vite", "dev": "vite",
"build": "vite build", "build": "vite build",
"preview": "vite preview" "preview": "vite preview"
}, },
"dependencies": { "dependencies": {
"@fontsource/inter": "^4.5.7", "@fontsource/inter": "^4.5.7",
"@sd/client": "*", "@sd/client": "*",
"@sd/core": "*", "@sd/core": "*",
"@sd/interface": "*", "@sd/interface": "*",
"@sd/ui": "*", "@sd/ui": "*",
"react": "^18.0.0", "react": "^18.0.0",
"react-dom": "^18.0.0" "react-dom": "^18.0.0"
}, },
"devDependencies": { "devDependencies": {
"@types/react": "^18.0.8", "@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0", "@types/react-dom": "^18.0.0",
"@vitejs/plugin-react": "^1.3.1", "@vitejs/plugin-react": "^1.3.1",
"autoprefixer": "^10.4.4", "autoprefixer": "^10.4.4",
"postcss": "^8.4.12", "postcss": "^8.4.12",
"tailwind": "^4.0.0", "tailwind": "^4.0.0",
"typescript": "^4.6.3", "typescript": "^4.6.3",
"vite": "^2.9.5", "vite": "^2.9.5",
"vite-plugin-svgr": "^1.1.0", "vite-plugin-svgr": "^1.1.0",
"vite-plugin-tsconfig-paths": "^1.0.5" "vite-plugin-tsconfig-paths": "^1.0.5"
} }
} }

View file

@ -1,25 +1,25 @@
{ {
"short_name": "Spacedrive", "short_name": "Spacedrive",
"name": "Spacedrive", "name": "Spacedrive",
"icons": [ "icons": [
{ {
"src": "favicon.ico", "src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16", "sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon" "type": "image/x-icon"
}, },
{ {
"src": "logo192.png", "src": "logo192.png",
"type": "image/png", "type": "image/png",
"sizes": "192x192" "sizes": "192x192"
}, },
{ {
"src": "logo512.png", "src": "logo512.png",
"type": "image/png", "type": "image/png",
"sizes": "512x512" "sizes": "512x512"
} }
], ],
"start_url": ".", "start_url": ".",
"display": "standalone", "display": "standalone",
"theme_color": "#000000", "theme_color": "#000000",
"background_color": "#ffffff" "background_color": "#ffffff"
} }

View file

@ -10,83 +10,83 @@ const randomId = () => Math.random().toString(36).slice(2);
// bind state to core via Tauri // bind state to core via Tauri
class Transport extends BaseTransport { class Transport extends BaseTransport {
requestMap = new Map<string, (data: any) => void>(); requestMap = new Map<string, (data: any) => void>();
constructor() { constructor() {
super(); super();
websocket.addEventListener('message', (event) => { websocket.addEventListener('message', (event) => {
if (!event.data) return; if (!event.data) return;
const { id, payload } = JSON.parse(event.data); const { id, payload } = JSON.parse(event.data);
const { type, data } = payload; const { type, data } = payload;
if (type === 'event') { if (type === 'event') {
this.emit('core_event', data); this.emit('core_event', data);
} else if (type === 'query' || type === 'command') { } else if (type === 'query' || type === 'command') {
if (this.requestMap.has(id)) { if (this.requestMap.has(id)) {
this.requestMap.get(id)?.(data); this.requestMap.get(id)?.(data);
this.requestMap.delete(id); this.requestMap.delete(id);
} }
} }
}); });
} }
async query(query: ClientQuery) { async query(query: ClientQuery) {
const id = randomId(); const id = randomId();
let resolve: (data: any) => void; let resolve: (data: any) => void;
const promise = new Promise((res) => { const promise = new Promise((res) => {
resolve = res; resolve = res;
}); });
// @ts-ignore // @ts-ignore
this.requestMap.set(id, resolve); this.requestMap.set(id, resolve);
websocket.send(JSON.stringify({ id, payload: { type: 'query', data: query } })); websocket.send(JSON.stringify({ id, payload: { type: 'query', data: query } }));
return await promise; return await promise;
} }
async command(command: ClientCommand) { async command(command: ClientCommand) {
const id = randomId(); const id = randomId();
let resolve: (data: any) => void; let resolve: (data: any) => void;
const promise = new Promise((res) => { const promise = new Promise((res) => {
resolve = res; resolve = res;
}); });
// @ts-ignore // @ts-ignore
this.requestMap.set(id, resolve); this.requestMap.set(id, resolve);
websocket.send(JSON.stringify({ id, payload: { type: 'command', data: command } })); websocket.send(JSON.stringify({ id, payload: { type: 'command', data: command } }));
return await promise; return await promise;
} }
} }
function App() { function App() {
useEffect(() => { useEffect(() => {
window.parent.postMessage('spacedrive-hello', '*'); window.parent.postMessage('spacedrive-hello', '*');
}, []); }, []);
return ( return (
<div className="App"> <div className="App">
{/* <header className="App-header"></header> */} {/* <header className="App-header"></header> */}
<SpacedriveInterface <SpacedriveInterface
demoMode demoMode
useMemoryRouter={true} useMemoryRouter={true}
transport={new Transport()} transport={new Transport()}
platform={'browser'} platform={'browser'}
convertFileSrc={function (url: string): string { convertFileSrc={function (url: string): string {
return url; return url;
}} }}
openDialog={function (options: { openDialog={function (options: {
directory?: boolean | undefined; directory?: boolean | undefined;
}): Promise<string | string[]> { }): Promise<string | string[]> {
return Promise.resolve([]); return Promise.resolve([]);
}} }}
/> />
</div> </div>
); );
} }
export default App; export default App;

View file

@ -1,9 +1,9 @@
/// <reference types="vite/client" /> /// <reference types="vite/client" />
interface ImportMetaEnv { interface ImportMetaEnv {
readonly VITE_SDSERVER_BASE_URL: string; readonly VITE_SDSERVER_BASE_URL: string;
} }
interface ImportMeta { interface ImportMeta {
readonly env: ImportMetaEnv; readonly env: ImportMetaEnv;
} }

View file

@ -1,12 +1,12 @@
<!DOCTYPE html> <!DOCTYPE html>
<html class="dark"> <html class="dark">
<head> <head>
<meta charset="utf-8" /> <meta charset="utf-8" />
<title>Spacedrive</title> <title>Spacedrive</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
</head> </head>
<body> <body>
<div id="root"></div> <div id="root"></div>
<script type="module" src="./index.tsx"></script> <script type="module" src="./index.tsx"></script>
</body> </body>
</html> </html>

View file

@ -5,7 +5,7 @@ import '@sd/ui/style';
const root = ReactDOM.createRoot(document.getElementById('root') as HTMLElement); const root = ReactDOM.createRoot(document.getElementById('root') as HTMLElement);
root.render( root.render(
<React.StrictMode> <React.StrictMode>
<App /> <App />
</React.StrictMode> </React.StrictMode>
); );

View file

@ -1,5 +1,5 @@
{ {
"extends": "../../packages/config/interface.tsconfig.json", "extends": "../../packages/config/interface.tsconfig.json",
"compilerOptions": {}, "compilerOptions": {},
"include": ["src"] "include": ["src"]
} }

View file

@ -1,3 +1,3 @@
{ {
"rewrites": [{ "source": "/(.*)", "destination": "/" }] "rewrites": [{ "source": "/(.*)", "destination": "/" }]
} }

View file

@ -7,24 +7,24 @@ import { name, version } from './package.json';
// https://vitejs.dev/config/ // https://vitejs.dev/config/
export default defineConfig({ export default defineConfig({
server: { server: {
port: 8002 port: 8002
}, },
plugins: [ plugins: [
// @ts-ignore // @ts-ignore
react({ react({
jsxRuntime: 'classic' jsxRuntime: 'classic'
}), }),
svg({ svgrOptions: { icon: true } }), svg({ svgrOptions: { icon: true } }),
tsconfigPaths() tsconfigPaths()
], ],
root: 'src', root: 'src',
publicDir: '../../packages/interface/src/assets', publicDir: '../../packages/interface/src/assets',
define: { define: {
pkgJson: { name, version } pkgJson: { name, version }
}, },
build: { build: {
outDir: '../dist', outDir: '../dist',
assetsDir: '.' assetsDir: '.'
} }
}); });

View file

@ -1,6 +1,5 @@
max_width = 100 max_width = 100
hard_tabs = false hard_tabs = true
tab_spaces = 2
newline_style = "Unix" newline_style = "Unix"
use_small_heuristics = "Default" use_small_heuristics = "Default"
reorder_imports = true reorder_imports = true

View file

@ -1,3 +1,10 @@
import type { Platform } from "./Platform"; import type { Platform } from './Platform';
export interface Client { uuid: string, name: string, platform: Platform, tcp_address: string, last_seen: string, last_synchronized: string, } export interface Client {
uuid: string;
name: string;
platform: Platform;
tcp_address: string;
last_seen: string;
last_synchronized: string;
}

View file

@ -1,2 +1,14 @@
export type ClientCommand =
export type ClientCommand = { key: "FileRead", params: { id: number, } } | { key: "FileDelete", params: { id: number, } } | { key: "LibDelete", params: { id: number, } } | { key: "TagCreate", params: { name: string, color: string, } } | { key: "TagUpdate", params: { name: string, color: string, } } | { key: "TagAssign", params: { file_id: number, tag_id: number, } } | { key: "TagDelete", params: { id: number, } } | { key: "LocCreate", params: { path: string, } } | { key: "LocUpdate", params: { id: number, name: string | null, } } | { key: "LocDelete", params: { id: number, } } | { key: "SysVolumeUnmount", params: { id: number, } } | { key: "GenerateThumbsForLocation", params: { id: number, path: string, } } | { key: "IdentifyUniqueFiles" }; | { key: 'FileRead'; params: { id: number } }
| { key: 'FileDelete'; params: { id: number } }
| { key: 'LibDelete'; params: { id: number } }
| { key: 'TagCreate'; params: { name: string; color: string } }
| { key: 'TagUpdate'; params: { name: string; color: string } }
| { key: 'TagAssign'; params: { file_id: number; tag_id: number } }
| { key: 'TagDelete'; params: { id: number } }
| { key: 'LocCreate'; params: { path: string } }
| { key: 'LocUpdate'; params: { id: number; name: string | null } }
| { key: 'LocDelete'; params: { id: number } }
| { key: 'SysVolumeUnmount'; params: { id: number } }
| { key: 'GenerateThumbsForLocation'; params: { id: number; path: string } }
| { key: 'IdentifyUniqueFiles' };

View file

@ -1,2 +1,10 @@
export type ClientQuery =
export type ClientQuery = { key: "ClientGetState" } | { key: "SysGetVolumes" } | { key: "LibGetTags" } | { key: "JobGetRunning" } | { key: "JobGetHistory" } | { key: "SysGetLocations" } | { key: "SysGetLocation", params: { id: number, } } | { key: "LibGetExplorerDir", params: { location_id: number, path: string, limit: number, } } | { key: "GetLibraryStatistics" }; | { key: 'ClientGetState' }
| { key: 'SysGetVolumes' }
| { key: 'LibGetTags' }
| { key: 'JobGetRunning' }
| { key: 'JobGetHistory' }
| { key: 'SysGetLocations' }
| { key: 'SysGetLocation'; params: { id: number } }
| { key: 'LibGetExplorerDir'; params: { location_id: number; path: string; limit: number } }
| { key: 'GetLibraryStatistics' };

View file

@ -1,3 +1,11 @@
import type { LibraryState } from "./LibraryState"; import type { LibraryState } from './LibraryState';
export interface ClientState { client_uuid: string, client_id: number, client_name: string, data_path: string, tcp_port: number, libraries: Array<LibraryState>, current_library_uuid: string, } export interface ClientState {
client_uuid: string;
client_id: number;
client_name: string;
data_path: string;
tcp_port: number;
libraries: Array<LibraryState>;
current_library_uuid: string;
}

View file

@ -1,4 +1,10 @@
import type { ClientQuery } from "./ClientQuery"; import type { ClientQuery } from './ClientQuery';
import type { CoreResource } from "./CoreResource"; import type { CoreResource } from './CoreResource';
export type CoreEvent = { key: "InvalidateQuery", data: ClientQuery } | { key: "InvalidateQueryDebounced", data: ClientQuery } | { key: "InvalidateResource", data: CoreResource } | { key: "NewThumbnail", data: { cas_id: string, } } | { key: "Log", data: { message: string, } } | { key: "DatabaseDisconnected", data: { reason: string | null, } }; export type CoreEvent =
| { key: 'InvalidateQuery'; data: ClientQuery }
| { key: 'InvalidateQueryDebounced'; data: ClientQuery }
| { key: 'InvalidateResource'; data: CoreResource }
| { key: 'NewThumbnail'; data: { cas_id: string } }
| { key: 'Log'; data: { message: string } }
| { key: 'DatabaseDisconnected'; data: { reason: string | null } };

View file

@ -1,5 +1,11 @@
import type { File } from "./File"; import type { File } from './File';
import type { JobReport } from "./JobReport"; import type { JobReport } from './JobReport';
import type { LocationResource } from "./LocationResource"; import type { LocationResource } from './LocationResource';
export type CoreResource = "Client" | "Library" | { Location: LocationResource } | { File: File } | { Job: JobReport } | "Tag"; export type CoreResource =
| 'Client'
| 'Library'
| { Location: LocationResource }
| { File: File }
| { Job: JobReport }
| 'Tag';

View file

@ -1,8 +1,18 @@
import type { ClientState } from "./ClientState"; import type { ClientState } from './ClientState';
import type { DirectoryWithContents } from "./DirectoryWithContents"; import type { DirectoryWithContents } from './DirectoryWithContents';
import type { JobReport } from "./JobReport"; import type { JobReport } from './JobReport';
import type { LocationResource } from "./LocationResource"; import type { LocationResource } from './LocationResource';
import type { Statistics } from "./Statistics"; import type { Statistics } from './Statistics';
import type { Volume } from "./Volume"; import type { Volume } from './Volume';
export type CoreResponse = { key: "Success", data: null } | { key: "SysGetVolumes", data: Array<Volume> } | { key: "SysGetLocation", data: LocationResource } | { key: "SysGetLocations", data: Array<LocationResource> } | { key: "LibGetExplorerDir", data: DirectoryWithContents } | { key: "ClientGetState", data: ClientState } | { key: "LocCreate", data: LocationResource } | { key: "JobGetRunning", data: Array<JobReport> } | { key: "JobGetHistory", data: Array<JobReport> } | { key: "GetLibraryStatistics", data: Statistics }; export type CoreResponse =
| { key: 'Success'; data: null }
| { key: 'SysGetVolumes'; data: Array<Volume> }
| { key: 'SysGetLocation'; data: LocationResource }
| { key: 'SysGetLocations'; data: Array<LocationResource> }
| { key: 'LibGetExplorerDir'; data: DirectoryWithContents }
| { key: 'ClientGetState'; data: ClientState }
| { key: 'LocCreate'; data: LocationResource }
| { key: 'JobGetRunning'; data: Array<JobReport> }
| { key: 'JobGetHistory'; data: Array<JobReport> }
| { key: 'GetLibraryStatistics'; data: Statistics };

View file

@ -1,3 +1,6 @@
import type { FilePath } from "./FilePath"; import type { FilePath } from './FilePath';
export interface DirectoryWithContents { directory: FilePath, contents: Array<FilePath>, } export interface DirectoryWithContents {
directory: FilePath;
contents: Array<FilePath>;
}

View file

@ -1,2 +1 @@
export type EncryptionAlgorithm = 'None' | 'AES128' | 'AES192' | 'AES256';
export type EncryptionAlgorithm = "None" | "AES128" | "AES192" | "AES256";

View file

@ -1,5 +1,24 @@
import type { EncryptionAlgorithm } from "./EncryptionAlgorithm"; import type { EncryptionAlgorithm } from './EncryptionAlgorithm';
import type { FileKind } from "./FileKind"; import type { FileKind } from './FileKind';
import type { FilePath } from "./FilePath"; import type { FilePath } from './FilePath';
export interface File { id: number, cas_id: string, integrity_checksum: string | null, size_in_bytes: string, kind: FileKind, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, encryption: EncryptionAlgorithm, ipfs_id: string | null, comment: string | null, date_created: string, date_modified: string, date_indexed: string, paths: Array<FilePath>, } export interface File {
id: number;
cas_id: string;
integrity_checksum: string | null;
size_in_bytes: string;
kind: FileKind;
hidden: boolean;
favorite: boolean;
important: boolean;
has_thumbnail: boolean;
has_thumbstrip: boolean;
has_video_preview: boolean;
encryption: EncryptionAlgorithm;
ipfs_id: string | null;
comment: string | null;
date_created: string;
date_modified: string;
date_indexed: string;
paths: Array<FilePath>;
}

View file

@ -1,2 +1,10 @@
export type FileKind =
export type FileKind = "Unknown" | "Directory" | "Package" | "Archive" | "Image" | "Video" | "Audio" | "Plaintext" | "Alias"; | 'Unknown'
| 'Directory'
| 'Package'
| 'Archive'
| 'Image'
| 'Video'
| 'Audio'
| 'Plaintext'
| 'Alias';

View file

@ -1,2 +1,16 @@
export interface FilePath {
export interface FilePath { id: number, is_dir: boolean, location_id: number, materialized_path: string, name: string, extension: string | null, file_id: number | null, parent_id: number | null, temp_cas_id: string | null, has_local_thumbnail: boolean, date_created: string, date_modified: string, date_indexed: string, permissions: string | null, } id: number;
is_dir: boolean;
location_id: number;
materialized_path: string;
name: string;
extension: string | null;
file_id: number | null;
parent_id: number | null;
temp_cas_id: string | null;
has_local_thumbnail: boolean;
date_created: string;
date_modified: string;
date_indexed: string;
permissions: string | null;
}

View file

@ -1,3 +1,12 @@
import type { JobStatus } from "./JobStatus"; import type { JobStatus } from './JobStatus';
export interface JobReport { id: string, date_created: string, date_modified: string, status: JobStatus, task_count: number, completed_task_count: number, message: string, seconds_elapsed: string, } export interface JobReport {
id: string;
date_created: string;
date_modified: string;
status: JobStatus;
task_count: number;
completed_task_count: number;
message: string;
seconds_elapsed: string;
}

View file

@ -1,2 +1 @@
export type JobStatus = 'Queued' | 'Running' | 'Completed' | 'Canceled' | 'Failed';
export type JobStatus = "Queued" | "Running" | "Completed" | "Canceled" | "Failed";

View file

@ -1,2 +1,6 @@
export interface LibraryState {
export interface LibraryState { library_uuid: string, library_id: number, library_path: string, offline: boolean, } library_uuid: string;
library_id: number;
library_path: string;
offline: boolean;
}

View file

@ -1,2 +1,10 @@
export interface LocationResource {
export interface LocationResource { id: number, name: string | null, path: string | null, total_capacity: number | null, available_capacity: number | null, is_removable: boolean | null, is_online: boolean, date_created: string, } id: number;
name: string | null;
path: string | null;
total_capacity: number | null;
available_capacity: number | null;
is_removable: boolean | null;
is_online: boolean;
date_created: string;
}

View file

@ -1,2 +1 @@
export type Platform = 'Unknown' | 'Windows' | 'MacOS' | 'Linux' | 'IOS' | 'Android';
export type Platform = "Unknown" | "Windows" | "MacOS" | "Linux" | "IOS" | "Android";

View file

@ -1,2 +1,9 @@
export interface Statistics {
export interface Statistics { total_file_count: number, total_bytes_used: string, total_bytes_capacity: string, total_bytes_free: string, total_unique_bytes: string, preview_media_bytes: string, library_db_size: string, } total_file_count: number;
total_bytes_used: string;
total_bytes_capacity: string;
total_bytes_free: string;
total_unique_bytes: string;
preview_media_bytes: string;
library_db_size: string;
}

View file

@ -1,2 +1,10 @@
export interface Volume {
export interface Volume { name: string, mount_point: string, total_capacity: bigint, available_capacity: bigint, is_removable: boolean, disk_type: string | null, file_system: string | null, is_root_filesystem: boolean, } name: string;
mount_point: string;
total_capacity: bigint;
available_capacity: bigint;
is_removable: boolean;
disk_type: string | null;
file_system: string | null;
is_root_filesystem: boolean;
}

View file

@ -15,28 +15,28 @@ use syn::{parse_macro_input, Data, DeriveInput};
/// ``` /// ```
#[proc_macro_derive(PropertyOperationApply)] #[proc_macro_derive(PropertyOperationApply)]
pub fn property_operation_apply(input: TokenStream) -> TokenStream { pub fn property_operation_apply(input: TokenStream) -> TokenStream {
let DeriveInput { ident, data, .. } = parse_macro_input!(input); let DeriveInput { ident, data, .. } = parse_macro_input!(input);
if let Data::Enum(data) = data { if let Data::Enum(data) = data {
let impls = data.variants.iter().map(|variant| { let impls = data.variants.iter().map(|variant| {
let variant_ident = &variant.ident; let variant_ident = &variant.ident;
quote! { quote! {
#ident::#variant_ident(method) => method.apply(ctx), #ident::#variant_ident(method) => method.apply(ctx),
} }
}); });
let expanded = quote! { let expanded = quote! {
impl #ident { impl #ident {
fn apply(operation: CrdtCtx<PropertyOperation>, ctx: self::engine::SyncContext) { fn apply(operation: CrdtCtx<PropertyOperation>, ctx: self::engine::SyncContext) {
match operation.resource { match operation.resource {
#(#impls)* #(#impls)*
}; };
} }
} }
}; };
TokenStream::from(expanded) TokenStream::from(expanded)
} else { } else {
panic!("The 'PropertyOperationApply' macro can only be used on enums!"); panic!("The 'PropertyOperationApply' macro can only be used on enums!");
} }
} }

View file

@ -1,18 +1,18 @@
{ {
"name": "@sd/core", "name": "@sd/core",
"version": "0.0.0", "version": "0.0.0",
"main": "index.js", "main": "index.js",
"license": "MIT", "license": "MIT",
"scripts": { "scripts": {
"codegen": "cargo test && ts-node ./scripts/bindingsIndex.ts", "codegen": "cargo test && ts-node ./scripts/bindingsIndex.ts",
"build": "cargo build", "build": "cargo build",
"test": "cargo test", "test": "cargo test",
"test:log": "cargo test -- --nocapture", "test:log": "cargo test -- --nocapture",
"prisma": "cargo prisma" "prisma": "cargo prisma"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^17.0.23", "@types/node": "^17.0.23",
"ts-node": "^10.7.0", "ts-node": "^10.7.0",
"typescript": "^4.6.3" "typescript": "^4.6.3"
} }
} }

View file

@ -1,3 +1,3 @@
fn main() { fn main() {
prisma_client_rust_cli::run(); prisma_client_rust_cli::run();
} }

View file

@ -2,29 +2,29 @@ import * as fs from 'fs/promises';
import * as path from 'path'; import * as path from 'path';
(async function main() { (async function main() {
async function exists(path: string) { async function exists(path: string) {
try { try {
await fs.access(path); await fs.access(path);
return true; return true;
} catch { } catch {
return false; return false;
} }
} }
const files = await fs.readdir(path.join(__dirname, '../bindings')); const files = await fs.readdir(path.join(__dirname, '../bindings'));
const bindings = files.filter((f) => f.endsWith('.ts')); const bindings = files.filter((f) => f.endsWith('.ts'));
let str = ''; let str = '';
// str += `export * from './types';\n`; // str += `export * from './types';\n`;
for (let binding of bindings) { for (let binding of bindings) {
str += `export * from './bindings/${binding.split('.')[0]}';\n`; str += `export * from './bindings/${binding.split('.')[0]}';\n`;
} }
let indexExists = await exists(path.join(__dirname, '../index.ts')); let indexExists = await exists(path.join(__dirname, '../index.ts'));
if (indexExists) { if (indexExists) {
await fs.rm(path.join(__dirname, '../index.ts')); await fs.rm(path.join(__dirname, '../index.ts'));
} }
await fs.writeFile(path.join(__dirname, '../index.ts'), str); await fs.writeFile(path.join(__dirname, '../index.ts'), str);
})(); })();

View file

@ -6,8 +6,8 @@ use ts_rs::TS;
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)] #[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)] #[ts(export)]
pub enum EncryptionAlgorithm { pub enum EncryptionAlgorithm {
None = 0, None = 0,
AES128 = 1, AES128 = 1,
AES192 = 2, AES192 = 2,
AES256 = 3, AES256 = 3,
} }

View file

@ -11,140 +11,142 @@ const INIT_MIGRATION: &str = include_str!("../../prisma/migrations/migration_tab
static MIGRATIONS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/prisma/migrations"); static MIGRATIONS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/prisma/migrations");
pub fn sha256_digest<R: Read>(mut reader: R) -> Result<Digest> { pub fn sha256_digest<R: Read>(mut reader: R) -> Result<Digest> {
let mut context = Context::new(&SHA256); let mut context = Context::new(&SHA256);
let mut buffer = [0; 1024]; let mut buffer = [0; 1024];
loop { loop {
let count = reader.read(&mut buffer)?; let count = reader.read(&mut buffer)?;
if count == 0 { if count == 0 {
break; break;
} }
context.update(&buffer[..count]); context.update(&buffer[..count]);
} }
Ok(context.finish()) Ok(context.finish())
} }
pub async fn run_migrations(ctx: &CoreContext) -> Result<()> { pub async fn run_migrations(ctx: &CoreContext) -> Result<()> {
let client = &ctx.database; let client = &ctx.database;
match client match client
._query_raw::<serde_json::Value>( ._query_raw::<serde_json::Value>(
"SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'", "SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'",
) )
.await .await
{ {
Ok(data) => { Ok(data) => {
if data.len() == 0 { if data.len() == 0 {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
println!("Migration table does not exist"); println!("Migration table does not exist");
// execute migration // execute migration
match client._execute_raw(INIT_MIGRATION).await { match client._execute_raw(INIT_MIGRATION).await {
Ok(_) => {} Ok(_) => {}
Err(e) => { Err(e) => {
println!("Failed to create migration table: {}", e); println!("Failed to create migration table: {}", e);
} }
}; };
let value: Vec<serde_json::Value> = client let value: Vec<serde_json::Value> = client
._query_raw("SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'") ._query_raw(
.await "SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'",
.unwrap(); )
.await
.unwrap();
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
println!("Migration table created: {:?}", value); println!("Migration table created: {:?}", value);
} else { } else {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
println!("Migration table exists: {:?}", data); println!("Migration table exists: {:?}", data);
} }
let mut migration_subdirs = MIGRATIONS_DIR let mut migration_subdirs = MIGRATIONS_DIR
.dirs() .dirs()
.filter(|subdir| { .filter(|subdir| {
subdir subdir
.path() .path()
.file_name() .file_name()
.map(|name| name != OsStr::new("migration_table")) .map(|name| name != OsStr::new("migration_table"))
.unwrap_or(false) .unwrap_or(false)
}) })
.collect::<Vec<_>>(); .collect::<Vec<_>>();
migration_subdirs.sort_by(|a, b| { migration_subdirs.sort_by(|a, b| {
let a_name = a.path().file_name().unwrap().to_str().unwrap(); let a_name = a.path().file_name().unwrap().to_str().unwrap();
let b_name = b.path().file_name().unwrap().to_str().unwrap(); let b_name = b.path().file_name().unwrap().to_str().unwrap();
let a_time = a_name[..14].parse::<i64>().unwrap(); let a_time = a_name[..14].parse::<i64>().unwrap();
let b_time = b_name[..14].parse::<i64>().unwrap(); let b_time = b_name[..14].parse::<i64>().unwrap();
a_time.cmp(&b_time) a_time.cmp(&b_time)
}); });
for subdir in migration_subdirs { for subdir in migration_subdirs {
println!("{:?}", subdir.path()); println!("{:?}", subdir.path());
let migration_file = subdir let migration_file = subdir
.get_file(subdir.path().join("./migration.sql")) .get_file(subdir.path().join("./migration.sql"))
.unwrap(); .unwrap();
let migration_sql = migration_file.contents_utf8().unwrap(); let migration_sql = migration_file.contents_utf8().unwrap();
let digest = sha256_digest(BufReader::new(migration_file.contents()))?; let digest = sha256_digest(BufReader::new(migration_file.contents()))?;
// create a lowercase hash from // create a lowercase hash from
let checksum = HEXLOWER.encode(digest.as_ref()); let checksum = HEXLOWER.encode(digest.as_ref());
let name = subdir.path().file_name().unwrap().to_str().unwrap(); let name = subdir.path().file_name().unwrap().to_str().unwrap();
// get existing migration by checksum, if it doesn't exist run the migration // get existing migration by checksum, if it doesn't exist run the migration
let existing_migration = client let existing_migration = client
.migration() .migration()
.find_unique(migration::checksum::equals(checksum.clone())) .find_unique(migration::checksum::equals(checksum.clone()))
.exec() .exec()
.await?; .await?;
if existing_migration.is_none() { if existing_migration.is_none() {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
println!("Running migration: {}", name); println!("Running migration: {}", name);
let steps = migration_sql.split(";").collect::<Vec<&str>>(); let steps = migration_sql.split(";").collect::<Vec<&str>>();
let steps = &steps[0..steps.len() - 1]; let steps = &steps[0..steps.len() - 1];
client client
.migration() .migration()
.create( .create(
migration::name::set(name.to_string()), migration::name::set(name.to_string()),
migration::checksum::set(checksum.clone()), migration::checksum::set(checksum.clone()),
vec![], vec![],
) )
.exec() .exec()
.await?; .await?;
for (i, step) in steps.iter().enumerate() { for (i, step) in steps.iter().enumerate() {
match client._execute_raw(&format!("{};", step)).await { match client._execute_raw(&format!("{};", step)).await {
Ok(_) => { Ok(_) => {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
println!("Step {} ran successfully", i); println!("Step {} ran successfully", i);
client client
.migration() .migration()
.find_unique(migration::checksum::equals(checksum.clone())) .find_unique(migration::checksum::equals(checksum.clone()))
.update(vec![migration::steps_applied::set(i as i32 + 1)]) .update(vec![migration::steps_applied::set(i as i32 + 1)])
.exec() .exec()
.await?; .await?;
} }
Err(e) => { Err(e) => {
println!("Error running migration: {}", name); println!("Error running migration: {}", name);
println!("{}", e); println!("{}", e);
break; break;
} }
} }
} }
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
println!("Migration {} recorded successfully", name); println!("Migration {} recorded successfully", name);
} else { } else {
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
println!("Migration {} already exists", name); println!("Migration {} already exists", name);
} }
} }
} }
Err(err) => { Err(err) => {
panic!("Failed to check migration table existence: {:?}", err); panic!("Failed to check migration table existence: {:?}", err);
} }
} }
Ok(()) Ok(())
} }

View file

@ -4,17 +4,17 @@ pub mod migrate;
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum DatabaseError { pub enum DatabaseError {
#[error("Failed to connect to database")] #[error("Failed to connect to database")]
MissingConnection, MissingConnection,
#[error("Unable find current_library in the client config")] #[error("Unable find current_library in the client config")]
MalformedConfig, MalformedConfig,
#[error("Unable to initialize the Prisma client")] #[error("Unable to initialize the Prisma client")]
ClientError(#[from] prisma::NewClientError), ClientError(#[from] prisma::NewClientError),
} }
pub async fn create_connection(path: &str) -> Result<PrismaClient, DatabaseError> { pub async fn create_connection(path: &str) -> Result<PrismaClient, DatabaseError> {
println!("Creating database connection: {:?}", path); println!("Creating database connection: {:?}", path);
let client = prisma::new_client_with_url(&format!("file:{}", &path)).await?; let client = prisma::new_client_with_url(&format!("file:{}", &path)).await?;
Ok(client) Ok(client)
} }

View file

@ -5,132 +5,132 @@ use std::{ffi::OsStr, path::Path};
#[derive(Default, Debug)] #[derive(Default, Debug)]
pub struct MediaItem { pub struct MediaItem {
pub created_at: Option<String>, pub created_at: Option<String>,
pub brand: Option<String>, pub brand: Option<String>,
pub model: Option<String>, pub model: Option<String>,
pub duration_seconds: f64, pub duration_seconds: f64,
pub best_video_stream_index: usize, pub best_video_stream_index: usize,
pub best_audio_stream_index: usize, pub best_audio_stream_index: usize,
pub best_subtitle_stream_index: usize, pub best_subtitle_stream_index: usize,
pub steams: Vec<Stream>, pub steams: Vec<Stream>,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct Stream { pub struct Stream {
pub codec: String, pub codec: String,
pub frames: f64, pub frames: f64,
pub duration_seconds: f64, pub duration_seconds: f64,
pub kind: Option<StreamKind>, pub kind: Option<StreamKind>,
} }
#[derive(Debug)] #[derive(Debug)]
pub enum StreamKind { pub enum StreamKind {
Video(VideoStream), Video(VideoStream),
Audio(AudioStream), Audio(AudioStream),
} }
#[derive(Debug)] #[derive(Debug)]
pub struct VideoStream { pub struct VideoStream {
pub width: u32, pub width: u32,
pub height: u32, pub height: u32,
pub aspect_ratio: String, pub aspect_ratio: String,
pub format: format::Pixel, pub format: format::Pixel,
pub bitrate: usize, pub bitrate: usize,
} }
#[derive(Debug)] #[derive(Debug)]
pub struct AudioStream { pub struct AudioStream {
pub channels: u16, pub channels: u16,
pub format: format::Sample, pub format: format::Sample,
pub bitrate: usize, pub bitrate: usize,
pub rate: u32, pub rate: u32,
} }
fn extract(iter: &mut Iter, key: &str) -> Option<String> { fn extract(iter: &mut Iter, key: &str) -> Option<String> {
iter.find(|k| k.0.contains(key)).map(|k| k.1.to_string()) iter.find(|k| k.0.contains(key)).map(|k| k.1.to_string())
} }
pub fn get_video_metadata(path: &str) -> Result<(), ffmpeg::Error> { pub fn get_video_metadata(path: &str) -> Result<(), ffmpeg::Error> {
ffmpeg::init().unwrap(); ffmpeg::init().unwrap();
let mut name = Path::new(path) let mut name = Path::new(path)
.file_name() .file_name()
.and_then(OsStr::to_str) .and_then(OsStr::to_str)
.map(ToString::to_string) .map(ToString::to_string)
.unwrap_or(String::new()); .unwrap_or(String::new());
// strip to exact potential date length and attempt to parse // strip to exact potential date length and attempt to parse
name = name.chars().take(19).collect(); name = name.chars().take(19).collect();
// specifically OBS uses this format for time, other checks could be added // specifically OBS uses this format for time, other checks could be added
let potential_date = NaiveDateTime::parse_from_str(&name, "%Y-%m-%d %H-%M-%S"); let potential_date = NaiveDateTime::parse_from_str(&name, "%Y-%m-%d %H-%M-%S");
match ffmpeg::format::input(&path) { match ffmpeg::format::input(&path) {
Ok(context) => { Ok(context) => {
let mut media_item = MediaItem::default(); let mut media_item = MediaItem::default();
let metadata = context.metadata(); let metadata = context.metadata();
let mut iter = metadata.iter(); let mut iter = metadata.iter();
// creation_time is usually the creation date of the file // creation_time is usually the creation date of the file
media_item.created_at = extract(&mut iter, "creation_time"); media_item.created_at = extract(&mut iter, "creation_time");
// apple photos use "com.apple.quicktime.creationdate", which we care more about than the creation_time // apple photos use "com.apple.quicktime.creationdate", which we care more about than the creation_time
media_item.created_at = extract(&mut iter, "creationdate"); media_item.created_at = extract(&mut iter, "creationdate");
// fallback to potential time if exists // fallback to potential time if exists
if media_item.created_at.is_none() { if media_item.created_at.is_none() {
media_item.created_at = potential_date.map(|d| d.to_string()).ok(); media_item.created_at = potential_date.map(|d| d.to_string()).ok();
} }
// origin metadata // origin metadata
media_item.brand = extract(&mut iter, "major_brand"); media_item.brand = extract(&mut iter, "major_brand");
media_item.brand = extract(&mut iter, "make"); media_item.brand = extract(&mut iter, "make");
media_item.model = extract(&mut iter, "model"); media_item.model = extract(&mut iter, "model");
if let Some(stream) = context.streams().best(ffmpeg::media::Type::Video) { if let Some(stream) = context.streams().best(ffmpeg::media::Type::Video) {
media_item.best_video_stream_index = stream.index(); media_item.best_video_stream_index = stream.index();
} }
if let Some(stream) = context.streams().best(ffmpeg::media::Type::Audio) { if let Some(stream) = context.streams().best(ffmpeg::media::Type::Audio) {
media_item.best_audio_stream_index = stream.index(); media_item.best_audio_stream_index = stream.index();
} }
if let Some(stream) = context.streams().best(ffmpeg::media::Type::Subtitle) { if let Some(stream) = context.streams().best(ffmpeg::media::Type::Subtitle) {
media_item.best_subtitle_stream_index = stream.index(); media_item.best_subtitle_stream_index = stream.index();
} }
media_item.duration_seconds = media_item.duration_seconds =
context.duration() as f64 / f64::from(ffmpeg::ffi::AV_TIME_BASE); context.duration() as f64 / f64::from(ffmpeg::ffi::AV_TIME_BASE);
for stream in context.streams() { for stream in context.streams() {
let codec = ffmpeg::codec::context::Context::from_parameters(stream.parameters())?; let codec = ffmpeg::codec::context::Context::from_parameters(stream.parameters())?;
let mut stream_item = Stream { let mut stream_item = Stream {
codec: codec.id().name().to_string(), codec: codec.id().name().to_string(),
frames: stream.frames() as f64, frames: stream.frames() as f64,
duration_seconds: stream.duration() as f64 * f64::from(stream.time_base()), duration_seconds: stream.duration() as f64 * f64::from(stream.time_base()),
kind: None, kind: None,
}; };
if codec.medium() == ffmpeg::media::Type::Video { if codec.medium() == ffmpeg::media::Type::Video {
if let Ok(video) = codec.decoder().video() { if let Ok(video) = codec.decoder().video() {
stream_item.kind = Some(StreamKind::Video(VideoStream { stream_item.kind = Some(StreamKind::Video(VideoStream {
bitrate: video.bit_rate(), bitrate: video.bit_rate(),
format: video.format(), format: video.format(),
width: video.width(), width: video.width(),
height: video.height(), height: video.height(),
aspect_ratio: video.aspect_ratio().to_string(), aspect_ratio: video.aspect_ratio().to_string(),
})); }));
} }
} else if codec.medium() == ffmpeg::media::Type::Audio { } else if codec.medium() == ffmpeg::media::Type::Audio {
if let Ok(audio) = codec.decoder().audio() { if let Ok(audio) = codec.decoder().audio() {
stream_item.kind = Some(StreamKind::Audio(AudioStream { stream_item.kind = Some(StreamKind::Audio(AudioStream {
channels: audio.channels(), channels: audio.channels(),
bitrate: audio.bit_rate(), bitrate: audio.bit_rate(),
rate: audio.rate(), rate: audio.rate(),
format: audio.format(), format: audio.format(),
})); }));
} }
} }
media_item.steams.push(stream_item); media_item.steams.push(stream_item);
} }
println!("{:#?}", media_item); println!("{:#?}", media_item);
} }
Err(error) => println!("error: {}", error), Err(error) => println!("error: {}", error),
} }
Ok(()) Ok(())
} }

View file

@ -1,9 +1,9 @@
use crate::job::jobs::JobReportUpdate; use crate::job::jobs::JobReportUpdate;
use crate::node::state; use crate::node::state;
use crate::{ use crate::{
job::{jobs::Job, worker::WorkerContext}, job::{jobs::Job, worker::WorkerContext},
prisma::file_path, prisma::file_path,
CoreContext, CoreContext,
}; };
use crate::{sys, CoreEvent}; use crate::{sys, CoreEvent};
use anyhow::Result; use anyhow::Result;
@ -15,9 +15,9 @@ use webp::*;
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct ThumbnailJob { pub struct ThumbnailJob {
pub location_id: i32, pub location_id: i32,
pub path: String, pub path: String,
pub background: bool, pub background: bool,
} }
static THUMBNAIL_SIZE_FACTOR: f32 = 0.2; static THUMBNAIL_SIZE_FACTOR: f32 = 0.2;
@ -26,133 +26,136 @@ pub static THUMBNAIL_CACHE_DIR_NAME: &str = "thumbnails";
#[async_trait::async_trait] #[async_trait::async_trait]
impl Job for ThumbnailJob { impl Job for ThumbnailJob {
fn name(&self) -> &'static str { fn name(&self) -> &'static str {
"file_identifier" "file_identifier"
} }
async fn run(&self, ctx: WorkerContext) -> Result<()> { async fn run(&self, ctx: WorkerContext) -> Result<()> {
let config = state::get(); let config = state::get();
let core_ctx = ctx.core_ctx.clone(); let core_ctx = ctx.core_ctx.clone();
let location = sys::locations::get_location(&core_ctx, self.location_id).await?; let location = sys::locations::get_location(&core_ctx, self.location_id).await?;
fs::create_dir_all( fs::create_dir_all(
Path::new(&config.data_path) Path::new(&config.data_path)
.join(THUMBNAIL_CACHE_DIR_NAME) .join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", self.location_id)), .join(format!("{}", self.location_id)),
)?; )?;
let root_path = location.path.unwrap(); let root_path = location.path.unwrap();
let image_files = get_images(&core_ctx, self.location_id, &self.path).await?; let image_files = get_images(&core_ctx, self.location_id, &self.path).await?;
let location_id = location.id.clone(); let location_id = location.id.clone();
println!("Found {:?} files", image_files.len()); println!("Found {:?} files", image_files.len());
let is_background = self.background.clone(); let is_background = self.background.clone();
tokio::task::spawn_blocking(move || { tokio::task::spawn_blocking(move || {
ctx.progress(vec![ ctx.progress(vec![
JobReportUpdate::TaskCount(image_files.len()), JobReportUpdate::TaskCount(image_files.len()),
JobReportUpdate::Message(format!("Preparing to process {} files", image_files.len())), JobReportUpdate::Message(format!(
]); "Preparing to process {} files",
image_files.len()
)),
]);
for (i, image_file) in image_files.iter().enumerate() { for (i, image_file) in image_files.iter().enumerate() {
ctx.progress(vec![JobReportUpdate::Message(format!( ctx.progress(vec![JobReportUpdate::Message(format!(
"Processing {}", "Processing {}",
image_file.materialized_path.clone() image_file.materialized_path.clone()
))]); ))]);
let path = format!("{}{}", root_path, image_file.materialized_path); let path = format!("{}{}", root_path, image_file.materialized_path);
println!("image_file {:?}", image_file); println!("image_file {:?}", image_file);
let cas_id = match image_file.file() { let cas_id = match image_file.file() {
Ok(i) => i.unwrap().cas_id.clone(), Ok(i) => i.unwrap().cas_id.clone(),
Err(_) => todo!(), Err(_) => todo!(),
}; };
// Define and write the WebP-encoded file to a given path // Define and write the WebP-encoded file to a given path
let output_path = Path::new(&config.data_path) let output_path = Path::new(&config.data_path)
.join(THUMBNAIL_CACHE_DIR_NAME) .join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", location_id)) .join(format!("{}", location_id))
.join(&cas_id) .join(&cas_id)
.with_extension("webp"); .with_extension("webp");
// check if file exists at output path // check if file exists at output path
if !output_path.exists() { if !output_path.exists() {
println!("writing {:?} to {}", output_path, path); println!("writing {:?} to {}", output_path, path);
generate_thumbnail(&path, &output_path) generate_thumbnail(&path, &output_path)
.map_err(|e| { .map_err(|e| {
println!("error generating thumb {:?}", e); println!("error generating thumb {:?}", e);
}) })
.unwrap_or(()); .unwrap_or(());
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(i + 1)]); ctx.progress(vec![JobReportUpdate::CompletedTaskCount(i + 1)]);
if !is_background { if !is_background {
block_on(ctx.core_ctx.emit(CoreEvent::NewThumbnail { cas_id })); block_on(ctx.core_ctx.emit(CoreEvent::NewThumbnail { cas_id }));
}; };
} else { } else {
println!("Thumb exists, skipping... {}", output_path.display()); println!("Thumb exists, skipping... {}", output_path.display());
} }
} }
}) })
.await?; .await?;
Ok(()) Ok(())
} }
} }
pub fn generate_thumbnail(file_path: &str, output_path: &PathBuf) -> Result<()> { pub fn generate_thumbnail(file_path: &str, output_path: &PathBuf) -> Result<()> {
// Using `image` crate, open the included .jpg file // Using `image` crate, open the included .jpg file
let img = image::open(file_path)?; let img = image::open(file_path)?;
let (w, h) = img.dimensions(); let (w, h) = img.dimensions();
// Optionally, resize the existing photo and convert back into DynamicImage // Optionally, resize the existing photo and convert back into DynamicImage
let img: DynamicImage = image::DynamicImage::ImageRgba8(imageops::resize( let img: DynamicImage = image::DynamicImage::ImageRgba8(imageops::resize(
&img, &img,
(w as f32 * THUMBNAIL_SIZE_FACTOR) as u32, (w as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
(h as f32 * THUMBNAIL_SIZE_FACTOR) as u32, (h as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
imageops::FilterType::Triangle, imageops::FilterType::Triangle,
)); ));
// Create the WebP encoder for the above image // Create the WebP encoder for the above image
let encoder: Encoder = Encoder::from_image(&img).map_err(|_| anyhow::anyhow!("jeff"))?; let encoder: Encoder = Encoder::from_image(&img).map_err(|_| anyhow::anyhow!("jeff"))?;
// Encode the image at a specified quality 0-100 // Encode the image at a specified quality 0-100
let webp: WebPMemory = encoder.encode(THUMBNAIL_QUALITY); let webp: WebPMemory = encoder.encode(THUMBNAIL_QUALITY);
println!("Writing to {}", output_path.display()); println!("Writing to {}", output_path.display());
std::fs::write(&output_path, &*webp)?; std::fs::write(&output_path, &*webp)?;
Ok(()) Ok(())
} }
pub async fn get_images( pub async fn get_images(
ctx: &CoreContext, ctx: &CoreContext,
location_id: i32, location_id: i32,
path: &str, path: &str,
) -> Result<Vec<file_path::Data>> { ) -> Result<Vec<file_path::Data>> {
let mut params = vec![ let mut params = vec![
file_path::location_id::equals(location_id), file_path::location_id::equals(location_id),
file_path::extension::in_vec(vec![ file_path::extension::in_vec(vec![
"png".to_string(), "png".to_string(),
"jpeg".to_string(), "jpeg".to_string(),
"jpg".to_string(), "jpg".to_string(),
"gif".to_string(), "gif".to_string(),
"webp".to_string(), "webp".to_string(),
]), ]),
]; ];
if !path.is_empty() { if !path.is_empty() {
params.push(file_path::materialized_path::starts_with(path.to_string())) params.push(file_path::materialized_path::starts_with(path.to_string()))
} }
let image_files = ctx let image_files = ctx
.database .database
.file_path() .file_path()
.find_many(params) .find_many(params)
.with(file_path::file::fetch()) .with(file_path::file::fetch())
.exec() .exec()
.await?; .await?;
Ok(image_files) Ok(image_files)
} }

View file

@ -0,0 +1 @@

View file

@ -0,0 +1 @@

View file

@ -16,62 +16,62 @@ static SAMPLE_COUNT: u64 = 4;
static SAMPLE_SIZE: u64 = 10000; static SAMPLE_SIZE: u64 = 10000;
fn read_at(file: &File, offset: u64, size: u64) -> Result<Vec<u8>> { fn read_at(file: &File, offset: u64, size: u64) -> Result<Vec<u8>> {
let mut buf = vec![0u8; size as usize]; let mut buf = vec![0u8; size as usize];
#[cfg(target_family = "unix")] #[cfg(target_family = "unix")]
file.read_exact_at(&mut buf, offset)?; file.read_exact_at(&mut buf, offset)?;
#[cfg(target_family = "windows")] #[cfg(target_family = "windows")]
file.seek_read(&mut buf, offset)?; file.seek_read(&mut buf, offset)?;
Ok(buf) Ok(buf)
} }
pub fn generate_cas_id(path: &str, size: u64) -> Result<String> { pub fn generate_cas_id(path: &str, size: u64) -> Result<String> {
// open file reference // open file reference
let file = File::open(path)?; let file = File::open(path)?;
let mut context = Context::new(&SHA256); let mut context = Context::new(&SHA256);
// include the file size in the checksum // include the file size in the checksum
context.update(&size.to_le_bytes()); context.update(&size.to_le_bytes());
// if size is small enough, just read the whole thing // if size is small enough, just read the whole thing
if SAMPLE_COUNT * SAMPLE_SIZE > size { if SAMPLE_COUNT * SAMPLE_SIZE > size {
let buf = read_at(&file, 0, size.try_into()?)?; let buf = read_at(&file, 0, size.try_into()?)?;
context.update(&buf); context.update(&buf);
} else { } else {
// loop over samples // loop over samples
for i in 0..SAMPLE_COUNT { for i in 0..SAMPLE_COUNT {
let buf = read_at(&file, (size / SAMPLE_COUNT) * i, SAMPLE_SIZE.try_into()?)?; let buf = read_at(&file, (size / SAMPLE_COUNT) * i, SAMPLE_SIZE.try_into()?)?;
context.update(&buf); context.update(&buf);
} }
// sample end of file // sample end of file
let buf = read_at(&file, size - SAMPLE_SIZE, SAMPLE_SIZE.try_into()?)?; let buf = read_at(&file, size - SAMPLE_SIZE, SAMPLE_SIZE.try_into()?)?;
context.update(&buf); context.update(&buf);
} }
let digest = context.finish(); let digest = context.finish();
let hex = HEXLOWER.encode(digest.as_ref()); let hex = HEXLOWER.encode(digest.as_ref());
Ok(hex) Ok(hex)
} }
pub fn full_checksum(path: &str) -> Result<String> { pub fn full_checksum(path: &str) -> Result<String> {
// read file as buffer and convert to digest // read file as buffer and convert to digest
let mut reader = BufReader::new(File::open(path).unwrap()); let mut reader = BufReader::new(File::open(path).unwrap());
let mut context = Context::new(&SHA256); let mut context = Context::new(&SHA256);
let mut buffer = [0; 1024]; let mut buffer = [0; 1024];
loop { loop {
let count = reader.read(&mut buffer)?; let count = reader.read(&mut buffer)?;
if count == 0 { if count == 0 {
break; break;
} }
context.update(&buffer[..count]); context.update(&buffer[..count]);
} }
let digest = context.finish(); let digest = context.finish();
// create a lowercase hash from // create a lowercase hash from
let hex = HEXLOWER.encode(digest.as_ref()); let hex = HEXLOWER.encode(digest.as_ref());
Ok(hex) Ok(hex)
} }

View file

@ -2,22 +2,22 @@ use std::fs;
use crate::job::jobs::JobReportUpdate; use crate::job::jobs::JobReportUpdate;
use crate::{ use crate::{
file::FileError, file::FileError,
job::{jobs::Job, worker::WorkerContext}, job::{jobs::Job, worker::WorkerContext},
prisma::{file_path}, prisma::file_path,
CoreContext, CoreContext,
}; };
use anyhow::Result; use anyhow::Result;
use futures::executor::block_on; use futures::executor::block_on;
use serde::{Deserialize, Serialize};
use prisma_client_rust::Direction; use prisma_client_rust::Direction;
use serde::{Deserialize, Serialize};
use super::checksum::generate_cas_id; use super::checksum::generate_cas_id;
#[derive(Deserialize, Serialize, Debug)] #[derive(Deserialize, Serialize, Debug)]
pub struct FileCreated { pub struct FileCreated {
pub id: i32, pub id: i32,
pub cas_id: String, pub cas_id: String,
} }
#[derive(Debug)] #[derive(Debug)]
@ -25,24 +25,24 @@ pub struct FileIdentifierJob;
#[async_trait::async_trait] #[async_trait::async_trait]
impl Job for FileIdentifierJob { impl Job for FileIdentifierJob {
fn name(&self) -> &'static str { fn name(&self) -> &'static str {
"file_identifier" "file_identifier"
} }
async fn run(&self, ctx: WorkerContext) -> Result<()> { async fn run(&self, ctx: WorkerContext) -> Result<()> {
println!("Identifying files"); println!("Identifying files");
let total_count = count_orphan_file_paths(&ctx.core_ctx).await?; let total_count = count_orphan_file_paths(&ctx.core_ctx).await?;
println!("Found {} orphan file paths", total_count); println!("Found {} orphan file paths", total_count);
let task_count = (total_count as f64 / 100f64).ceil() as usize; let task_count = (total_count as f64 / 100f64).ceil() as usize;
println!("Will process {} tasks", task_count); println!("Will process {} tasks", task_count);
// update job with total task count based on orphan file_paths count // update job with total task count based on orphan file_paths count
ctx.progress(vec![JobReportUpdate::TaskCount(task_count)]); ctx.progress(vec![JobReportUpdate::TaskCount(task_count)]);
let db = ctx.core_ctx.database.clone(); let db = ctx.core_ctx.database.clone();
let ctx = tokio::task::spawn_blocking(move || { let ctx = tokio::task::spawn_blocking(move || {
let mut completed: usize = 0; let mut completed: usize = 0;
let mut cursor: i32 = 1; let mut cursor: i32 = 1;
@ -102,69 +102,68 @@ impl Job for FileIdentifierJob {
ctx ctx
}).await?; }).await?;
let remaining = count_orphan_file_paths(&ctx.core_ctx).await?; let remaining = count_orphan_file_paths(&ctx.core_ctx).await?;
println!("Finished with {} files remaining because your code is bad.", remaining); println!(
"Finished with {} files remaining because your code is bad.",
remaining
);
// if remaining > 0 { // if remaining > 0 {
// ctx.core_ctx.spawn_job(Box::new(FileIdentifierJob)); // ctx.core_ctx.spawn_job(Box::new(FileIdentifierJob));
// } // }
Ok(()) Ok(())
} }
} }
#[derive(Deserialize, Serialize, Debug)] #[derive(Deserialize, Serialize, Debug)]
struct CountRes { struct CountRes {
count: Option<usize>, count: Option<usize>,
} }
pub async fn count_orphan_file_paths(ctx: &CoreContext) -> Result<usize, FileError> { pub async fn count_orphan_file_paths(ctx: &CoreContext) -> Result<usize, FileError> {
let db = &ctx.database; let db = &ctx.database;
let files_count = db let files_count = db
._query_raw::<CountRes>( ._query_raw::<CountRes>(
r#"SELECT COUNT(*) AS count FROM file_paths WHERE file_id IS NULL AND is_dir IS FALSE"#, r#"SELECT COUNT(*) AS count FROM file_paths WHERE file_id IS NULL AND is_dir IS FALSE"#,
) )
.await?; .await?;
Ok(files_count[0].count.unwrap_or(0)) Ok(files_count[0].count.unwrap_or(0))
} }
pub async fn get_orphan_file_paths( pub async fn get_orphan_file_paths(
ctx: &CoreContext, ctx: &CoreContext,
cursor: i32, cursor: i32,
) -> Result<Vec<file_path::Data>, FileError> { ) -> Result<Vec<file_path::Data>, FileError> {
let db = &ctx.database; let db = &ctx.database;
println!("cursor: {:?}", cursor); println!("cursor: {:?}", cursor);
let files = db let files = db
.file_path() .file_path()
.find_many(vec![ .find_many(vec![
file_path::file_id::equals(None), file_path::file_id::equals(None),
file_path::is_dir::equals(false), file_path::is_dir::equals(false),
]) ])
.order_by(file_path::id::order(Direction::Asc)) .order_by(file_path::id::order(Direction::Asc))
.cursor(file_path::id::cursor(cursor)) .cursor(file_path::id::cursor(cursor))
.take(100) .take(100)
.exec() .exec()
.await?; .await?;
Ok(files) Ok(files)
} }
pub fn prepare_file_values(file_path: &file_path::Data) -> Result<String> { pub fn prepare_file_values(file_path: &file_path::Data) -> Result<String> {
let metadata = fs::metadata(&file_path.materialized_path)?; let metadata = fs::metadata(&file_path.materialized_path)?;
let cas_id = { let cas_id = {
if !file_path.is_dir { if !file_path.is_dir {
// TODO: remove unwrap // TODO: remove unwrap
let mut x = generate_cas_id(&file_path.materialized_path, metadata.len()).unwrap(); let mut x = generate_cas_id(&file_path.materialized_path, metadata.len()).unwrap();
x.truncate(16); x.truncate(16);
x x
} else { } else {
"".to_string() "".to_string()
} }
}; };
// TODO: add all metadata // TODO: add all metadata
Ok(format!( Ok(format!("(\"{}\",\"{}\")", cas_id, "0"))
"(\"{}\",\"{}\")",
cas_id,
"0"
))
} }

View file

@ -1,62 +1,62 @@
use crate::{ use crate::{
encode::thumb::THUMBNAIL_CACHE_DIR_NAME, encode::thumb::THUMBNAIL_CACHE_DIR_NAME,
file::{DirectoryWithContents, File, FileError}, file::{DirectoryWithContents, File, FileError},
node::state, node::state,
prisma::{file, file_path}, prisma::{file, file_path},
sys::locations::get_location, sys::locations::get_location,
CoreContext, CoreContext,
}; };
use std::path::Path; use std::path::Path;
pub async fn open_dir( pub async fn open_dir(
ctx: &CoreContext, ctx: &CoreContext,
location_id: &i32, location_id: &i32,
path: &str, path: &str,
) -> Result<DirectoryWithContents, FileError> { ) -> Result<DirectoryWithContents, FileError> {
let db = &ctx.database; let db = &ctx.database;
let config = state::get(); let config = state::get();
// get location // get location
let location = get_location(ctx, location_id.clone()).await?; let location = get_location(ctx, location_id.clone()).await?;
let directory = db let directory = db
.file_path() .file_path()
.find_first(vec![ .find_first(vec![
file_path::location_id::equals(location.id), file_path::location_id::equals(location.id),
file_path::materialized_path::equals(path.into()), file_path::materialized_path::equals(path.into()),
file_path::is_dir::equals(true), file_path::is_dir::equals(true),
]) ])
.exec() .exec()
.await? .await?
.ok_or(FileError::DirectoryNotFound(path.to_string()))?; .ok_or(FileError::DirectoryNotFound(path.to_string()))?;
// TODO: this is incorrect, we need to query on file paths // TODO: this is incorrect, we need to query on file paths
let files: Vec<File> = db let files: Vec<File> = db
.file() .file()
.find_many(vec![file::paths::some(vec![file_path::parent_id::equals( .find_many(vec![file::paths::some(vec![file_path::parent_id::equals(
Some(directory.id), Some(directory.id),
)])]) )])])
.exec() .exec()
.await? .await?
.into_iter() .into_iter()
.map(Into::into) .map(Into::into)
.collect(); .collect();
let mut contents: Vec<File> = vec![]; let mut contents: Vec<File> = vec![];
for mut file in files { for mut file in files {
let thumb_path = Path::new(&config.data_path) let thumb_path = Path::new(&config.data_path)
.join(THUMBNAIL_CACHE_DIR_NAME) .join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", location.id)) .join(format!("{}", location.id))
.join(file.cas_id.clone()) .join(file.cas_id.clone())
.with_extension("webp"); .with_extension("webp");
file.has_thumbnail = thumb_path.exists(); file.has_thumbnail = thumb_path.exists();
contents.push(file); contents.push(file);
} }
Ok(DirectoryWithContents { Ok(DirectoryWithContents {
directory: directory.into(), directory: directory.into(),
contents, contents,
}) })
} }

View file

@ -1,6 +1,6 @@
use crate::job::{ use crate::job::{
jobs::{Job, JobReportUpdate}, jobs::{Job, JobReportUpdate},
worker::WorkerContext, worker::WorkerContext,
}; };
use anyhow::Result; use anyhow::Result;
@ -12,28 +12,28 @@ pub use {pathctx::PathContext, scan::scan_path};
#[derive(Debug)] #[derive(Debug)]
pub struct IndexerJob { pub struct IndexerJob {
pub path: String, pub path: String,
} }
#[async_trait::async_trait] #[async_trait::async_trait]
impl Job for IndexerJob { impl Job for IndexerJob {
fn name(&self) -> &'static str { fn name(&self) -> &'static str {
"indexer" "indexer"
} }
async fn run(&self, ctx: WorkerContext) -> Result<()> { async fn run(&self, ctx: WorkerContext) -> Result<()> {
let core_ctx = ctx.core_ctx.clone(); let core_ctx = ctx.core_ctx.clone();
scan_path(&core_ctx, self.path.as_str(), move |p| { scan_path(&core_ctx, self.path.as_str(), move |p| {
ctx.progress( ctx.progress(
p.iter() p.iter()
.map(|p| match p.clone() { .map(|p| match p.clone() {
ScanProgress::ChunkCount(c) => JobReportUpdate::TaskCount(c), ScanProgress::ChunkCount(c) => JobReportUpdate::TaskCount(c),
ScanProgress::SavedChunks(p) => JobReportUpdate::CompletedTaskCount(p), ScanProgress::SavedChunks(p) => JobReportUpdate::CompletedTaskCount(p),
ScanProgress::Message(m) => JobReportUpdate::Message(m), ScanProgress::Message(m) => JobReportUpdate::Message(m),
}) })
.collect(), .collect(),
) )
}) })
.await?; .await?;
Ok(()) Ok(())
} }
} }

View file

@ -1,13 +1,13 @@
// PathContext provides the indexer with instruction to handle particular directory structures and identify rich context. // PathContext provides the indexer with instruction to handle particular directory structures and identify rich context.
pub struct PathContext { pub struct PathContext {
// an app specific key "com.github.repo" // an app specific key "com.github.repo"
pub key: String, pub key: String,
pub name: String, pub name: String,
pub is_dir: bool, pub is_dir: bool,
// possible file extensions for this path // possible file extensions for this path
pub extensions: Vec<String>, pub extensions: Vec<String>,
// sub-paths that must be found // sub-paths that must be found
pub must_contain_sub_paths: Vec<String>, pub must_contain_sub_paths: Vec<String>,
// sub-paths that are ignored // sub-paths that are ignored
pub always_ignored_sub_paths: Option<String>, pub always_ignored_sub_paths: Option<String>,
} }

View file

@ -10,283 +10,283 @@ use walkdir::{DirEntry, WalkDir};
#[derive(Clone)] #[derive(Clone)]
pub enum ScanProgress { pub enum ScanProgress {
ChunkCount(usize), ChunkCount(usize),
SavedChunks(usize), SavedChunks(usize),
Message(String), Message(String),
} }
static BATCH_SIZE: usize = 100; static BATCH_SIZE: usize = 100;
// creates a vector of valid path buffers from a directory // creates a vector of valid path buffers from a directory
pub async fn scan_path( pub async fn scan_path(
ctx: &CoreContext, ctx: &CoreContext,
path: &str, path: &str,
on_progress: impl Fn(Vec<ScanProgress>) + Send + Sync + 'static, on_progress: impl Fn(Vec<ScanProgress>) + Send + Sync + 'static,
) -> Result<()> { ) -> Result<()> {
let db = &ctx.database; let db = &ctx.database;
let path = path.to_string(); let path = path.to_string();
let location = create_location(&ctx, &path).await?; let location = create_location(&ctx, &path).await?;
// query db to highers id, so we can increment it for the new files indexed // query db to highers id, so we can increment it for the new files indexed
#[derive(Deserialize, Serialize, Debug)] #[derive(Deserialize, Serialize, Debug)]
struct QueryRes { struct QueryRes {
id: Option<i32>, id: Option<i32>,
} }
// grab the next id so we can increment in memory for batch inserting // grab the next id so we can increment in memory for batch inserting
let first_file_id = match db let first_file_id = match db
._query_raw::<QueryRes>(r#"SELECT MAX(id) id FROM file_paths"#) ._query_raw::<QueryRes>(r#"SELECT MAX(id) id FROM file_paths"#)
.await .await
{ {
Ok(rows) => rows[0].id.unwrap_or(0), Ok(rows) => rows[0].id.unwrap_or(0),
Err(e) => Err(anyhow!("Error querying for next file id: {}", e))?, Err(e) => Err(anyhow!("Error querying for next file id: {}", e))?,
}; };
//check is path is a directory //check is path is a directory
if !PathBuf::from(&path).is_dir() { if !PathBuf::from(&path).is_dir() {
return Err(anyhow::anyhow!("{} is not a directory", &path)); return Err(anyhow::anyhow!("{} is not a directory", &path));
} }
let dir_path = path.clone(); let dir_path = path.clone();
// spawn a dedicated thread to scan the directory for performance // spawn a dedicated thread to scan the directory for performance
let (paths, scan_start, on_progress) = tokio::task::spawn_blocking(move || { let (paths, scan_start, on_progress) = tokio::task::spawn_blocking(move || {
// store every valid path discovered // store every valid path discovered
let mut paths: Vec<(PathBuf, i32, Option<i32>, bool)> = Vec::new(); let mut paths: Vec<(PathBuf, i32, Option<i32>, bool)> = Vec::new();
// store a hashmap of directories to their file ids for fast lookup // store a hashmap of directories to their file ids for fast lookup
let mut dirs: HashMap<String, i32> = HashMap::new(); let mut dirs: HashMap<String, i32> = HashMap::new();
// begin timer for logging purposes // begin timer for logging purposes
let scan_start = Instant::now(); let scan_start = Instant::now();
let mut next_file_id = first_file_id; let mut next_file_id = first_file_id;
let mut get_id = || { let mut get_id = || {
next_file_id += 1; next_file_id += 1;
next_file_id next_file_id
}; };
// walk through directory recursively // walk through directory recursively
for entry in WalkDir::new(&dir_path).into_iter().filter_entry(|dir| { for entry in WalkDir::new(&dir_path).into_iter().filter_entry(|dir| {
let approved = let approved =
!is_hidden(dir) && !is_app_bundle(dir) && !is_node_modules(dir) && !is_library(dir); !is_hidden(dir) && !is_app_bundle(dir) && !is_node_modules(dir) && !is_library(dir);
approved approved
}) { }) {
// extract directory entry or log and continue if failed // extract directory entry or log and continue if failed
let entry = match entry { let entry = match entry {
Ok(entry) => entry, Ok(entry) => entry,
Err(e) => { Err(e) => {
println!("Error reading file {}", e); println!("Error reading file {}", e);
continue; continue;
} }
}; };
let path = entry.path(); let path = entry.path();
println!("found: {:?}", path); println!("found: {:?}", path);
let parent_path = path let parent_path = path
.parent() .parent()
.unwrap_or(Path::new("")) .unwrap_or(Path::new(""))
.to_str() .to_str()
.unwrap_or(""); .unwrap_or("");
let parent_dir_id = dirs.get(&*parent_path); let parent_dir_id = dirs.get(&*parent_path);
let str = match path.as_os_str().to_str() { let str = match path.as_os_str().to_str() {
Some(str) => str, Some(str) => str,
None => { None => {
println!("Error reading file {}", &path.display()); println!("Error reading file {}", &path.display());
continue; continue;
} }
}; };
on_progress(vec![ on_progress(vec![
ScanProgress::Message(format!("{}", str)), ScanProgress::Message(format!("{}", str)),
ScanProgress::ChunkCount(paths.len() / BATCH_SIZE), ScanProgress::ChunkCount(paths.len() / BATCH_SIZE),
]); ]);
let file_id = get_id(); let file_id = get_id();
let file_type = entry.file_type(); let file_type = entry.file_type();
let is_dir = file_type.is_dir(); let is_dir = file_type.is_dir();
if is_dir || file_type.is_file() { if is_dir || file_type.is_file() {
paths.push((path.to_owned(), file_id, parent_dir_id.cloned(), is_dir)); paths.push((path.to_owned(), file_id, parent_dir_id.cloned(), is_dir));
} }
if is_dir { if is_dir {
let _path = match path.to_str() { let _path = match path.to_str() {
Some(path) => path.to_owned(), Some(path) => path.to_owned(),
None => continue, None => continue,
}; };
dirs.insert(_path, file_id); dirs.insert(_path, file_id);
} }
} }
(paths, scan_start, on_progress) (paths, scan_start, on_progress)
}) })
.await .await
.unwrap(); .unwrap();
let db_write_start = Instant::now(); let db_write_start = Instant::now();
let scan_read_time = scan_start.elapsed(); let scan_read_time = scan_start.elapsed();
for (i, chunk) in paths.chunks(BATCH_SIZE).enumerate() { for (i, chunk) in paths.chunks(BATCH_SIZE).enumerate() {
on_progress(vec![ on_progress(vec![
ScanProgress::SavedChunks(i as usize), ScanProgress::SavedChunks(i as usize),
ScanProgress::Message(format!( ScanProgress::Message(format!(
"Writing {} of {} to library", "Writing {} of {} to library",
i * chunk.len(), i * chunk.len(),
paths.len(), paths.len(),
)), )),
]); ]);
// vector to store active models // vector to store active models
let mut files: Vec<String> = Vec::new(); let mut files: Vec<String> = Vec::new();
for (file_path, file_id, parent_dir_id, is_dir) in chunk { for (file_path, file_id, parent_dir_id, is_dir) in chunk {
files.push( files.push(
match prepare_values(&file_path, *file_id, &location, parent_dir_id, *is_dir) { match prepare_values(&file_path, *file_id, &location, parent_dir_id, *is_dir) {
Ok(file) => file, Ok(file) => file,
Err(e) => { Err(e) => {
println!("Error creating file model from path {:?}: {}", file_path, e); println!("Error creating file model from path {:?}: {}", file_path, e);
continue; continue;
} }
}, },
); );
} }
let raw_sql = format!( let raw_sql = format!(
r#" r#"
INSERT INTO file_paths (id, is_dir, location_id, materialized_path, name, extension, parent_id) INSERT INTO file_paths (id, is_dir, location_id, materialized_path, name, extension, parent_id)
VALUES {} VALUES {}
"#, "#,
files.join(", ") files.join(", ")
); );
// println!("{}", raw_sql); // println!("{}", raw_sql);
let count = db._execute_raw(&raw_sql).await; let count = db._execute_raw(&raw_sql).await;
println!("Inserted {:?} records", count); println!("Inserted {:?} records", count);
} }
println!( println!(
"scan of {:?} completed in {:?}. {:?} files found. db write completed in {:?}", "scan of {:?} completed in {:?}. {:?} files found. db write completed in {:?}",
&path, &path,
scan_read_time, scan_read_time,
paths.len(), paths.len(),
db_write_start.elapsed() db_write_start.elapsed()
); );
Ok(()) Ok(())
} }
// reads a file at a path and creates an ActiveModel with metadata // reads a file at a path and creates an ActiveModel with metadata
fn prepare_values( fn prepare_values(
file_path: &PathBuf, file_path: &PathBuf,
id: i32, id: i32,
location: &LocationResource, location: &LocationResource,
parent_id: &Option<i32>, parent_id: &Option<i32>,
is_dir: bool, is_dir: bool,
) -> Result<String> { ) -> Result<String> {
// let metadata = fs::metadata(&file_path)?; // let metadata = fs::metadata(&file_path)?;
let location_path = location.path.as_ref().unwrap().as_str(); let location_path = location.path.as_ref().unwrap().as_str();
// let size = metadata.len(); // let size = metadata.len();
let name; let name;
let extension; let extension;
// if the 'file_path' is not a directory, then get the extension and name. // if the 'file_path' is not a directory, then get the extension and name.
// if 'file_path' is a directory, set extension to an empty string to avoid periods in folder names // if 'file_path' is a directory, set extension to an empty string to avoid periods in folder names
// - being interpreted as file extensions // - being interpreted as file extensions
if is_dir { if is_dir {
extension = "".to_string(); extension = "".to_string();
name = extract_name(file_path.file_name()); name = extract_name(file_path.file_name());
} else { } else {
extension = extract_name(file_path.extension()); extension = extract_name(file_path.extension());
name = extract_name(file_path.file_stem()); name = extract_name(file_path.file_stem());
} }
let materialized_path = match file_path.to_str() { let materialized_path = match file_path.to_str() {
Some(p) => p Some(p) => p
.clone() .clone()
.strip_prefix(&location_path) .strip_prefix(&location_path)
// .and_then(|p| p.strip_suffix(format!("{}{}", name, extension).as_str())) // .and_then(|p| p.strip_suffix(format!("{}{}", name, extension).as_str()))
.unwrap_or_default(), .unwrap_or_default(),
None => return Err(anyhow!("{}", file_path.to_str().unwrap_or_default())), None => return Err(anyhow!("{}", file_path.to_str().unwrap_or_default())),
}; };
// let cas_id = { // let cas_id = {
// if !metadata.is_dir() { // if !metadata.is_dir() {
// // TODO: remove unwrap, skip and make sure to continue loop // // TODO: remove unwrap, skip and make sure to continue loop
// let mut x = generate_cas_id(&file_path.to_str().unwrap(), metadata.len()).unwrap(); // let mut x = generate_cas_id(&file_path.to_str().unwrap(), metadata.len()).unwrap();
// x.truncate(16); // x.truncate(16);
// x // x
// } else { // } else {
// "".to_string() // "".to_string()
// } // }
// }; // };
// let date_created: DateTime<Utc> = metadata.created().unwrap().into(); // let date_created: DateTime<Utc> = metadata.created().unwrap().into();
// let parsed_date_created = date_created.to_rfc3339_opts(SecondsFormat::Millis, true); // let parsed_date_created = date_created.to_rfc3339_opts(SecondsFormat::Millis, true);
let values = format!( let values = format!(
"({}, {}, {}, \"{}\", \"{}\", \"{}\", {})", "({}, {}, {}, \"{}\", \"{}\", \"{}\", {})",
id, id,
is_dir, is_dir,
location.id, location.id,
materialized_path, materialized_path,
name, name,
extension.to_lowercase(), extension.to_lowercase(),
parent_id parent_id
.clone() .clone()
.map(|id| format!("\"{}\"", &id)) .map(|id| format!("\"{}\"", &id))
.unwrap_or("NULL".to_string()), .unwrap_or("NULL".to_string()),
// parsed_date_created, // parsed_date_created,
// cas_id // cas_id
); );
println!("{}", values); println!("{}", values);
Ok(values) Ok(values)
} }
// extract name from OsStr returned by PathBuff // extract name from OsStr returned by PathBuff
fn extract_name(os_string: Option<&OsStr>) -> String { fn extract_name(os_string: Option<&OsStr>) -> String {
os_string os_string
.unwrap_or_default() .unwrap_or_default()
.to_str() .to_str()
.unwrap_or_default() .unwrap_or_default()
.to_owned() .to_owned()
} }
fn is_hidden(entry: &DirEntry) -> bool { fn is_hidden(entry: &DirEntry) -> bool {
entry entry
.file_name() .file_name()
.to_str() .to_str()
.map(|s| s.starts_with(".")) .map(|s| s.starts_with("."))
.unwrap_or(false) .unwrap_or(false)
} }
fn is_library(entry: &DirEntry) -> bool { fn is_library(entry: &DirEntry) -> bool {
entry entry
.path() .path()
.to_str() .to_str()
// make better this is shit // make better this is shit
.map(|s| s.contains("/Library/")) .map(|s| s.contains("/Library/"))
.unwrap_or(false) .unwrap_or(false)
} }
fn is_node_modules(entry: &DirEntry) -> bool { fn is_node_modules(entry: &DirEntry) -> bool {
entry entry
.file_name() .file_name()
.to_str() .to_str()
.map(|s| s.contains("node_modules")) .map(|s| s.contains("node_modules"))
.unwrap_or(false) .unwrap_or(false)
} }
fn is_app_bundle(entry: &DirEntry) -> bool { fn is_app_bundle(entry: &DirEntry) -> bool {
let is_dir = entry.metadata().unwrap().is_dir(); let is_dir = entry.metadata().unwrap().is_dir();
let contains_dot = entry let contains_dot = entry
.file_name() .file_name()
.to_str() .to_str()
.map(|s| s.contains(".app") | s.contains(".bundle")) .map(|s| s.contains(".app") | s.contains(".bundle"))
.unwrap_or(false); .unwrap_or(false);
let is_app_bundle = is_dir && contains_dot; let is_app_bundle = is_dir && contains_dot;
// if is_app_bundle { // if is_app_bundle {
// let path_buff = entry.path(); // let path_buff = entry.path();
// let path = path_buff.to_str().unwrap(); // let path = path_buff.to_str().unwrap();
// self::path(&path, ); // self::path(&path, );
// } // }
is_app_bundle is_app_bundle
} }

View file

@ -4,9 +4,9 @@ use thiserror::Error;
use ts_rs::TS; use ts_rs::TS;
use crate::{ use crate::{
crypto::encryption::EncryptionAlgorithm, crypto::encryption::EncryptionAlgorithm,
prisma::{self, file, file_path}, prisma::{self, file, file_path},
sys::SysError, sys::SysError,
}; };
pub mod cas; pub mod cas;
pub mod explorer; pub mod explorer;
@ -17,133 +17,133 @@ pub mod watcher;
#[derive(Debug, Clone, Serialize, Deserialize, TS)] #[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)] #[ts(export)]
pub struct File { pub struct File {
pub id: i32, pub id: i32,
pub cas_id: String, pub cas_id: String,
pub integrity_checksum: Option<String>, pub integrity_checksum: Option<String>,
pub size_in_bytes: String, pub size_in_bytes: String,
pub kind: FileKind, pub kind: FileKind,
pub hidden: bool, pub hidden: bool,
pub favorite: bool, pub favorite: bool,
pub important: bool, pub important: bool,
pub has_thumbnail: bool, pub has_thumbnail: bool,
pub has_thumbstrip: bool, pub has_thumbstrip: bool,
pub has_video_preview: bool, pub has_video_preview: bool,
// pub encryption: EncryptionAlgorithm, // pub encryption: EncryptionAlgorithm,
pub ipfs_id: Option<String>, pub ipfs_id: Option<String>,
pub comment: Option<String>, pub comment: Option<String>,
#[ts(type = "string")] #[ts(type = "string")]
pub date_created: chrono::DateTime<chrono::Utc>, pub date_created: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")] #[ts(type = "string")]
pub date_modified: chrono::DateTime<chrono::Utc>, pub date_modified: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")] #[ts(type = "string")]
pub date_indexed: chrono::DateTime<chrono::Utc>, pub date_indexed: chrono::DateTime<chrono::Utc>,
pub paths: Vec<FilePath>, pub paths: Vec<FilePath>,
// pub media_data: Option<MediaData>, // pub media_data: Option<MediaData>,
// pub tags: Vec<Tag>, // pub tags: Vec<Tag>,
// pub label: Vec<Label>, // pub label: Vec<Label>,
} }
// A physical file path // A physical file path
#[derive(Debug, Clone, Serialize, Deserialize, TS)] #[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)] #[ts(export)]
pub struct FilePath { pub struct FilePath {
pub id: i32, pub id: i32,
pub is_dir: bool, pub is_dir: bool,
pub location_id: i32, pub location_id: i32,
pub materialized_path: String, pub materialized_path: String,
pub name: String, pub name: String,
pub extension: Option<String>, pub extension: Option<String>,
pub file_id: Option<i32>, pub file_id: Option<i32>,
pub parent_id: Option<i32>, pub parent_id: Option<i32>,
// pub temp_cas_id: Option<String>, // pub temp_cas_id: Option<String>,
pub has_local_thumbnail: bool, pub has_local_thumbnail: bool,
#[ts(type = "string")] #[ts(type = "string")]
pub date_created: chrono::DateTime<chrono::Utc>, pub date_created: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")] #[ts(type = "string")]
pub date_modified: chrono::DateTime<chrono::Utc>, pub date_modified: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")] #[ts(type = "string")]
pub date_indexed: chrono::DateTime<chrono::Utc>, pub date_indexed: chrono::DateTime<chrono::Utc>,
} }
#[repr(i32)] #[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)] #[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)] #[ts(export)]
pub enum FileKind { pub enum FileKind {
Unknown = 0, Unknown = 0,
Directory = 1, Directory = 1,
Package = 2, Package = 2,
Archive = 3, Archive = 3,
Image = 4, Image = 4,
Video = 5, Video = 5,
Audio = 6, Audio = 6,
Plaintext = 7, Plaintext = 7,
Alias = 8, Alias = 8,
} }
impl Into<File> for file::Data { impl Into<File> for file::Data {
fn into(self) -> File { fn into(self) -> File {
File { File {
id: self.id, id: self.id,
cas_id: self.cas_id, cas_id: self.cas_id,
integrity_checksum: self.integrity_checksum, integrity_checksum: self.integrity_checksum,
kind: IntEnum::from_int(self.kind).unwrap(), kind: IntEnum::from_int(self.kind).unwrap(),
size_in_bytes: self.size_in_bytes.to_string(), size_in_bytes: self.size_in_bytes.to_string(),
// encryption: EncryptionAlgorithm::from_int(self.encryption).unwrap(), // encryption: EncryptionAlgorithm::from_int(self.encryption).unwrap(),
ipfs_id: self.ipfs_id, ipfs_id: self.ipfs_id,
hidden: self.hidden, hidden: self.hidden,
favorite: self.favorite, favorite: self.favorite,
important: self.important, important: self.important,
has_thumbnail: self.has_thumbnail, has_thumbnail: self.has_thumbnail,
has_thumbstrip: self.has_thumbstrip, has_thumbstrip: self.has_thumbstrip,
has_video_preview: self.has_video_preview, has_video_preview: self.has_video_preview,
comment: self.comment, comment: self.comment,
date_created: self.date_created, date_created: self.date_created,
date_modified: self.date_modified, date_modified: self.date_modified,
date_indexed: self.date_indexed, date_indexed: self.date_indexed,
paths: vec![], paths: vec![],
} }
} }
} }
impl Into<FilePath> for file_path::Data { impl Into<FilePath> for file_path::Data {
fn into(self) -> FilePath { fn into(self) -> FilePath {
FilePath { FilePath {
id: self.id, id: self.id,
is_dir: self.is_dir, is_dir: self.is_dir,
materialized_path: self.materialized_path, materialized_path: self.materialized_path,
file_id: self.file_id, file_id: self.file_id,
parent_id: self.parent_id, parent_id: self.parent_id,
location_id: self.location_id, location_id: self.location_id,
date_indexed: self.date_indexed, date_indexed: self.date_indexed,
// permissions: self.permissions, // permissions: self.permissions,
has_local_thumbnail: false, has_local_thumbnail: false,
name: self.name, name: self.name,
extension: self.extension, extension: self.extension,
// temp_cas_id: self.temp_cas_id, // temp_cas_id: self.temp_cas_id,
date_created: self.date_created, date_created: self.date_created,
date_modified: self.date_modified, date_modified: self.date_modified,
} }
} }
} }
#[derive(Serialize, Deserialize, TS, Debug)] #[derive(Serialize, Deserialize, TS, Debug)]
#[ts(export)] #[ts(export)]
pub struct DirectoryWithContents { pub struct DirectoryWithContents {
pub directory: FilePath, pub directory: FilePath,
pub contents: Vec<File>, pub contents: Vec<File>,
} }
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum FileError { pub enum FileError {
#[error("Directory not found (path: {0:?})")] #[error("Directory not found (path: {0:?})")]
DirectoryNotFound(String), DirectoryNotFound(String),
#[error("File not found (path: {0:?})")] #[error("File not found (path: {0:?})")]
FileNotFound(String), FileNotFound(String),
#[error("Database error")] #[error("Database error")]
DatabaseError(#[from] prisma::QueryError), DatabaseError(#[from] prisma::QueryError),
#[error("System error")] #[error("System error")]
SysError(#[from] SysError), SysError(#[from] SysError),
} }

View file

@ -1,25 +1,25 @@
use std::path::Path; use std::path::Path;
use hotwatch::{ use hotwatch::{
blocking::{Flow, Hotwatch}, blocking::{Flow, Hotwatch},
Event, Event,
}; };
pub fn watch_dir(path: &str) { pub fn watch_dir(path: &str) {
let mut hotwatch = Hotwatch::new().expect("hotwatch failed to initialize!"); let mut hotwatch = Hotwatch::new().expect("hotwatch failed to initialize!");
hotwatch hotwatch
.watch(&path, |event: Event| { .watch(&path, |event: Event| {
if let Event::Write(path) = event { if let Event::Write(path) = event {
println!("{:?} changed!", path); println!("{:?} changed!", path);
// Flow::Exit // Flow::Exit
Flow::Continue Flow::Continue
} else { } else {
Flow::Continue Flow::Continue
} }
}) })
.expect("failed to watch file!"); .expect("failed to watch file!");
hotwatch.run(); hotwatch.run();
println!("watching directory {:?}", Path::new(&path)); println!("watching directory {:?}", Path::new(&path));
} }

View file

@ -1,12 +1,12 @@
use super::{ use super::{
worker::{Worker, WorkerContext}, worker::{Worker, WorkerContext},
JobError, JobError,
}; };
use crate::{ use crate::{
node::state, node::state,
prisma::{job, node}, prisma::{job, node},
sync::{crdt::Replicate, engine::SyncContext}, sync::{crdt::Replicate, engine::SyncContext},
CoreContext, CoreContext,
}; };
use anyhow::Result; use anyhow::Result;
use int_enum::IntEnum; use int_enum::IntEnum;
@ -19,166 +19,164 @@ const MAX_WORKERS: usize = 4;
#[async_trait::async_trait] #[async_trait::async_trait]
pub trait Job: Send + Sync + Debug { pub trait Job: Send + Sync + Debug {
async fn run(&self, ctx: WorkerContext) -> Result<()>; async fn run(&self, ctx: WorkerContext) -> Result<()>;
fn name(&self) -> &'static str; fn name(&self) -> &'static str;
} }
// jobs struct is maintained by the core // jobs struct is maintained by the core
pub struct Jobs { pub struct Jobs {
job_queue: Vec<Box<dyn Job>>, job_queue: Vec<Box<dyn Job>>,
// workers are spawned when jobs are picked off the queue // workers are spawned when jobs are picked off the queue
running_workers: HashMap<String, Arc<Mutex<Worker>>>, running_workers: HashMap<String, Arc<Mutex<Worker>>>,
} }
impl Jobs { impl Jobs {
pub fn new() -> Self { pub fn new() -> Self {
Self { Self {
job_queue: vec![], job_queue: vec![],
running_workers: HashMap::new(), running_workers: HashMap::new(),
} }
} }
pub async fn ingest(&mut self, ctx: &CoreContext, job: Box<dyn Job>) { pub async fn ingest(&mut self, ctx: &CoreContext, job: Box<dyn Job>) {
// create worker to process job // create worker to process job
if self.running_workers.len() < MAX_WORKERS { if self.running_workers.len() < MAX_WORKERS {
let worker = Worker::new(job); let worker = Worker::new(job);
let id = worker.id(); let id = worker.id();
let wrapped_worker = Arc::new(Mutex::new(worker)); let wrapped_worker = Arc::new(Mutex::new(worker));
Worker::spawn(wrapped_worker.clone(), ctx).await; Worker::spawn(wrapped_worker.clone(), ctx).await;
self.running_workers.insert(id, wrapped_worker); self.running_workers.insert(id, wrapped_worker);
} else { } else {
self.job_queue.push(job); self.job_queue.push(job);
} }
} }
pub fn ingest_queue(&mut self, ctx: &CoreContext, job: Box<dyn Job>) { pub fn ingest_queue(&mut self, ctx: &CoreContext, job: Box<dyn Job>) {
self.job_queue.push(job); self.job_queue.push(job);
} }
pub async fn complete(&mut self, ctx: &CoreContext, job_id: String) { pub async fn complete(&mut self, ctx: &CoreContext, job_id: String) {
// remove worker from running workers // remove worker from running workers
self.running_workers.remove(&job_id); self.running_workers.remove(&job_id);
// continue queue // continue queue
let job = self.job_queue.pop(); let job = self.job_queue.pop();
if let Some(job) = job { if let Some(job) = job {
self.ingest(ctx, job).await; self.ingest(ctx, job).await;
} }
} }
pub async fn get_running(&self) -> Vec<JobReport> { pub async fn get_running(&self) -> Vec<JobReport> {
let mut ret = vec![]; let mut ret = vec![];
for worker in self.running_workers.values() { for worker in self.running_workers.values() {
let worker = worker.lock().await; let worker = worker.lock().await;
ret.push(worker.job_report.clone()); ret.push(worker.job_report.clone());
} }
ret ret
} }
pub async fn get_history(ctx: &CoreContext) -> Result<Vec<JobReport>, JobError> { pub async fn get_history(ctx: &CoreContext) -> Result<Vec<JobReport>, JobError> {
let db = &ctx.database; let db = &ctx.database;
let jobs = db let jobs = db
.job() .job()
.find_many(vec![job::status::not(JobStatus::Running.int_value())]) .find_many(vec![job::status::not(JobStatus::Running.int_value())])
.exec() .exec()
.await?; .await?;
Ok(jobs.into_iter().map(|j| j.into()).collect()) Ok(jobs.into_iter().map(|j| j.into()).collect())
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub enum JobReportUpdate { pub enum JobReportUpdate {
TaskCount(usize), TaskCount(usize),
CompletedTaskCount(usize), CompletedTaskCount(usize),
Message(String), Message(String),
SecondsElapsed(u64), SecondsElapsed(u64),
} }
#[derive(Debug, Serialize, Deserialize, TS, Clone)] #[derive(Debug, Serialize, Deserialize, TS, Clone)]
#[ts(export)] #[ts(export)]
pub struct JobReport { pub struct JobReport {
pub id: String, pub id: String,
pub name: String, pub name: String,
// client_id: i32, // client_id: i32,
#[ts(type = "string")] #[ts(type = "string")]
pub date_created: chrono::DateTime<chrono::Utc>, pub date_created: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")] #[ts(type = "string")]
pub date_modified: chrono::DateTime<chrono::Utc>, pub date_modified: chrono::DateTime<chrono::Utc>,
pub status: JobStatus, pub status: JobStatus,
pub task_count: i32, pub task_count: i32,
pub completed_task_count: i32, pub completed_task_count: i32,
pub message: String, pub message: String,
// pub percentage_complete: f64, // pub percentage_complete: f64,
#[ts(type = "string")] #[ts(type = "string")]
pub seconds_elapsed: i32, pub seconds_elapsed: i32,
} }
// convert database struct into a resource struct // convert database struct into a resource struct
impl Into<JobReport> for job::Data { impl Into<JobReport> for job::Data {
fn into(self) -> JobReport { fn into(self) -> JobReport {
JobReport { JobReport {
id: self.id, id: self.id,
name: self.name, name: self.name,
// client_id: self.client_id, // client_id: self.client_id,
status: JobStatus::from_int(self.status).unwrap(), status: JobStatus::from_int(self.status).unwrap(),
task_count: self.task_count, task_count: self.task_count,
completed_task_count: self.completed_task_count, completed_task_count: self.completed_task_count,
date_created: self.date_created, date_created: self.date_created,
date_modified: self.date_modified, date_modified: self.date_modified,
message: String::new(), message: String::new(),
seconds_elapsed: self.seconds_elapsed, seconds_elapsed: self.seconds_elapsed,
} }
} }
} }
impl JobReport { impl JobReport {
pub fn new(uuid: String, name: String) -> Self { pub fn new(uuid: String, name: String) -> Self {
Self { Self {
id: uuid, id: uuid,
name, name,
// client_id: 0, // client_id: 0,
date_created: chrono::Utc::now(), date_created: chrono::Utc::now(),
date_modified: chrono::Utc::now(), date_modified: chrono::Utc::now(),
status: JobStatus::Queued, status: JobStatus::Queued,
task_count: 0, task_count: 0,
completed_task_count: 0, completed_task_count: 0,
message: String::new(), message: String::new(),
seconds_elapsed: 0, seconds_elapsed: 0,
} }
} }
pub async fn create(&self, ctx: &CoreContext) -> Result<(), JobError> { pub async fn create(&self, ctx: &CoreContext) -> Result<(), JobError> {
let config = state::get(); let config = state::get();
ctx ctx.database
.database .job()
.job() .create(
.create( job::id::set(self.id.clone()),
job::id::set(self.id.clone()), job::name::set(self.name.clone()),
job::name::set(self.name.clone()), job::action::set(1),
job::action::set(1), job::nodes::link(node::id::equals(config.node_id)),
job::nodes::link(node::id::equals(config.node_id)), vec![],
vec![], )
) .exec()
.exec() .await?;
.await?; Ok(())
Ok(()) }
} pub async fn update(&self, ctx: &CoreContext) -> Result<(), JobError> {
pub async fn update(&self, ctx: &CoreContext) -> Result<(), JobError> { ctx.database
ctx .job()
.database .find_unique(job::id::equals(self.id.clone()))
.job() .update(vec![
.find_unique(job::id::equals(self.id.clone())) job::status::set(self.status.int_value()),
.update(vec![ job::task_count::set(self.task_count),
job::status::set(self.status.int_value()), job::completed_task_count::set(self.completed_task_count),
job::task_count::set(self.task_count), job::date_modified::set(chrono::Utc::now()),
job::completed_task_count::set(self.completed_task_count), job::seconds_elapsed::set(self.seconds_elapsed),
job::date_modified::set(chrono::Utc::now()), ])
job::seconds_elapsed::set(self.seconds_elapsed), .exec()
]) .await?;
.exec() Ok(())
.await?; }
Ok(())
}
} }
#[derive(Clone)] #[derive(Clone)]
@ -186,19 +184,19 @@ pub struct JobReportCreate {}
#[async_trait::async_trait] #[async_trait::async_trait]
impl Replicate for JobReport { impl Replicate for JobReport {
type Create = JobReportCreate; type Create = JobReportCreate;
async fn create(_data: Self::Create, _ctx: SyncContext) {} async fn create(_data: Self::Create, _ctx: SyncContext) {}
async fn delete(_ctx: SyncContext) {} async fn delete(_ctx: SyncContext) {}
} }
#[repr(i32)] #[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)] #[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)] #[ts(export)]
pub enum JobStatus { pub enum JobStatus {
Queued = 0, Queued = 0,
Running = 1, Running = 1,
Completed = 2, Completed = 2,
Canceled = 3, Canceled = 3,
Failed = 4, Failed = 4,
} }

View file

@ -8,8 +8,8 @@ pub mod worker;
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum JobError { pub enum JobError {
#[error("Failed to create job (job_id {job_id:?})")] #[error("Failed to create job (job_id {job_id:?})")]
CreateFailure { job_id: String }, CreateFailure { job_id: String },
#[error("Database error")] #[error("Database error")]
DatabaseError(#[from] prisma::QueryError), DatabaseError(#[from] prisma::QueryError),
} }

View file

@ -2,190 +2,186 @@ use super::jobs::{JobReport, JobReportUpdate, JobStatus};
use crate::{ClientQuery, CoreContext, CoreEvent, InternalEvent, Job}; use crate::{ClientQuery, CoreContext, CoreEvent, InternalEvent, Job};
use std::{sync::Arc, time::Duration}; use std::{sync::Arc, time::Duration};
use tokio::{ use tokio::{
sync::{ sync::{
mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender}, mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
Mutex, Mutex,
}, },
time::{sleep, Instant}, time::{sleep, Instant},
}; };
// used to update the worker state from inside the worker thread // used to update the worker state from inside the worker thread
pub enum WorkerEvent { pub enum WorkerEvent {
Progressed(Vec<JobReportUpdate>), Progressed(Vec<JobReportUpdate>),
Completed, Completed,
Failed, Failed,
} }
enum WorkerState { enum WorkerState {
Pending(Box<dyn Job>, UnboundedReceiver<WorkerEvent>), Pending(Box<dyn Job>, UnboundedReceiver<WorkerEvent>),
Running, Running,
} }
#[derive(Clone)] #[derive(Clone)]
pub struct WorkerContext { pub struct WorkerContext {
pub uuid: String, pub uuid: String,
pub core_ctx: CoreContext, pub core_ctx: CoreContext,
pub sender: UnboundedSender<WorkerEvent>, pub sender: UnboundedSender<WorkerEvent>,
} }
impl WorkerContext { impl WorkerContext {
pub fn progress(&self, updates: Vec<JobReportUpdate>) { pub fn progress(&self, updates: Vec<JobReportUpdate>) {
self self.sender
.sender .send(WorkerEvent::Progressed(updates))
.send(WorkerEvent::Progressed(updates)) .unwrap_or(());
.unwrap_or(()); }
}
} }
// a worker is a dedicated thread that runs a single job // a worker is a dedicated thread that runs a single job
// once the job is complete the worker will exit // once the job is complete the worker will exit
pub struct Worker { pub struct Worker {
pub job_report: JobReport, pub job_report: JobReport,
state: WorkerState, state: WorkerState,
worker_sender: UnboundedSender<WorkerEvent>, worker_sender: UnboundedSender<WorkerEvent>,
} }
impl Worker { impl Worker {
pub fn new(job: Box<dyn Job>) -> Self { pub fn new(job: Box<dyn Job>) -> Self {
let (worker_sender, worker_receiver) = unbounded_channel(); let (worker_sender, worker_receiver) = unbounded_channel();
let uuid = uuid::Uuid::new_v4().to_string(); let uuid = uuid::Uuid::new_v4().to_string();
let name = job.name(); let name = job.name();
Self { Self {
state: WorkerState::Pending(job, worker_receiver), state: WorkerState::Pending(job, worker_receiver),
job_report: JobReport::new(uuid, name.to_string()), job_report: JobReport::new(uuid, name.to_string()),
worker_sender, worker_sender,
} }
} }
// spawns a thread and extracts channel sender to communicate with it // spawns a thread and extracts channel sender to communicate with it
pub async fn spawn(worker: Arc<Mutex<Self>>, ctx: &CoreContext) { pub async fn spawn(worker: Arc<Mutex<Self>>, ctx: &CoreContext) {
// we capture the worker receiver channel so state can be updated from inside the worker // we capture the worker receiver channel so state can be updated from inside the worker
let mut worker_mut = worker.lock().await; let mut worker_mut = worker.lock().await;
// extract owned job and receiver from Self // extract owned job and receiver from Self
let (job, worker_receiver) = let (job, worker_receiver) =
match std::mem::replace(&mut worker_mut.state, WorkerState::Running) { match std::mem::replace(&mut worker_mut.state, WorkerState::Running) {
WorkerState::Pending(job, worker_receiver) => { WorkerState::Pending(job, worker_receiver) => {
worker_mut.state = WorkerState::Running; worker_mut.state = WorkerState::Running;
(job, worker_receiver) (job, worker_receiver)
} }
WorkerState::Running => unreachable!(), WorkerState::Running => unreachable!(),
}; };
let worker_sender = worker_mut.worker_sender.clone(); let worker_sender = worker_mut.worker_sender.clone();
let core_ctx = ctx.clone(); let core_ctx = ctx.clone();
worker_mut.job_report.status = JobStatus::Running; worker_mut.job_report.status = JobStatus::Running;
worker_mut.job_report.create(&ctx).await.unwrap_or(()); worker_mut.job_report.create(&ctx).await.unwrap_or(());
// spawn task to handle receiving events from the worker // spawn task to handle receiving events from the worker
tokio::spawn(Worker::track_progress( tokio::spawn(Worker::track_progress(
worker.clone(), worker.clone(),
worker_receiver, worker_receiver,
ctx.clone(), ctx.clone(),
)); ));
let uuid = worker_mut.job_report.id.clone(); let uuid = worker_mut.job_report.id.clone();
// spawn task to handle running the job // spawn task to handle running the job
tokio::spawn(async move { tokio::spawn(async move {
let worker_ctx = WorkerContext { let worker_ctx = WorkerContext {
uuid, uuid,
core_ctx, core_ctx,
sender: worker_sender, sender: worker_sender,
}; };
let job_start = Instant::now(); let job_start = Instant::now();
// track time // track time
let sender = worker_ctx.sender.clone(); let sender = worker_ctx.sender.clone();
tokio::spawn(async move { tokio::spawn(async move {
loop { loop {
let elapsed = job_start.elapsed().as_secs(); let elapsed = job_start.elapsed().as_secs();
sender sender
.send(WorkerEvent::Progressed(vec![ .send(WorkerEvent::Progressed(vec![
JobReportUpdate::SecondsElapsed(elapsed), JobReportUpdate::SecondsElapsed(elapsed),
])) ]))
.unwrap_or(()); .unwrap_or(());
sleep(Duration::from_millis(1000)).await; sleep(Duration::from_millis(1000)).await;
} }
}); });
let result = job.run(worker_ctx.clone()).await; let result = job.run(worker_ctx.clone()).await;
if let Err(e) = result { if let Err(e) = result {
println!("job failed {:?}", e); println!("job failed {:?}", e);
worker_ctx.sender.send(WorkerEvent::Failed).unwrap_or(()); worker_ctx.sender.send(WorkerEvent::Failed).unwrap_or(());
} else { } else {
// handle completion // handle completion
worker_ctx.sender.send(WorkerEvent::Completed).unwrap_or(()); worker_ctx.sender.send(WorkerEvent::Completed).unwrap_or(());
} }
worker_ctx worker_ctx
.core_ctx .core_ctx
.internal_sender .internal_sender
.send(InternalEvent::JobComplete(worker_ctx.uuid.clone())) .send(InternalEvent::JobComplete(worker_ctx.uuid.clone()))
.unwrap_or(()); .unwrap_or(());
}); });
} }
pub fn id(&self) -> String { pub fn id(&self) -> String {
self.job_report.id.to_owned() self.job_report.id.to_owned()
} }
async fn track_progress( async fn track_progress(
worker: Arc<Mutex<Self>>, worker: Arc<Mutex<Self>>,
mut channel: UnboundedReceiver<WorkerEvent>, mut channel: UnboundedReceiver<WorkerEvent>,
ctx: CoreContext, ctx: CoreContext,
) { ) {
while let Some(command) = channel.recv().await { while let Some(command) = channel.recv().await {
let mut worker = worker.lock().await; let mut worker = worker.lock().await;
match command { match command {
WorkerEvent::Progressed(changes) => { WorkerEvent::Progressed(changes) => {
// protect against updates if job is not running // protect against updates if job is not running
if worker.job_report.status != JobStatus::Running { if worker.job_report.status != JobStatus::Running {
continue; continue;
}; };
for change in changes { for change in changes {
match change { match change {
JobReportUpdate::TaskCount(task_count) => { JobReportUpdate::TaskCount(task_count) => {
worker.job_report.task_count = task_count as i32; worker.job_report.task_count = task_count as i32;
} }
JobReportUpdate::CompletedTaskCount(completed_task_count) => { JobReportUpdate::CompletedTaskCount(completed_task_count) => {
worker.job_report.completed_task_count = completed_task_count as i32; worker.job_report.completed_task_count =
} completed_task_count as i32;
JobReportUpdate::Message(message) => { }
worker.job_report.message = message; JobReportUpdate::Message(message) => {
} worker.job_report.message = message;
JobReportUpdate::SecondsElapsed(seconds) => { }
worker.job_report.seconds_elapsed = seconds as i32; JobReportUpdate::SecondsElapsed(seconds) => {
} worker.job_report.seconds_elapsed = seconds as i32;
} }
} }
ctx }
.emit(CoreEvent::InvalidateQueryDebounced( ctx.emit(CoreEvent::InvalidateQueryDebounced(
ClientQuery::JobGetRunning, ClientQuery::JobGetRunning,
)) ))
.await; .await;
} }
WorkerEvent::Completed => { WorkerEvent::Completed => {
worker.job_report.status = JobStatus::Completed; worker.job_report.status = JobStatus::Completed;
worker.job_report.update(&ctx).await.unwrap_or(()); worker.job_report.update(&ctx).await.unwrap_or(());
ctx ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetRunning))
.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetRunning)) .await;
.await; ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
ctx .await;
.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory)) break;
.await; }
break; WorkerEvent::Failed => {
} worker.job_report.status = JobStatus::Failed;
WorkerEvent::Failed => { worker.job_report.update(&ctx).await.unwrap_or(());
worker.job_report.status = JobStatus::Failed;
worker.job_report.update(&ctx).await.unwrap_or(());
ctx ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory)) .await;
.await; break;
break; }
} }
} }
} }
}
} }

View file

@ -1,6 +1,6 @@
use crate::{ use crate::{
file::cas::identifier::FileIdentifierJob, library::loader::get_library_path, file::cas::identifier::FileIdentifierJob, library::loader::get_library_path,
node::state::NodeState, node::state::NodeState,
}; };
use job::jobs::{Job, JobReport, Jobs}; use job::jobs::{Job, JobReport, Jobs};
use prisma::PrismaClient; use prisma::PrismaClient;
@ -8,8 +8,8 @@ use serde::{Deserialize, Serialize};
use std::{fs, sync::Arc}; use std::{fs, sync::Arc};
use thiserror::Error; use thiserror::Error;
use tokio::sync::{ use tokio::sync::{
mpsc::{self, unbounded_channel, UnboundedReceiver, UnboundedSender}, mpsc::{self, unbounded_channel, UnboundedReceiver, UnboundedSender},
oneshot, oneshot,
}; };
use ts_rs::TS; use ts_rs::TS;
@ -34,308 +34,310 @@ pub mod util;
// a wrapper around external input with a returning sender channel for core to respond // a wrapper around external input with a returning sender channel for core to respond
#[derive(Debug)] #[derive(Debug)]
pub struct ReturnableMessage<D, R = Result<CoreResponse, CoreError>> { pub struct ReturnableMessage<D, R = Result<CoreResponse, CoreError>> {
data: D, data: D,
return_sender: oneshot::Sender<R>, return_sender: oneshot::Sender<R>,
} }
// core controller is passed to the client to communicate with the core which runs in a dedicated thread // core controller is passed to the client to communicate with the core which runs in a dedicated thread
pub struct CoreController { pub struct CoreController {
query_sender: UnboundedSender<ReturnableMessage<ClientQuery>>, query_sender: UnboundedSender<ReturnableMessage<ClientQuery>>,
command_sender: UnboundedSender<ReturnableMessage<ClientCommand>>, command_sender: UnboundedSender<ReturnableMessage<ClientCommand>>,
} }
impl CoreController { impl CoreController {
pub async fn query(&self, query: ClientQuery) -> Result<CoreResponse, CoreError> { pub async fn query(&self, query: ClientQuery) -> Result<CoreResponse, CoreError> {
// a one time use channel to send and await a response // a one time use channel to send and await a response
let (sender, recv) = oneshot::channel(); let (sender, recv) = oneshot::channel();
self self.query_sender
.query_sender .send(ReturnableMessage {
.send(ReturnableMessage { data: query,
data: query, return_sender: sender,
return_sender: sender, })
}) .unwrap_or(());
.unwrap_or(()); // wait for response and return
// wait for response and return recv.await.unwrap_or(Err(CoreError::QueryError))
recv.await.unwrap_or(Err(CoreError::QueryError)) }
}
pub async fn command(&self, command: ClientCommand) -> Result<CoreResponse, CoreError> { pub async fn command(&self, command: ClientCommand) -> Result<CoreResponse, CoreError> {
let (sender, recv) = oneshot::channel(); let (sender, recv) = oneshot::channel();
self self.command_sender
.command_sender .send(ReturnableMessage {
.send(ReturnableMessage { data: command,
data: command, return_sender: sender,
return_sender: sender, })
}) .unwrap_or(());
.unwrap_or(());
recv.await.unwrap() recv.await.unwrap()
} }
} }
#[derive(Debug)] #[derive(Debug)]
pub enum InternalEvent { pub enum InternalEvent {
JobIngest(Box<dyn Job>), JobIngest(Box<dyn Job>),
JobQueue(Box<dyn Job>), JobQueue(Box<dyn Job>),
JobComplete(String), JobComplete(String),
} }
#[derive(Clone)] #[derive(Clone)]
pub struct CoreContext { pub struct CoreContext {
pub database: Arc<PrismaClient>, pub database: Arc<PrismaClient>,
pub event_sender: mpsc::Sender<CoreEvent>, pub event_sender: mpsc::Sender<CoreEvent>,
pub internal_sender: UnboundedSender<InternalEvent>, pub internal_sender: UnboundedSender<InternalEvent>,
} }
impl CoreContext { impl CoreContext {
pub fn spawn_job(&self, job: Box<dyn Job>) { pub fn spawn_job(&self, job: Box<dyn Job>) {
self self.internal_sender
.internal_sender .send(InternalEvent::JobIngest(job))
.send(InternalEvent::JobIngest(job)) .unwrap_or_else(|e| {
.unwrap_or_else(|e| { println!("Failed to spawn job. {:?}", e);
println!("Failed to spawn job. {:?}", e); });
}); }
} pub fn queue_job(&self, job: Box<dyn Job>) {
pub fn queue_job(&self, job: Box<dyn Job>) { self.internal_sender
self .send(InternalEvent::JobIngest(job))
.internal_sender .unwrap_or_else(|e| {
.send(InternalEvent::JobIngest(job)) println!("Failed to queue job. {:?}", e);
.unwrap_or_else(|e| { });
println!("Failed to queue job. {:?}", e); }
}); pub async fn emit(&self, event: CoreEvent) {
} self.event_sender.send(event).await.unwrap_or_else(|e| {
pub async fn emit(&self, event: CoreEvent) { println!("Failed to emit event. {:?}", e);
self.event_sender.send(event).await.unwrap_or_else(|e| { });
println!("Failed to emit event. {:?}", e); }
});
}
} }
pub struct Node { pub struct Node {
state: NodeState, state: NodeState,
jobs: job::jobs::Jobs, jobs: job::jobs::Jobs,
database: Arc<PrismaClient>, database: Arc<PrismaClient>,
// filetype_registry: library::TypeRegistry, // filetype_registry: library::TypeRegistry,
// extension_registry: library::ExtensionRegistry, // extension_registry: library::ExtensionRegistry,
// global messaging channels // global messaging channels
query_channel: ( query_channel: (
UnboundedSender<ReturnableMessage<ClientQuery>>, UnboundedSender<ReturnableMessage<ClientQuery>>,
UnboundedReceiver<ReturnableMessage<ClientQuery>>, UnboundedReceiver<ReturnableMessage<ClientQuery>>,
), ),
command_channel: ( command_channel: (
UnboundedSender<ReturnableMessage<ClientCommand>>, UnboundedSender<ReturnableMessage<ClientCommand>>,
UnboundedReceiver<ReturnableMessage<ClientCommand>>, UnboundedReceiver<ReturnableMessage<ClientCommand>>,
), ),
event_sender: mpsc::Sender<CoreEvent>, event_sender: mpsc::Sender<CoreEvent>,
// a channel for child threads to send events back to the core // a channel for child threads to send events back to the core
internal_channel: ( internal_channel: (
UnboundedSender<InternalEvent>, UnboundedSender<InternalEvent>,
UnboundedReceiver<InternalEvent>, UnboundedReceiver<InternalEvent>,
), ),
} }
impl Node { impl Node {
// create new instance of node, run startup tasks // create new instance of node, run startup tasks
pub async fn new(mut data_dir: std::path::PathBuf) -> (Node, mpsc::Receiver<CoreEvent>) { pub async fn new(mut data_dir: std::path::PathBuf) -> (Node, mpsc::Receiver<CoreEvent>) {
let (event_sender, event_recv) = mpsc::channel(100); let (event_sender, event_recv) = mpsc::channel(100);
data_dir = data_dir.join("spacedrive"); data_dir = data_dir.join("spacedrive");
let data_dir = data_dir.to_str().unwrap(); let data_dir = data_dir.to_str().unwrap();
// create data directory if it doesn't exist // create data directory if it doesn't exist
fs::create_dir_all(&data_dir).unwrap(); fs::create_dir_all(&data_dir).unwrap();
// prepare basic client state // prepare basic client state
let mut state = NodeState::new(data_dir, "diamond-mastering-space-dragon").unwrap(); let mut state = NodeState::new(data_dir, "diamond-mastering-space-dragon").unwrap();
// load from disk // load from disk
state state
.read_disk() .read_disk()
.unwrap_or(println!("Error: No node state found, creating new one...")); .unwrap_or(println!("Error: No node state found, creating new one..."));
state.save(); state.save();
println!("Node State: {:?}", state); println!("Node State: {:?}", state);
// connect to default library // connect to default library
let database = Arc::new( let database = Arc::new(
db::create_connection(&get_library_path(&data_dir)) db::create_connection(&get_library_path(&data_dir))
.await .await
.unwrap(), .unwrap(),
); );
let internal_channel = unbounded_channel::<InternalEvent>(); let internal_channel = unbounded_channel::<InternalEvent>();
let node = Node { let node = Node {
state, state,
query_channel: unbounded_channel(), query_channel: unbounded_channel(),
command_channel: unbounded_channel(), command_channel: unbounded_channel(),
jobs: Jobs::new(), jobs: Jobs::new(),
event_sender, event_sender,
database, database,
internal_channel, internal_channel,
}; };
#[cfg(feature = "p2p")] #[cfg(feature = "p2p")]
tokio::spawn(async move { tokio::spawn(async move {
p2p::listener::listen(None).await.unwrap_or(()); p2p::listener::listen(None).await.unwrap_or(());
}); });
(node, event_recv) (node, event_recv)
} }
pub fn get_context(&self) -> CoreContext { pub fn get_context(&self) -> CoreContext {
CoreContext { CoreContext {
database: self.database.clone(), database: self.database.clone(),
event_sender: self.event_sender.clone(), event_sender: self.event_sender.clone(),
internal_sender: self.internal_channel.0.clone(), internal_sender: self.internal_channel.0.clone(),
} }
} }
pub fn get_controller(&self) -> CoreController { pub fn get_controller(&self) -> CoreController {
CoreController { CoreController {
query_sender: self.query_channel.0.clone(), query_sender: self.query_channel.0.clone(),
command_sender: self.command_channel.0.clone(), command_sender: self.command_channel.0.clone(),
} }
} }
pub async fn start(&mut self) { pub async fn start(&mut self) {
let ctx = self.get_context(); let ctx = self.get_context();
loop { loop {
// listen on global messaging channels for incoming messages // listen on global messaging channels for incoming messages
tokio::select! { tokio::select! {
Some(msg) = self.query_channel.1.recv() => { Some(msg) = self.query_channel.1.recv() => {
let res = self.exec_query(msg.data).await; let res = self.exec_query(msg.data).await;
msg.return_sender.send(res).unwrap_or(()); msg.return_sender.send(res).unwrap_or(());
} }
Some(msg) = self.command_channel.1.recv() => { Some(msg) = self.command_channel.1.recv() => {
let res = self.exec_command(msg.data).await; let res = self.exec_command(msg.data).await;
msg.return_sender.send(res).unwrap_or(()); msg.return_sender.send(res).unwrap_or(());
} }
Some(event) = self.internal_channel.1.recv() => { Some(event) = self.internal_channel.1.recv() => {
match event { match event {
InternalEvent::JobIngest(job) => { InternalEvent::JobIngest(job) => {
self.jobs.ingest(&ctx, job).await; self.jobs.ingest(&ctx, job).await;
}, },
InternalEvent::JobQueue(job) => { InternalEvent::JobQueue(job) => {
self.jobs.ingest_queue(&ctx, job); self.jobs.ingest_queue(&ctx, job);
}, },
InternalEvent::JobComplete(id) => { InternalEvent::JobComplete(id) => {
self.jobs.complete(&ctx, id).await; self.jobs.complete(&ctx, id).await;
}, },
} }
} }
} }
} }
} }
// load library database + initialize client with db // load library database + initialize client with db
pub async fn initializer(&self) { pub async fn initializer(&self) {
println!("Initializing..."); println!("Initializing...");
let ctx = self.get_context(); let ctx = self.get_context();
if self.state.libraries.len() == 0 { if self.state.libraries.len() == 0 {
match library::loader::create(&ctx, None).await { match library::loader::create(&ctx, None).await {
Ok(library) => println!("Created new library: {:?}", library), Ok(library) => println!("Created new library: {:?}", library),
Err(e) => println!("Error creating library: {:?}", e), Err(e) => println!("Error creating library: {:?}", e),
} }
} else { } else {
for library in self.state.libraries.iter() { for library in self.state.libraries.iter() {
// init database for library // init database for library
match library::loader::load(&ctx, &library.library_path, &library.library_uuid).await { match library::loader::load(&ctx, &library.library_path, &library.library_uuid)
Ok(library) => println!("Loaded library: {:?}", library), .await
Err(e) => println!("Error loading library: {:?}", e), {
} Ok(library) => println!("Loaded library: {:?}", library),
} Err(e) => println!("Error loading library: {:?}", e),
} }
// init node data within library }
match node::LibraryNode::create(&self).await { }
Ok(_) => println!("Spacedrive online"), // init node data within library
Err(e) => println!("Error initializing node: {:?}", e), match node::LibraryNode::create(&self).await {
}; Ok(_) => println!("Spacedrive online"),
} Err(e) => println!("Error initializing node: {:?}", e),
};
}
async fn exec_command(&mut self, cmd: ClientCommand) -> Result<CoreResponse, CoreError> { async fn exec_command(&mut self, cmd: ClientCommand) -> Result<CoreResponse, CoreError> {
println!("Core command: {:?}", cmd); println!("Core command: {:?}", cmd);
let ctx = self.get_context(); let ctx = self.get_context();
Ok(match cmd { Ok(match cmd {
// CRUD for locations // CRUD for locations
ClientCommand::LocCreate { path } => { ClientCommand::LocCreate { path } => {
let loc = sys::locations::new_location_and_scan(&ctx, &path).await?; let loc = sys::locations::new_location_and_scan(&ctx, &path).await?;
ctx.queue_job(Box::new(FileIdentifierJob)); ctx.queue_job(Box::new(FileIdentifierJob));
CoreResponse::LocCreate(loc) CoreResponse::LocCreate(loc)
} }
ClientCommand::LocUpdate { id: _, name: _ } => todo!(), ClientCommand::LocUpdate { id: _, name: _ } => todo!(),
ClientCommand::LocDelete { id: _ } => todo!(), ClientCommand::LocDelete { id: _ } => todo!(),
// CRUD for files // CRUD for files
ClientCommand::FileRead { id: _ } => todo!(), ClientCommand::FileRead { id: _ } => todo!(),
// ClientCommand::FileEncrypt { id: _, algorithm: _ } => todo!(), // ClientCommand::FileEncrypt { id: _, algorithm: _ } => todo!(),
ClientCommand::FileDelete { id: _ } => todo!(), ClientCommand::FileDelete { id: _ } => todo!(),
// CRUD for tags // CRUD for tags
ClientCommand::TagCreate { name: _, color: _ } => todo!(), ClientCommand::TagCreate { name: _, color: _ } => todo!(),
ClientCommand::TagAssign { ClientCommand::TagAssign {
file_id: _, file_id: _,
tag_id: _, tag_id: _,
} => todo!(), } => todo!(),
ClientCommand::TagDelete { id: _ } => todo!(), ClientCommand::TagDelete { id: _ } => todo!(),
// CRUD for libraries // CRUD for libraries
ClientCommand::SysVolumeUnmount { id: _ } => todo!(), ClientCommand::SysVolumeUnmount { id: _ } => todo!(),
ClientCommand::LibDelete { id: _ } => todo!(), ClientCommand::LibDelete { id: _ } => todo!(),
ClientCommand::TagUpdate { name: _, color: _ } => todo!(), ClientCommand::TagUpdate { name: _, color: _ } => todo!(),
ClientCommand::GenerateThumbsForLocation { id, path } => { ClientCommand::GenerateThumbsForLocation { id, path } => {
ctx.spawn_job(Box::new(ThumbnailJob { ctx.spawn_job(Box::new(ThumbnailJob {
location_id: id, location_id: id,
path, path,
background: false, // fix background: false, // fix
})); }));
CoreResponse::Success(()) CoreResponse::Success(())
} }
// ClientCommand::PurgeDatabase => { // ClientCommand::PurgeDatabase => {
// println!("Purging database..."); // println!("Purging database...");
// fs::remove_file(Path::new(&self.state.data_path).join("library.db")).unwrap(); // fs::remove_file(Path::new(&self.state.data_path).join("library.db")).unwrap();
// CoreResponse::Success(()) // CoreResponse::Success(())
// } // }
ClientCommand::IdentifyUniqueFiles => { ClientCommand::IdentifyUniqueFiles => {
ctx.spawn_job(Box::new(FileIdentifierJob)); ctx.spawn_job(Box::new(FileIdentifierJob));
CoreResponse::Success(()) CoreResponse::Success(())
} }
}) })
} }
// query sources of data // query sources of data
async fn exec_query(&self, query: ClientQuery) -> Result<CoreResponse, CoreError> { async fn exec_query(&self, query: ClientQuery) -> Result<CoreResponse, CoreError> {
#[cfg(fdebug_assertions)] #[cfg(fdebug_assertions)]
println!("Core query: {:?}", query); println!("Core query: {:?}", query);
let ctx = self.get_context(); let ctx = self.get_context();
Ok(match query { Ok(match query {
// return the client state from memory // return the client state from memory
ClientQuery::ClientGetState => CoreResponse::ClientGetState(self.state.clone()), ClientQuery::ClientGetState => CoreResponse::ClientGetState(self.state.clone()),
// get system volumes without saving to library // get system volumes without saving to library
ClientQuery::SysGetVolumes => { ClientQuery::SysGetVolumes => {
CoreResponse::SysGetVolumes(sys::volumes::Volume::get_volumes()?) CoreResponse::SysGetVolumes(sys::volumes::Volume::get_volumes()?)
} }
ClientQuery::SysGetLocations => { ClientQuery::SysGetLocations => {
CoreResponse::SysGetLocations(sys::locations::get_locations(&ctx).await?) CoreResponse::SysGetLocations(sys::locations::get_locations(&ctx).await?)
} }
// get location from library // get location from library
ClientQuery::SysGetLocation { id } => { ClientQuery::SysGetLocation { id } => {
CoreResponse::SysGetLocation(sys::locations::get_location(&ctx, id).await?) CoreResponse::SysGetLocation(sys::locations::get_location(&ctx, id).await?)
} }
// return contents of a directory for the explorer // return contents of a directory for the explorer
ClientQuery::LibGetExplorerDir { ClientQuery::LibGetExplorerDir {
path, path,
location_id, location_id,
limit: _, limit: _,
} => CoreResponse::LibGetExplorerDir( } => CoreResponse::LibGetExplorerDir(
file::explorer::open::open_dir(&ctx, &location_id, &path).await?, file::explorer::open::open_dir(&ctx, &location_id, &path).await?,
), ),
ClientQuery::LibGetTags => todo!(), ClientQuery::LibGetTags => todo!(),
ClientQuery::JobGetRunning => CoreResponse::JobGetRunning(self.jobs.get_running().await), ClientQuery::JobGetRunning => {
ClientQuery::JobGetHistory => CoreResponse::JobGetHistory(Jobs::get_history(&ctx).await?), CoreResponse::JobGetRunning(self.jobs.get_running().await)
ClientQuery::GetLibraryStatistics => { }
CoreResponse::GetLibraryStatistics(library::statistics::Statistics::calculate(&ctx).await?) ClientQuery::JobGetHistory => {
} CoreResponse::JobGetHistory(Jobs::get_history(&ctx).await?)
ClientQuery::GetNodes => todo!(), }
}) ClientQuery::GetLibraryStatistics => CoreResponse::GetLibraryStatistics(
} library::statistics::Statistics::calculate(&ctx).await?,
),
ClientQuery::GetNodes => todo!(),
})
}
} }
// represents an event this library can emit // represents an event this library can emit
@ -343,26 +345,26 @@ impl Node {
#[serde(tag = "key", content = "params")] #[serde(tag = "key", content = "params")]
#[ts(export)] #[ts(export)]
pub enum ClientCommand { pub enum ClientCommand {
// Files // Files
FileRead { id: i32 }, FileRead { id: i32 },
// FileEncrypt { id: i32, algorithm: EncryptionAlgorithm }, // FileEncrypt { id: i32, algorithm: EncryptionAlgorithm },
FileDelete { id: i32 }, FileDelete { id: i32 },
// Library // Library
LibDelete { id: i32 }, LibDelete { id: i32 },
// Tags // Tags
TagCreate { name: String, color: String }, TagCreate { name: String, color: String },
TagUpdate { name: String, color: String }, TagUpdate { name: String, color: String },
TagAssign { file_id: i32, tag_id: i32 }, TagAssign { file_id: i32, tag_id: i32 },
TagDelete { id: i32 }, TagDelete { id: i32 },
// Locations // Locations
LocCreate { path: String }, LocCreate { path: String },
LocUpdate { id: i32, name: Option<String> }, LocUpdate { id: i32, name: Option<String> },
LocDelete { id: i32 }, LocDelete { id: i32 },
// System // System
SysVolumeUnmount { id: i32 }, SysVolumeUnmount { id: i32 },
GenerateThumbsForLocation { id: i32, path: String }, GenerateThumbsForLocation { id: i32, path: String },
// PurgeDatabase, // PurgeDatabase,
IdentifyUniqueFiles, IdentifyUniqueFiles,
} }
// represents an event this library can emit // represents an event this library can emit
@ -370,22 +372,22 @@ pub enum ClientCommand {
#[serde(tag = "key", content = "params")] #[serde(tag = "key", content = "params")]
#[ts(export)] #[ts(export)]
pub enum ClientQuery { pub enum ClientQuery {
ClientGetState, ClientGetState,
SysGetVolumes, SysGetVolumes,
LibGetTags, LibGetTags,
JobGetRunning, JobGetRunning,
JobGetHistory, JobGetHistory,
SysGetLocations, SysGetLocations,
SysGetLocation { SysGetLocation {
id: i32, id: i32,
}, },
LibGetExplorerDir { LibGetExplorerDir {
location_id: i32, location_id: i32,
path: String, path: String,
limit: i32, limit: i32,
}, },
GetLibraryStatistics, GetLibraryStatistics,
GetNodes, GetNodes,
} }
// represents an event this library can emit // represents an event this library can emit
@ -393,54 +395,54 @@ pub enum ClientQuery {
#[serde(tag = "key", content = "data")] #[serde(tag = "key", content = "data")]
#[ts(export)] #[ts(export)]
pub enum CoreEvent { pub enum CoreEvent {
// most all events should be once of these two // most all events should be once of these two
InvalidateQuery(ClientQuery), InvalidateQuery(ClientQuery),
InvalidateQueryDebounced(ClientQuery), InvalidateQueryDebounced(ClientQuery),
InvalidateResource(CoreResource), InvalidateResource(CoreResource),
NewThumbnail { cas_id: String }, NewThumbnail { cas_id: String },
Log { message: String }, Log { message: String },
DatabaseDisconnected { reason: Option<String> }, DatabaseDisconnected { reason: Option<String> },
} }
#[derive(Serialize, Deserialize, Debug, TS)] #[derive(Serialize, Deserialize, Debug, TS)]
#[serde(tag = "key", content = "data")] #[serde(tag = "key", content = "data")]
#[ts(export)] #[ts(export)]
pub enum CoreResponse { pub enum CoreResponse {
Success(()), Success(()),
SysGetVolumes(Vec<sys::volumes::Volume>), SysGetVolumes(Vec<sys::volumes::Volume>),
SysGetLocation(sys::locations::LocationResource), SysGetLocation(sys::locations::LocationResource),
SysGetLocations(Vec<sys::locations::LocationResource>), SysGetLocations(Vec<sys::locations::LocationResource>),
LibGetExplorerDir(file::DirectoryWithContents), LibGetExplorerDir(file::DirectoryWithContents),
ClientGetState(NodeState), ClientGetState(NodeState),
LocCreate(sys::locations::LocationResource), LocCreate(sys::locations::LocationResource),
JobGetRunning(Vec<JobReport>), JobGetRunning(Vec<JobReport>),
JobGetHistory(Vec<JobReport>), JobGetHistory(Vec<JobReport>),
GetLibraryStatistics(library::statistics::Statistics), GetLibraryStatistics(library::statistics::Statistics),
} }
#[derive(Error, Debug)] #[derive(Error, Debug)]
pub enum CoreError { pub enum CoreError {
#[error("Query error")] #[error("Query error")]
QueryError, QueryError,
#[error("System error")] #[error("System error")]
SysError(#[from] sys::SysError), SysError(#[from] sys::SysError),
#[error("File error")] #[error("File error")]
FileError(#[from] file::FileError), FileError(#[from] file::FileError),
#[error("Job error")] #[error("Job error")]
JobError(#[from] job::JobError), JobError(#[from] job::JobError),
#[error("Database error")] #[error("Database error")]
DatabaseError(#[from] prisma::QueryError), DatabaseError(#[from] prisma::QueryError),
#[error("Database error")] #[error("Database error")]
LibraryError(#[from] library::LibraryError), LibraryError(#[from] library::LibraryError),
} }
#[derive(Serialize, Deserialize, Debug, TS)] #[derive(Serialize, Deserialize, Debug, TS)]
#[ts(export)] #[ts(export)]
pub enum CoreResource { pub enum CoreResource {
Client, Client,
Library, Library,
Location(sys::locations::LocationResource), Location(sys::locations::LocationResource),
File(file::File), File(file::File),
Job(JobReport), Job(JobReport),
Tag, Tag,
} }

View file

@ -11,86 +11,86 @@ pub static LIBRARY_DB_NAME: &str = "library.db";
pub static DEFAULT_NAME: &str = "My Library"; pub static DEFAULT_NAME: &str = "My Library";
pub fn get_library_path(data_path: &str) -> String { pub fn get_library_path(data_path: &str) -> String {
let path = data_path.to_owned(); let path = data_path.to_owned();
format!("{}/{}", path, LIBRARY_DB_NAME) format!("{}/{}", path, LIBRARY_DB_NAME)
} }
pub async fn get(core: &Node) -> Result<library::Data, LibraryError> { pub async fn get(core: &Node) -> Result<library::Data, LibraryError> {
let config = state::get(); let config = state::get();
let db = &core.database; let db = &core.database;
let library_state = config.get_current_library(); let library_state = config.get_current_library();
println!("{:?}", library_state); println!("{:?}", library_state);
// get library from db // get library from db
let library = match db let library = match db
.library() .library()
.find_unique(library::pub_id::equals(library_state.library_uuid.clone())) .find_unique(library::pub_id::equals(library_state.library_uuid.clone()))
.exec() .exec()
.await? .await?
{ {
Some(library) => Ok(library), Some(library) => Ok(library),
None => { None => {
// update config library state to offline // update config library state to offline
// config.libraries // config.libraries
Err(anyhow::anyhow!("library_not_found")) Err(anyhow::anyhow!("library_not_found"))
} }
}; };
Ok(library.unwrap()) Ok(library.unwrap())
} }
pub async fn load(ctx: &CoreContext, library_path: &str, library_id: &str) -> Result<()> { pub async fn load(ctx: &CoreContext, library_path: &str, library_id: &str) -> Result<()> {
let mut config = state::get(); let mut config = state::get();
println!("Initializing library: {} {}", &library_id, library_path); println!("Initializing library: {} {}", &library_id, library_path);
if config.current_library_uuid != library_id { if config.current_library_uuid != library_id {
config.current_library_uuid = library_id.to_string(); config.current_library_uuid = library_id.to_string();
config.save(); config.save();
} }
// create connection with library database & run migrations // create connection with library database & run migrations
migrate::run_migrations(&ctx).await?; migrate::run_migrations(&ctx).await?;
// if doesn't exist, mark as offline // if doesn't exist, mark as offline
Ok(()) Ok(())
} }
pub async fn create(ctx: &CoreContext, name: Option<String>) -> Result<()> { pub async fn create(ctx: &CoreContext, name: Option<String>) -> Result<()> {
let mut config = state::get(); let mut config = state::get();
let uuid = Uuid::new_v4().to_string(); let uuid = Uuid::new_v4().to_string();
println!("Creating library {:?}, UUID: {:?}", name, uuid); println!("Creating library {:?}, UUID: {:?}", name, uuid);
let library_state = LibraryState { let library_state = LibraryState {
library_uuid: uuid.clone(), library_uuid: uuid.clone(),
library_path: get_library_path(&config.data_path), library_path: get_library_path(&config.data_path),
..LibraryState::default() ..LibraryState::default()
}; };
migrate::run_migrations(&ctx).await?; migrate::run_migrations(&ctx).await?;
config.libraries.push(library_state); config.libraries.push(library_state);
config.current_library_uuid = uuid; config.current_library_uuid = uuid;
config.save(); config.save();
let db = &ctx.database; let db = &ctx.database;
let _library = db let _library = db
.library() .library()
.create( .create(
library::pub_id::set(config.current_library_uuid), library::pub_id::set(config.current_library_uuid),
library::name::set(name.unwrap_or(DEFAULT_NAME.into())), library::name::set(name.unwrap_or(DEFAULT_NAME.into())),
vec![], vec![],
) )
.exec() .exec()
.await; .await;
println!("library created in database: {:?}", _library); println!("library created in database: {:?}", _library);
Ok(()) Ok(())
} }

Some files were not shown because too many files have changed in this diff Show more