format using tabs

This commit is contained in:
maxichrome 2022-05-22 23:07:35 -05:00
parent 198f52af20
commit a0d0938001
No known key found for this signature in database
GPG key ID: DDC459310E98B6AB
150 changed files with 5021 additions and 4855 deletions

View file

@ -1,6 +1,6 @@
name: 🐞 Bug Report
description: Report a bug
labels:
labels:
- kind/bug
- status/needs-triage
@ -43,8 +43,8 @@ body:
id: info
attributes:
label: Platform and versions
description: "Please include the output of `pnpm --version && cargo --version && rustc --version` along with information about your Operating System such as version and/or specific distribution if revelant."
render: shell
description: 'Please include the output of `pnpm --version && cargo --version && rustc --version` along with information about your Operating System such as version and/or specific distribution if revelant.'
render: Shell
validations:
required: true
@ -52,8 +52,8 @@ body:
id: logs
attributes:
label: Stack trace
render: shell
render: Shell
- type: textarea
id: context
attributes:

View file

@ -1,3 +1,5 @@
# tell yaml plugin that this is the config file and not a template of its own:
# yaml-language-server: $schema=https://json.schemastore.org/github-issue-config.json
blank_issues_enabled: false
contact_links:
- name: 📝 Report Typo
@ -11,4 +13,4 @@ contact_links:
about: Suggest any ideas you have using our discussion forums.
- name: 💬 Discord Chat
url: https://discord.gg/gTaF2Z44f5
about: Ask questions and talk to other Spacedrive users and the maintainers
about: Ask questions and talk to other Spacedrive users and the maintainers

View file

@ -1,4 +1,4 @@
name: Build Server Image
name: Build Server Image
description: Builds and publishes the docker image for the Spacedrive server
inputs:
gh_token:

View file

@ -3,6 +3,6 @@ const core = require('@actions/core');
const exec = require('@actions/exec');
const github = require('@actions/github');
// const folders =
// const folders =
exec.exec('brew', ['install', 'ffmpeg']);

View file

@ -1,17 +1,17 @@
{
"name": "install-ffmpeg-macos",
"version": "0.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "Brendan Allan",
"license": "ISC",
"dependencies": {
"@actions/core": "^1.6.0",
"@actions/exec": "^1.1.1",
"@actions/github": "^5.0.1"
}
"name": "install-ffmpeg-macos",
"version": "0.0.0",
"description": "",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1"
},
"keywords": [],
"author": "Brendan Allan",
"license": "ISC",
"dependencies": {
"@actions/core": "^1.6.0",
"@actions/exec": "^1.1.1",
"@actions/github": "^5.0.1"
}
}

View file

@ -1,10 +1,9 @@
<!-- Put any information about this PR up here -->
<!-- Which issue does this PR close? -->
<!-- If this PR does not have a corresponding issue,
make sure one gets created before you create this PR.
You can create a bug report or feature request at
https://github.com/spacedriveapp/spacedrive/issues/new/choose -->
Closes #(issue)

View file

@ -1,5 +1,4 @@
hard_tabs = true
tab_spaces = 4
match_block_trailing_comma = true
max_width = 90
newline_style = "Unix"

50
.vscode/settings.json vendored
View file

@ -1,26 +1,28 @@
{
"cSpell.words": [
"actix",
"bpfrpt",
"consts",
"creationdate",
"ipfs",
"Keepsafe",
"pathctx",
"prismjs",
"proptype",
"quicktime",
"repr",
"Roadmap",
"svgr",
"tailwindcss",
"trivago",
"tsparticles",
"upsert"
],
"[rust]": {
"editor.defaultFormatter": "matklad.rust-analyzer"
},
"rust-analyzer.procMacro.enable": true,
"rust-analyzer.diagnostics.experimental.enable": false
"cSpell.words": [
"actix",
"bpfrpt",
"consts",
"creationdate",
"ipfs",
"Keepsafe",
"pathctx",
"prismjs",
"proptype",
"quicktime",
"repr",
"Roadmap",
"svgr",
"tailwindcss",
"trivago",
"tsparticles",
"upsert"
],
"[rust]": {
"editor.defaultFormatter": "matklad.rust-analyzer"
},
"rust-analyzer.procMacro.enable": true,
"rust-analyzer.diagnostics.experimental.enable": false,
"rust-analyzer.inlayHints.parameterHints.enable": false,
"rust-analyzer.inlayHints.typeHints.enable": false
}

View file

@ -1,4 +1,3 @@
# Contributor Covenant Code of Conduct
## Our Pledge
@ -18,23 +17,23 @@ diverse, inclusive, and healthy community.
Examples of behavior that contributes to a positive environment for our
community include:
* Demonstrating empathy and kindness toward other people
* Being respectful of differing opinions, viewpoints, and experiences
* Giving and gracefully accepting constructive feedback
* Accepting responsibility and apologizing to those affected by our mistakes,
- Demonstrating empathy and kindness toward other people
- Being respectful of differing opinions, viewpoints, and experiences
- Giving and gracefully accepting constructive feedback
- Accepting responsibility and apologizing to those affected by our mistakes,
and learning from the experience
* Focusing on what is best not just for us as individuals, but for the
- Focusing on what is best not just for us as individuals, but for the
overall community
Examples of unacceptable behavior include:
* The use of sexualized language or imagery, and sexual attention or
- The use of sexualized language or imagery, and sexual attention or
advances of any kind
* Trolling, insulting or derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or email
- Trolling, insulting or derogatory comments, and personal or political attacks
- Public or private harassment
- Publishing others' private information, such as a physical or email
address, without their explicit permission
* Other conduct which could reasonably be considered inappropriate in a
- Other conduct which could reasonably be considered inappropriate in a
professional setting
## Enforcement Responsibilities
@ -107,7 +106,7 @@ Violating these terms may lead to a permanent ban.
### 4. Permanent Ban
**Community Impact**: Demonstrating a pattern of violation of community
standards, including sustained inappropriate behavior, harassment of an
standards, including sustained inappropriate behavior, harassment of an
individual, or aggression toward or disparagement of classes of individuals.
**Consequence**: A permanent ban from any sort of public interaction within
@ -119,15 +118,15 @@ This Code of Conduct is adapted from the [Contributor Covenant][homepage],
version 2.0, available at
[https://www.contributor-covenant.org/version/2/0/code_of_conduct.html][v2.0].
Community Impact Guidelines were inspired by
[Mozilla's code of conduct enforcement ladder][Mozilla CoC].
Community Impact Guidelines were inspired by
[Mozilla's code of conduct enforcement ladder][mozilla coc].
For answers to common questions about this code of conduct, see the FAQ at
[https://www.contributor-covenant.org/faq][FAQ]. Translations are available
[https://www.contributor-covenant.org/faq][faq]. Translations are available
at [https://www.contributor-covenant.org/translations][translations].
[homepage]: https://www.contributor-covenant.org
[v2.0]: https://www.contributor-covenant.org/version/2/0/code_of_conduct.html
[Mozilla CoC]: https://github.com/mozilla/diversity
[FAQ]: https://www.contributor-covenant.org/faq
[mozilla coc]: https://github.com/mozilla/diversity
[faq]: https://www.contributor-covenant.org/faq
[translations]: https://www.contributor-covenant.org/translations

View file

@ -61,17 +61,18 @@ If you are having issues ensure you are using the following versions of Rust and
### Pull Request
When you're finished with the changes, create a pull request, also known as a PR.
- Fill the "Ready for review" template so that we can review your PR. This template helps reviewers understand your changes as well as the purpose of your pull request.
- Fill the "Ready for review" template so that we can review your PR. This template helps reviewers understand your changes as well as the purpose of your pull request.
- Don't forget to [link PR to issue](https://docs.github.com/en/issues/tracking-your-work-with-issues/linking-a-pull-request-to-an-issue) if you are solving one.
- Enable the checkbox to [allow maintainer edits](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/allowing-changes-to-a-pull-request-branch-created-from-a-fork) so the branch can be updated for a merge.
Once you submit your PR, a team member will review your proposal. We may ask questions or request for additional information.
Once you submit your PR, a team member will review your proposal. We may ask questions or request for additional information.
- We may ask for changes to be made before a PR can be merged, either using [suggested changes](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/incorporating-feedback-in-your-pull-request) or pull request comments. You can apply suggested changes directly through the UI. You can make any other changes in your fork, then commit them to your branch.
- As you update your PR and apply changes, mark each conversation as [resolved](https://docs.github.com/en/github/collaborating-with-issues-and-pull-requests/commenting-on-a-pull-request#resolving-conversations).
- If you run into any merge issues, checkout this [git tutorial](https://lab.github.com/githubtraining/managing-merge-conflicts) to help you resolve merge conflicts and other issues.
### Your PR is merged!
Congratulations :tada::tada: The Spacedrive team thanks you :sparkles:.
Congratulations :tada::tada: The Spacedrive team thanks you :sparkles:.
Once your PR is merged, your contributions will be included in the next release of the application.

View file

@ -38,7 +38,6 @@ Organize files across many devices in one place. From cloud services to offline
For independent creatives, hoarders and those that want to own their digital footprint. Spacedrive provides a file management experience like no other, and it's completely free.
<p align="center">
<img src="https://raw.githubusercontent.com/spacedriveapp/.github/main/profile/app.png" alt="Logo">
<br />

View file

@ -1,41 +1,41 @@
{
"name": "@sd/desktop",
"version": "1.0.0",
"main": "index.js",
"license": "MIT",
"private": true,
"scripts": {
"vite": "vite",
"dev": "concurrently \"pnpm tauri dev\" \"vite\"",
"tauri": "tauri",
"build": "vite build"
},
"dependencies": {
"@sd/client": "workspace:*",
"@sd/core": "workspace:*",
"@sd/interface": "workspace:*",
"@sd/ui": "workspace:*",
"@tauri-apps/api": "^1.0.0-rc.3",
"react": "^18.0.0",
"react-dom": "^18.0.0"
},
"devDependencies": {
"@tauri-apps/cli": "^1.0.0-rc.8",
"@tauri-apps/tauricon": "github:tauri-apps/tauricon",
"@types/babel-core": "^6.25.7",
"@types/byte-size": "^8.1.0",
"@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0",
"@types/react-router-dom": "^5.3.3",
"@types/react-window": "^1.8.5",
"@types/tailwindcss": "^3.0.10",
"@vitejs/plugin-react": "^1.3.1",
"concurrently": "^7.1.0",
"prettier": "^2.6.2",
"sass": "^1.50.0",
"typescript": "^4.6.3",
"vite": "^2.9.5",
"vite-plugin-filter-replace": "^0.1.9",
"vite-plugin-svgr": "^1.1.0"
}
"name": "@sd/desktop",
"version": "1.0.0",
"main": "index.js",
"license": "MIT",
"private": true,
"scripts": {
"vite": "vite",
"dev": "concurrently \"pnpm tauri dev\" \"vite\"",
"tauri": "tauri",
"build": "vite build"
},
"dependencies": {
"@sd/client": "workspace:*",
"@sd/core": "workspace:*",
"@sd/interface": "workspace:*",
"@sd/ui": "workspace:*",
"@tauri-apps/api": "^1.0.0-rc.3",
"react": "^18.0.0",
"react-dom": "^18.0.0"
},
"devDependencies": {
"@tauri-apps/cli": "^1.0.0-rc.8",
"@tauri-apps/tauricon": "github:tauri-apps/tauricon",
"@types/babel-core": "^6.25.7",
"@types/byte-size": "^8.1.0",
"@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0",
"@types/react-router-dom": "^5.3.3",
"@types/react-window": "^1.8.5",
"@types/tailwindcss": "^3.0.10",
"@vitejs/plugin-react": "^1.3.1",
"concurrently": "^7.1.0",
"prettier": "^2.6.2",
"sass": "^1.50.0",
"typescript": "^4.6.3",
"vite": "^2.9.5",
"vite-plugin-filter-replace": "^0.1.9",
"vite-plugin-svgr": "^1.1.0"
}
}

View file

@ -1,6 +1,5 @@
max_width = 100
hard_tabs = false
tab_spaces = 2
hard_tabs = true
newline_style = "Auto"
use_small_heuristics = "Default"
reorder_imports = true

View file

@ -1,11 +1,11 @@
// use swift_rs::build_utils::{link_swift, link_swift_package};
fn main() {
// HOTFIX: compile the swift code for arm64
// std::env::set_var("CARGO_CFG_TARGET_ARCH", "arm64");
// HOTFIX: compile the swift code for arm64
// std::env::set_var("CARGO_CFG_TARGET_ARCH", "arm64");
// link_swift();
// link_swift_package("swift-lib", "../../../packages/macos/");
// link_swift();
// link_swift_package("swift-lib", "../../../packages/macos/");
tauri_build::build();
tauri_build::build();
}

View file

@ -11,106 +11,106 @@ use window::WindowExt;
#[tauri::command(async)]
async fn client_query_transport(
core: tauri::State<'_, CoreController>,
data: ClientQuery,
core: tauri::State<'_, CoreController>,
data: ClientQuery,
) -> Result<CoreResponse, String> {
match core.query(data).await {
Ok(response) => Ok(response),
Err(err) => {
println!("query error: {:?}", err);
Err(err.to_string())
}
}
match core.query(data).await {
Ok(response) => Ok(response),
Err(err) => {
println!("query error: {:?}", err);
Err(err.to_string())
}
}
}
#[tauri::command(async)]
async fn client_command_transport(
core: tauri::State<'_, CoreController>,
data: ClientCommand,
core: tauri::State<'_, CoreController>,
data: ClientCommand,
) -> Result<CoreResponse, String> {
match core.command(data).await {
Ok(response) => Ok(response),
Err(err) => {
println!("command error: {:?}", err);
Err(err.to_string())
}
}
match core.command(data).await {
Ok(response) => Ok(response),
Err(err) => {
println!("command error: {:?}", err);
Err(err.to_string())
}
}
}
#[tauri::command(async)]
async fn app_ready(app_handle: tauri::AppHandle) {
let window = app_handle.get_window("main").unwrap();
let window = app_handle.get_window("main").unwrap();
window.show().unwrap();
window.show().unwrap();
#[cfg(target_os = "macos")]
{
std::thread::sleep(std::time::Duration::from_millis(1000));
println!("fixing shadow for, {:?}", window.ns_window().unwrap());
window.fix_shadow();
}
#[cfg(target_os = "macos")]
{
std::thread::sleep(std::time::Duration::from_millis(1000));
println!("fixing shadow for, {:?}", window.ns_window().unwrap());
window.fix_shadow();
}
}
#[tokio::main]
async fn main() {
let data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./"));
// create an instance of the core
let (mut node, mut event_receiver) = Node::new(data_dir).await;
// run startup tasks
node.initializer().await;
// extract the node controller
let controller = node.get_controller();
// throw the node into a dedicated thread
tokio::spawn(async move {
node.start().await;
});
// create tauri app
tauri::Builder::default()
// pass controller to the tauri state manager
.manage(controller)
.setup(|app| {
let app = app.handle();
let data_dir = path::data_dir().unwrap_or(std::path::PathBuf::from("./"));
// create an instance of the core
let (mut node, mut event_receiver) = Node::new(data_dir).await;
// run startup tasks
node.initializer().await;
// extract the node controller
let controller = node.get_controller();
// throw the node into a dedicated thread
tokio::spawn(async move {
node.start().await;
});
// create tauri app
tauri::Builder::default()
// pass controller to the tauri state manager
.manage(controller)
.setup(|app| {
let app = app.handle();
app.windows().iter().for_each(|(_, window)| {
window.hide().unwrap();
app.windows().iter().for_each(|(_, window)| {
window.hide().unwrap();
#[cfg(target_os = "windows")]
window.set_decorations(true).unwrap();
#[cfg(target_os = "windows")]
window.set_decorations(true).unwrap();
#[cfg(target_os = "macos")]
window.set_transparent_titlebar(true, true);
});
#[cfg(target_os = "macos")]
window.set_transparent_titlebar(true, true);
});
// core event transport
tokio::spawn(async move {
let mut last = Instant::now();
// handle stream output
while let Some(event) = event_receiver.recv().await {
match event {
CoreEvent::InvalidateQueryDebounced(_) => {
let current = Instant::now();
if current.duration_since(last) > Duration::from_millis(1000 / 60) {
last = current;
app.emit_all("core_event", &event).unwrap();
}
}
event => {
app.emit_all("core_event", &event).unwrap();
}
}
}
});
// core event transport
tokio::spawn(async move {
let mut last = Instant::now();
// handle stream output
while let Some(event) = event_receiver.recv().await {
match event {
CoreEvent::InvalidateQueryDebounced(_) => {
let current = Instant::now();
if current.duration_since(last) > Duration::from_millis(1000 / 60) {
last = current;
app.emit_all("core_event", &event).unwrap();
}
}
event => {
app.emit_all("core_event", &event).unwrap();
}
}
}
});
Ok(())
})
.on_menu_event(|event| menu::handle_menu_event(event))
.on_window_event(|event| window::handle_window_event(event))
.invoke_handler(tauri::generate_handler![
client_query_transport,
client_command_transport,
app_ready,
])
.menu(menu::get_menu())
.run(tauri::generate_context!())
.expect("error while running tauri application");
Ok(())
})
.on_menu_event(|event| menu::handle_menu_event(event))
.on_window_event(|event| window::handle_window_event(event))
.invoke_handler(tauri::generate_handler![
client_query_transport,
client_command_transport,
app_ready,
])
.menu(menu::get_menu())
.run(tauri::generate_context!())
.expect("error while running tauri application");
}

View file

@ -3,88 +3,88 @@ use std::env::consts;
use tauri::{AboutMetadata, CustomMenuItem, Menu, MenuItem, Submenu, WindowMenuEvent, Wry};
pub(crate) fn get_menu() -> Menu {
match consts::OS {
"linux" => Menu::new(),
"macos" => custom_menu_bar(),
_ => Menu::new(),
}
match consts::OS {
"linux" => Menu::new(),
"macos" => custom_menu_bar(),
_ => Menu::new(),
}
}
fn custom_menu_bar() -> Menu {
// let quit = CustomMenuItem::new("quit".to_string(), "Quit");
// let close = CustomMenuItem::new("close".to_string(), "Close");
// let jeff = CustomMenuItem::new("jeff".to_string(), "Jeff");
// let submenu = Submenu::new(
// "File",
// Menu::new().add_item(quit).add_item(close).add_item(jeff),
// );
let spacedrive = Submenu::new(
"Spacedrive",
Menu::new()
.add_native_item(MenuItem::About(
"Spacedrive".to_string(),
AboutMetadata::new(),
)) // TODO: fill out about metadata
.add_native_item(MenuItem::Separator)
.add_native_item(MenuItem::Services)
.add_native_item(MenuItem::Separator)
.add_native_item(MenuItem::Hide)
.add_native_item(MenuItem::HideOthers)
.add_native_item(MenuItem::ShowAll)
.add_native_item(MenuItem::Separator)
.add_native_item(MenuItem::Quit),
);
// let quit = CustomMenuItem::new("quit".to_string(), "Quit");
// let close = CustomMenuItem::new("close".to_string(), "Close");
// let jeff = CustomMenuItem::new("jeff".to_string(), "Jeff");
// let submenu = Submenu::new(
// "File",
// Menu::new().add_item(quit).add_item(close).add_item(jeff),
// );
let spacedrive = Submenu::new(
"Spacedrive",
Menu::new()
.add_native_item(MenuItem::About(
"Spacedrive".to_string(),
AboutMetadata::new(),
)) // TODO: fill out about metadata
.add_native_item(MenuItem::Separator)
.add_native_item(MenuItem::Services)
.add_native_item(MenuItem::Separator)
.add_native_item(MenuItem::Hide)
.add_native_item(MenuItem::HideOthers)
.add_native_item(MenuItem::ShowAll)
.add_native_item(MenuItem::Separator)
.add_native_item(MenuItem::Quit),
);
let file = Submenu::new(
"File",
Menu::new()
.add_item(
CustomMenuItem::new("new_window".to_string(), "New Window")
.accelerator("CmdOrCtrl+N")
.disabled(),
)
.add_item(
CustomMenuItem::new("close".to_string(), "Close Window").accelerator("CmdOrCtrl+W"),
),
);
let edit = Submenu::new(
"Edit",
Menu::new()
.add_native_item(MenuItem::Copy)
.add_native_item(MenuItem::Paste),
);
let view = Submenu::new(
"View",
Menu::new()
.add_item(
CustomMenuItem::new("command_pallete".to_string(), "Command Pallete")
.accelerator("CmdOrCtrl+P"),
)
.add_item(CustomMenuItem::new("layout".to_string(), "Layout").disabled()),
);
let window = Submenu::new(
"Window",
Menu::new().add_native_item(MenuItem::EnterFullScreen),
);
let file = Submenu::new(
"File",
Menu::new()
.add_item(
CustomMenuItem::new("new_window".to_string(), "New Window")
.accelerator("CmdOrCtrl+N")
.disabled(),
)
.add_item(
CustomMenuItem::new("close".to_string(), "Close Window").accelerator("CmdOrCtrl+W"),
),
);
let edit = Submenu::new(
"Edit",
Menu::new()
.add_native_item(MenuItem::Copy)
.add_native_item(MenuItem::Paste),
);
let view = Submenu::new(
"View",
Menu::new()
.add_item(
CustomMenuItem::new("command_pallete".to_string(), "Command Pallete")
.accelerator("CmdOrCtrl+P"),
)
.add_item(CustomMenuItem::new("layout".to_string(), "Layout").disabled()),
);
let window = Submenu::new(
"Window",
Menu::new().add_native_item(MenuItem::EnterFullScreen),
);
let menu = Menu::new()
.add_submenu(spacedrive)
.add_submenu(file)
.add_submenu(edit)
.add_submenu(view)
.add_submenu(window);
let menu = Menu::new()
.add_submenu(spacedrive)
.add_submenu(file)
.add_submenu(edit)
.add_submenu(view)
.add_submenu(window);
menu
menu
}
pub(crate) fn handle_menu_event(event: WindowMenuEvent<Wry>) {
match event.menu_item_id() {
"quit" => {
std::process::exit(0);
}
"close" => {
event.window().close().unwrap();
}
_ => {}
}
match event.menu_item_id() {
"quit" => {
std::process::exit(0);
}
"close" => {
event.window().close().unwrap();
}
_ => {}
}
}

View file

@ -1,93 +1,93 @@
use tauri::{GlobalWindowEvent, Runtime, Window, Wry};
pub(crate) fn handle_window_event(event: GlobalWindowEvent<Wry>) {
match event.event() {
_ => {}
}
match event.event() {
_ => {}
}
}
pub trait WindowExt {
#[cfg(target_os = "macos")]
fn set_toolbar(&self, shown: bool);
#[cfg(target_os = "macos")]
fn set_transparent_titlebar(&self, transparent: bool, large: bool);
#[cfg(target_os = "macos")]
fn fix_shadow(&self);
#[cfg(target_os = "macos")]
fn set_toolbar(&self, shown: bool);
#[cfg(target_os = "macos")]
fn set_transparent_titlebar(&self, transparent: bool, large: bool);
#[cfg(target_os = "macos")]
fn fix_shadow(&self);
}
impl<R: Runtime> WindowExt for Window<R> {
#[cfg(target_os = "macos")]
fn set_toolbar(&self, shown: bool) {
use cocoa::{
appkit::{NSToolbar, NSWindow},
base::{nil, NO},
foundation::NSString,
};
#[cfg(target_os = "macos")]
fn set_toolbar(&self, shown: bool) {
use cocoa::{
appkit::{NSToolbar, NSWindow},
base::{nil, NO},
foundation::NSString,
};
unsafe {
let id = self.ns_window().unwrap() as cocoa::base::id;
unsafe {
let id = self.ns_window().unwrap() as cocoa::base::id;
if shown {
let toolbar =
NSToolbar::alloc(nil).initWithIdentifier_(NSString::alloc(nil).init_str("wat"));
toolbar.setShowsBaselineSeparator_(NO);
id.setToolbar_(toolbar);
} else {
id.setToolbar_(nil);
}
}
}
if shown {
let toolbar =
NSToolbar::alloc(nil).initWithIdentifier_(NSString::alloc(nil).init_str("wat"));
toolbar.setShowsBaselineSeparator_(NO);
id.setToolbar_(toolbar);
} else {
id.setToolbar_(nil);
}
}
}
#[cfg(target_os = "macos")]
fn set_transparent_titlebar(&self, transparent: bool, large: bool) {
use cocoa::{
appkit::{NSWindow, NSWindowStyleMask, NSWindowTitleVisibility},
base::{NO, YES},
};
#[cfg(target_os = "macos")]
fn set_transparent_titlebar(&self, transparent: bool, large: bool) {
use cocoa::{
appkit::{NSWindow, NSWindowStyleMask, NSWindowTitleVisibility},
base::{NO, YES},
};
unsafe {
let id = self.ns_window().unwrap() as cocoa::base::id;
unsafe {
let id = self.ns_window().unwrap() as cocoa::base::id;
let mut style_mask = id.styleMask();
// println!("existing style mask, {:#?}", style_mask);
style_mask.set(
NSWindowStyleMask::NSFullSizeContentViewWindowMask,
transparent,
);
style_mask.set(
NSWindowStyleMask::NSTexturedBackgroundWindowMask,
transparent,
);
style_mask.set(
NSWindowStyleMask::NSUnifiedTitleAndToolbarWindowMask,
transparent && large,
);
id.setStyleMask_(style_mask);
let mut style_mask = id.styleMask();
// println!("existing style mask, {:#?}", style_mask);
style_mask.set(
NSWindowStyleMask::NSFullSizeContentViewWindowMask,
transparent,
);
style_mask.set(
NSWindowStyleMask::NSTexturedBackgroundWindowMask,
transparent,
);
style_mask.set(
NSWindowStyleMask::NSUnifiedTitleAndToolbarWindowMask,
transparent && large,
);
id.setStyleMask_(style_mask);
if large {
self.set_toolbar(true);
}
if large {
self.set_toolbar(true);
}
id.setTitleVisibility_(if transparent {
NSWindowTitleVisibility::NSWindowTitleHidden
} else {
NSWindowTitleVisibility::NSWindowTitleVisible
});
id.setTitleVisibility_(if transparent {
NSWindowTitleVisibility::NSWindowTitleHidden
} else {
NSWindowTitleVisibility::NSWindowTitleVisible
});
id.setTitlebarAppearsTransparent_(if transparent { YES } else { NO });
}
}
id.setTitlebarAppearsTransparent_(if transparent { YES } else { NO });
}
}
#[cfg(target_os = "macos")]
fn fix_shadow(&self) {
use cocoa::appkit::NSWindow;
#[cfg(target_os = "macos")]
fn fix_shadow(&self) {
use cocoa::appkit::NSWindow;
unsafe {
let id = self.ns_window().unwrap() as cocoa::base::id;
unsafe {
let id = self.ns_window().unwrap() as cocoa::base::id;
println!("recomputing shadow for window {:?}", id.title());
println!("recomputing shadow for window {:?}", id.title());
id.invalidateShadow();
}
}
id.invalidateShadow();
}
}
}

View file

@ -1,83 +1,83 @@
{
"package": {
"productName": "Spacedrive",
"version": "0.1.0"
},
"build": {
"distDir": "../dist",
"devPath": "http://localhost:8001",
"beforeDevCommand": "",
"beforeBuildCommand": ""
},
"tauri": {
"macOSPrivateApi": true,
"bundle": {
"active": true,
"targets": "all",
"identifier": "app.spacedrive.desktop",
"icon": [
"icons/32x32.png",
"icons/128x128.png",
"icons/128x128@2x.png",
"icons/icon.icns",
"icons/icon.ico"
],
"resources": [],
"externalBin": [],
"copyright": "Jamie Pine",
"shortDescription": "The Universal File Explorer",
"longDescription": "A cross-platform file explorer, powered by an open source virtual distributed filesystem.",
"deb": {
"depends": [],
"useBootstrapper": false
},
"macOS": {
"frameworks": [],
"minimumSystemVersion": "",
"useBootstrapper": false,
"exceptionDomain": "",
"signingIdentity": null,
"entitlements": null
},
"windows": {
"certificateThumbprint": null,
"digestAlgorithm": "sha256",
"timestampUrl": ""
}
},
"updater": {
"active": false
},
"allowlist": {
"all": true,
"protocol": {
"assetScope": ["*"]
},
"dialog": {
"all": true,
"open": true,
"save": true
}
},
"windows": [
{
"title": "Spacedrive",
"width": 1200,
"height": 725,
"minWidth": 700,
"minHeight": 500,
"resizable": true,
"fullscreen": false,
"alwaysOnTop": false,
"focus": false,
"fileDropEnabled": false,
"decorations": true,
"transparent": true,
"center": true
}
],
"security": {
"csp": "default-src asset: https://asset.localhost blob: data: filesystem: ws: wss: http: https: tauri: 'unsafe-eval' 'unsafe-inline' 'self' img-src: 'self'"
}
}
"package": {
"productName": "Spacedrive",
"version": "0.1.0"
},
"build": {
"distDir": "../dist",
"devPath": "http://localhost:8001",
"beforeDevCommand": "",
"beforeBuildCommand": ""
},
"tauri": {
"macOSPrivateApi": true,
"bundle": {
"active": true,
"targets": "all",
"identifier": "app.spacedrive.desktop",
"icon": [
"icons/32x32.png",
"icons/128x128.png",
"icons/128x128@2x.png",
"icons/icon.icns",
"icons/icon.ico"
],
"resources": [],
"externalBin": [],
"copyright": "Jamie Pine",
"shortDescription": "The Universal File Explorer",
"longDescription": "A cross-platform file explorer, powered by an open source virtual distributed filesystem.",
"deb": {
"depends": [],
"useBootstrapper": false
},
"macOS": {
"frameworks": [],
"minimumSystemVersion": "",
"useBootstrapper": false,
"exceptionDomain": "",
"signingIdentity": null,
"entitlements": null
},
"windows": {
"certificateThumbprint": null,
"digestAlgorithm": "sha256",
"timestampUrl": ""
}
},
"updater": {
"active": false
},
"allowlist": {
"all": true,
"protocol": {
"assetScope": ["*"]
},
"dialog": {
"all": true,
"open": true,
"save": true
}
},
"windows": [
{
"title": "Spacedrive",
"width": 1200,
"height": 725,
"minWidth": 700,
"minHeight": 500,
"resizable": true,
"fullscreen": false,
"alwaysOnTop": false,
"focus": false,
"fileDropEnabled": false,
"decorations": true,
"transparent": true,
"center": true
}
],
"security": {
"csp": "default-src asset: https://asset.localhost blob: data: filesystem: ws: wss: http: https: tauri: 'unsafe-eval' 'unsafe-inline' 'self' img-src: 'self'"
}
}
}

View file

@ -1,74 +1,74 @@
{
"package": {
"productName": "Spacedrive",
"version": "0.1.0"
},
"build": {
"distDir": "../dist",
"devPath": "http://localhost:8001",
"beforeDevCommand": "",
"beforeBuildCommand": ""
},
"tauri": {
"bundle": {
"active": true,
"targets": "all",
"identifier": "co.spacedrive.desktop",
"icon": ["icons/icon.icns"],
"resources": [],
"externalBin": [],
"copyright": "Jamie Pine",
"shortDescription": "Your personal virtual cloud.",
"longDescription": "Spacedrive is an open source virtual filesystem, a personal cloud powered by your everyday devices. Feature-rich benefits of the cloud, only its owned and hosted by you with security, privacy and ownership as a foundation. Spacedrive makes it possible to create a limitless directory of your digital life that will stand the test of time.",
"deb": {
"depends": [],
"useBootstrapper": false
},
"macOS": {
"frameworks": [],
"minimumSystemVersion": "",
"useBootstrapper": false,
"exceptionDomain": "",
"signingIdentity": null,
"entitlements": null
},
"windows": {
"certificateThumbprint": null,
"digestAlgorithm": "sha256",
"timestampUrl": ""
}
},
"updater": {
"active": false
},
"allowlist": {
"all": true,
"os": {
"all": true
},
"dialog": {
"all": true,
"open": true,
"save": true
}
},
"windows": [
{
"title": "Spacedrive",
"width": 1250,
"height": 625,
"resizable": true,
"fullscreen": false,
"alwaysOnTop": false,
"focus": true,
"fileDropEnabled": false,
"decorations": true,
"transparent": false,
"center": true
}
],
"security": {
"csp": "default-src asset: blob: data: filesystem: ws: wss: http: https: tauri: 'unsafe-eval' 'unsafe-inline' 'self' img-src: 'self'"
}
}
"package": {
"productName": "Spacedrive",
"version": "0.1.0"
},
"build": {
"distDir": "../dist",
"devPath": "http://localhost:8001",
"beforeDevCommand": "",
"beforeBuildCommand": ""
},
"tauri": {
"bundle": {
"active": true,
"targets": "all",
"identifier": "co.spacedrive.desktop",
"icon": ["icons/icon.icns"],
"resources": [],
"externalBin": [],
"copyright": "Jamie Pine",
"shortDescription": "Your personal virtual cloud.",
"longDescription": "Spacedrive is an open source virtual filesystem, a personal cloud powered by your everyday devices. Feature-rich benefits of the cloud, only its owned and hosted by you with security, privacy and ownership as a foundation. Spacedrive makes it possible to create a limitless directory of your digital life that will stand the test of time.",
"deb": {
"depends": [],
"useBootstrapper": false
},
"macOS": {
"frameworks": [],
"minimumSystemVersion": "",
"useBootstrapper": false,
"exceptionDomain": "",
"signingIdentity": null,
"entitlements": null
},
"windows": {
"certificateThumbprint": null,
"digestAlgorithm": "sha256",
"timestampUrl": ""
}
},
"updater": {
"active": false
},
"allowlist": {
"all": true,
"os": {
"all": true
},
"dialog": {
"all": true,
"open": true,
"save": true
}
},
"windows": [
{
"title": "Spacedrive",
"width": 1250,
"height": 625,
"resizable": true,
"fullscreen": false,
"alwaysOnTop": false,
"focus": true,
"fileDropEnabled": false,
"decorations": true,
"transparent": false,
"center": true
}
],
"security": {
"csp": "default-src asset: blob: data: filesystem: ws: wss: http: https: tauri: 'unsafe-eval' 'unsafe-inline' 'self' img-src: 'self'"
}
}
}

View file

@ -1,13 +1,13 @@
<!DOCTYPE html>
<html lang="en" class="dark">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/src/favicon.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Spacedrive</title>
</head>
<body style="overflow: hidden">
<div id="root"></div>
<script type="module" src="./index.tsx"></script>
</body>
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/src/favicon.svg" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Spacedrive</title>
</head>
<body style="overflow: hidden">
<div id="root"></div>
<script type="module" src="./index.tsx"></script>
</body>
</html>

View file

@ -15,79 +15,79 @@ import { appWindow } from '@tauri-apps/api/window';
// bind state to core via Tauri
class Transport extends BaseTransport {
constructor() {
super();
constructor() {
super();
listen('core_event', (e: Event<CoreEvent>) => {
this.emit('core_event', e.payload);
});
}
async query(query: ClientQuery) {
return await invoke('client_query_transport', { data: query });
}
async command(query: ClientCommand) {
return await invoke('client_command_transport', { data: query });
}
listen('core_event', (e: Event<CoreEvent>) => {
this.emit('core_event', e.payload);
});
}
async query(query: ClientQuery) {
return await invoke('client_query_transport', { data: query });
}
async command(query: ClientCommand) {
return await invoke('client_command_transport', { data: query });
}
}
function App() {
function getPlatform(platform: string): Platform {
switch (platform) {
case 'darwin':
return 'macOS';
case 'win32':
return 'windows';
case 'linux':
return 'linux';
default:
return 'browser';
}
}
function getPlatform(platform: string): Platform {
switch (platform) {
case 'darwin':
return 'macOS';
case 'win32':
return 'windows';
case 'linux':
return 'linux';
default:
return 'browser';
}
}
const [platform, setPlatform] = useState<Platform>('macOS');
const [focused, setFocused] = useState(true);
const [platform, setPlatform] = useState<Platform>('macOS');
const [focused, setFocused] = useState(true);
useEffect(() => {
os.platform().then((platform) => setPlatform(getPlatform(platform)));
invoke('app_ready');
}, []);
useEffect(() => {
os.platform().then((platform) => setPlatform(getPlatform(platform)));
invoke('app_ready');
}, []);
useEffect(() => {
const unlistenFocus = listen('tauri://focus', () => setFocused(true));
const unlistenBlur = listen('tauri://blur', () => setFocused(false));
useEffect(() => {
const unlistenFocus = listen('tauri://focus', () => setFocused(true));
const unlistenBlur = listen('tauri://blur', () => setFocused(false));
return () => {
unlistenFocus.then((unlisten) => unlisten());
unlistenBlur.then((unlisten) => unlisten());
};
}, []);
return () => {
unlistenFocus.then((unlisten) => unlisten());
unlistenBlur.then((unlisten) => unlisten());
};
}, []);
return (
<SpacedriveInterface
useMemoryRouter
transport={new Transport()}
platform={platform}
convertFileSrc={function (url: string): string {
return convertFileSrc(url);
}}
openDialog={function (options: {
directory?: boolean | undefined;
}): Promise<string | string[]> {
return dialog.open(options);
}}
isFocused={focused}
onClose={() => appWindow.close()}
onFullscreen={() => appWindow.setFullscreen(true)}
onMinimize={() => appWindow.minimize()}
onOpen={(path: string) => shell.open(path)}
/>
);
return (
<SpacedriveInterface
useMemoryRouter
transport={new Transport()}
platform={platform}
convertFileSrc={function (url: string): string {
return convertFileSrc(url);
}}
openDialog={function (options: {
directory?: boolean | undefined;
}): Promise<string | string[]> {
return dialog.open(options);
}}
isFocused={focused}
onClose={() => appWindow.close()}
onFullscreen={() => appWindow.setFullscreen(true)}
onMinimize={() => appWindow.minimize()}
onOpen={(path: string) => shell.open(path)}
/>
);
}
const root = createRoot(document.getElementById('root')!);
root.render(
<React.StrictMode>
<App />
</React.StrictMode>
<React.StrictMode>
<App />
</React.StrictMode>
);

View file

@ -1,7 +1,7 @@
/// <reference types="vite/client" />
declare interface ImportMetaEnv {
VITE_OS: string;
VITE_OS: string;
}
declare module '@babel/core' {}

View file

@ -1,5 +1,5 @@
{
"extends": "../../packages/config/interface.tsconfig.json",
"compilerOptions": {},
"include": ["src"]
"extends": "../../packages/config/interface.tsconfig.json",
"compilerOptions": {},
"include": ["src"]
}

View file

@ -1,27 +1,27 @@
import { defineConfig } from 'vite';
import react from '@vitejs/plugin-react';
import { name, version } from './package.json';
import svg from "vite-plugin-svgr"
import svg from 'vite-plugin-svgr';
// https://vitejs.dev/config/
export default defineConfig({
server: {
port: 8001
},
plugins: [
//@ts-ignore
react({
jsxRuntime: 'classic'
}),
svg({ svgrOptions: { icon: true } })
],
root: 'src',
publicDir: '../../packages/interface/src/assets',
define: {
pkgJson: { name, version }
},
build: {
outDir: '../dist',
assetsDir: '.'
}
server: {
port: 8001
},
plugins: [
//@ts-ignore
react({
jsxRuntime: 'classic'
}),
svg({ svgrOptions: { icon: true } })
],
root: 'src',
publicDir: '../../packages/interface/src/assets',
define: {
pkgJson: { name, version }
},
build: {
outDir: '../dist',
assetsDir: '.'
}
});

View file

@ -1,23 +1,23 @@
<!DOCTYPE html>
<html lang="en" class="dark">
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Spacedrive — A file manager from the future.</title>
<meta
name="description"
content="Combine your drives and clouds into one database that you can organize and explore from any device. Designed for creators, hoarders and the painfully disorganized."
/>
<meta
name="keywords"
content="files,file manager,spacedrive,file explorer,vdfs,distributed filesystem,cas,content addressable storage,virtual filesystem,photos app, video organizer,video encoder,tags,tag based filesystem"
/>
<meta name="author" content="Jamie Pine" />
<meta name="robots" content="index, follow" />
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
<head>
<meta charset="UTF-8" />
<link rel="icon" type="image/svg+xml" href="/favicon.ico" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<title>Spacedrive — A file manager from the future.</title>
<meta
name="description"
content="Combine your drives and clouds into one database that you can organize and explore from any device. Designed for creators, hoarders and the painfully disorganized."
/>
<meta
name="keywords"
content="files,file manager,spacedrive,file explorer,vdfs,distributed filesystem,cas,content addressable storage,virtual filesystem,photos app, video organizer,video encoder,tags,tag based filesystem"
/>
<meta name="author" content="Jamie Pine" />
<meta name="robots" content="index, follow" />
</head>
<body>
<div id="root"></div>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

View file

@ -1,59 +1,59 @@
{
"name": "@sd/landing",
"private": true,
"version": "0.0.0",
"scripts": {
"dev": "vite",
"build": "vite build",
"serve": "vite preview"
},
"dependencies": {
"@fontsource/inter": "^4.5.7",
"@headlessui/react": "^1.5.0",
"@heroicons/react": "^1.0.6",
"@icons-pack/react-simple-icons": "^4.6.1",
"@sd/client": "workspace:*",
"@sd/core": "workspace:*",
"@sd/interface": "workspace:*",
"@sd/ui": "workspace:*",
"@tailwindcss/typography": "^0.5.2",
"@types/compression": "^1.7.2",
"@types/express": "^4.17.13",
"clsx": "^1.1.1",
"compression": "^1.7.4",
"express": "^4.17.3",
"phosphor-react": "^1.4.1",
"prismjs": "^1.28.0",
"react": "^18.0.0",
"react-device-detect": "^2.2.2",
"react-dom": "^18.0.0",
"react-helmet": "^6.1.0",
"react-router-dom": "6.3.0",
"react-tsparticles": "^2.0.6",
"simple-icons": "^6.19.0",
"tsparticles": "^2.0.6"
},
"devDependencies": {
"@babel/preset-react": "^7.16.7",
"@types/lodash": "^4.14.182",
"@types/prismjs": "^1.26.0",
"@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0",
"@types/react-helmet": "^6.1.5",
"@vitejs/plugin-react": "^1.3.1",
"autoprefixer": "^10.4.4",
"nodemon": "^2.0.15",
"postcss": "^8.4.12",
"sass": "^1.50.0",
"tailwind": "^4.0.0",
"ts-node": "^10.7.0",
"typescript": "^4.6.3",
"vite": "^2.9.5",
"vite-plugin-markdown": "^2.0.2",
"vite-plugin-md": "^0.13.0",
"vite-plugin-pages": "^0.23.0",
"vite-plugin-pages-sitemap": "^1.2.2",
"vite-plugin-ssr": "^0.3.64",
"vite-plugin-svgr": "^1.1.0"
}
"name": "@sd/landing",
"private": true,
"version": "0.0.0",
"scripts": {
"dev": "vite",
"build": "vite build",
"serve": "vite preview"
},
"dependencies": {
"@fontsource/inter": "^4.5.7",
"@headlessui/react": "^1.5.0",
"@heroicons/react": "^1.0.6",
"@icons-pack/react-simple-icons": "^4.6.1",
"@sd/client": "workspace:*",
"@sd/core": "workspace:*",
"@sd/interface": "workspace:*",
"@sd/ui": "workspace:*",
"@tailwindcss/typography": "^0.5.2",
"@types/compression": "^1.7.2",
"@types/express": "^4.17.13",
"clsx": "^1.1.1",
"compression": "^1.7.4",
"express": "^4.17.3",
"phosphor-react": "^1.4.1",
"prismjs": "^1.28.0",
"react": "^18.0.0",
"react-device-detect": "^2.2.2",
"react-dom": "^18.0.0",
"react-helmet": "^6.1.0",
"react-router-dom": "6.3.0",
"react-tsparticles": "^2.0.6",
"simple-icons": "^6.19.0",
"tsparticles": "^2.0.6"
},
"devDependencies": {
"@babel/preset-react": "^7.16.7",
"@types/lodash": "^4.14.182",
"@types/prismjs": "^1.26.0",
"@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0",
"@types/react-helmet": "^6.1.5",
"@vitejs/plugin-react": "^1.3.1",
"autoprefixer": "^10.4.4",
"nodemon": "^2.0.15",
"postcss": "^8.4.12",
"sass": "^1.50.0",
"tailwind": "^4.0.0",
"ts-node": "^10.7.0",
"typescript": "^4.6.3",
"vite": "^2.9.5",
"vite-plugin-markdown": "^2.0.2",
"vite-plugin-md": "^0.13.0",
"vite-plugin-pages": "^0.23.0",
"vite-plugin-pages-sitemap": "^1.2.2",
"vite-plugin-ssr": "^0.3.64",
"vite-plugin-svgr": "^1.1.0"
}
}

View file

@ -28,413 +28,416 @@
* --syntax-cursor-line: hsla(220, 100%, 80%, 0.04);
*/
code[class*="language-"],
pre[class*="language-"] {
background: hsl(220, 9%, 6%);
color: hsl(220, 14%, 71%);
text-shadow: 0 1px rgba(0, 0, 0, 0.3);
font-family: "Fira Code", "Fira Mono", Menlo, Consolas, "DejaVu Sans Mono", monospace;
direction: ltr;
text-align: left;
white-space: pre;
word-spacing: normal;
word-break: normal;
line-height: 1.5;
-moz-tab-size: 2;
-o-tab-size: 2;
tab-size: 2;
-webkit-hyphens: none;
-moz-hyphens: none;
-ms-hyphens: none;
hyphens: none;
}
/* Selection */
code[class*="language-"]::-moz-selection,
code[class*="language-"] *::-moz-selection,
pre[class*="language-"] *::-moz-selection {
background: hsl(220, 13%, 28%);
color: inherit;
text-shadow: none;
}
code[class*="language-"]::selection,
code[class*="language-"] *::selection,
pre[class*="language-"] *::selection {
background: hsl(220, 13%, 28%);
color: inherit;
text-shadow: none;
}
/* Code blocks */
pre[class*="language-"] {
padding: 1em;
margin: 0.5em 0;
overflow: auto;
border-radius: 0.3em;
}
/* Inline code */
:not(pre) > code[class*="language-"] {
padding: 0.2em 0.3em;
border-radius: 0.3em;
white-space: normal;
}
/* Print */
@media print {
code[class*="language-"],
pre[class*="language-"] {
text-shadow: none;
}
}
.token.comment,
.token.prolog,
.token.cdata {
color: hsl(220, 10%, 40%);
}
.token.doctype,
.token.punctuation,
.token.entity {
color: hsl(220, 14%, 71%);
}
.token.attr-name,
.token.class-name,
.token.boolean,
.token.constant,
.token.number,
.token.atrule {
color: hsl(29, 54%, 61%);
}
.token.keyword {
color: hsl(286, 60%, 67%);
}
.token.property,
.token.tag,
.token.symbol,
.token.deleted,
.token.important {
color: hsl(355, 65%, 65%);
}
.token.selector,
.token.string,
.token.char,
.token.builtin,
.token.inserted,
.token.regex,
.token.attr-value,
.token.attr-value > .token.punctuation {
color: hsl(95, 38%, 62%);
}
.token.variable,
.token.operator,
.token.function {
color: hsl(207, 82%, 66%);
}
.token.url {
color: hsl(187, 47%, 55%);
}
/* HTML overrides */
.token.attr-value > .token.punctuation.attr-equals,
.token.special-attr > .token.attr-value > .token.value.css {
color: hsl(220, 14%, 71%);
}
/* CSS overrides */
.language-css .token.selector {
color: hsl(355, 65%, 65%);
}
.language-css .token.property {
color: hsl(220, 14%, 71%);
}
.language-css .token.function,
.language-css .token.url > .token.function {
color: hsl(187, 47%, 55%);
}
.language-css .token.url > .token.string.url {
color: hsl(95, 38%, 62%);
}
.language-css .token.important,
.language-css .token.atrule .token.rule {
color: hsl(286, 60%, 67%);
}
/* JS overrides */
.language-javascript .token.operator {
color: hsl(286, 60%, 67%);
}
.language-javascript .token.template-string > .token.interpolation > .token.interpolation-punctuation.punctuation {
color: hsl(5, 48%, 51%);
}
/* JSON overrides */
.language-json .token.operator {
color: hsl(220, 14%, 71%);
}
.language-json .token.null.keyword {
color: hsl(29, 54%, 61%);
}
/* MD overrides */
.language-markdown .token.url,
.language-markdown .token.url > .token.operator,
.language-markdown .token.url-reference.url > .token.string {
color: hsl(220, 14%, 71%);
}
.language-markdown .token.url > .token.content {
color: hsl(207, 82%, 66%);
}
.language-markdown .token.url > .token.url,
.language-markdown .token.url-reference.url {
color: hsl(187, 47%, 55%);
}
.language-markdown .token.blockquote.punctuation,
.language-markdown .token.hr.punctuation {
color: hsl(220, 10%, 40%);
font-style: italic;
}
.language-markdown .token.code-snippet {
color: hsl(95, 38%, 62%);
}
.language-markdown .token.bold .token.content {
color: hsl(29, 54%, 61%);
}
.language-markdown .token.italic .token.content {
color: hsl(286, 60%, 67%);
}
.language-markdown .token.strike .token.content,
.language-markdown .token.strike .token.punctuation,
.language-markdown .token.list.punctuation,
.language-markdown .token.title.important > .token.punctuation {
color: hsl(355, 65%, 65%);
}
/* General */
.token.bold {
font-weight: bold;
}
.token.comment,
.token.italic {
font-style: italic;
}
.token.entity {
cursor: help;
}
.token.namespace {
opacity: 0.8;
}
/* Plugin overrides */
/* Selectors should have higher specificity than those in the plugins' default stylesheets */
/* Show Invisibles plugin overrides */
.token.token.tab:not(:empty):before,
.token.token.cr:before,
.token.token.lf:before,
.token.token.space:before {
color: hsla(220, 14%, 71%, 0.15);
text-shadow: none;
}
/* Toolbar plugin overrides */
/* Space out all buttons and move them away from the right edge of the code block */
div.code-toolbar > .toolbar.toolbar > .toolbar-item {
margin-right: 0.4em;
}
/* Styling the buttons */
div.code-toolbar > .toolbar.toolbar > .toolbar-item > button,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > a,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > span {
background: hsl(220, 13%, 26%);
color: hsl(220, 9%, 55%);
padding: 0.1em 0.4em;
border-radius: 0.3em;
}
div.code-toolbar > .toolbar.toolbar > .toolbar-item > button:hover,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > button:focus,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > a:hover,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > a:focus,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > span:hover,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > span:focus {
background: hsl(220, 13%, 28%);
color: hsl(220, 14%, 71%);
}
/* Line Highlight plugin overrides */
/* The highlighted line itself */
.line-highlight.line-highlight {
background: hsla(220, 100%, 80%, 0.04);
}
/* Default line numbers in Line Highlight plugin */
.line-highlight.line-highlight:before,
.line-highlight.line-highlight[data-end]:after {
background: hsl(220, 13%, 26%);
color: hsl(220, 14%, 71%);
padding: 0.1em 0.6em;
border-radius: 0.3em;
box-shadow: 0 2px 0 0 rgba(0, 0, 0, 0.2); /* same as Toolbar plugin default */
}
/* Hovering over a linkable line number (in the gutter area) */
/* Requires Line Numbers plugin as well */
pre[id].linkable-line-numbers.linkable-line-numbers span.line-numbers-rows > span:hover:before {
background-color: hsla(220, 100%, 80%, 0.04);
}
/* Line Numbers and Command Line plugins overrides */
/* Line separating gutter from coding area */
.line-numbers.line-numbers .line-numbers-rows,
.command-line .command-line-prompt {
border-right-color: hsla(220, 14%, 71%, 0.15);
}
/* Stuff in the gutter */
.line-numbers .line-numbers-rows > span:before,
.command-line .command-line-prompt > span:before {
color: hsl(220, 14%, 45%);
}
/* Match Braces plugin overrides */
/* Note: Outline colour is inherited from the braces */
.rainbow-braces .token.token.punctuation.brace-level-1,
.rainbow-braces .token.token.punctuation.brace-level-5,
.rainbow-braces .token.token.punctuation.brace-level-9 {
color: hsl(355, 65%, 65%);
}
.rainbow-braces .token.token.punctuation.brace-level-2,
.rainbow-braces .token.token.punctuation.brace-level-6,
.rainbow-braces .token.token.punctuation.brace-level-10 {
color: hsl(95, 38%, 62%);
}
.rainbow-braces .token.token.punctuation.brace-level-3,
.rainbow-braces .token.token.punctuation.brace-level-7,
.rainbow-braces .token.token.punctuation.brace-level-11 {
color: hsl(207, 82%, 66%);
}
.rainbow-braces .token.token.punctuation.brace-level-4,
.rainbow-braces .token.token.punctuation.brace-level-8,
.rainbow-braces .token.token.punctuation.brace-level-12 {
color: hsl(286, 60%, 67%);
}
/* Diff Highlight plugin overrides */
/* Taken from https://github.com/atom/github/blob/master/styles/variables.less */
pre.diff-highlight > code .token.token.deleted:not(.prefix),
pre > code.diff-highlight .token.token.deleted:not(.prefix) {
background-color: hsla(353, 100%, 66%, 0.15);
}
pre.diff-highlight > code .token.token.deleted:not(.prefix)::-moz-selection,
pre.diff-highlight > code .token.token.deleted:not(.prefix) *::-moz-selection,
pre > code.diff-highlight .token.token.deleted:not(.prefix)::-moz-selection,
pre > code.diff-highlight .token.token.deleted:not(.prefix) *::-moz-selection {
background-color: hsla(353, 95%, 66%, 0.25);
}
pre.diff-highlight > code .token.token.deleted:not(.prefix)::selection,
pre.diff-highlight > code .token.token.deleted:not(.prefix) *::selection,
pre > code.diff-highlight .token.token.deleted:not(.prefix)::selection,
pre > code.diff-highlight .token.token.deleted:not(.prefix) *::selection {
background-color: hsla(353, 95%, 66%, 0.25);
}
pre.diff-highlight > code .token.token.inserted:not(.prefix),
pre > code.diff-highlight .token.token.inserted:not(.prefix) {
background-color: hsla(137, 100%, 55%, 0.15);
}
pre.diff-highlight > code .token.token.inserted:not(.prefix)::-moz-selection,
pre.diff-highlight > code .token.token.inserted:not(.prefix) *::-moz-selection,
pre > code.diff-highlight .token.token.inserted:not(.prefix)::-moz-selection,
pre > code.diff-highlight .token.token.inserted:not(.prefix) *::-moz-selection {
background-color: hsla(135, 73%, 55%, 0.25);
}
pre.diff-highlight > code .token.token.inserted:not(.prefix)::selection,
pre.diff-highlight > code .token.token.inserted:not(.prefix) *::selection,
pre > code.diff-highlight .token.token.inserted:not(.prefix)::selection,
pre > code.diff-highlight .token.token.inserted:not(.prefix) *::selection {
background-color: hsla(135, 73%, 55%, 0.25);
}
/* Previewers plugin overrides */
/* Based on https://github.com/atom-community/atom-ide-datatip/blob/master/styles/atom-ide-datatips.less and https://github.com/atom/atom/blob/master/packages/one-dark-ui */
/* Border around popup */
.prism-previewer.prism-previewer:before,
.prism-previewer-gradient.prism-previewer-gradient div {
border-color: hsl(224, 13%, 17%);
}
/* Angle and time should remain as circles and are hence not included */
.prism-previewer-color.prism-previewer-color:before,
.prism-previewer-gradient.prism-previewer-gradient div,
.prism-previewer-easing.prism-previewer-easing:before {
border-radius: 0.3em;
}
/* Triangles pointing to the code */
.prism-previewer.prism-previewer:after {
border-top-color: hsl(224, 13%, 17%);
}
.prism-previewer-flipped.prism-previewer-flipped.after {
border-bottom-color: hsl(224, 13%, 17%);
}
/* Background colour within the popup */
.prism-previewer-angle.prism-previewer-angle:before,
.prism-previewer-time.prism-previewer-time:before,
.prism-previewer-easing.prism-previewer-easing {
background: hsl(219, 13%, 22%);
}
/* For angle, this is the positive area (eg. 90deg will display one quadrant in this colour) */
/* For time, this is the alternate colour */
.prism-previewer-angle.prism-previewer-angle circle,
.prism-previewer-time.prism-previewer-time circle {
stroke: hsl(220, 14%, 71%);
stroke-opacity: 1;
}
/* Stroke colours of the handle, direction point, and vector itself */
.prism-previewer-easing.prism-previewer-easing circle,
.prism-previewer-easing.prism-previewer-easing path,
.prism-previewer-easing.prism-previewer-easing line {
stroke: hsl(220, 14%, 71%);
}
/* Fill colour of the handle */
.prism-previewer-easing.prism-previewer-easing circle {
fill: transparent;
}
code[class*='language-'],
pre[class*='language-'] {
background: hsl(220, 9%, 6%);
color: hsl(220, 14%, 71%);
text-shadow: 0 1px rgba(0, 0, 0, 0.3);
font-family: 'Fira Code', 'Fira Mono', Menlo, Consolas, 'DejaVu Sans Mono', monospace;
direction: ltr;
text-align: left;
white-space: pre;
word-spacing: normal;
word-break: normal;
line-height: 1.5;
-moz-tab-size: 2;
-o-tab-size: 2;
tab-size: 2;
-webkit-hyphens: none;
-moz-hyphens: none;
-ms-hyphens: none;
hyphens: none;
}
/* Selection */
code[class*='language-']::-moz-selection,
code[class*='language-'] *::-moz-selection,
pre[class*='language-'] *::-moz-selection {
background: hsl(220, 13%, 28%);
color: inherit;
text-shadow: none;
}
code[class*='language-']::selection,
code[class*='language-'] *::selection,
pre[class*='language-'] *::selection {
background: hsl(220, 13%, 28%);
color: inherit;
text-shadow: none;
}
/* Code blocks */
pre[class*='language-'] {
padding: 1em;
margin: 0.5em 0;
overflow: auto;
border-radius: 0.3em;
}
/* Inline code */
:not(pre) > code[class*='language-'] {
padding: 0.2em 0.3em;
border-radius: 0.3em;
white-space: normal;
}
/* Print */
@media print {
code[class*='language-'],
pre[class*='language-'] {
text-shadow: none;
}
}
.token.comment,
.token.prolog,
.token.cdata {
color: hsl(220, 10%, 40%);
}
.token.doctype,
.token.punctuation,
.token.entity {
color: hsl(220, 14%, 71%);
}
.token.attr-name,
.token.class-name,
.token.boolean,
.token.constant,
.token.number,
.token.atrule {
color: hsl(29, 54%, 61%);
}
.token.keyword {
color: hsl(286, 60%, 67%);
}
.token.property,
.token.tag,
.token.symbol,
.token.deleted,
.token.important {
color: hsl(355, 65%, 65%);
}
.token.selector,
.token.string,
.token.char,
.token.builtin,
.token.inserted,
.token.regex,
.token.attr-value,
.token.attr-value > .token.punctuation {
color: hsl(95, 38%, 62%);
}
.token.variable,
.token.operator,
.token.function {
color: hsl(207, 82%, 66%);
}
.token.url {
color: hsl(187, 47%, 55%);
}
/* HTML overrides */
.token.attr-value > .token.punctuation.attr-equals,
.token.special-attr > .token.attr-value > .token.value.css {
color: hsl(220, 14%, 71%);
}
/* CSS overrides */
.language-css .token.selector {
color: hsl(355, 65%, 65%);
}
.language-css .token.property {
color: hsl(220, 14%, 71%);
}
.language-css .token.function,
.language-css .token.url > .token.function {
color: hsl(187, 47%, 55%);
}
.language-css .token.url > .token.string.url {
color: hsl(95, 38%, 62%);
}
.language-css .token.important,
.language-css .token.atrule .token.rule {
color: hsl(286, 60%, 67%);
}
/* JS overrides */
.language-javascript .token.operator {
color: hsl(286, 60%, 67%);
}
.language-javascript
.token.template-string
> .token.interpolation
> .token.interpolation-punctuation.punctuation {
color: hsl(5, 48%, 51%);
}
/* JSON overrides */
.language-json .token.operator {
color: hsl(220, 14%, 71%);
}
.language-json .token.null.keyword {
color: hsl(29, 54%, 61%);
}
/* MD overrides */
.language-markdown .token.url,
.language-markdown .token.url > .token.operator,
.language-markdown .token.url-reference.url > .token.string {
color: hsl(220, 14%, 71%);
}
.language-markdown .token.url > .token.content {
color: hsl(207, 82%, 66%);
}
.language-markdown .token.url > .token.url,
.language-markdown .token.url-reference.url {
color: hsl(187, 47%, 55%);
}
.language-markdown .token.blockquote.punctuation,
.language-markdown .token.hr.punctuation {
color: hsl(220, 10%, 40%);
font-style: italic;
}
.language-markdown .token.code-snippet {
color: hsl(95, 38%, 62%);
}
.language-markdown .token.bold .token.content {
color: hsl(29, 54%, 61%);
}
.language-markdown .token.italic .token.content {
color: hsl(286, 60%, 67%);
}
.language-markdown .token.strike .token.content,
.language-markdown .token.strike .token.punctuation,
.language-markdown .token.list.punctuation,
.language-markdown .token.title.important > .token.punctuation {
color: hsl(355, 65%, 65%);
}
/* General */
.token.bold {
font-weight: bold;
}
.token.comment,
.token.italic {
font-style: italic;
}
.token.entity {
cursor: help;
}
.token.namespace {
opacity: 0.8;
}
/* Plugin overrides */
/* Selectors should have higher specificity than those in the plugins' default stylesheets */
/* Show Invisibles plugin overrides */
.token.token.tab:not(:empty):before,
.token.token.cr:before,
.token.token.lf:before,
.token.token.space:before {
color: hsla(220, 14%, 71%, 0.15);
text-shadow: none;
}
/* Toolbar plugin overrides */
/* Space out all buttons and move them away from the right edge of the code block */
div.code-toolbar > .toolbar.toolbar > .toolbar-item {
margin-right: 0.4em;
}
/* Styling the buttons */
div.code-toolbar > .toolbar.toolbar > .toolbar-item > button,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > a,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > span {
background: hsl(220, 13%, 26%);
color: hsl(220, 9%, 55%);
padding: 0.1em 0.4em;
border-radius: 0.3em;
}
div.code-toolbar > .toolbar.toolbar > .toolbar-item > button:hover,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > button:focus,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > a:hover,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > a:focus,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > span:hover,
div.code-toolbar > .toolbar.toolbar > .toolbar-item > span:focus {
background: hsl(220, 13%, 28%);
color: hsl(220, 14%, 71%);
}
/* Line Highlight plugin overrides */
/* The highlighted line itself */
.line-highlight.line-highlight {
background: hsla(220, 100%, 80%, 0.04);
}
/* Default line numbers in Line Highlight plugin */
.line-highlight.line-highlight:before,
.line-highlight.line-highlight[data-end]:after {
background: hsl(220, 13%, 26%);
color: hsl(220, 14%, 71%);
padding: 0.1em 0.6em;
border-radius: 0.3em;
box-shadow: 0 2px 0 0 rgba(0, 0, 0, 0.2); /* same as Toolbar plugin default */
}
/* Hovering over a linkable line number (in the gutter area) */
/* Requires Line Numbers plugin as well */
pre[id].linkable-line-numbers.linkable-line-numbers span.line-numbers-rows > span:hover:before {
background-color: hsla(220, 100%, 80%, 0.04);
}
/* Line Numbers and Command Line plugins overrides */
/* Line separating gutter from coding area */
.line-numbers.line-numbers .line-numbers-rows,
.command-line .command-line-prompt {
border-right-color: hsla(220, 14%, 71%, 0.15);
}
/* Stuff in the gutter */
.line-numbers .line-numbers-rows > span:before,
.command-line .command-line-prompt > span:before {
color: hsl(220, 14%, 45%);
}
/* Match Braces plugin overrides */
/* Note: Outline colour is inherited from the braces */
.rainbow-braces .token.token.punctuation.brace-level-1,
.rainbow-braces .token.token.punctuation.brace-level-5,
.rainbow-braces .token.token.punctuation.brace-level-9 {
color: hsl(355, 65%, 65%);
}
.rainbow-braces .token.token.punctuation.brace-level-2,
.rainbow-braces .token.token.punctuation.brace-level-6,
.rainbow-braces .token.token.punctuation.brace-level-10 {
color: hsl(95, 38%, 62%);
}
.rainbow-braces .token.token.punctuation.brace-level-3,
.rainbow-braces .token.token.punctuation.brace-level-7,
.rainbow-braces .token.token.punctuation.brace-level-11 {
color: hsl(207, 82%, 66%);
}
.rainbow-braces .token.token.punctuation.brace-level-4,
.rainbow-braces .token.token.punctuation.brace-level-8,
.rainbow-braces .token.token.punctuation.brace-level-12 {
color: hsl(286, 60%, 67%);
}
/* Diff Highlight plugin overrides */
/* Taken from https://github.com/atom/github/blob/master/styles/variables.less */
pre.diff-highlight > code .token.token.deleted:not(.prefix),
pre > code.diff-highlight .token.token.deleted:not(.prefix) {
background-color: hsla(353, 100%, 66%, 0.15);
}
pre.diff-highlight > code .token.token.deleted:not(.prefix)::-moz-selection,
pre.diff-highlight > code .token.token.deleted:not(.prefix) *::-moz-selection,
pre > code.diff-highlight .token.token.deleted:not(.prefix)::-moz-selection,
pre > code.diff-highlight .token.token.deleted:not(.prefix) *::-moz-selection {
background-color: hsla(353, 95%, 66%, 0.25);
}
pre.diff-highlight > code .token.token.deleted:not(.prefix)::selection,
pre.diff-highlight > code .token.token.deleted:not(.prefix) *::selection,
pre > code.diff-highlight .token.token.deleted:not(.prefix)::selection,
pre > code.diff-highlight .token.token.deleted:not(.prefix) *::selection {
background-color: hsla(353, 95%, 66%, 0.25);
}
pre.diff-highlight > code .token.token.inserted:not(.prefix),
pre > code.diff-highlight .token.token.inserted:not(.prefix) {
background-color: hsla(137, 100%, 55%, 0.15);
}
pre.diff-highlight > code .token.token.inserted:not(.prefix)::-moz-selection,
pre.diff-highlight > code .token.token.inserted:not(.prefix) *::-moz-selection,
pre > code.diff-highlight .token.token.inserted:not(.prefix)::-moz-selection,
pre > code.diff-highlight .token.token.inserted:not(.prefix) *::-moz-selection {
background-color: hsla(135, 73%, 55%, 0.25);
}
pre.diff-highlight > code .token.token.inserted:not(.prefix)::selection,
pre.diff-highlight > code .token.token.inserted:not(.prefix) *::selection,
pre > code.diff-highlight .token.token.inserted:not(.prefix)::selection,
pre > code.diff-highlight .token.token.inserted:not(.prefix) *::selection {
background-color: hsla(135, 73%, 55%, 0.25);
}
/* Previewers plugin overrides */
/* Based on https://github.com/atom-community/atom-ide-datatip/blob/master/styles/atom-ide-datatips.less and https://github.com/atom/atom/blob/master/packages/one-dark-ui */
/* Border around popup */
.prism-previewer.prism-previewer:before,
.prism-previewer-gradient.prism-previewer-gradient div {
border-color: hsl(224, 13%, 17%);
}
/* Angle and time should remain as circles and are hence not included */
.prism-previewer-color.prism-previewer-color:before,
.prism-previewer-gradient.prism-previewer-gradient div,
.prism-previewer-easing.prism-previewer-easing:before {
border-radius: 0.3em;
}
/* Triangles pointing to the code */
.prism-previewer.prism-previewer:after {
border-top-color: hsl(224, 13%, 17%);
}
.prism-previewer-flipped.prism-previewer-flipped.after {
border-bottom-color: hsl(224, 13%, 17%);
}
/* Background colour within the popup */
.prism-previewer-angle.prism-previewer-angle:before,
.prism-previewer-time.prism-previewer-time:before,
.prism-previewer-easing.prism-previewer-easing {
background: hsl(219, 13%, 22%);
}
/* For angle, this is the positive area (eg. 90deg will display one quadrant in this colour) */
/* For time, this is the alternate colour */
.prism-previewer-angle.prism-previewer-angle circle,
.prism-previewer-time.prism-previewer-time circle {
stroke: hsl(220, 14%, 71%);
stroke-opacity: 1;
}
/* Stroke colours of the handle, direction point, and vector itself */
.prism-previewer-easing.prism-previewer-easing circle,
.prism-previewer-easing.prism-previewer-easing path,
.prism-previewer-easing.prism-previewer-easing line {
stroke: hsl(220, 14%, 71%);
}
/* Fill colour of the handle */
.prism-previewer-easing.prism-previewer-easing circle {
fill: transparent;
}

View file

@ -4,93 +4,93 @@ import { useEffect } from 'react';
import { isMobile } from 'react-device-detect';
export default function AppEmbed() {
const [showApp, setShowApp] = useState(false);
const [iFrameAppReady, setIframeAppReady] = useState(false);
const [forceImg, setForceImg] = useState(false);
const [imgFallback, setImageFallback] = useState(false);
const iFrame = useRef<HTMLIFrameElement>(null);
const [showApp, setShowApp] = useState(false);
const [iFrameAppReady, setIframeAppReady] = useState(false);
const [forceImg, setForceImg] = useState(false);
const [imgFallback, setImageFallback] = useState(false);
const iFrame = useRef<HTMLIFrameElement>(null);
function handleResize() {
if (window.innerWidth < 1000) {
setForceImg(true);
} else if (forceImg) {
setForceImg(false);
}
}
function handleResize() {
if (window.innerWidth < 1000) {
setForceImg(true);
} else if (forceImg) {
setForceImg(false);
}
}
useEffect(() => {
window.addEventListener('resize', handleResize);
handleResize();
return () => window.removeEventListener('resize', handleResize);
}, []);
useEffect(() => {
window.addEventListener('resize', handleResize);
handleResize();
return () => window.removeEventListener('resize', handleResize);
}, []);
function handleEvent(e: any) {
if (e.data === 'spacedrive-hello') {
if (!iFrameAppReady) setIframeAppReady(true);
}
}
function handleEvent(e: any) {
if (e.data === 'spacedrive-hello') {
if (!iFrameAppReady) setIframeAppReady(true);
}
}
// after five minutes kill the live demo
useEffect(() => {
const timer = setTimeout(() => {
setIframeAppReady(false);
}, 300000);
return () => clearTimeout(timer);
}, []);
// after five minutes kill the live demo
useEffect(() => {
const timer = setTimeout(() => {
setIframeAppReady(false);
}, 300000);
return () => clearTimeout(timer);
}, []);
useEffect(() => {
window.addEventListener('message', handleEvent, false);
setShowApp(true);
useEffect(() => {
window.addEventListener('message', handleEvent, false);
setShowApp(true);
return () => window.removeEventListener('message', handleEvent);
}, []);
return () => window.removeEventListener('message', handleEvent);
}, []);
useEffect(() => {
setTimeout(() => {
if (!iFrameAppReady) setImageFallback(true);
}, 1500);
}, []);
useEffect(() => {
setTimeout(() => {
if (!iFrameAppReady) setImageFallback(true);
}, 1500);
}, []);
const renderImage = (imgFallback && !iFrameAppReady) || forceImg;
const renderImage = (imgFallback && !iFrameAppReady) || forceImg;
const renderBloom = renderImage || iFrameAppReady;
const renderBloom = renderImage || iFrameAppReady;
return (
<div className="w-screen">
{renderBloom && (
<div className="relative max-w-full sm:w-full sm:max-w-[1200px] mx-auto">
<div className="absolute w-full overflow-visible top-[100px] h-32">
<div className="left-0 mt-22 bloom bloom-one" />
<div className="left-[34%] -mt-32 bloom bloom-three " />
<div className="right-0 invisible sm:visible bloom bloom-two" />
</div>
</div>
)}
<div className="relative z-30 h-[228px] px-5 sm:h-[428px] md:h-[428px] lg:h-[628px] mt-8 sm:mt-16">
<div
className={clsx(
'relative h-full m-auto border rounded-lg max-w-7xl transition-opacity bg-gray-850 border-gray-550 opacity-0',
renderBloom && '!opacity-100',
renderImage && 'bg-transparent border-none'
)}
>
{showApp && !forceImg && (
<iframe
ref={iFrame}
referrerPolicy="origin-when-cross-origin"
className={clsx(
'w-full h-full z-30 rounded-lg shadow-iframe inset-center bg-gray-850',
iFrameAppReady ? 'fade-in-app-embed opacity-100' : 'opacity-0 -ml-[10000px]'
)}
src={`${
import.meta.env.VITE_SDWEB_BASE_URL || 'http://localhost:8002'
}?library_id=9068c6ec-cf90-451b-bb30-4174781e7bc6`}
/>
)}
return (
<div className="w-screen">
{renderBloom && (
<div className="relative max-w-full sm:w-full sm:max-w-[1200px] mx-auto">
<div className="absolute w-full overflow-visible top-[100px] h-32">
<div className="left-0 mt-22 bloom bloom-one" />
<div className="left-[34%] -mt-32 bloom bloom-three " />
<div className="right-0 invisible sm:visible bloom bloom-two" />
</div>
</div>
)}
<div className="relative z-30 h-[228px] px-5 sm:h-[428px] md:h-[428px] lg:h-[628px] mt-8 sm:mt-16">
<div
className={clsx(
'relative h-full m-auto border rounded-lg max-w-7xl transition-opacity bg-gray-850 border-gray-550 opacity-0',
renderBloom && '!opacity-100',
renderImage && 'bg-transparent border-none'
)}
>
{showApp && !forceImg && (
<iframe
ref={iFrame}
referrerPolicy="origin-when-cross-origin"
className={clsx(
'w-full h-full z-30 rounded-lg shadow-iframe inset-center bg-gray-850',
iFrameAppReady ? 'fade-in-app-embed opacity-100' : 'opacity-0 -ml-[10000px]'
)}
src={`${
import.meta.env.VITE_SDWEB_BASE_URL || 'http://localhost:8002'
}?library_id=9068c6ec-cf90-451b-bb30-4174781e7bc6`}
/>
)}
{renderImage && <div className="z-40 h-full fade-in-app-embed landing-img " />}
</div>
</div>
</div>
);
{renderImage && <div className="z-40 h-full fade-in-app-embed landing-img " />}
</div>
</div>
</div>
);
}

View file

@ -3,70 +3,70 @@ import Particles from 'react-tsparticles';
import { loadFull } from 'tsparticles';
export const Bubbles = () => {
const particlesInit = async (main: any) => {
console.log(main);
await loadFull(main);
};
const particlesInit = async (main: any) => {
console.log(main);
await loadFull(main);
};
const particlesLoaded = (container: any) => {
console.log(container);
};
const particlesLoaded = (container: any) => {
console.log(container);
};
return (
//@ts-ignore
<Particles
id="tsparticles"
className="absolute z-0"
init={particlesInit}
//@ts-ignore
loaded={particlesLoaded}
options={{
fpsLimit: 120,
interactivity: {
events: {
onClick: {
enable: true,
mode: 'push'
},
resize: true
}
},
particles: {
color: {
value: '#ffffff'
},
collisions: {
enable: true
},
move: {
direction: 'top',
enable: true,
outModes: {
default: 'destroy'
},
random: false,
speed: 0.2,
straight: true
},
number: {
density: {
enable: true,
area: 900
},
value: 100
},
opacity: {
value: 0.1
},
shape: {
type: 'circle'
},
size: {
value: { min: 0.5, max: 3 }
}
},
detectRetina: true
}}
/>
);
return (
//@ts-ignore
<Particles
id="tsparticles"
className="absolute z-0"
init={particlesInit}
//@ts-ignore
loaded={particlesLoaded}
options={{
fpsLimit: 120,
interactivity: {
events: {
onClick: {
enable: true,
mode: 'push'
},
resize: true
}
},
particles: {
color: {
value: '#ffffff'
},
collisions: {
enable: true
},
move: {
direction: 'top',
enable: true,
outModes: {
default: 'destroy'
},
random: false,
speed: 0.2,
straight: true
},
number: {
density: {
enable: true,
area: 900
},
value: 100
},
opacity: {
value: 0.1
},
shape: {
type: 'circle'
},
size: {
value: { min: 0.5, max: 3 }
}
},
detectRetina: true
}}
/>
);
};

View file

@ -1,103 +1,103 @@
import React from 'react';
import { ReactComponent as AppLogo } from '../assets/app-logo.svg';
import {
Twitter,
Discord,
Instagram,
Github,
Opencollective,
Twitch
Twitter,
Discord,
Instagram,
Github,
Opencollective,
Twitch
} from '@icons-pack/react-simple-icons';
function FooterLink(props: { children: string | JSX.Element; link: string }) {
return (
<a href={props.link} target="_blank" className="text-gray-300 hover:text-white">
{props.children}
</a>
);
return (
<a href={props.link} target="_blank" className="text-gray-300 hover:text-white">
{props.children}
</a>
);
}
export function Footer() {
return (
<footer id="footer" className="z-50 w-screen pt-3 border-t border-gray-550 bg-gray-850">
<div className="container grid grid-cols-2 gap-6 p-8 pt-10 pb-20 m-auto text-white min-h-64 sm:grid-cols-2 lg:grid-cols-6">
<div className="col-span-2">
<AppLogo className="w-10 h-10 mb-5" />
return (
<footer id="footer" className="z-50 w-screen pt-3 border-t border-gray-550 bg-gray-850">
<div className="container grid grid-cols-2 gap-6 p-8 pt-10 pb-20 m-auto text-white min-h-64 sm:grid-cols-2 lg:grid-cols-6">
<div className="col-span-2">
<AppLogo className="w-10 h-10 mb-5" />
<h3 className="mb-1 text-xl font-bold">Spacedrive</h3>
<p className="text-sm text-gray-350">&copy; Copyright 2022 Jamie Pine</p>
<div className="flex flex-row mt-6 mb-10 space-x-3">
<FooterLink link="https://twitter.com/spacedriveapp">
<Twitter />
</FooterLink>
<FooterLink link="https://discord.gg/gTaF2Z44f5">
<Discord />
</FooterLink>
<FooterLink link="https://instagram.com/spacedriveapp">
<Instagram />
</FooterLink>
<FooterLink link="https://github.com/spacedriveapp">
<Github />
</FooterLink>
<FooterLink link="https://opencollective.com/spacedrive">
<Opencollective />
</FooterLink>
<FooterLink link="https://twitch.tv/jamiepinelive">
<Twitch />
</FooterLink>
</div>
</div>
<h3 className="mb-1 text-xl font-bold">Spacedrive</h3>
<p className="text-sm text-gray-350">&copy; Copyright 2022 Jamie Pine</p>
<div className="flex flex-row mt-6 mb-10 space-x-3">
<FooterLink link="https://twitter.com/spacedriveapp">
<Twitter />
</FooterLink>
<FooterLink link="https://discord.gg/gTaF2Z44f5">
<Discord />
</FooterLink>
<FooterLink link="https://instagram.com/spacedriveapp">
<Instagram />
</FooterLink>
<FooterLink link="https://github.com/spacedriveapp">
<Github />
</FooterLink>
<FooterLink link="https://opencollective.com/spacedrive">
<Opencollective />
</FooterLink>
<FooterLink link="https://twitch.tv/jamiepinelive">
<Twitch />
</FooterLink>
</div>
</div>
<div className="flex flex-col col-span-1 space-y-2">
<h3 className="mb-1 text-xs font-bold uppercase ">About</h3>
<div className="flex flex-col col-span-1 space-y-2">
<h3 className="mb-1 text-xs font-bold uppercase ">About</h3>
<FooterLink link="/team">Team</FooterLink>
<FooterLink link="/faq">FAQ</FooterLink>
<FooterLink link="https://github.com/spacedriveapp/spacedrive#motivation">
Mission
</FooterLink>
<FooterLink link="/changelog">Changelog</FooterLink>
<div className="opacity-50 pointer-events-none">
<FooterLink link="#">Blog</FooterLink>
</div>
</div>
<div className="flex flex-col col-span-1 space-y-2 pointer-events-none">
<h3 className="mb-1 text-xs font-bold uppercase">Downloads</h3>
<div className="flex flex-col col-span-1 space-y-2 opacity-50">
<FooterLink link="#">macOS</FooterLink>
<FooterLink link="#">Windows</FooterLink>
<FooterLink link="#">Linux</FooterLink>
</div>
</div>
<div className="flex flex-col col-span-1 space-y-2">
<h3 className="mb-1 text-xs font-bold uppercase ">Developers</h3>
<FooterLink link="https://github.com/spacedriveapp/spacedrive/tree/main/docs">
Documentation
</FooterLink>
<FooterLink link="https://github.com/spacedriveapp/spacedrive/tree/main/docs/developer/contributing.md">
Contribute
</FooterLink>
<div className="opacity-50 pointer-events-none">
<FooterLink link="#">Extensions</FooterLink>
</div>
<div className="opacity-50 pointer-events-none">
<FooterLink link="#">Self Host</FooterLink>
</div>
</div>
<div className="flex flex-col col-span-1 space-y-2">
<h3 className="mb-1 text-xs font-bold uppercase ">Org</h3>
<FooterLink link="https://opencollective.com/spacedrive">Open Collective</FooterLink>
<FooterLink link="https://github.com/spacedriveapp/spacedrive/blob/main/LICENSE">
License
</FooterLink>
<div className="opacity-50 pointer-events-none">
<FooterLink link="#">Privacy</FooterLink>
</div>
<div className="opacity-50 pointer-events-none">
<FooterLink link="#">Terms</FooterLink>
</div>
</div>
</div>
</footer>
);
<FooterLink link="/team">Team</FooterLink>
<FooterLink link="/faq">FAQ</FooterLink>
<FooterLink link="https://github.com/spacedriveapp/spacedrive#motivation">
Mission
</FooterLink>
<FooterLink link="/changelog">Changelog</FooterLink>
<div className="opacity-50 pointer-events-none">
<FooterLink link="#">Blog</FooterLink>
</div>
</div>
<div className="flex flex-col col-span-1 space-y-2 pointer-events-none">
<h3 className="mb-1 text-xs font-bold uppercase">Downloads</h3>
<div className="flex flex-col col-span-1 space-y-2 opacity-50">
<FooterLink link="#">macOS</FooterLink>
<FooterLink link="#">Windows</FooterLink>
<FooterLink link="#">Linux</FooterLink>
</div>
</div>
<div className="flex flex-col col-span-1 space-y-2">
<h3 className="mb-1 text-xs font-bold uppercase ">Developers</h3>
<FooterLink link="https://github.com/spacedriveapp/spacedrive/tree/main/docs">
Documentation
</FooterLink>
<FooterLink link="https://github.com/spacedriveapp/spacedrive/tree/main/docs/developer/contributing.md">
Contribute
</FooterLink>
<div className="opacity-50 pointer-events-none">
<FooterLink link="#">Extensions</FooterLink>
</div>
<div className="opacity-50 pointer-events-none">
<FooterLink link="#">Self Host</FooterLink>
</div>
</div>
<div className="flex flex-col col-span-1 space-y-2">
<h3 className="mb-1 text-xs font-bold uppercase ">Org</h3>
<FooterLink link="https://opencollective.com/spacedrive">Open Collective</FooterLink>
<FooterLink link="https://github.com/spacedriveapp/spacedrive/blob/main/LICENSE">
License
</FooterLink>
<div className="opacity-50 pointer-events-none">
<FooterLink link="#">Privacy</FooterLink>
</div>
<div className="opacity-50 pointer-events-none">
<FooterLink link="#">Terms</FooterLink>
</div>
</div>
</div>
</footer>
);
}

View file

@ -5,20 +5,20 @@ import 'prismjs/components/prism-rust';
import '../atom-one.css';
interface MarkdownPageProps {
children: React.ReactNode;
children: React.ReactNode;
}
function MarkdownPage(props: MarkdownPageProps) {
useEffect(() => {
Prism.highlightAll();
}, []);
return (
<div className="container max-w-4xl p-4 mt-32 mb-20">
<article id="content" className="m-auto prose lg:prose-xs dark:prose-invert">
{props.children}
</article>
</div>
);
useEffect(() => {
Prism.highlightAll();
}, []);
return (
<div className="container max-w-4xl p-4 mt-32 mb-20">
<article id="content" className="m-auto prose lg:prose-xs dark:prose-invert">
{props.children}
</article>
</div>
);
}
export default MarkdownPage;

View file

@ -5,125 +5,125 @@ import { Link, List, MapPin, Question } from 'phosphor-react';
import { ReactComponent as AppLogo } from '../assets/app-logo.svg';
import { Discord, Github } from '@icons-pack/react-simple-icons';
import {
ClockIcon,
CogIcon,
HeartIcon,
LockClosedIcon,
MapIcon,
QuestionMarkCircleIcon
ClockIcon,
CogIcon,
HeartIcon,
LockClosedIcon,
MapIcon,
QuestionMarkCircleIcon
} from '@heroicons/react/solid';
function NavLink(props: { link?: string; children: string }) {
return (
<a
href={props.link ?? '#'}
target={props.link?.startsWith('http') ? '_blank' : undefined}
className="p-4 text-gray-300 no-underline transition cursor-pointer hover:text-gray-50"
>
{props.children}
</a>
);
return (
<a
href={props.link ?? '#'}
target={props.link?.startsWith('http') ? '_blank' : undefined}
className="p-4 text-gray-300 no-underline transition cursor-pointer hover:text-gray-50"
>
{props.children}
</a>
);
}
export default function NavBar() {
const [isAtTop, setIsAtTop] = useState(window.pageYOffset < 20);
const [isAtTop, setIsAtTop] = useState(window.pageYOffset < 20);
function onScroll(event: Event) {
if (window.pageYOffset < 20) setIsAtTop(true);
else if (isAtTop) setIsAtTop(false);
}
function onScroll(event: Event) {
if (window.pageYOffset < 20) setIsAtTop(true);
else if (isAtTop) setIsAtTop(false);
}
useEffect(() => {
window.addEventListener('scroll', onScroll);
return () => window.removeEventListener('scroll', onScroll);
}, []);
useEffect(() => {
window.addEventListener('scroll', onScroll);
return () => window.removeEventListener('scroll', onScroll);
}, []);
return (
<div
className={clsx(
'fixed transition z-40 w-full h-16 border-b ',
isAtTop
? 'bg-transparent border-transparent'
: 'border-gray-550 bg-gray-750 bg-opacity-80 backdrop-blur'
)}
>
<div className="container relative flex items-center h-full px-5 m-auto">
<a href="/" className="absolute flex flex-row items-center">
<AppLogo className="z-30 w-8 h-8 mr-3" />
<h3 className="text-xl font-bold text-white">
Spacedrive
{/* <span className="ml-2 text-xs text-gray-400 uppercase">ALPHA</span> */}
</h3>
</a>
return (
<div
className={clsx(
'fixed transition z-40 w-full h-16 border-b ',
isAtTop
? 'bg-transparent border-transparent'
: 'border-gray-550 bg-gray-750 bg-opacity-80 backdrop-blur'
)}
>
<div className="container relative flex items-center h-full px-5 m-auto">
<a href="/" className="absolute flex flex-row items-center">
<AppLogo className="z-30 w-8 h-8 mr-3" />
<h3 className="text-xl font-bold text-white">
Spacedrive
{/* <span className="ml-2 text-xs text-gray-400 uppercase">ALPHA</span> */}
</h3>
</a>
<div className="hidden m-auto space-x-4 text-white lg:block ">
<NavLink link="/roadmap">Roadmap</NavLink>
<NavLink link="/faq">FAQ</NavLink>
<NavLink link="/team">Team</NavLink>
{/* <NavLink link="/change-log">Changelog</NavLink>
<div className="hidden m-auto space-x-4 text-white lg:block ">
<NavLink link="/roadmap">Roadmap</NavLink>
<NavLink link="/faq">FAQ</NavLink>
<NavLink link="/team">Team</NavLink>
{/* <NavLink link="/change-log">Changelog</NavLink>
<NavLink link="/privacy">Privacy</NavLink> */}
<NavLink link="https://opencollective.com/spacedrive">Sponsor us</NavLink>
</div>
<Dropdown
className="absolute block h-6 w-44 top-2 right-4 lg:hidden"
items={[
[
{
name: 'Repository',
icon: Github,
onPress: () =>
(window.location.href = 'https://github.com/spacedriveapp/spacedrive')
},
{
name: 'Join Discord',
icon: Discord,
onPress: () => (window.location.href = 'https://discord.gg/gTaF2Z44f5')
}
],
[
{
name: 'Roadmap',
icon: MapIcon,
onPress: () => (window.location.href = '/roadmap'),
selected: window.location.href.includes('/roadmap')
},
{
name: 'FAQ',
icon: QuestionMarkCircleIcon,
onPress: () => (window.location.href = '/faq'),
selected: window.location.href.includes('/faq')
},
// {
// name: 'Changelog',
// icon: ClockIcon,
// onPress: () => (window.location.href = '/changelog'),
// selected: window.location.href.includes('/changelog')
// },
// {
// name: 'Privacy',
// icon: LockClosedIcon,
// onPress: () => (window.location.href = '/privacy'),
// selected: window.location.href.includes('/privacy')
// },
{
name: 'Sponsor us',
icon: HeartIcon,
onPress: () => (window.location.href = 'https://opencollective.com/spacedrive')
}
]
]}
buttonIcon={<List weight="bold" className="w-6 h-6" />}
buttonProps={{ className: '!p-1 ml-[140px]' }}
/>
<div className="absolute flex-row hidden space-x-5 right-3 lg:flex">
<a href="https://discord.gg/gTaF2Z44f5" target="_blank">
<Discord className="text-white" />
</a>
<a href="https://github.com/spacedriveapp/spacedrive" target="_blank">
<Github className="text-white" />
</a>
</div>
</div>
</div>
);
<NavLink link="https://opencollective.com/spacedrive">Sponsor us</NavLink>
</div>
<Dropdown
className="absolute block h-6 w-44 top-2 right-4 lg:hidden"
items={[
[
{
name: 'Repository',
icon: Github,
onPress: () =>
(window.location.href = 'https://github.com/spacedriveapp/spacedrive')
},
{
name: 'Join Discord',
icon: Discord,
onPress: () => (window.location.href = 'https://discord.gg/gTaF2Z44f5')
}
],
[
{
name: 'Roadmap',
icon: MapIcon,
onPress: () => (window.location.href = '/roadmap'),
selected: window.location.href.includes('/roadmap')
},
{
name: 'FAQ',
icon: QuestionMarkCircleIcon,
onPress: () => (window.location.href = '/faq'),
selected: window.location.href.includes('/faq')
},
// {
// name: 'Changelog',
// icon: ClockIcon,
// onPress: () => (window.location.href = '/changelog'),
// selected: window.location.href.includes('/changelog')
// },
// {
// name: 'Privacy',
// icon: LockClosedIcon,
// onPress: () => (window.location.href = '/privacy'),
// selected: window.location.href.includes('/privacy')
// },
{
name: 'Sponsor us',
icon: HeartIcon,
onPress: () => (window.location.href = 'https://opencollective.com/spacedrive')
}
]
]}
buttonIcon={<List weight="bold" className="w-6 h-6" />}
buttonProps={{ className: '!p-1 ml-[140px]' }}
/>
<div className="absolute flex-row hidden space-x-5 right-3 lg:flex">
<a href="https://discord.gg/gTaF2Z44f5" target="_blank">
<Discord className="text-white" />
</a>
<a href="https://github.com/spacedriveapp/spacedrive" target="_blank">
<Github className="text-white" />
</a>
</div>
</div>
</div>
);
}

View file

@ -11,33 +11,33 @@ import './style.scss';
import { Button } from '@sd/ui';
function App() {
return (
<Suspense fallback={<p>Loading...</p>}>
<div className="dark:bg-black dark:text-white ">
<Button
href="#content"
className="fixed left-0 z-50 mt-3 ml-8 duration-200 -translate-y-16 cursor-pointer focus:translate-y-0"
variant="gray"
>
Skip to content
</Button>
return (
<Suspense fallback={<p>Loading...</p>}>
<div className="dark:bg-black dark:text-white ">
<Button
href="#content"
className="fixed left-0 z-50 mt-3 ml-8 duration-200 -translate-y-16 cursor-pointer focus:translate-y-0"
variant="gray"
>
Skip to content
</Button>
<NavBar />
<div className="container z-10 flex flex-col items-center px-4 mx-auto overflow-x-hidden sm:overflow-x-visible ">
{useRoutes(routes)}
<Footer />
</div>
</div>
</Suspense>
);
<NavBar />
<div className="container z-10 flex flex-col items-center px-4 mx-auto overflow-x-hidden sm:overflow-x-visible ">
{useRoutes(routes)}
<Footer />
</div>
</div>
</Suspense>
);
}
const root = createRoot(document.getElementById('root')!);
root.render(
<React.StrictMode>
<Router>
<App />
</Router>
</React.StrictMode>
<React.StrictMode>
<Router>
<App />
</Router>
</React.StrictMode>
);

View file

@ -5,31 +5,31 @@ import { Button } from '@sd/ui';
import { SmileyXEyes } from 'phosphor-react';
function Page() {
return (
<Markdown>
<Helmet>
<title>Not Found - Spacedrive</title>
</Helmet>
<div className="flex flex-col items-center">
<SmileyXEyes className="mb-3 w-44 h-44" />
<h1 className="mb-2 text-center">In the quantum realm this page potentially exists.</h1>
<p>In other words, thats a 404.</p>
<div className="flex flex-wrap justify-center">
<Button
href={document.referrer || 'javascript:history.back()'}
className="mt-2 mr-3 cursor-pointer "
variant="gray"
>
Back
</Button>
<Button href="/" className="mt-2 cursor-pointer" variant="primary">
Discover Spacedrive
</Button>
</div>
</div>
<div className="h-96" />
</Markdown>
);
return (
<Markdown>
<Helmet>
<title>Not Found - Spacedrive</title>
</Helmet>
<div className="flex flex-col items-center">
<SmileyXEyes className="mb-3 w-44 h-44" />
<h1 className="mb-2 text-center">In the quantum realm this page potentially exists.</h1>
<p>In other words, thats a 404.</p>
<div className="flex flex-wrap justify-center">
<Button
href={document.referrer || 'javascript:history.back()'}
className="mt-2 mr-3 cursor-pointer "
variant="gray"
>
Back
</Button>
<Button href="/" className="mt-2 cursor-pointer" variant="primary">
Discover Spacedrive
</Button>
</div>
</div>
<div className="h-96" />
</Markdown>
);
}
export default Page;

View file

@ -4,15 +4,15 @@ import { ReactComponent as Content } from '~/docs/changelog/index.md';
import { Helmet } from 'react-helmet';
function Page() {
return (
<Markdown>
<Helmet>
<title>Changelog - Spacedrive</title>
<meta name="description" content="Updates and release builds of the Spacedrive app." />
</Helmet>
<Content />
</Markdown>
);
return (
<Markdown>
<Helmet>
<title>Changelog - Spacedrive</title>
<meta name="description" content="Updates and release builds of the Spacedrive app." />
</Helmet>
<Content />
</Markdown>
);
}
export default Page;

View file

@ -4,18 +4,18 @@ import { ReactComponent as Content } from '~/docs/architecture/distributed-data-
import { Helmet } from 'react-helmet';
function Page() {
return (
<Markdown>
<Helmet>
<title>Distributed Data Sync - Spacedrive Documentation</title>
<meta
name="description"
content="How we handle data sync with SQLite in a distributed network."
/>
</Helmet>
<Content />
</Markdown>
);
return (
<Markdown>
<Helmet>
<title>Distributed Data Sync - Spacedrive Documentation</title>
<meta
name="description"
content="How we handle data sync with SQLite in a distributed network."
/>
</Helmet>
<Content />
</Markdown>
);
}
export default Page;

View file

@ -4,15 +4,15 @@ import { ReactComponent as Content } from '~/docs/product/faq.md';
import { Helmet } from 'react-helmet';
function Page() {
return (
<Markdown>
<Helmet>
<title>FAQ - Spacedrive</title>
<meta name="description" content="Updates and release builds of the Spacedrive app." />
</Helmet>
<Content />
</Markdown>
);
return (
<Markdown>
<Helmet>
<title>FAQ - Spacedrive</title>
<meta name="description" content="Updates and release builds of the Spacedrive app." />
</Helmet>
<Content />
</Markdown>
);
}
export default Page;

View file

@ -6,98 +6,98 @@ import clsx from 'clsx';
import AppEmbed from '../components/AppEmbed';
interface SectionProps {
orientation: 'left' | 'right';
heading?: string;
description?: string | React.ReactNode;
children?: React.ReactNode;
className?: string;
orientation: 'left' | 'right';
heading?: string;
description?: string | React.ReactNode;
children?: React.ReactNode;
className?: string;
}
function Section(props: SectionProps = { orientation: 'left' }) {
let info = (
<div className="p-10">
{props.heading && <h1 className="text-4xl font-black">{props.heading}</h1>}
{props.description && <p className="mt-5 text-xl text-gray-450">{props.description}</p>}
</div>
);
return (
<div className={clsx('grid grid-cols-1 my-10 lg:grid-cols-2 lg:my-44', props.className)}>
{props.orientation === 'right' ? (
<>
{info}
{props.children}
</>
) : (
<>
{props.children}
{info}
</>
)}
</div>
);
let info = (
<div className="p-10">
{props.heading && <h1 className="text-4xl font-black">{props.heading}</h1>}
{props.description && <p className="mt-5 text-xl text-gray-450">{props.description}</p>}
</div>
);
return (
<div className={clsx('grid grid-cols-1 my-10 lg:grid-cols-2 lg:my-44', props.className)}>
{props.orientation === 'right' ? (
<>
{info}
{props.children}
</>
) : (
<>
{props.children}
{info}
</>
)}
</div>
);
}
function Page() {
return (
<>
<div className="mt-28 lg:mt-36" />
return (
<>
<div className="mt-28 lg:mt-36" />
<h1
id="content"
className="z-30 px-2 mb-3 text-4xl font-black leading-tight text-center md:text-6xl"
>
A file explorer from the future.
</h1>
<p className="z-30 max-w-4xl mt-1 mb-8 text-center text-md lg:text-lg leading-2 lg:leading-8 text-gray-450">
Combine your drives and clouds into one database that you can organize and explore from any
device.
<br />
<span className="hidden sm:block">
Designed for creators, hoarders and the painfully disorganized.
</span>
</p>
<div className="flex flex-row space-x-4 delay-3 ">
<Button
href="https://github.com/spacedriveapp/spacedrive"
target="_blank"
className="z-30 cursor-pointer"
variant="gray"
>
<Github className="inline w-5 h-5 -mt-[4px] -ml-1 mr-2" fill="white" />
Star on GitHub
</Button>
</div>
<p className="z-30 px-6 mt-3 text-sm text-center text-gray-450 ">
Coming soon on macOS, Windows and Linux.
<br />
Shortly after to iOS & Android.
</p>
<h1
id="content"
className="z-30 px-2 mb-3 text-4xl font-black leading-tight text-center md:text-6xl"
>
A file explorer from the future.
</h1>
<p className="z-30 max-w-4xl mt-1 mb-8 text-center text-md lg:text-lg leading-2 lg:leading-8 text-gray-450">
Combine your drives and clouds into one database that you can organize and explore from any
device.
<br />
<span className="hidden sm:block">
Designed for creators, hoarders and the painfully disorganized.
</span>
</p>
<div className="flex flex-row space-x-4 delay-3 ">
<Button
href="https://github.com/spacedriveapp/spacedrive"
target="_blank"
className="z-30 cursor-pointer"
variant="gray"
>
<Github className="inline w-5 h-5 -mt-[4px] -ml-1 mr-2" fill="white" />
Star on GitHub
</Button>
</div>
<p className="z-30 px-6 mt-3 text-sm text-center text-gray-450 ">
Coming soon on macOS, Windows and Linux.
<br />
Shortly after to iOS & Android.
</p>
<AppEmbed />
<Section
orientation="right"
heading="Never leave a file behind."
className="z-30"
description={
<>
Spacedrive accounts for every file you own, uniquely fingerprinting and extracting
metadata so you can sort, tag, backup and share files without limitations of any one
cloud provider.
<br />
<br />
<a
className="transition text-primary-600 hover:text-primary-500"
href="https://github.com/spacedriveapp"
target="_blank"
>
Find out more
</a>
</>
}
/>
<Bubbles />
</>
);
<AppEmbed />
<Section
orientation="right"
heading="Never leave a file behind."
className="z-30"
description={
<>
Spacedrive accounts for every file you own, uniquely fingerprinting and extracting
metadata so you can sort, tag, backup and share files without limitations of any one
cloud provider.
<br />
<br />
<a
className="transition text-primary-600 hover:text-primary-500"
href="https://github.com/spacedriveapp"
target="_blank"
>
Find out more
</a>
</>
}
/>
<Bubbles />
</>
);
}
export default Page;

View file

@ -5,18 +5,18 @@ import { Helmet } from 'react-helmet';
import { ReactComponent as Folder } from '../../../../packages/interface/src/assets/svg/folder.svg';
function Page() {
return (
<Markdown>
<Helmet>
<title>Roadmap - Spacedrive</title>
<meta name="description" content="What can Spacedrive do?" />
</Helmet>
<div className="w-24 mb-10">
<Folder className="" />
</div>
<Content />
</Markdown>
);
return (
<Markdown>
<Helmet>
<title>Roadmap - Spacedrive</title>
<meta name="description" content="What can Spacedrive do?" />
</Helmet>
<div className="w-24 mb-10">
<Folder className="" />
</div>
<Content />
</Markdown>
);
}
export default Page;

View file

@ -4,17 +4,17 @@ import { ReactComponent as Content } from '~/docs/product/credits.md';
import { Helmet } from 'react-helmet';
function Page() {
return (
<Markdown>
<Helmet>
<title>Our Team - Spacedrive</title>
<meta name="description" content="Who's behind Spacedrive?" />
</Helmet>
<div className="team-page">
<Content />
</div>
</Markdown>
);
return (
<Markdown>
<Helmet>
<title>Our Team - Spacedrive</title>
<meta name="description" content="Who's behind Spacedrive?" />
</Helmet>
<div className="team-page">
<Content />
</div>
</Markdown>
);
}
export default Page;

View file

@ -1,88 +1,84 @@
html {
@apply bg-black;
-ms-overflow-style: none; /* IE and Edge */
scrollbar-width: none; /* Firefox */
&::-webkit-scrollbar {
display: none;
}
@apply bg-black;
-ms-overflow-style: none; /* IE and Edge */
scrollbar-width: none; /* Firefox */
&::-webkit-scrollbar {
display: none;
}
}
.landing-img {
background-image: url('/app.png');
background-size: contain;
background-repeat: no-repeat;
background-position: center top;
background-image: url('/app.png');
background-size: contain;
background-repeat: no-repeat;
background-position: center top;
}
.fade-in-app-embed {
animation: fadeInUp 3s;
-webkit-animation: fadeInUp 3s;
-moz-animation: fadeInUp 3s;
-o-animation: fadeInUp 3s;
-ms-animation: fadeInUp 3s;
animation: fadeInUp 3s;
-webkit-animation: fadeInUp 3s;
-moz-animation: fadeInUp 3s;
-o-animation: fadeInUp 3s;
-ms-animation: fadeInUp 3s;
}
.fade-in-heading {
animation: fadeInUp 1s;
animation: fadeInUp 1s;
}
@keyframes fadeInUp {
0% {
opacity:0;
// transform: translateY(10px);
}
100% {
opacity:1;
0% {
opacity: 0;
// transform: translateY(10px);
}
100% {
opacity: 1;
// transform: translateY(0px);
}
// transform: translateY(0px);
}
}
.bloom {
@apply absolute w-96 h-96;
will-change: opacity;
opacity: 0;
filter: blur(160px);
border-radius: 50%;
transform: scale(1.5);
animation-name: bloomBurst;
animation-duration: 1s;
animation-timing-function: ease-in-out;
animation-fill-mode: forwards;
animation-iteration-count: 1;
animation-direction: forwards;
&.bloom-one {
background: conic-gradient(from 90deg at 50% 50%, #255bef, #aa1cca);
animation-delay: 500ms;
}
&.bloom-two {
background: conic-gradient(from 90deg at 50% 50%, #c62dbb, #1D054B);
animation-delay: 300ms;
}
&.bloom-three {
background: conic-gradient(from 90deg at 50% 50%, #2d53c6, #1D054B);
animation-delay: 1100ms;
}
@apply absolute w-96 h-96;
will-change: opacity;
opacity: 0;
filter: blur(160px);
border-radius: 50%;
transform: scale(1.5);
animation-name: bloomBurst;
animation-duration: 1s;
animation-timing-function: ease-in-out;
animation-fill-mode: forwards;
animation-iteration-count: 1;
animation-direction: forwards;
&.bloom-one {
background: conic-gradient(from 90deg at 50% 50%, #255bef, #aa1cca);
animation-delay: 500ms;
}
&.bloom-two {
background: conic-gradient(from 90deg at 50% 50%, #c62dbb, #1d054b);
animation-delay: 300ms;
}
&.bloom-three {
background: conic-gradient(from 90deg at 50% 50%, #2d53c6, #1d054b);
animation-delay: 1100ms;
}
}
@keyframes bloomBurst {
from {
opacity: 0;
}
40% {
opacity: 1;
}
to {
opacity: 0.6;
}
from {
opacity: 0;
}
40% {
opacity: 1;
}
to {
opacity: 0.6;
}
}
.shadow-iframe {
box-shadow: 0px 0px 100px 0px rgba(0,0,0,0.5);
box-shadow: 0px 0px 100px 0px rgba(0, 0, 0, 0.5);
}
// Gradient colors
@ -90,4 +86,4 @@ html {
// #7A1D77
// #8E4CAB
// #1D054B
// #9A3F8C
// #9A3F8C

View file

@ -2,24 +2,24 @@
/// <reference types="vite-plugin-pages/client-react" />
interface ImportMetaEnv {
readonly VITE_SDWEB_BASE_URL: string;
readonly VITE_SDWEB_BASE_URL: string;
}
interface ImportMeta {
readonly env: ImportMetaEnv;
readonly env: ImportMetaEnv;
}
declare module '*.md' {
// "unknown" would be more detailed depends on how you structure frontmatter
const attributes: Record<string, unknown>;
// "unknown" would be more detailed depends on how you structure frontmatter
const attributes: Record<string, unknown>;
// When "Mode.TOC" is requested
const toc: { level: string; content: string }[];
// When "Mode.TOC" is requested
const toc: { level: string; content: string }[];
// When "Mode.HTML" is requested
const html: string;
// When "Mode.HTML" is requested
const html: string;
// When "Mode.React" is requested. VFC could take a generic like React.VFC<{ MyComponent: TypeOfMyComponent }>
import React from 'react';
const ReactComponent: React.VFC;
// When "Mode.React" is requested. VFC could take a generic like React.VFC<{ MyComponent: TypeOfMyComponent }>
import React from 'react';
const ReactComponent: React.VFC;
}

View file

@ -1,5 +1,5 @@
{
"extends": "../../packages/config/interface.tsconfig.json",
"compilerOptions": {},
"include": ["src"]
"extends": "../../packages/config/interface.tsconfig.json",
"compilerOptions": {},
"include": ["src"]
}

View file

@ -1,3 +1,3 @@
{
"rewrites": [{ "source": "/(.*)", "destination": "/" }]
"rewrites": [{ "source": "/(.*)", "destination": "/" }]
}

View file

@ -6,23 +6,23 @@ import svg from 'vite-plugin-svgr';
// https://vitejs.dev/config/
export default defineConfig({
// @ts-ignore
plugins: [
react(),
pages({
dirs: 'src/pages'
// onRoutesGenerated: (routes) => generateSitemap({ routes })
}),
svg(),
md({ mode: [Mode.REACT] })
],
resolve: {
alias: {
'~/docs': __dirname + '../../../docs'
}
},
server: {
port: 8003
},
publicDir: 'public'
// @ts-ignore
plugins: [
react(),
pages({
dirs: 'src/pages'
// onRoutesGenerated: (routes) => generateSitemap({ routes })
}),
svg(),
md({ mode: [Mode.REACT] })
],
resolve: {
alias: {
'~/docs': __dirname + '../../../docs'
}
},
server: {
port: 8003
},
publicDir: 'public'
});

View file

@ -1,6 +1,6 @@
{
"name": "mobile",
"version": "0.0.0",
"main": "index.js",
"license": "MIT"
"name": "mobile",
"version": "0.0.0",
"main": "index.js",
"license": "MIT"
}

View file

@ -1,6 +1,6 @@
{
"name": "@sd/server",
"version": "0.0.0",
"main": "index.js",
"license": "MIT"
"name": "@sd/server",
"version": "0.0.0",
"main": "index.js",
"license": "MIT"
}

View file

@ -1,31 +1,31 @@
{
"name": "@sd/web",
"private": true,
"version": "0.0.0",
"scripts": {
"dev": "vite",
"build": "vite build",
"preview": "vite preview"
},
"dependencies": {
"@fontsource/inter": "^4.5.7",
"@sd/client": "*",
"@sd/core": "*",
"@sd/interface": "*",
"@sd/ui": "*",
"react": "^18.0.0",
"react-dom": "^18.0.0"
},
"devDependencies": {
"@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0",
"@vitejs/plugin-react": "^1.3.1",
"autoprefixer": "^10.4.4",
"postcss": "^8.4.12",
"tailwind": "^4.0.0",
"typescript": "^4.6.3",
"vite": "^2.9.5",
"vite-plugin-svgr": "^1.1.0",
"vite-plugin-tsconfig-paths": "^1.0.5"
}
"name": "@sd/web",
"private": true,
"version": "0.0.0",
"scripts": {
"dev": "vite",
"build": "vite build",
"preview": "vite preview"
},
"dependencies": {
"@fontsource/inter": "^4.5.7",
"@sd/client": "*",
"@sd/core": "*",
"@sd/interface": "*",
"@sd/ui": "*",
"react": "^18.0.0",
"react-dom": "^18.0.0"
},
"devDependencies": {
"@types/react": "^18.0.8",
"@types/react-dom": "^18.0.0",
"@vitejs/plugin-react": "^1.3.1",
"autoprefixer": "^10.4.4",
"postcss": "^8.4.12",
"tailwind": "^4.0.0",
"typescript": "^4.6.3",
"vite": "^2.9.5",
"vite-plugin-svgr": "^1.1.0",
"vite-plugin-tsconfig-paths": "^1.0.5"
}
}

View file

@ -1,25 +1,25 @@
{
"short_name": "Spacedrive",
"name": "Spacedrive",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
},
{
"src": "logo192.png",
"type": "image/png",
"sizes": "192x192"
},
{
"src": "logo512.png",
"type": "image/png",
"sizes": "512x512"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
"short_name": "Spacedrive",
"name": "Spacedrive",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
},
{
"src": "logo192.png",
"type": "image/png",
"sizes": "192x192"
},
{
"src": "logo512.png",
"type": "image/png",
"sizes": "512x512"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}

View file

@ -10,83 +10,83 @@ const randomId = () => Math.random().toString(36).slice(2);
// bind state to core via Tauri
class Transport extends BaseTransport {
requestMap = new Map<string, (data: any) => void>();
requestMap = new Map<string, (data: any) => void>();
constructor() {
super();
constructor() {
super();
websocket.addEventListener('message', (event) => {
if (!event.data) return;
websocket.addEventListener('message', (event) => {
if (!event.data) return;
const { id, payload } = JSON.parse(event.data);
const { id, payload } = JSON.parse(event.data);
const { type, data } = payload;
if (type === 'event') {
this.emit('core_event', data);
} else if (type === 'query' || type === 'command') {
if (this.requestMap.has(id)) {
this.requestMap.get(id)?.(data);
this.requestMap.delete(id);
}
}
});
}
async query(query: ClientQuery) {
const id = randomId();
let resolve: (data: any) => void;
const { type, data } = payload;
if (type === 'event') {
this.emit('core_event', data);
} else if (type === 'query' || type === 'command') {
if (this.requestMap.has(id)) {
this.requestMap.get(id)?.(data);
this.requestMap.delete(id);
}
}
});
}
async query(query: ClientQuery) {
const id = randomId();
let resolve: (data: any) => void;
const promise = new Promise((res) => {
resolve = res;
});
const promise = new Promise((res) => {
resolve = res;
});
// @ts-ignore
this.requestMap.set(id, resolve);
// @ts-ignore
this.requestMap.set(id, resolve);
websocket.send(JSON.stringify({ id, payload: { type: 'query', data: query } }));
websocket.send(JSON.stringify({ id, payload: { type: 'query', data: query } }));
return await promise;
}
async command(command: ClientCommand) {
const id = randomId();
let resolve: (data: any) => void;
return await promise;
}
async command(command: ClientCommand) {
const id = randomId();
let resolve: (data: any) => void;
const promise = new Promise((res) => {
resolve = res;
});
const promise = new Promise((res) => {
resolve = res;
});
// @ts-ignore
this.requestMap.set(id, resolve);
// @ts-ignore
this.requestMap.set(id, resolve);
websocket.send(JSON.stringify({ id, payload: { type: 'command', data: command } }));
websocket.send(JSON.stringify({ id, payload: { type: 'command', data: command } }));
return await promise;
}
return await promise;
}
}
function App() {
useEffect(() => {
window.parent.postMessage('spacedrive-hello', '*');
}, []);
useEffect(() => {
window.parent.postMessage('spacedrive-hello', '*');
}, []);
return (
<div className="App">
{/* <header className="App-header"></header> */}
<SpacedriveInterface
demoMode
useMemoryRouter={true}
transport={new Transport()}
platform={'browser'}
convertFileSrc={function (url: string): string {
return url;
}}
openDialog={function (options: {
directory?: boolean | undefined;
}): Promise<string | string[]> {
return Promise.resolve([]);
}}
/>
</div>
);
return (
<div className="App">
{/* <header className="App-header"></header> */}
<SpacedriveInterface
demoMode
useMemoryRouter={true}
transport={new Transport()}
platform={'browser'}
convertFileSrc={function (url: string): string {
return url;
}}
openDialog={function (options: {
directory?: boolean | undefined;
}): Promise<string | string[]> {
return Promise.resolve([]);
}}
/>
</div>
);
}
export default App;

View file

@ -1,9 +1,9 @@
/// <reference types="vite/client" />
interface ImportMetaEnv {
readonly VITE_SDSERVER_BASE_URL: string;
readonly VITE_SDSERVER_BASE_URL: string;
}
interface ImportMeta {
readonly env: ImportMetaEnv;
readonly env: ImportMetaEnv;
}

View file

@ -1,12 +1,12 @@
<!DOCTYPE html>
<html class="dark">
<head>
<meta charset="utf-8" />
<title>Spacedrive</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
</head>
<body>
<div id="root"></div>
<script type="module" src="./index.tsx"></script>
</body>
<head>
<meta charset="utf-8" />
<title>Spacedrive</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
</head>
<body>
<div id="root"></div>
<script type="module" src="./index.tsx"></script>
</body>
</html>

View file

@ -5,7 +5,7 @@ import '@sd/ui/style';
const root = ReactDOM.createRoot(document.getElementById('root') as HTMLElement);
root.render(
<React.StrictMode>
<App />
</React.StrictMode>
<React.StrictMode>
<App />
</React.StrictMode>
);

View file

@ -1,5 +1,5 @@
{
"extends": "../../packages/config/interface.tsconfig.json",
"compilerOptions": {},
"include": ["src"]
"extends": "../../packages/config/interface.tsconfig.json",
"compilerOptions": {},
"include": ["src"]
}

View file

@ -1,3 +1,3 @@
{
"rewrites": [{ "source": "/(.*)", "destination": "/" }]
"rewrites": [{ "source": "/(.*)", "destination": "/" }]
}

View file

@ -7,24 +7,24 @@ import { name, version } from './package.json';
// https://vitejs.dev/config/
export default defineConfig({
server: {
port: 8002
},
plugins: [
// @ts-ignore
react({
jsxRuntime: 'classic'
}),
svg({ svgrOptions: { icon: true } }),
tsconfigPaths()
],
root: 'src',
publicDir: '../../packages/interface/src/assets',
define: {
pkgJson: { name, version }
},
build: {
outDir: '../dist',
assetsDir: '.'
}
server: {
port: 8002
},
plugins: [
// @ts-ignore
react({
jsxRuntime: 'classic'
}),
svg({ svgrOptions: { icon: true } }),
tsconfigPaths()
],
root: 'src',
publicDir: '../../packages/interface/src/assets',
define: {
pkgJson: { name, version }
},
build: {
outDir: '../dist',
assetsDir: '.'
}
});

View file

@ -1,6 +1,5 @@
max_width = 100
hard_tabs = false
tab_spaces = 2
hard_tabs = true
newline_style = "Unix"
use_small_heuristics = "Default"
reorder_imports = true

View file

@ -1,3 +1,10 @@
import type { Platform } from "./Platform";
import type { Platform } from './Platform';
export interface Client { uuid: string, name: string, platform: Platform, tcp_address: string, last_seen: string, last_synchronized: string, }
export interface Client {
uuid: string;
name: string;
platform: Platform;
tcp_address: string;
last_seen: string;
last_synchronized: string;
}

View file

@ -1,2 +1,14 @@
export type ClientCommand = { key: "FileRead", params: { id: number, } } | { key: "FileDelete", params: { id: number, } } | { key: "LibDelete", params: { id: number, } } | { key: "TagCreate", params: { name: string, color: string, } } | { key: "TagUpdate", params: { name: string, color: string, } } | { key: "TagAssign", params: { file_id: number, tag_id: number, } } | { key: "TagDelete", params: { id: number, } } | { key: "LocCreate", params: { path: string, } } | { key: "LocUpdate", params: { id: number, name: string | null, } } | { key: "LocDelete", params: { id: number, } } | { key: "SysVolumeUnmount", params: { id: number, } } | { key: "GenerateThumbsForLocation", params: { id: number, path: string, } } | { key: "IdentifyUniqueFiles" };
export type ClientCommand =
| { key: 'FileRead'; params: { id: number } }
| { key: 'FileDelete'; params: { id: number } }
| { key: 'LibDelete'; params: { id: number } }
| { key: 'TagCreate'; params: { name: string; color: string } }
| { key: 'TagUpdate'; params: { name: string; color: string } }
| { key: 'TagAssign'; params: { file_id: number; tag_id: number } }
| { key: 'TagDelete'; params: { id: number } }
| { key: 'LocCreate'; params: { path: string } }
| { key: 'LocUpdate'; params: { id: number; name: string | null } }
| { key: 'LocDelete'; params: { id: number } }
| { key: 'SysVolumeUnmount'; params: { id: number } }
| { key: 'GenerateThumbsForLocation'; params: { id: number; path: string } }
| { key: 'IdentifyUniqueFiles' };

View file

@ -1,2 +1,10 @@
export type ClientQuery = { key: "ClientGetState" } | { key: "SysGetVolumes" } | { key: "LibGetTags" } | { key: "JobGetRunning" } | { key: "JobGetHistory" } | { key: "SysGetLocations" } | { key: "SysGetLocation", params: { id: number, } } | { key: "LibGetExplorerDir", params: { location_id: number, path: string, limit: number, } } | { key: "GetLibraryStatistics" };
export type ClientQuery =
| { key: 'ClientGetState' }
| { key: 'SysGetVolumes' }
| { key: 'LibGetTags' }
| { key: 'JobGetRunning' }
| { key: 'JobGetHistory' }
| { key: 'SysGetLocations' }
| { key: 'SysGetLocation'; params: { id: number } }
| { key: 'LibGetExplorerDir'; params: { location_id: number; path: string; limit: number } }
| { key: 'GetLibraryStatistics' };

View file

@ -1,3 +1,11 @@
import type { LibraryState } from "./LibraryState";
import type { LibraryState } from './LibraryState';
export interface ClientState { client_uuid: string, client_id: number, client_name: string, data_path: string, tcp_port: number, libraries: Array<LibraryState>, current_library_uuid: string, }
export interface ClientState {
client_uuid: string;
client_id: number;
client_name: string;
data_path: string;
tcp_port: number;
libraries: Array<LibraryState>;
current_library_uuid: string;
}

View file

@ -1,4 +1,10 @@
import type { ClientQuery } from "./ClientQuery";
import type { CoreResource } from "./CoreResource";
import type { ClientQuery } from './ClientQuery';
import type { CoreResource } from './CoreResource';
export type CoreEvent = { key: "InvalidateQuery", data: ClientQuery } | { key: "InvalidateQueryDebounced", data: ClientQuery } | { key: "InvalidateResource", data: CoreResource } | { key: "NewThumbnail", data: { cas_id: string, } } | { key: "Log", data: { message: string, } } | { key: "DatabaseDisconnected", data: { reason: string | null, } };
export type CoreEvent =
| { key: 'InvalidateQuery'; data: ClientQuery }
| { key: 'InvalidateQueryDebounced'; data: ClientQuery }
| { key: 'InvalidateResource'; data: CoreResource }
| { key: 'NewThumbnail'; data: { cas_id: string } }
| { key: 'Log'; data: { message: string } }
| { key: 'DatabaseDisconnected'; data: { reason: string | null } };

View file

@ -1,5 +1,11 @@
import type { File } from "./File";
import type { JobReport } from "./JobReport";
import type { LocationResource } from "./LocationResource";
import type { File } from './File';
import type { JobReport } from './JobReport';
import type { LocationResource } from './LocationResource';
export type CoreResource = "Client" | "Library" | { Location: LocationResource } | { File: File } | { Job: JobReport } | "Tag";
export type CoreResource =
| 'Client'
| 'Library'
| { Location: LocationResource }
| { File: File }
| { Job: JobReport }
| 'Tag';

View file

@ -1,8 +1,18 @@
import type { ClientState } from "./ClientState";
import type { DirectoryWithContents } from "./DirectoryWithContents";
import type { JobReport } from "./JobReport";
import type { LocationResource } from "./LocationResource";
import type { Statistics } from "./Statistics";
import type { Volume } from "./Volume";
import type { ClientState } from './ClientState';
import type { DirectoryWithContents } from './DirectoryWithContents';
import type { JobReport } from './JobReport';
import type { LocationResource } from './LocationResource';
import type { Statistics } from './Statistics';
import type { Volume } from './Volume';
export type CoreResponse = { key: "Success", data: null } | { key: "SysGetVolumes", data: Array<Volume> } | { key: "SysGetLocation", data: LocationResource } | { key: "SysGetLocations", data: Array<LocationResource> } | { key: "LibGetExplorerDir", data: DirectoryWithContents } | { key: "ClientGetState", data: ClientState } | { key: "LocCreate", data: LocationResource } | { key: "JobGetRunning", data: Array<JobReport> } | { key: "JobGetHistory", data: Array<JobReport> } | { key: "GetLibraryStatistics", data: Statistics };
export type CoreResponse =
| { key: 'Success'; data: null }
| { key: 'SysGetVolumes'; data: Array<Volume> }
| { key: 'SysGetLocation'; data: LocationResource }
| { key: 'SysGetLocations'; data: Array<LocationResource> }
| { key: 'LibGetExplorerDir'; data: DirectoryWithContents }
| { key: 'ClientGetState'; data: ClientState }
| { key: 'LocCreate'; data: LocationResource }
| { key: 'JobGetRunning'; data: Array<JobReport> }
| { key: 'JobGetHistory'; data: Array<JobReport> }
| { key: 'GetLibraryStatistics'; data: Statistics };

View file

@ -1,3 +1,6 @@
import type { FilePath } from "./FilePath";
import type { FilePath } from './FilePath';
export interface DirectoryWithContents { directory: FilePath, contents: Array<FilePath>, }
export interface DirectoryWithContents {
directory: FilePath;
contents: Array<FilePath>;
}

View file

@ -1,2 +1 @@
export type EncryptionAlgorithm = "None" | "AES128" | "AES192" | "AES256";
export type EncryptionAlgorithm = 'None' | 'AES128' | 'AES192' | 'AES256';

View file

@ -1,5 +1,24 @@
import type { EncryptionAlgorithm } from "./EncryptionAlgorithm";
import type { FileKind } from "./FileKind";
import type { FilePath } from "./FilePath";
import type { EncryptionAlgorithm } from './EncryptionAlgorithm';
import type { FileKind } from './FileKind';
import type { FilePath } from './FilePath';
export interface File { id: number, cas_id: string, integrity_checksum: string | null, size_in_bytes: string, kind: FileKind, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, encryption: EncryptionAlgorithm, ipfs_id: string | null, comment: string | null, date_created: string, date_modified: string, date_indexed: string, paths: Array<FilePath>, }
export interface File {
id: number;
cas_id: string;
integrity_checksum: string | null;
size_in_bytes: string;
kind: FileKind;
hidden: boolean;
favorite: boolean;
important: boolean;
has_thumbnail: boolean;
has_thumbstrip: boolean;
has_video_preview: boolean;
encryption: EncryptionAlgorithm;
ipfs_id: string | null;
comment: string | null;
date_created: string;
date_modified: string;
date_indexed: string;
paths: Array<FilePath>;
}

View file

@ -1,2 +1,10 @@
export type FileKind = "Unknown" | "Directory" | "Package" | "Archive" | "Image" | "Video" | "Audio" | "Plaintext" | "Alias";
export type FileKind =
| 'Unknown'
| 'Directory'
| 'Package'
| 'Archive'
| 'Image'
| 'Video'
| 'Audio'
| 'Plaintext'
| 'Alias';

View file

@ -1,2 +1,16 @@
export interface FilePath { id: number, is_dir: boolean, location_id: number, materialized_path: string, name: string, extension: string | null, file_id: number | null, parent_id: number | null, temp_cas_id: string | null, has_local_thumbnail: boolean, date_created: string, date_modified: string, date_indexed: string, permissions: string | null, }
export interface FilePath {
id: number;
is_dir: boolean;
location_id: number;
materialized_path: string;
name: string;
extension: string | null;
file_id: number | null;
parent_id: number | null;
temp_cas_id: string | null;
has_local_thumbnail: boolean;
date_created: string;
date_modified: string;
date_indexed: string;
permissions: string | null;
}

View file

@ -1,3 +1,12 @@
import type { JobStatus } from "./JobStatus";
import type { JobStatus } from './JobStatus';
export interface JobReport { id: string, date_created: string, date_modified: string, status: JobStatus, task_count: number, completed_task_count: number, message: string, seconds_elapsed: string, }
export interface JobReport {
id: string;
date_created: string;
date_modified: string;
status: JobStatus;
task_count: number;
completed_task_count: number;
message: string;
seconds_elapsed: string;
}

View file

@ -1,2 +1 @@
export type JobStatus = "Queued" | "Running" | "Completed" | "Canceled" | "Failed";
export type JobStatus = 'Queued' | 'Running' | 'Completed' | 'Canceled' | 'Failed';

View file

@ -1,2 +1,6 @@
export interface LibraryState { library_uuid: string, library_id: number, library_path: string, offline: boolean, }
export interface LibraryState {
library_uuid: string;
library_id: number;
library_path: string;
offline: boolean;
}

View file

@ -1,2 +1,10 @@
export interface LocationResource { id: number, name: string | null, path: string | null, total_capacity: number | null, available_capacity: number | null, is_removable: boolean | null, is_online: boolean, date_created: string, }
export interface LocationResource {
id: number;
name: string | null;
path: string | null;
total_capacity: number | null;
available_capacity: number | null;
is_removable: boolean | null;
is_online: boolean;
date_created: string;
}

View file

@ -1,2 +1 @@
export type Platform = "Unknown" | "Windows" | "MacOS" | "Linux" | "IOS" | "Android";
export type Platform = 'Unknown' | 'Windows' | 'MacOS' | 'Linux' | 'IOS' | 'Android';

View file

@ -1,2 +1,9 @@
export interface Statistics { total_file_count: number, total_bytes_used: string, total_bytes_capacity: string, total_bytes_free: string, total_unique_bytes: string, preview_media_bytes: string, library_db_size: string, }
export interface Statistics {
total_file_count: number;
total_bytes_used: string;
total_bytes_capacity: string;
total_bytes_free: string;
total_unique_bytes: string;
preview_media_bytes: string;
library_db_size: string;
}

View file

@ -1,2 +1,10 @@
export interface Volume { name: string, mount_point: string, total_capacity: bigint, available_capacity: bigint, is_removable: boolean, disk_type: string | null, file_system: string | null, is_root_filesystem: boolean, }
export interface Volume {
name: string;
mount_point: string;
total_capacity: bigint;
available_capacity: bigint;
is_removable: boolean;
disk_type: string | null;
file_system: string | null;
is_root_filesystem: boolean;
}

View file

@ -15,28 +15,28 @@ use syn::{parse_macro_input, Data, DeriveInput};
/// ```
#[proc_macro_derive(PropertyOperationApply)]
pub fn property_operation_apply(input: TokenStream) -> TokenStream {
let DeriveInput { ident, data, .. } = parse_macro_input!(input);
let DeriveInput { ident, data, .. } = parse_macro_input!(input);
if let Data::Enum(data) = data {
let impls = data.variants.iter().map(|variant| {
let variant_ident = &variant.ident;
quote! {
#ident::#variant_ident(method) => method.apply(ctx),
}
});
if let Data::Enum(data) = data {
let impls = data.variants.iter().map(|variant| {
let variant_ident = &variant.ident;
quote! {
#ident::#variant_ident(method) => method.apply(ctx),
}
});
let expanded = quote! {
impl #ident {
fn apply(operation: CrdtCtx<PropertyOperation>, ctx: self::engine::SyncContext) {
match operation.resource {
#(#impls)*
};
}
}
};
let expanded = quote! {
impl #ident {
fn apply(operation: CrdtCtx<PropertyOperation>, ctx: self::engine::SyncContext) {
match operation.resource {
#(#impls)*
};
}
}
};
TokenStream::from(expanded)
} else {
panic!("The 'PropertyOperationApply' macro can only be used on enums!");
}
TokenStream::from(expanded)
} else {
panic!("The 'PropertyOperationApply' macro can only be used on enums!");
}
}

View file

@ -1,18 +1,18 @@
{
"name": "@sd/core",
"version": "0.0.0",
"main": "index.js",
"license": "MIT",
"scripts": {
"codegen": "cargo test && ts-node ./scripts/bindingsIndex.ts",
"build": "cargo build",
"test": "cargo test",
"test:log": "cargo test -- --nocapture",
"prisma": "cargo prisma"
},
"devDependencies": {
"@types/node": "^17.0.23",
"ts-node": "^10.7.0",
"typescript": "^4.6.3"
}
"name": "@sd/core",
"version": "0.0.0",
"main": "index.js",
"license": "MIT",
"scripts": {
"codegen": "cargo test && ts-node ./scripts/bindingsIndex.ts",
"build": "cargo build",
"test": "cargo test",
"test:log": "cargo test -- --nocapture",
"prisma": "cargo prisma"
},
"devDependencies": {
"@types/node": "^17.0.23",
"ts-node": "^10.7.0",
"typescript": "^4.6.3"
}
}

View file

@ -1,3 +1,3 @@
fn main() {
prisma_client_rust_cli::run();
prisma_client_rust_cli::run();
}

View file

@ -2,29 +2,29 @@ import * as fs from 'fs/promises';
import * as path from 'path';
(async function main() {
async function exists(path: string) {
try {
await fs.access(path);
return true;
} catch {
return false;
}
}
async function exists(path: string) {
try {
await fs.access(path);
return true;
} catch {
return false;
}
}
const files = await fs.readdir(path.join(__dirname, '../bindings'));
const bindings = files.filter((f) => f.endsWith('.ts'));
let str = '';
// str += `export * from './types';\n`;
const files = await fs.readdir(path.join(__dirname, '../bindings'));
const bindings = files.filter((f) => f.endsWith('.ts'));
let str = '';
// str += `export * from './types';\n`;
for (let binding of bindings) {
str += `export * from './bindings/${binding.split('.')[0]}';\n`;
}
for (let binding of bindings) {
str += `export * from './bindings/${binding.split('.')[0]}';\n`;
}
let indexExists = await exists(path.join(__dirname, '../index.ts'));
let indexExists = await exists(path.join(__dirname, '../index.ts'));
if (indexExists) {
await fs.rm(path.join(__dirname, '../index.ts'));
}
if (indexExists) {
await fs.rm(path.join(__dirname, '../index.ts'));
}
await fs.writeFile(path.join(__dirname, '../index.ts'), str);
await fs.writeFile(path.join(__dirname, '../index.ts'), str);
})();

View file

@ -6,8 +6,8 @@ use ts_rs::TS;
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum EncryptionAlgorithm {
None = 0,
AES128 = 1,
AES192 = 2,
AES256 = 3,
None = 0,
AES128 = 1,
AES192 = 2,
AES256 = 3,
}

View file

@ -11,140 +11,142 @@ const INIT_MIGRATION: &str = include_str!("../../prisma/migrations/migration_tab
static MIGRATIONS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/prisma/migrations");
pub fn sha256_digest<R: Read>(mut reader: R) -> Result<Digest> {
let mut context = Context::new(&SHA256);
let mut buffer = [0; 1024];
loop {
let count = reader.read(&mut buffer)?;
if count == 0 {
break;
}
context.update(&buffer[..count]);
}
Ok(context.finish())
let mut context = Context::new(&SHA256);
let mut buffer = [0; 1024];
loop {
let count = reader.read(&mut buffer)?;
if count == 0 {
break;
}
context.update(&buffer[..count]);
}
Ok(context.finish())
}
pub async fn run_migrations(ctx: &CoreContext) -> Result<()> {
let client = &ctx.database;
let client = &ctx.database;
match client
._query_raw::<serde_json::Value>(
"SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'",
)
.await
{
Ok(data) => {
if data.len() == 0 {
#[cfg(debug_assertions)]
println!("Migration table does not exist");
// execute migration
match client._execute_raw(INIT_MIGRATION).await {
Ok(_) => {}
Err(e) => {
println!("Failed to create migration table: {}", e);
}
};
match client
._query_raw::<serde_json::Value>(
"SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'",
)
.await
{
Ok(data) => {
if data.len() == 0 {
#[cfg(debug_assertions)]
println!("Migration table does not exist");
// execute migration
match client._execute_raw(INIT_MIGRATION).await {
Ok(_) => {}
Err(e) => {
println!("Failed to create migration table: {}", e);
}
};
let value: Vec<serde_json::Value> = client
._query_raw("SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'")
.await
.unwrap();
let value: Vec<serde_json::Value> = client
._query_raw(
"SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'",
)
.await
.unwrap();
#[cfg(debug_assertions)]
println!("Migration table created: {:?}", value);
} else {
#[cfg(debug_assertions)]
println!("Migration table exists: {:?}", data);
}
#[cfg(debug_assertions)]
println!("Migration table created: {:?}", value);
} else {
#[cfg(debug_assertions)]
println!("Migration table exists: {:?}", data);
}
let mut migration_subdirs = MIGRATIONS_DIR
.dirs()
.filter(|subdir| {
subdir
.path()
.file_name()
.map(|name| name != OsStr::new("migration_table"))
.unwrap_or(false)
})
.collect::<Vec<_>>();
let mut migration_subdirs = MIGRATIONS_DIR
.dirs()
.filter(|subdir| {
subdir
.path()
.file_name()
.map(|name| name != OsStr::new("migration_table"))
.unwrap_or(false)
})
.collect::<Vec<_>>();
migration_subdirs.sort_by(|a, b| {
let a_name = a.path().file_name().unwrap().to_str().unwrap();
let b_name = b.path().file_name().unwrap().to_str().unwrap();
migration_subdirs.sort_by(|a, b| {
let a_name = a.path().file_name().unwrap().to_str().unwrap();
let b_name = b.path().file_name().unwrap().to_str().unwrap();
let a_time = a_name[..14].parse::<i64>().unwrap();
let b_time = b_name[..14].parse::<i64>().unwrap();
let a_time = a_name[..14].parse::<i64>().unwrap();
let b_time = b_name[..14].parse::<i64>().unwrap();
a_time.cmp(&b_time)
});
a_time.cmp(&b_time)
});
for subdir in migration_subdirs {
println!("{:?}", subdir.path());
let migration_file = subdir
.get_file(subdir.path().join("./migration.sql"))
.unwrap();
let migration_sql = migration_file.contents_utf8().unwrap();
for subdir in migration_subdirs {
println!("{:?}", subdir.path());
let migration_file = subdir
.get_file(subdir.path().join("./migration.sql"))
.unwrap();
let migration_sql = migration_file.contents_utf8().unwrap();
let digest = sha256_digest(BufReader::new(migration_file.contents()))?;
// create a lowercase hash from
let checksum = HEXLOWER.encode(digest.as_ref());
let name = subdir.path().file_name().unwrap().to_str().unwrap();
let digest = sha256_digest(BufReader::new(migration_file.contents()))?;
// create a lowercase hash from
let checksum = HEXLOWER.encode(digest.as_ref());
let name = subdir.path().file_name().unwrap().to_str().unwrap();
// get existing migration by checksum, if it doesn't exist run the migration
let existing_migration = client
.migration()
.find_unique(migration::checksum::equals(checksum.clone()))
.exec()
.await?;
// get existing migration by checksum, if it doesn't exist run the migration
let existing_migration = client
.migration()
.find_unique(migration::checksum::equals(checksum.clone()))
.exec()
.await?;
if existing_migration.is_none() {
#[cfg(debug_assertions)]
println!("Running migration: {}", name);
if existing_migration.is_none() {
#[cfg(debug_assertions)]
println!("Running migration: {}", name);
let steps = migration_sql.split(";").collect::<Vec<&str>>();
let steps = &steps[0..steps.len() - 1];
let steps = migration_sql.split(";").collect::<Vec<&str>>();
let steps = &steps[0..steps.len() - 1];
client
.migration()
.create(
migration::name::set(name.to_string()),
migration::checksum::set(checksum.clone()),
vec![],
)
.exec()
.await?;
client
.migration()
.create(
migration::name::set(name.to_string()),
migration::checksum::set(checksum.clone()),
vec![],
)
.exec()
.await?;
for (i, step) in steps.iter().enumerate() {
match client._execute_raw(&format!("{};", step)).await {
Ok(_) => {
#[cfg(debug_assertions)]
println!("Step {} ran successfully", i);
client
.migration()
.find_unique(migration::checksum::equals(checksum.clone()))
.update(vec![migration::steps_applied::set(i as i32 + 1)])
.exec()
.await?;
}
Err(e) => {
println!("Error running migration: {}", name);
println!("{}", e);
break;
}
}
}
for (i, step) in steps.iter().enumerate() {
match client._execute_raw(&format!("{};", step)).await {
Ok(_) => {
#[cfg(debug_assertions)]
println!("Step {} ran successfully", i);
client
.migration()
.find_unique(migration::checksum::equals(checksum.clone()))
.update(vec![migration::steps_applied::set(i as i32 + 1)])
.exec()
.await?;
}
Err(e) => {
println!("Error running migration: {}", name);
println!("{}", e);
break;
}
}
}
#[cfg(debug_assertions)]
println!("Migration {} recorded successfully", name);
} else {
#[cfg(debug_assertions)]
println!("Migration {} already exists", name);
}
}
}
Err(err) => {
panic!("Failed to check migration table existence: {:?}", err);
}
}
#[cfg(debug_assertions)]
println!("Migration {} recorded successfully", name);
} else {
#[cfg(debug_assertions)]
println!("Migration {} already exists", name);
}
}
}
Err(err) => {
panic!("Failed to check migration table existence: {:?}", err);
}
}
Ok(())
Ok(())
}

View file

@ -4,17 +4,17 @@ pub mod migrate;
#[derive(Error, Debug)]
pub enum DatabaseError {
#[error("Failed to connect to database")]
MissingConnection,
#[error("Unable find current_library in the client config")]
MalformedConfig,
#[error("Unable to initialize the Prisma client")]
ClientError(#[from] prisma::NewClientError),
#[error("Failed to connect to database")]
MissingConnection,
#[error("Unable find current_library in the client config")]
MalformedConfig,
#[error("Unable to initialize the Prisma client")]
ClientError(#[from] prisma::NewClientError),
}
pub async fn create_connection(path: &str) -> Result<PrismaClient, DatabaseError> {
println!("Creating database connection: {:?}", path);
let client = prisma::new_client_with_url(&format!("file:{}", &path)).await?;
println!("Creating database connection: {:?}", path);
let client = prisma::new_client_with_url(&format!("file:{}", &path)).await?;
Ok(client)
Ok(client)
}

View file

@ -5,132 +5,132 @@ use std::{ffi::OsStr, path::Path};
#[derive(Default, Debug)]
pub struct MediaItem {
pub created_at: Option<String>,
pub brand: Option<String>,
pub model: Option<String>,
pub duration_seconds: f64,
pub best_video_stream_index: usize,
pub best_audio_stream_index: usize,
pub best_subtitle_stream_index: usize,
pub steams: Vec<Stream>,
pub created_at: Option<String>,
pub brand: Option<String>,
pub model: Option<String>,
pub duration_seconds: f64,
pub best_video_stream_index: usize,
pub best_audio_stream_index: usize,
pub best_subtitle_stream_index: usize,
pub steams: Vec<Stream>,
}
#[derive(Debug)]
pub struct Stream {
pub codec: String,
pub frames: f64,
pub duration_seconds: f64,
pub kind: Option<StreamKind>,
pub codec: String,
pub frames: f64,
pub duration_seconds: f64,
pub kind: Option<StreamKind>,
}
#[derive(Debug)]
pub enum StreamKind {
Video(VideoStream),
Audio(AudioStream),
Video(VideoStream),
Audio(AudioStream),
}
#[derive(Debug)]
pub struct VideoStream {
pub width: u32,
pub height: u32,
pub aspect_ratio: String,
pub format: format::Pixel,
pub bitrate: usize,
pub width: u32,
pub height: u32,
pub aspect_ratio: String,
pub format: format::Pixel,
pub bitrate: usize,
}
#[derive(Debug)]
pub struct AudioStream {
pub channels: u16,
pub format: format::Sample,
pub bitrate: usize,
pub rate: u32,
pub channels: u16,
pub format: format::Sample,
pub bitrate: usize,
pub rate: u32,
}
fn extract(iter: &mut Iter, key: &str) -> Option<String> {
iter.find(|k| k.0.contains(key)).map(|k| k.1.to_string())
iter.find(|k| k.0.contains(key)).map(|k| k.1.to_string())
}
pub fn get_video_metadata(path: &str) -> Result<(), ffmpeg::Error> {
ffmpeg::init().unwrap();
ffmpeg::init().unwrap();
let mut name = Path::new(path)
.file_name()
.and_then(OsStr::to_str)
.map(ToString::to_string)
.unwrap_or(String::new());
let mut name = Path::new(path)
.file_name()
.and_then(OsStr::to_str)
.map(ToString::to_string)
.unwrap_or(String::new());
// strip to exact potential date length and attempt to parse
name = name.chars().take(19).collect();
// specifically OBS uses this format for time, other checks could be added
let potential_date = NaiveDateTime::parse_from_str(&name, "%Y-%m-%d %H-%M-%S");
// strip to exact potential date length and attempt to parse
name = name.chars().take(19).collect();
// specifically OBS uses this format for time, other checks could be added
let potential_date = NaiveDateTime::parse_from_str(&name, "%Y-%m-%d %H-%M-%S");
match ffmpeg::format::input(&path) {
Ok(context) => {
let mut media_item = MediaItem::default();
let metadata = context.metadata();
let mut iter = metadata.iter();
match ffmpeg::format::input(&path) {
Ok(context) => {
let mut media_item = MediaItem::default();
let metadata = context.metadata();
let mut iter = metadata.iter();
// creation_time is usually the creation date of the file
media_item.created_at = extract(&mut iter, "creation_time");
// apple photos use "com.apple.quicktime.creationdate", which we care more about than the creation_time
media_item.created_at = extract(&mut iter, "creationdate");
// fallback to potential time if exists
if media_item.created_at.is_none() {
media_item.created_at = potential_date.map(|d| d.to_string()).ok();
}
// origin metadata
media_item.brand = extract(&mut iter, "major_brand");
media_item.brand = extract(&mut iter, "make");
media_item.model = extract(&mut iter, "model");
// creation_time is usually the creation date of the file
media_item.created_at = extract(&mut iter, "creation_time");
// apple photos use "com.apple.quicktime.creationdate", which we care more about than the creation_time
media_item.created_at = extract(&mut iter, "creationdate");
// fallback to potential time if exists
if media_item.created_at.is_none() {
media_item.created_at = potential_date.map(|d| d.to_string()).ok();
}
// origin metadata
media_item.brand = extract(&mut iter, "major_brand");
media_item.brand = extract(&mut iter, "make");
media_item.model = extract(&mut iter, "model");
if let Some(stream) = context.streams().best(ffmpeg::media::Type::Video) {
media_item.best_video_stream_index = stream.index();
}
if let Some(stream) = context.streams().best(ffmpeg::media::Type::Audio) {
media_item.best_audio_stream_index = stream.index();
}
if let Some(stream) = context.streams().best(ffmpeg::media::Type::Subtitle) {
media_item.best_subtitle_stream_index = stream.index();
}
media_item.duration_seconds =
context.duration() as f64 / f64::from(ffmpeg::ffi::AV_TIME_BASE);
if let Some(stream) = context.streams().best(ffmpeg::media::Type::Video) {
media_item.best_video_stream_index = stream.index();
}
if let Some(stream) = context.streams().best(ffmpeg::media::Type::Audio) {
media_item.best_audio_stream_index = stream.index();
}
if let Some(stream) = context.streams().best(ffmpeg::media::Type::Subtitle) {
media_item.best_subtitle_stream_index = stream.index();
}
media_item.duration_seconds =
context.duration() as f64 / f64::from(ffmpeg::ffi::AV_TIME_BASE);
for stream in context.streams() {
let codec = ffmpeg::codec::context::Context::from_parameters(stream.parameters())?;
for stream in context.streams() {
let codec = ffmpeg::codec::context::Context::from_parameters(stream.parameters())?;
let mut stream_item = Stream {
codec: codec.id().name().to_string(),
frames: stream.frames() as f64,
duration_seconds: stream.duration() as f64 * f64::from(stream.time_base()),
kind: None,
};
let mut stream_item = Stream {
codec: codec.id().name().to_string(),
frames: stream.frames() as f64,
duration_seconds: stream.duration() as f64 * f64::from(stream.time_base()),
kind: None,
};
if codec.medium() == ffmpeg::media::Type::Video {
if let Ok(video) = codec.decoder().video() {
stream_item.kind = Some(StreamKind::Video(VideoStream {
bitrate: video.bit_rate(),
format: video.format(),
width: video.width(),
height: video.height(),
aspect_ratio: video.aspect_ratio().to_string(),
}));
}
} else if codec.medium() == ffmpeg::media::Type::Audio {
if let Ok(audio) = codec.decoder().audio() {
stream_item.kind = Some(StreamKind::Audio(AudioStream {
channels: audio.channels(),
bitrate: audio.bit_rate(),
rate: audio.rate(),
format: audio.format(),
}));
}
}
media_item.steams.push(stream_item);
}
println!("{:#?}", media_item);
}
if codec.medium() == ffmpeg::media::Type::Video {
if let Ok(video) = codec.decoder().video() {
stream_item.kind = Some(StreamKind::Video(VideoStream {
bitrate: video.bit_rate(),
format: video.format(),
width: video.width(),
height: video.height(),
aspect_ratio: video.aspect_ratio().to_string(),
}));
}
} else if codec.medium() == ffmpeg::media::Type::Audio {
if let Ok(audio) = codec.decoder().audio() {
stream_item.kind = Some(StreamKind::Audio(AudioStream {
channels: audio.channels(),
bitrate: audio.bit_rate(),
rate: audio.rate(),
format: audio.format(),
}));
}
}
media_item.steams.push(stream_item);
}
println!("{:#?}", media_item);
}
Err(error) => println!("error: {}", error),
}
Ok(())
Err(error) => println!("error: {}", error),
}
Ok(())
}

View file

@ -1,9 +1,9 @@
use crate::job::jobs::JobReportUpdate;
use crate::node::state;
use crate::{
job::{jobs::Job, worker::WorkerContext},
prisma::file_path,
CoreContext,
job::{jobs::Job, worker::WorkerContext},
prisma::file_path,
CoreContext,
};
use crate::{sys, CoreEvent};
use anyhow::Result;
@ -15,9 +15,9 @@ use webp::*;
#[derive(Debug, Clone)]
pub struct ThumbnailJob {
pub location_id: i32,
pub path: String,
pub background: bool,
pub location_id: i32,
pub path: String,
pub background: bool,
}
static THUMBNAIL_SIZE_FACTOR: f32 = 0.2;
@ -26,133 +26,136 @@ pub static THUMBNAIL_CACHE_DIR_NAME: &str = "thumbnails";
#[async_trait::async_trait]
impl Job for ThumbnailJob {
fn name(&self) -> &'static str {
"file_identifier"
}
async fn run(&self, ctx: WorkerContext) -> Result<()> {
let config = state::get();
let core_ctx = ctx.core_ctx.clone();
fn name(&self) -> &'static str {
"file_identifier"
}
async fn run(&self, ctx: WorkerContext) -> Result<()> {
let config = state::get();
let core_ctx = ctx.core_ctx.clone();
let location = sys::locations::get_location(&core_ctx, self.location_id).await?;
let location = sys::locations::get_location(&core_ctx, self.location_id).await?;
fs::create_dir_all(
Path::new(&config.data_path)
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", self.location_id)),
)?;
fs::create_dir_all(
Path::new(&config.data_path)
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", self.location_id)),
)?;
let root_path = location.path.unwrap();
let root_path = location.path.unwrap();
let image_files = get_images(&core_ctx, self.location_id, &self.path).await?;
let image_files = get_images(&core_ctx, self.location_id, &self.path).await?;
let location_id = location.id.clone();
let location_id = location.id.clone();
println!("Found {:?} files", image_files.len());
println!("Found {:?} files", image_files.len());
let is_background = self.background.clone();
let is_background = self.background.clone();
tokio::task::spawn_blocking(move || {
ctx.progress(vec![
JobReportUpdate::TaskCount(image_files.len()),
JobReportUpdate::Message(format!("Preparing to process {} files", image_files.len())),
]);
tokio::task::spawn_blocking(move || {
ctx.progress(vec![
JobReportUpdate::TaskCount(image_files.len()),
JobReportUpdate::Message(format!(
"Preparing to process {} files",
image_files.len()
)),
]);
for (i, image_file) in image_files.iter().enumerate() {
ctx.progress(vec![JobReportUpdate::Message(format!(
"Processing {}",
image_file.materialized_path.clone()
))]);
let path = format!("{}{}", root_path, image_file.materialized_path);
println!("image_file {:?}", image_file);
for (i, image_file) in image_files.iter().enumerate() {
ctx.progress(vec![JobReportUpdate::Message(format!(
"Processing {}",
image_file.materialized_path.clone()
))]);
let path = format!("{}{}", root_path, image_file.materialized_path);
println!("image_file {:?}", image_file);
let cas_id = match image_file.file() {
Ok(i) => i.unwrap().cas_id.clone(),
Err(_) => todo!(),
};
let cas_id = match image_file.file() {
Ok(i) => i.unwrap().cas_id.clone(),
Err(_) => todo!(),
};
// Define and write the WebP-encoded file to a given path
let output_path = Path::new(&config.data_path)
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", location_id))
.join(&cas_id)
.with_extension("webp");
// Define and write the WebP-encoded file to a given path
let output_path = Path::new(&config.data_path)
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", location_id))
.join(&cas_id)
.with_extension("webp");
// check if file exists at output path
if !output_path.exists() {
println!("writing {:?} to {}", output_path, path);
generate_thumbnail(&path, &output_path)
.map_err(|e| {
println!("error generating thumb {:?}", e);
})
.unwrap_or(());
// check if file exists at output path
if !output_path.exists() {
println!("writing {:?} to {}", output_path, path);
generate_thumbnail(&path, &output_path)
.map_err(|e| {
println!("error generating thumb {:?}", e);
})
.unwrap_or(());
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(i + 1)]);
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(i + 1)]);
if !is_background {
block_on(ctx.core_ctx.emit(CoreEvent::NewThumbnail { cas_id }));
};
} else {
println!("Thumb exists, skipping... {}", output_path.display());
}
}
})
.await?;
if !is_background {
block_on(ctx.core_ctx.emit(CoreEvent::NewThumbnail { cas_id }));
};
} else {
println!("Thumb exists, skipping... {}", output_path.display());
}
}
})
.await?;
Ok(())
}
Ok(())
}
}
pub fn generate_thumbnail(file_path: &str, output_path: &PathBuf) -> Result<()> {
// Using `image` crate, open the included .jpg file
let img = image::open(file_path)?;
let (w, h) = img.dimensions();
// Optionally, resize the existing photo and convert back into DynamicImage
let img: DynamicImage = image::DynamicImage::ImageRgba8(imageops::resize(
&img,
(w as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
(h as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
imageops::FilterType::Triangle,
));
// Create the WebP encoder for the above image
let encoder: Encoder = Encoder::from_image(&img).map_err(|_| anyhow::anyhow!("jeff"))?;
// Using `image` crate, open the included .jpg file
let img = image::open(file_path)?;
let (w, h) = img.dimensions();
// Optionally, resize the existing photo and convert back into DynamicImage
let img: DynamicImage = image::DynamicImage::ImageRgba8(imageops::resize(
&img,
(w as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
(h as f32 * THUMBNAIL_SIZE_FACTOR) as u32,
imageops::FilterType::Triangle,
));
// Create the WebP encoder for the above image
let encoder: Encoder = Encoder::from_image(&img).map_err(|_| anyhow::anyhow!("jeff"))?;
// Encode the image at a specified quality 0-100
let webp: WebPMemory = encoder.encode(THUMBNAIL_QUALITY);
// Encode the image at a specified quality 0-100
let webp: WebPMemory = encoder.encode(THUMBNAIL_QUALITY);
println!("Writing to {}", output_path.display());
println!("Writing to {}", output_path.display());
std::fs::write(&output_path, &*webp)?;
std::fs::write(&output_path, &*webp)?;
Ok(())
Ok(())
}
pub async fn get_images(
ctx: &CoreContext,
location_id: i32,
path: &str,
ctx: &CoreContext,
location_id: i32,
path: &str,
) -> Result<Vec<file_path::Data>> {
let mut params = vec![
file_path::location_id::equals(location_id),
file_path::extension::in_vec(vec![
"png".to_string(),
"jpeg".to_string(),
"jpg".to_string(),
"gif".to_string(),
"webp".to_string(),
]),
];
let mut params = vec![
file_path::location_id::equals(location_id),
file_path::extension::in_vec(vec![
"png".to_string(),
"jpeg".to_string(),
"jpg".to_string(),
"gif".to_string(),
"webp".to_string(),
]),
];
if !path.is_empty() {
params.push(file_path::materialized_path::starts_with(path.to_string()))
}
if !path.is_empty() {
params.push(file_path::materialized_path::starts_with(path.to_string()))
}
let image_files = ctx
.database
.file_path()
.find_many(params)
.with(file_path::file::fetch())
.exec()
.await?;
let image_files = ctx
.database
.file_path()
.find_many(params)
.with(file_path::file::fetch())
.exec()
.await?;
Ok(image_files)
Ok(image_files)
}

View file

@ -0,0 +1 @@

View file

@ -0,0 +1 @@

View file

@ -16,62 +16,62 @@ static SAMPLE_COUNT: u64 = 4;
static SAMPLE_SIZE: u64 = 10000;
fn read_at(file: &File, offset: u64, size: u64) -> Result<Vec<u8>> {
let mut buf = vec![0u8; size as usize];
let mut buf = vec![0u8; size as usize];
#[cfg(target_family = "unix")]
file.read_exact_at(&mut buf, offset)?;
#[cfg(target_family = "unix")]
file.read_exact_at(&mut buf, offset)?;
#[cfg(target_family = "windows")]
file.seek_read(&mut buf, offset)?;
#[cfg(target_family = "windows")]
file.seek_read(&mut buf, offset)?;
Ok(buf)
Ok(buf)
}
pub fn generate_cas_id(path: &str, size: u64) -> Result<String> {
// open file reference
let file = File::open(path)?;
// open file reference
let file = File::open(path)?;
let mut context = Context::new(&SHA256);
let mut context = Context::new(&SHA256);
// include the file size in the checksum
context.update(&size.to_le_bytes());
// include the file size in the checksum
context.update(&size.to_le_bytes());
// if size is small enough, just read the whole thing
if SAMPLE_COUNT * SAMPLE_SIZE > size {
let buf = read_at(&file, 0, size.try_into()?)?;
context.update(&buf);
} else {
// loop over samples
for i in 0..SAMPLE_COUNT {
let buf = read_at(&file, (size / SAMPLE_COUNT) * i, SAMPLE_SIZE.try_into()?)?;
context.update(&buf);
}
// sample end of file
let buf = read_at(&file, size - SAMPLE_SIZE, SAMPLE_SIZE.try_into()?)?;
context.update(&buf);
}
// if size is small enough, just read the whole thing
if SAMPLE_COUNT * SAMPLE_SIZE > size {
let buf = read_at(&file, 0, size.try_into()?)?;
context.update(&buf);
} else {
// loop over samples
for i in 0..SAMPLE_COUNT {
let buf = read_at(&file, (size / SAMPLE_COUNT) * i, SAMPLE_SIZE.try_into()?)?;
context.update(&buf);
}
// sample end of file
let buf = read_at(&file, size - SAMPLE_SIZE, SAMPLE_SIZE.try_into()?)?;
context.update(&buf);
}
let digest = context.finish();
let hex = HEXLOWER.encode(digest.as_ref());
let digest = context.finish();
let hex = HEXLOWER.encode(digest.as_ref());
Ok(hex)
Ok(hex)
}
pub fn full_checksum(path: &str) -> Result<String> {
// read file as buffer and convert to digest
let mut reader = BufReader::new(File::open(path).unwrap());
let mut context = Context::new(&SHA256);
let mut buffer = [0; 1024];
loop {
let count = reader.read(&mut buffer)?;
if count == 0 {
break;
}
context.update(&buffer[..count]);
}
let digest = context.finish();
// create a lowercase hash from
let hex = HEXLOWER.encode(digest.as_ref());
// read file as buffer and convert to digest
let mut reader = BufReader::new(File::open(path).unwrap());
let mut context = Context::new(&SHA256);
let mut buffer = [0; 1024];
loop {
let count = reader.read(&mut buffer)?;
if count == 0 {
break;
}
context.update(&buffer[..count]);
}
let digest = context.finish();
// create a lowercase hash from
let hex = HEXLOWER.encode(digest.as_ref());
Ok(hex)
Ok(hex)
}

View file

@ -2,22 +2,22 @@ use std::fs;
use crate::job::jobs::JobReportUpdate;
use crate::{
file::FileError,
job::{jobs::Job, worker::WorkerContext},
prisma::{file_path},
CoreContext,
file::FileError,
job::{jobs::Job, worker::WorkerContext},
prisma::file_path,
CoreContext,
};
use anyhow::Result;
use futures::executor::block_on;
use serde::{Deserialize, Serialize};
use prisma_client_rust::Direction;
use serde::{Deserialize, Serialize};
use super::checksum::generate_cas_id;
#[derive(Deserialize, Serialize, Debug)]
pub struct FileCreated {
pub id: i32,
pub cas_id: String,
pub id: i32,
pub cas_id: String,
}
#[derive(Debug)]
@ -25,24 +25,24 @@ pub struct FileIdentifierJob;
#[async_trait::async_trait]
impl Job for FileIdentifierJob {
fn name(&self) -> &'static str {
"file_identifier"
}
async fn run(&self, ctx: WorkerContext) -> Result<()> {
println!("Identifying files");
let total_count = count_orphan_file_paths(&ctx.core_ctx).await?;
println!("Found {} orphan file paths", total_count);
fn name(&self) -> &'static str {
"file_identifier"
}
async fn run(&self, ctx: WorkerContext) -> Result<()> {
println!("Identifying files");
let total_count = count_orphan_file_paths(&ctx.core_ctx).await?;
println!("Found {} orphan file paths", total_count);
let task_count = (total_count as f64 / 100f64).ceil() as usize;
let task_count = (total_count as f64 / 100f64).ceil() as usize;
println!("Will process {} tasks", task_count);
println!("Will process {} tasks", task_count);
// update job with total task count based on orphan file_paths count
ctx.progress(vec![JobReportUpdate::TaskCount(task_count)]);
// update job with total task count based on orphan file_paths count
ctx.progress(vec![JobReportUpdate::TaskCount(task_count)]);
let db = ctx.core_ctx.database.clone();
let db = ctx.core_ctx.database.clone();
let ctx = tokio::task::spawn_blocking(move || {
let ctx = tokio::task::spawn_blocking(move || {
let mut completed: usize = 0;
let mut cursor: i32 = 1;
@ -102,69 +102,68 @@ impl Job for FileIdentifierJob {
ctx
}).await?;
let remaining = count_orphan_file_paths(&ctx.core_ctx).await?;
let remaining = count_orphan_file_paths(&ctx.core_ctx).await?;
println!("Finished with {} files remaining because your code is bad.", remaining);
println!(
"Finished with {} files remaining because your code is bad.",
remaining
);
// if remaining > 0 {
// ctx.core_ctx.spawn_job(Box::new(FileIdentifierJob));
// }
// if remaining > 0 {
// ctx.core_ctx.spawn_job(Box::new(FileIdentifierJob));
// }
Ok(())
}
Ok(())
}
}
#[derive(Deserialize, Serialize, Debug)]
struct CountRes {
count: Option<usize>,
count: Option<usize>,
}
pub async fn count_orphan_file_paths(ctx: &CoreContext) -> Result<usize, FileError> {
let db = &ctx.database;
let files_count = db
._query_raw::<CountRes>(
r#"SELECT COUNT(*) AS count FROM file_paths WHERE file_id IS NULL AND is_dir IS FALSE"#,
)
.await?;
Ok(files_count[0].count.unwrap_or(0))
let db = &ctx.database;
let files_count = db
._query_raw::<CountRes>(
r#"SELECT COUNT(*) AS count FROM file_paths WHERE file_id IS NULL AND is_dir IS FALSE"#,
)
.await?;
Ok(files_count[0].count.unwrap_or(0))
}
pub async fn get_orphan_file_paths(
ctx: &CoreContext,
cursor: i32,
ctx: &CoreContext,
cursor: i32,
) -> Result<Vec<file_path::Data>, FileError> {
let db = &ctx.database;
println!("cursor: {:?}", cursor);
let files = db
.file_path()
.find_many(vec![
file_path::file_id::equals(None),
file_path::is_dir::equals(false),
])
.order_by(file_path::id::order(Direction::Asc))
.cursor(file_path::id::cursor(cursor))
.take(100)
.exec()
.await?;
Ok(files)
let db = &ctx.database;
println!("cursor: {:?}", cursor);
let files = db
.file_path()
.find_many(vec![
file_path::file_id::equals(None),
file_path::is_dir::equals(false),
])
.order_by(file_path::id::order(Direction::Asc))
.cursor(file_path::id::cursor(cursor))
.take(100)
.exec()
.await?;
Ok(files)
}
pub fn prepare_file_values(file_path: &file_path::Data) -> Result<String> {
let metadata = fs::metadata(&file_path.materialized_path)?;
let cas_id = {
if !file_path.is_dir {
// TODO: remove unwrap
let mut x = generate_cas_id(&file_path.materialized_path, metadata.len()).unwrap();
x.truncate(16);
x
} else {
"".to_string()
}
};
// TODO: add all metadata
Ok(format!(
"(\"{}\",\"{}\")",
cas_id,
"0"
))
let metadata = fs::metadata(&file_path.materialized_path)?;
let cas_id = {
if !file_path.is_dir {
// TODO: remove unwrap
let mut x = generate_cas_id(&file_path.materialized_path, metadata.len()).unwrap();
x.truncate(16);
x
} else {
"".to_string()
}
};
// TODO: add all metadata
Ok(format!("(\"{}\",\"{}\")", cas_id, "0"))
}

View file

@ -1,62 +1,62 @@
use crate::{
encode::thumb::THUMBNAIL_CACHE_DIR_NAME,
file::{DirectoryWithContents, File, FileError},
node::state,
prisma::{file, file_path},
sys::locations::get_location,
CoreContext,
encode::thumb::THUMBNAIL_CACHE_DIR_NAME,
file::{DirectoryWithContents, File, FileError},
node::state,
prisma::{file, file_path},
sys::locations::get_location,
CoreContext,
};
use std::path::Path;
pub async fn open_dir(
ctx: &CoreContext,
location_id: &i32,
path: &str,
ctx: &CoreContext,
location_id: &i32,
path: &str,
) -> Result<DirectoryWithContents, FileError> {
let db = &ctx.database;
let config = state::get();
let db = &ctx.database;
let config = state::get();
// get location
let location = get_location(ctx, location_id.clone()).await?;
// get location
let location = get_location(ctx, location_id.clone()).await?;
let directory = db
.file_path()
.find_first(vec![
file_path::location_id::equals(location.id),
file_path::materialized_path::equals(path.into()),
file_path::is_dir::equals(true),
])
.exec()
.await?
.ok_or(FileError::DirectoryNotFound(path.to_string()))?;
let directory = db
.file_path()
.find_first(vec![
file_path::location_id::equals(location.id),
file_path::materialized_path::equals(path.into()),
file_path::is_dir::equals(true),
])
.exec()
.await?
.ok_or(FileError::DirectoryNotFound(path.to_string()))?;
// TODO: this is incorrect, we need to query on file paths
let files: Vec<File> = db
.file()
.find_many(vec![file::paths::some(vec![file_path::parent_id::equals(
Some(directory.id),
)])])
.exec()
.await?
.into_iter()
.map(Into::into)
.collect();
// TODO: this is incorrect, we need to query on file paths
let files: Vec<File> = db
.file()
.find_many(vec![file::paths::some(vec![file_path::parent_id::equals(
Some(directory.id),
)])])
.exec()
.await?
.into_iter()
.map(Into::into)
.collect();
let mut contents: Vec<File> = vec![];
let mut contents: Vec<File> = vec![];
for mut file in files {
let thumb_path = Path::new(&config.data_path)
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", location.id))
.join(file.cas_id.clone())
.with_extension("webp");
for mut file in files {
let thumb_path = Path::new(&config.data_path)
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(format!("{}", location.id))
.join(file.cas_id.clone())
.with_extension("webp");
file.has_thumbnail = thumb_path.exists();
contents.push(file);
}
file.has_thumbnail = thumb_path.exists();
contents.push(file);
}
Ok(DirectoryWithContents {
directory: directory.into(),
contents,
})
Ok(DirectoryWithContents {
directory: directory.into(),
contents,
})
}

View file

@ -1,6 +1,6 @@
use crate::job::{
jobs::{Job, JobReportUpdate},
worker::WorkerContext,
jobs::{Job, JobReportUpdate},
worker::WorkerContext,
};
use anyhow::Result;
@ -12,28 +12,28 @@ pub use {pathctx::PathContext, scan::scan_path};
#[derive(Debug)]
pub struct IndexerJob {
pub path: String,
pub path: String,
}
#[async_trait::async_trait]
impl Job for IndexerJob {
fn name(&self) -> &'static str {
"indexer"
}
async fn run(&self, ctx: WorkerContext) -> Result<()> {
let core_ctx = ctx.core_ctx.clone();
scan_path(&core_ctx, self.path.as_str(), move |p| {
ctx.progress(
p.iter()
.map(|p| match p.clone() {
ScanProgress::ChunkCount(c) => JobReportUpdate::TaskCount(c),
ScanProgress::SavedChunks(p) => JobReportUpdate::CompletedTaskCount(p),
ScanProgress::Message(m) => JobReportUpdate::Message(m),
})
.collect(),
)
})
.await?;
Ok(())
}
fn name(&self) -> &'static str {
"indexer"
}
async fn run(&self, ctx: WorkerContext) -> Result<()> {
let core_ctx = ctx.core_ctx.clone();
scan_path(&core_ctx, self.path.as_str(), move |p| {
ctx.progress(
p.iter()
.map(|p| match p.clone() {
ScanProgress::ChunkCount(c) => JobReportUpdate::TaskCount(c),
ScanProgress::SavedChunks(p) => JobReportUpdate::CompletedTaskCount(p),
ScanProgress::Message(m) => JobReportUpdate::Message(m),
})
.collect(),
)
})
.await?;
Ok(())
}
}

View file

@ -1,13 +1,13 @@
// PathContext provides the indexer with instruction to handle particular directory structures and identify rich context.
pub struct PathContext {
// an app specific key "com.github.repo"
pub key: String,
pub name: String,
pub is_dir: bool,
// possible file extensions for this path
pub extensions: Vec<String>,
// sub-paths that must be found
pub must_contain_sub_paths: Vec<String>,
// sub-paths that are ignored
pub always_ignored_sub_paths: Option<String>,
// an app specific key "com.github.repo"
pub key: String,
pub name: String,
pub is_dir: bool,
// possible file extensions for this path
pub extensions: Vec<String>,
// sub-paths that must be found
pub must_contain_sub_paths: Vec<String>,
// sub-paths that are ignored
pub always_ignored_sub_paths: Option<String>,
}

View file

@ -10,283 +10,283 @@ use walkdir::{DirEntry, WalkDir};
#[derive(Clone)]
pub enum ScanProgress {
ChunkCount(usize),
SavedChunks(usize),
Message(String),
ChunkCount(usize),
SavedChunks(usize),
Message(String),
}
static BATCH_SIZE: usize = 100;
// creates a vector of valid path buffers from a directory
pub async fn scan_path(
ctx: &CoreContext,
path: &str,
on_progress: impl Fn(Vec<ScanProgress>) + Send + Sync + 'static,
ctx: &CoreContext,
path: &str,
on_progress: impl Fn(Vec<ScanProgress>) + Send + Sync + 'static,
) -> Result<()> {
let db = &ctx.database;
let path = path.to_string();
let db = &ctx.database;
let path = path.to_string();
let location = create_location(&ctx, &path).await?;
let location = create_location(&ctx, &path).await?;
// query db to highers id, so we can increment it for the new files indexed
#[derive(Deserialize, Serialize, Debug)]
struct QueryRes {
id: Option<i32>,
}
// grab the next id so we can increment in memory for batch inserting
let first_file_id = match db
._query_raw::<QueryRes>(r#"SELECT MAX(id) id FROM file_paths"#)
.await
{
Ok(rows) => rows[0].id.unwrap_or(0),
Err(e) => Err(anyhow!("Error querying for next file id: {}", e))?,
};
// query db to highers id, so we can increment it for the new files indexed
#[derive(Deserialize, Serialize, Debug)]
struct QueryRes {
id: Option<i32>,
}
// grab the next id so we can increment in memory for batch inserting
let first_file_id = match db
._query_raw::<QueryRes>(r#"SELECT MAX(id) id FROM file_paths"#)
.await
{
Ok(rows) => rows[0].id.unwrap_or(0),
Err(e) => Err(anyhow!("Error querying for next file id: {}", e))?,
};
//check is path is a directory
if !PathBuf::from(&path).is_dir() {
return Err(anyhow::anyhow!("{} is not a directory", &path));
}
let dir_path = path.clone();
//check is path is a directory
if !PathBuf::from(&path).is_dir() {
return Err(anyhow::anyhow!("{} is not a directory", &path));
}
let dir_path = path.clone();
// spawn a dedicated thread to scan the directory for performance
let (paths, scan_start, on_progress) = tokio::task::spawn_blocking(move || {
// store every valid path discovered
let mut paths: Vec<(PathBuf, i32, Option<i32>, bool)> = Vec::new();
// store a hashmap of directories to their file ids for fast lookup
let mut dirs: HashMap<String, i32> = HashMap::new();
// begin timer for logging purposes
let scan_start = Instant::now();
// spawn a dedicated thread to scan the directory for performance
let (paths, scan_start, on_progress) = tokio::task::spawn_blocking(move || {
// store every valid path discovered
let mut paths: Vec<(PathBuf, i32, Option<i32>, bool)> = Vec::new();
// store a hashmap of directories to their file ids for fast lookup
let mut dirs: HashMap<String, i32> = HashMap::new();
// begin timer for logging purposes
let scan_start = Instant::now();
let mut next_file_id = first_file_id;
let mut get_id = || {
next_file_id += 1;
next_file_id
};
// walk through directory recursively
for entry in WalkDir::new(&dir_path).into_iter().filter_entry(|dir| {
let approved =
!is_hidden(dir) && !is_app_bundle(dir) && !is_node_modules(dir) && !is_library(dir);
approved
}) {
// extract directory entry or log and continue if failed
let entry = match entry {
Ok(entry) => entry,
Err(e) => {
println!("Error reading file {}", e);
continue;
}
};
let path = entry.path();
let mut next_file_id = first_file_id;
let mut get_id = || {
next_file_id += 1;
next_file_id
};
// walk through directory recursively
for entry in WalkDir::new(&dir_path).into_iter().filter_entry(|dir| {
let approved =
!is_hidden(dir) && !is_app_bundle(dir) && !is_node_modules(dir) && !is_library(dir);
approved
}) {
// extract directory entry or log and continue if failed
let entry = match entry {
Ok(entry) => entry,
Err(e) => {
println!("Error reading file {}", e);
continue;
}
};
let path = entry.path();
println!("found: {:?}", path);
println!("found: {:?}", path);
let parent_path = path
.parent()
.unwrap_or(Path::new(""))
.to_str()
.unwrap_or("");
let parent_dir_id = dirs.get(&*parent_path);
let parent_path = path
.parent()
.unwrap_or(Path::new(""))
.to_str()
.unwrap_or("");
let parent_dir_id = dirs.get(&*parent_path);
let str = match path.as_os_str().to_str() {
Some(str) => str,
None => {
println!("Error reading file {}", &path.display());
continue;
}
};
let str = match path.as_os_str().to_str() {
Some(str) => str,
None => {
println!("Error reading file {}", &path.display());
continue;
}
};
on_progress(vec![
ScanProgress::Message(format!("{}", str)),
ScanProgress::ChunkCount(paths.len() / BATCH_SIZE),
]);
on_progress(vec![
ScanProgress::Message(format!("{}", str)),
ScanProgress::ChunkCount(paths.len() / BATCH_SIZE),
]);
let file_id = get_id();
let file_type = entry.file_type();
let is_dir = file_type.is_dir();
let file_id = get_id();
let file_type = entry.file_type();
let is_dir = file_type.is_dir();
if is_dir || file_type.is_file() {
paths.push((path.to_owned(), file_id, parent_dir_id.cloned(), is_dir));
}
if is_dir || file_type.is_file() {
paths.push((path.to_owned(), file_id, parent_dir_id.cloned(), is_dir));
}
if is_dir {
let _path = match path.to_str() {
Some(path) => path.to_owned(),
None => continue,
};
dirs.insert(_path, file_id);
}
}
(paths, scan_start, on_progress)
})
.await
.unwrap();
if is_dir {
let _path = match path.to_str() {
Some(path) => path.to_owned(),
None => continue,
};
dirs.insert(_path, file_id);
}
}
(paths, scan_start, on_progress)
})
.await
.unwrap();
let db_write_start = Instant::now();
let scan_read_time = scan_start.elapsed();
let db_write_start = Instant::now();
let scan_read_time = scan_start.elapsed();
for (i, chunk) in paths.chunks(BATCH_SIZE).enumerate() {
on_progress(vec![
ScanProgress::SavedChunks(i as usize),
ScanProgress::Message(format!(
"Writing {} of {} to library",
i * chunk.len(),
paths.len(),
)),
]);
for (i, chunk) in paths.chunks(BATCH_SIZE).enumerate() {
on_progress(vec![
ScanProgress::SavedChunks(i as usize),
ScanProgress::Message(format!(
"Writing {} of {} to library",
i * chunk.len(),
paths.len(),
)),
]);
// vector to store active models
let mut files: Vec<String> = Vec::new();
for (file_path, file_id, parent_dir_id, is_dir) in chunk {
files.push(
match prepare_values(&file_path, *file_id, &location, parent_dir_id, *is_dir) {
Ok(file) => file,
Err(e) => {
println!("Error creating file model from path {:?}: {}", file_path, e);
continue;
}
},
);
}
let raw_sql = format!(
r#"
// vector to store active models
let mut files: Vec<String> = Vec::new();
for (file_path, file_id, parent_dir_id, is_dir) in chunk {
files.push(
match prepare_values(&file_path, *file_id, &location, parent_dir_id, *is_dir) {
Ok(file) => file,
Err(e) => {
println!("Error creating file model from path {:?}: {}", file_path, e);
continue;
}
},
);
}
let raw_sql = format!(
r#"
INSERT INTO file_paths (id, is_dir, location_id, materialized_path, name, extension, parent_id)
VALUES {}
"#,
files.join(", ")
);
// println!("{}", raw_sql);
let count = db._execute_raw(&raw_sql).await;
println!("Inserted {:?} records", count);
}
println!(
"scan of {:?} completed in {:?}. {:?} files found. db write completed in {:?}",
&path,
scan_read_time,
paths.len(),
db_write_start.elapsed()
);
Ok(())
files.join(", ")
);
// println!("{}", raw_sql);
let count = db._execute_raw(&raw_sql).await;
println!("Inserted {:?} records", count);
}
println!(
"scan of {:?} completed in {:?}. {:?} files found. db write completed in {:?}",
&path,
scan_read_time,
paths.len(),
db_write_start.elapsed()
);
Ok(())
}
// reads a file at a path and creates an ActiveModel with metadata
fn prepare_values(
file_path: &PathBuf,
id: i32,
location: &LocationResource,
parent_id: &Option<i32>,
is_dir: bool,
file_path: &PathBuf,
id: i32,
location: &LocationResource,
parent_id: &Option<i32>,
is_dir: bool,
) -> Result<String> {
// let metadata = fs::metadata(&file_path)?;
let location_path = location.path.as_ref().unwrap().as_str();
// let size = metadata.len();
let name;
let extension;
// let metadata = fs::metadata(&file_path)?;
let location_path = location.path.as_ref().unwrap().as_str();
// let size = metadata.len();
let name;
let extension;
// if the 'file_path' is not a directory, then get the extension and name.
// if the 'file_path' is not a directory, then get the extension and name.
// if 'file_path' is a directory, set extension to an empty string to avoid periods in folder names
// - being interpreted as file extensions
if is_dir {
extension = "".to_string();
name = extract_name(file_path.file_name());
} else {
extension = extract_name(file_path.extension());
name = extract_name(file_path.file_stem());
}
// if 'file_path' is a directory, set extension to an empty string to avoid periods in folder names
// - being interpreted as file extensions
if is_dir {
extension = "".to_string();
name = extract_name(file_path.file_name());
} else {
extension = extract_name(file_path.extension());
name = extract_name(file_path.file_stem());
}
let materialized_path = match file_path.to_str() {
Some(p) => p
.clone()
.strip_prefix(&location_path)
// .and_then(|p| p.strip_suffix(format!("{}{}", name, extension).as_str()))
.unwrap_or_default(),
None => return Err(anyhow!("{}", file_path.to_str().unwrap_or_default())),
};
let materialized_path = match file_path.to_str() {
Some(p) => p
.clone()
.strip_prefix(&location_path)
// .and_then(|p| p.strip_suffix(format!("{}{}", name, extension).as_str()))
.unwrap_or_default(),
None => return Err(anyhow!("{}", file_path.to_str().unwrap_or_default())),
};
// let cas_id = {
// if !metadata.is_dir() {
// // TODO: remove unwrap, skip and make sure to continue loop
// let mut x = generate_cas_id(&file_path.to_str().unwrap(), metadata.len()).unwrap();
// x.truncate(16);
// x
// } else {
// "".to_string()
// }
// };
// let cas_id = {
// if !metadata.is_dir() {
// // TODO: remove unwrap, skip and make sure to continue loop
// let mut x = generate_cas_id(&file_path.to_str().unwrap(), metadata.len()).unwrap();
// x.truncate(16);
// x
// } else {
// "".to_string()
// }
// };
// let date_created: DateTime<Utc> = metadata.created().unwrap().into();
// let parsed_date_created = date_created.to_rfc3339_opts(SecondsFormat::Millis, true);
// let date_created: DateTime<Utc> = metadata.created().unwrap().into();
// let parsed_date_created = date_created.to_rfc3339_opts(SecondsFormat::Millis, true);
let values = format!(
"({}, {}, {}, \"{}\", \"{}\", \"{}\", {})",
id,
is_dir,
location.id,
materialized_path,
name,
extension.to_lowercase(),
parent_id
.clone()
.map(|id| format!("\"{}\"", &id))
.unwrap_or("NULL".to_string()),
// parsed_date_created,
// cas_id
);
let values = format!(
"({}, {}, {}, \"{}\", \"{}\", \"{}\", {})",
id,
is_dir,
location.id,
materialized_path,
name,
extension.to_lowercase(),
parent_id
.clone()
.map(|id| format!("\"{}\"", &id))
.unwrap_or("NULL".to_string()),
// parsed_date_created,
// cas_id
);
println!("{}", values);
println!("{}", values);
Ok(values)
Ok(values)
}
// extract name from OsStr returned by PathBuff
fn extract_name(os_string: Option<&OsStr>) -> String {
os_string
.unwrap_or_default()
.to_str()
.unwrap_or_default()
.to_owned()
os_string
.unwrap_or_default()
.to_str()
.unwrap_or_default()
.to_owned()
}
fn is_hidden(entry: &DirEntry) -> bool {
entry
.file_name()
.to_str()
.map(|s| s.starts_with("."))
.unwrap_or(false)
entry
.file_name()
.to_str()
.map(|s| s.starts_with("."))
.unwrap_or(false)
}
fn is_library(entry: &DirEntry) -> bool {
entry
.path()
.to_str()
// make better this is shit
.map(|s| s.contains("/Library/"))
.unwrap_or(false)
entry
.path()
.to_str()
// make better this is shit
.map(|s| s.contains("/Library/"))
.unwrap_or(false)
}
fn is_node_modules(entry: &DirEntry) -> bool {
entry
.file_name()
.to_str()
.map(|s| s.contains("node_modules"))
.unwrap_or(false)
entry
.file_name()
.to_str()
.map(|s| s.contains("node_modules"))
.unwrap_or(false)
}
fn is_app_bundle(entry: &DirEntry) -> bool {
let is_dir = entry.metadata().unwrap().is_dir();
let contains_dot = entry
.file_name()
.to_str()
.map(|s| s.contains(".app") | s.contains(".bundle"))
.unwrap_or(false);
let is_dir = entry.metadata().unwrap().is_dir();
let contains_dot = entry
.file_name()
.to_str()
.map(|s| s.contains(".app") | s.contains(".bundle"))
.unwrap_or(false);
let is_app_bundle = is_dir && contains_dot;
// if is_app_bundle {
// let path_buff = entry.path();
// let path = path_buff.to_str().unwrap();
let is_app_bundle = is_dir && contains_dot;
// if is_app_bundle {
// let path_buff = entry.path();
// let path = path_buff.to_str().unwrap();
// self::path(&path, );
// }
// self::path(&path, );
// }
is_app_bundle
is_app_bundle
}

View file

@ -4,9 +4,9 @@ use thiserror::Error;
use ts_rs::TS;
use crate::{
crypto::encryption::EncryptionAlgorithm,
prisma::{self, file, file_path},
sys::SysError,
crypto::encryption::EncryptionAlgorithm,
prisma::{self, file, file_path},
sys::SysError,
};
pub mod cas;
pub mod explorer;
@ -17,133 +17,133 @@ pub mod watcher;
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
pub struct File {
pub id: i32,
pub cas_id: String,
pub integrity_checksum: Option<String>,
pub size_in_bytes: String,
pub kind: FileKind,
pub id: i32,
pub cas_id: String,
pub integrity_checksum: Option<String>,
pub size_in_bytes: String,
pub kind: FileKind,
pub hidden: bool,
pub favorite: bool,
pub important: bool,
pub has_thumbnail: bool,
pub has_thumbstrip: bool,
pub has_video_preview: bool,
// pub encryption: EncryptionAlgorithm,
pub ipfs_id: Option<String>,
pub comment: Option<String>,
pub hidden: bool,
pub favorite: bool,
pub important: bool,
pub has_thumbnail: bool,
pub has_thumbstrip: bool,
pub has_video_preview: bool,
// pub encryption: EncryptionAlgorithm,
pub ipfs_id: Option<String>,
pub comment: Option<String>,
#[ts(type = "string")]
pub date_created: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_modified: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_indexed: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_created: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_modified: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_indexed: chrono::DateTime<chrono::Utc>,
pub paths: Vec<FilePath>,
// pub media_data: Option<MediaData>,
// pub tags: Vec<Tag>,
// pub label: Vec<Label>,
pub paths: Vec<FilePath>,
// pub media_data: Option<MediaData>,
// pub tags: Vec<Tag>,
// pub label: Vec<Label>,
}
// A physical file path
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
pub struct FilePath {
pub id: i32,
pub is_dir: bool,
pub location_id: i32,
pub materialized_path: String,
pub name: String,
pub extension: Option<String>,
pub file_id: Option<i32>,
pub parent_id: Option<i32>,
// pub temp_cas_id: Option<String>,
pub has_local_thumbnail: bool,
#[ts(type = "string")]
pub date_created: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_modified: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_indexed: chrono::DateTime<chrono::Utc>,
pub id: i32,
pub is_dir: bool,
pub location_id: i32,
pub materialized_path: String,
pub name: String,
pub extension: Option<String>,
pub file_id: Option<i32>,
pub parent_id: Option<i32>,
// pub temp_cas_id: Option<String>,
pub has_local_thumbnail: bool,
#[ts(type = "string")]
pub date_created: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_modified: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_indexed: chrono::DateTime<chrono::Utc>,
}
#[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum FileKind {
Unknown = 0,
Directory = 1,
Package = 2,
Archive = 3,
Image = 4,
Video = 5,
Audio = 6,
Plaintext = 7,
Alias = 8,
Unknown = 0,
Directory = 1,
Package = 2,
Archive = 3,
Image = 4,
Video = 5,
Audio = 6,
Plaintext = 7,
Alias = 8,
}
impl Into<File> for file::Data {
fn into(self) -> File {
File {
id: self.id,
cas_id: self.cas_id,
integrity_checksum: self.integrity_checksum,
kind: IntEnum::from_int(self.kind).unwrap(),
size_in_bytes: self.size_in_bytes.to_string(),
// encryption: EncryptionAlgorithm::from_int(self.encryption).unwrap(),
ipfs_id: self.ipfs_id,
hidden: self.hidden,
favorite: self.favorite,
important: self.important,
has_thumbnail: self.has_thumbnail,
has_thumbstrip: self.has_thumbstrip,
has_video_preview: self.has_video_preview,
comment: self.comment,
date_created: self.date_created,
date_modified: self.date_modified,
date_indexed: self.date_indexed,
paths: vec![],
}
}
fn into(self) -> File {
File {
id: self.id,
cas_id: self.cas_id,
integrity_checksum: self.integrity_checksum,
kind: IntEnum::from_int(self.kind).unwrap(),
size_in_bytes: self.size_in_bytes.to_string(),
// encryption: EncryptionAlgorithm::from_int(self.encryption).unwrap(),
ipfs_id: self.ipfs_id,
hidden: self.hidden,
favorite: self.favorite,
important: self.important,
has_thumbnail: self.has_thumbnail,
has_thumbstrip: self.has_thumbstrip,
has_video_preview: self.has_video_preview,
comment: self.comment,
date_created: self.date_created,
date_modified: self.date_modified,
date_indexed: self.date_indexed,
paths: vec![],
}
}
}
impl Into<FilePath> for file_path::Data {
fn into(self) -> FilePath {
FilePath {
id: self.id,
is_dir: self.is_dir,
materialized_path: self.materialized_path,
file_id: self.file_id,
parent_id: self.parent_id,
location_id: self.location_id,
date_indexed: self.date_indexed,
// permissions: self.permissions,
has_local_thumbnail: false,
name: self.name,
extension: self.extension,
// temp_cas_id: self.temp_cas_id,
date_created: self.date_created,
date_modified: self.date_modified,
}
}
fn into(self) -> FilePath {
FilePath {
id: self.id,
is_dir: self.is_dir,
materialized_path: self.materialized_path,
file_id: self.file_id,
parent_id: self.parent_id,
location_id: self.location_id,
date_indexed: self.date_indexed,
// permissions: self.permissions,
has_local_thumbnail: false,
name: self.name,
extension: self.extension,
// temp_cas_id: self.temp_cas_id,
date_created: self.date_created,
date_modified: self.date_modified,
}
}
}
#[derive(Serialize, Deserialize, TS, Debug)]
#[ts(export)]
pub struct DirectoryWithContents {
pub directory: FilePath,
pub contents: Vec<File>,
pub directory: FilePath,
pub contents: Vec<File>,
}
#[derive(Error, Debug)]
pub enum FileError {
#[error("Directory not found (path: {0:?})")]
DirectoryNotFound(String),
#[error("File not found (path: {0:?})")]
FileNotFound(String),
#[error("Database error")]
DatabaseError(#[from] prisma::QueryError),
#[error("System error")]
SysError(#[from] SysError),
#[error("Directory not found (path: {0:?})")]
DirectoryNotFound(String),
#[error("File not found (path: {0:?})")]
FileNotFound(String),
#[error("Database error")]
DatabaseError(#[from] prisma::QueryError),
#[error("System error")]
SysError(#[from] SysError),
}

View file

@ -1,25 +1,25 @@
use std::path::Path;
use hotwatch::{
blocking::{Flow, Hotwatch},
Event,
blocking::{Flow, Hotwatch},
Event,
};
pub fn watch_dir(path: &str) {
let mut hotwatch = Hotwatch::new().expect("hotwatch failed to initialize!");
hotwatch
.watch(&path, |event: Event| {
if let Event::Write(path) = event {
println!("{:?} changed!", path);
// Flow::Exit
Flow::Continue
} else {
Flow::Continue
}
})
.expect("failed to watch file!");
let mut hotwatch = Hotwatch::new().expect("hotwatch failed to initialize!");
hotwatch
.watch(&path, |event: Event| {
if let Event::Write(path) = event {
println!("{:?} changed!", path);
// Flow::Exit
Flow::Continue
} else {
Flow::Continue
}
})
.expect("failed to watch file!");
hotwatch.run();
hotwatch.run();
println!("watching directory {:?}", Path::new(&path));
println!("watching directory {:?}", Path::new(&path));
}

View file

@ -1,12 +1,12 @@
use super::{
worker::{Worker, WorkerContext},
JobError,
worker::{Worker, WorkerContext},
JobError,
};
use crate::{
node::state,
prisma::{job, node},
sync::{crdt::Replicate, engine::SyncContext},
CoreContext,
node::state,
prisma::{job, node},
sync::{crdt::Replicate, engine::SyncContext},
CoreContext,
};
use anyhow::Result;
use int_enum::IntEnum;
@ -19,166 +19,164 @@ const MAX_WORKERS: usize = 4;
#[async_trait::async_trait]
pub trait Job: Send + Sync + Debug {
async fn run(&self, ctx: WorkerContext) -> Result<()>;
fn name(&self) -> &'static str;
async fn run(&self, ctx: WorkerContext) -> Result<()>;
fn name(&self) -> &'static str;
}
// jobs struct is maintained by the core
pub struct Jobs {
job_queue: Vec<Box<dyn Job>>,
// workers are spawned when jobs are picked off the queue
running_workers: HashMap<String, Arc<Mutex<Worker>>>,
job_queue: Vec<Box<dyn Job>>,
// workers are spawned when jobs are picked off the queue
running_workers: HashMap<String, Arc<Mutex<Worker>>>,
}
impl Jobs {
pub fn new() -> Self {
Self {
job_queue: vec![],
running_workers: HashMap::new(),
}
}
pub async fn ingest(&mut self, ctx: &CoreContext, job: Box<dyn Job>) {
// create worker to process job
if self.running_workers.len() < MAX_WORKERS {
let worker = Worker::new(job);
let id = worker.id();
pub fn new() -> Self {
Self {
job_queue: vec![],
running_workers: HashMap::new(),
}
}
pub async fn ingest(&mut self, ctx: &CoreContext, job: Box<dyn Job>) {
// create worker to process job
if self.running_workers.len() < MAX_WORKERS {
let worker = Worker::new(job);
let id = worker.id();
let wrapped_worker = Arc::new(Mutex::new(worker));
let wrapped_worker = Arc::new(Mutex::new(worker));
Worker::spawn(wrapped_worker.clone(), ctx).await;
Worker::spawn(wrapped_worker.clone(), ctx).await;
self.running_workers.insert(id, wrapped_worker);
} else {
self.job_queue.push(job);
}
}
pub fn ingest_queue(&mut self, ctx: &CoreContext, job: Box<dyn Job>) {
self.job_queue.push(job);
}
pub async fn complete(&mut self, ctx: &CoreContext, job_id: String) {
// remove worker from running workers
self.running_workers.remove(&job_id);
// continue queue
let job = self.job_queue.pop();
if let Some(job) = job {
self.ingest(ctx, job).await;
}
}
pub async fn get_running(&self) -> Vec<JobReport> {
let mut ret = vec![];
self.running_workers.insert(id, wrapped_worker);
} else {
self.job_queue.push(job);
}
}
pub fn ingest_queue(&mut self, ctx: &CoreContext, job: Box<dyn Job>) {
self.job_queue.push(job);
}
pub async fn complete(&mut self, ctx: &CoreContext, job_id: String) {
// remove worker from running workers
self.running_workers.remove(&job_id);
// continue queue
let job = self.job_queue.pop();
if let Some(job) = job {
self.ingest(ctx, job).await;
}
}
pub async fn get_running(&self) -> Vec<JobReport> {
let mut ret = vec![];
for worker in self.running_workers.values() {
let worker = worker.lock().await;
ret.push(worker.job_report.clone());
}
ret
}
pub async fn get_history(ctx: &CoreContext) -> Result<Vec<JobReport>, JobError> {
let db = &ctx.database;
let jobs = db
.job()
.find_many(vec![job::status::not(JobStatus::Running.int_value())])
.exec()
.await?;
for worker in self.running_workers.values() {
let worker = worker.lock().await;
ret.push(worker.job_report.clone());
}
ret
}
pub async fn get_history(ctx: &CoreContext) -> Result<Vec<JobReport>, JobError> {
let db = &ctx.database;
let jobs = db
.job()
.find_many(vec![job::status::not(JobStatus::Running.int_value())])
.exec()
.await?;
Ok(jobs.into_iter().map(|j| j.into()).collect())
}
Ok(jobs.into_iter().map(|j| j.into()).collect())
}
}
#[derive(Debug)]
pub enum JobReportUpdate {
TaskCount(usize),
CompletedTaskCount(usize),
Message(String),
SecondsElapsed(u64),
TaskCount(usize),
CompletedTaskCount(usize),
Message(String),
SecondsElapsed(u64),
}
#[derive(Debug, Serialize, Deserialize, TS, Clone)]
#[ts(export)]
pub struct JobReport {
pub id: String,
pub name: String,
// client_id: i32,
#[ts(type = "string")]
pub date_created: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_modified: chrono::DateTime<chrono::Utc>,
pub id: String,
pub name: String,
// client_id: i32,
#[ts(type = "string")]
pub date_created: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_modified: chrono::DateTime<chrono::Utc>,
pub status: JobStatus,
pub task_count: i32,
pub completed_task_count: i32,
pub status: JobStatus,
pub task_count: i32,
pub completed_task_count: i32,
pub message: String,
// pub percentage_complete: f64,
#[ts(type = "string")]
pub seconds_elapsed: i32,
pub message: String,
// pub percentage_complete: f64,
#[ts(type = "string")]
pub seconds_elapsed: i32,
}
// convert database struct into a resource struct
impl Into<JobReport> for job::Data {
fn into(self) -> JobReport {
JobReport {
id: self.id,
name: self.name,
// client_id: self.client_id,
status: JobStatus::from_int(self.status).unwrap(),
task_count: self.task_count,
completed_task_count: self.completed_task_count,
date_created: self.date_created,
date_modified: self.date_modified,
message: String::new(),
seconds_elapsed: self.seconds_elapsed,
}
}
fn into(self) -> JobReport {
JobReport {
id: self.id,
name: self.name,
// client_id: self.client_id,
status: JobStatus::from_int(self.status).unwrap(),
task_count: self.task_count,
completed_task_count: self.completed_task_count,
date_created: self.date_created,
date_modified: self.date_modified,
message: String::new(),
seconds_elapsed: self.seconds_elapsed,
}
}
}
impl JobReport {
pub fn new(uuid: String, name: String) -> Self {
Self {
id: uuid,
name,
// client_id: 0,
date_created: chrono::Utc::now(),
date_modified: chrono::Utc::now(),
status: JobStatus::Queued,
task_count: 0,
completed_task_count: 0,
message: String::new(),
seconds_elapsed: 0,
}
}
pub async fn create(&self, ctx: &CoreContext) -> Result<(), JobError> {
let config = state::get();
ctx
.database
.job()
.create(
job::id::set(self.id.clone()),
job::name::set(self.name.clone()),
job::action::set(1),
job::nodes::link(node::id::equals(config.node_id)),
vec![],
)
.exec()
.await?;
Ok(())
}
pub async fn update(&self, ctx: &CoreContext) -> Result<(), JobError> {
ctx
.database
.job()
.find_unique(job::id::equals(self.id.clone()))
.update(vec![
job::status::set(self.status.int_value()),
job::task_count::set(self.task_count),
job::completed_task_count::set(self.completed_task_count),
job::date_modified::set(chrono::Utc::now()),
job::seconds_elapsed::set(self.seconds_elapsed),
])
.exec()
.await?;
Ok(())
}
pub fn new(uuid: String, name: String) -> Self {
Self {
id: uuid,
name,
// client_id: 0,
date_created: chrono::Utc::now(),
date_modified: chrono::Utc::now(),
status: JobStatus::Queued,
task_count: 0,
completed_task_count: 0,
message: String::new(),
seconds_elapsed: 0,
}
}
pub async fn create(&self, ctx: &CoreContext) -> Result<(), JobError> {
let config = state::get();
ctx.database
.job()
.create(
job::id::set(self.id.clone()),
job::name::set(self.name.clone()),
job::action::set(1),
job::nodes::link(node::id::equals(config.node_id)),
vec![],
)
.exec()
.await?;
Ok(())
}
pub async fn update(&self, ctx: &CoreContext) -> Result<(), JobError> {
ctx.database
.job()
.find_unique(job::id::equals(self.id.clone()))
.update(vec![
job::status::set(self.status.int_value()),
job::task_count::set(self.task_count),
job::completed_task_count::set(self.completed_task_count),
job::date_modified::set(chrono::Utc::now()),
job::seconds_elapsed::set(self.seconds_elapsed),
])
.exec()
.await?;
Ok(())
}
}
#[derive(Clone)]
@ -186,19 +184,19 @@ pub struct JobReportCreate {}
#[async_trait::async_trait]
impl Replicate for JobReport {
type Create = JobReportCreate;
type Create = JobReportCreate;
async fn create(_data: Self::Create, _ctx: SyncContext) {}
async fn delete(_ctx: SyncContext) {}
async fn create(_data: Self::Create, _ctx: SyncContext) {}
async fn delete(_ctx: SyncContext) {}
}
#[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum JobStatus {
Queued = 0,
Running = 1,
Completed = 2,
Canceled = 3,
Failed = 4,
Queued = 0,
Running = 1,
Completed = 2,
Canceled = 3,
Failed = 4,
}

View file

@ -8,8 +8,8 @@ pub mod worker;
#[derive(Error, Debug)]
pub enum JobError {
#[error("Failed to create job (job_id {job_id:?})")]
CreateFailure { job_id: String },
#[error("Database error")]
DatabaseError(#[from] prisma::QueryError),
#[error("Failed to create job (job_id {job_id:?})")]
CreateFailure { job_id: String },
#[error("Database error")]
DatabaseError(#[from] prisma::QueryError),
}

View file

@ -2,190 +2,186 @@ use super::jobs::{JobReport, JobReportUpdate, JobStatus};
use crate::{ClientQuery, CoreContext, CoreEvent, InternalEvent, Job};
use std::{sync::Arc, time::Duration};
use tokio::{
sync::{
mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
Mutex,
},
time::{sleep, Instant},
sync::{
mpsc::{unbounded_channel, UnboundedReceiver, UnboundedSender},
Mutex,
},
time::{sleep, Instant},
};
// used to update the worker state from inside the worker thread
pub enum WorkerEvent {
Progressed(Vec<JobReportUpdate>),
Completed,
Failed,
Progressed(Vec<JobReportUpdate>),
Completed,
Failed,
}
enum WorkerState {
Pending(Box<dyn Job>, UnboundedReceiver<WorkerEvent>),
Running,
Pending(Box<dyn Job>, UnboundedReceiver<WorkerEvent>),
Running,
}
#[derive(Clone)]
pub struct WorkerContext {
pub uuid: String,
pub core_ctx: CoreContext,
pub sender: UnboundedSender<WorkerEvent>,
pub uuid: String,
pub core_ctx: CoreContext,
pub sender: UnboundedSender<WorkerEvent>,
}
impl WorkerContext {
pub fn progress(&self, updates: Vec<JobReportUpdate>) {
self
.sender
.send(WorkerEvent::Progressed(updates))
.unwrap_or(());
}
pub fn progress(&self, updates: Vec<JobReportUpdate>) {
self.sender
.send(WorkerEvent::Progressed(updates))
.unwrap_or(());
}
}
// a worker is a dedicated thread that runs a single job
// once the job is complete the worker will exit
pub struct Worker {
pub job_report: JobReport,
state: WorkerState,
worker_sender: UnboundedSender<WorkerEvent>,
pub job_report: JobReport,
state: WorkerState,
worker_sender: UnboundedSender<WorkerEvent>,
}
impl Worker {
pub fn new(job: Box<dyn Job>) -> Self {
let (worker_sender, worker_receiver) = unbounded_channel();
let uuid = uuid::Uuid::new_v4().to_string();
let name = job.name();
pub fn new(job: Box<dyn Job>) -> Self {
let (worker_sender, worker_receiver) = unbounded_channel();
let uuid = uuid::Uuid::new_v4().to_string();
let name = job.name();
Self {
state: WorkerState::Pending(job, worker_receiver),
job_report: JobReport::new(uuid, name.to_string()),
worker_sender,
}
}
// spawns a thread and extracts channel sender to communicate with it
pub async fn spawn(worker: Arc<Mutex<Self>>, ctx: &CoreContext) {
// we capture the worker receiver channel so state can be updated from inside the worker
let mut worker_mut = worker.lock().await;
// extract owned job and receiver from Self
let (job, worker_receiver) =
match std::mem::replace(&mut worker_mut.state, WorkerState::Running) {
WorkerState::Pending(job, worker_receiver) => {
worker_mut.state = WorkerState::Running;
(job, worker_receiver)
}
WorkerState::Running => unreachable!(),
};
let worker_sender = worker_mut.worker_sender.clone();
let core_ctx = ctx.clone();
Self {
state: WorkerState::Pending(job, worker_receiver),
job_report: JobReport::new(uuid, name.to_string()),
worker_sender,
}
}
// spawns a thread and extracts channel sender to communicate with it
pub async fn spawn(worker: Arc<Mutex<Self>>, ctx: &CoreContext) {
// we capture the worker receiver channel so state can be updated from inside the worker
let mut worker_mut = worker.lock().await;
// extract owned job and receiver from Self
let (job, worker_receiver) =
match std::mem::replace(&mut worker_mut.state, WorkerState::Running) {
WorkerState::Pending(job, worker_receiver) => {
worker_mut.state = WorkerState::Running;
(job, worker_receiver)
}
WorkerState::Running => unreachable!(),
};
let worker_sender = worker_mut.worker_sender.clone();
let core_ctx = ctx.clone();
worker_mut.job_report.status = JobStatus::Running;
worker_mut.job_report.status = JobStatus::Running;
worker_mut.job_report.create(&ctx).await.unwrap_or(());
worker_mut.job_report.create(&ctx).await.unwrap_or(());
// spawn task to handle receiving events from the worker
tokio::spawn(Worker::track_progress(
worker.clone(),
worker_receiver,
ctx.clone(),
));
// spawn task to handle receiving events from the worker
tokio::spawn(Worker::track_progress(
worker.clone(),
worker_receiver,
ctx.clone(),
));
let uuid = worker_mut.job_report.id.clone();
// spawn task to handle running the job
tokio::spawn(async move {
let worker_ctx = WorkerContext {
uuid,
core_ctx,
sender: worker_sender,
};
let job_start = Instant::now();
let uuid = worker_mut.job_report.id.clone();
// spawn task to handle running the job
tokio::spawn(async move {
let worker_ctx = WorkerContext {
uuid,
core_ctx,
sender: worker_sender,
};
let job_start = Instant::now();
// track time
let sender = worker_ctx.sender.clone();
tokio::spawn(async move {
loop {
let elapsed = job_start.elapsed().as_secs();
sender
.send(WorkerEvent::Progressed(vec![
JobReportUpdate::SecondsElapsed(elapsed),
]))
.unwrap_or(());
sleep(Duration::from_millis(1000)).await;
}
});
// track time
let sender = worker_ctx.sender.clone();
tokio::spawn(async move {
loop {
let elapsed = job_start.elapsed().as_secs();
sender
.send(WorkerEvent::Progressed(vec![
JobReportUpdate::SecondsElapsed(elapsed),
]))
.unwrap_or(());
sleep(Duration::from_millis(1000)).await;
}
});
let result = job.run(worker_ctx.clone()).await;
let result = job.run(worker_ctx.clone()).await;
if let Err(e) = result {
println!("job failed {:?}", e);
worker_ctx.sender.send(WorkerEvent::Failed).unwrap_or(());
} else {
// handle completion
worker_ctx.sender.send(WorkerEvent::Completed).unwrap_or(());
}
worker_ctx
.core_ctx
.internal_sender
.send(InternalEvent::JobComplete(worker_ctx.uuid.clone()))
.unwrap_or(());
});
}
if let Err(e) = result {
println!("job failed {:?}", e);
worker_ctx.sender.send(WorkerEvent::Failed).unwrap_or(());
} else {
// handle completion
worker_ctx.sender.send(WorkerEvent::Completed).unwrap_or(());
}
worker_ctx
.core_ctx
.internal_sender
.send(InternalEvent::JobComplete(worker_ctx.uuid.clone()))
.unwrap_or(());
});
}
pub fn id(&self) -> String {
self.job_report.id.to_owned()
}
pub fn id(&self) -> String {
self.job_report.id.to_owned()
}
async fn track_progress(
worker: Arc<Mutex<Self>>,
mut channel: UnboundedReceiver<WorkerEvent>,
ctx: CoreContext,
) {
while let Some(command) = channel.recv().await {
let mut worker = worker.lock().await;
async fn track_progress(
worker: Arc<Mutex<Self>>,
mut channel: UnboundedReceiver<WorkerEvent>,
ctx: CoreContext,
) {
while let Some(command) = channel.recv().await {
let mut worker = worker.lock().await;
match command {
WorkerEvent::Progressed(changes) => {
// protect against updates if job is not running
if worker.job_report.status != JobStatus::Running {
continue;
};
for change in changes {
match change {
JobReportUpdate::TaskCount(task_count) => {
worker.job_report.task_count = task_count as i32;
}
JobReportUpdate::CompletedTaskCount(completed_task_count) => {
worker.job_report.completed_task_count = completed_task_count as i32;
}
JobReportUpdate::Message(message) => {
worker.job_report.message = message;
}
JobReportUpdate::SecondsElapsed(seconds) => {
worker.job_report.seconds_elapsed = seconds as i32;
}
}
}
ctx
.emit(CoreEvent::InvalidateQueryDebounced(
ClientQuery::JobGetRunning,
))
.await;
}
WorkerEvent::Completed => {
worker.job_report.status = JobStatus::Completed;
worker.job_report.update(&ctx).await.unwrap_or(());
match command {
WorkerEvent::Progressed(changes) => {
// protect against updates if job is not running
if worker.job_report.status != JobStatus::Running {
continue;
};
for change in changes {
match change {
JobReportUpdate::TaskCount(task_count) => {
worker.job_report.task_count = task_count as i32;
}
JobReportUpdate::CompletedTaskCount(completed_task_count) => {
worker.job_report.completed_task_count =
completed_task_count as i32;
}
JobReportUpdate::Message(message) => {
worker.job_report.message = message;
}
JobReportUpdate::SecondsElapsed(seconds) => {
worker.job_report.seconds_elapsed = seconds as i32;
}
}
}
ctx.emit(CoreEvent::InvalidateQueryDebounced(
ClientQuery::JobGetRunning,
))
.await;
}
WorkerEvent::Completed => {
worker.job_report.status = JobStatus::Completed;
worker.job_report.update(&ctx).await.unwrap_or(());
ctx
.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetRunning))
.await;
ctx
.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
.await;
break;
}
WorkerEvent::Failed => {
worker.job_report.status = JobStatus::Failed;
worker.job_report.update(&ctx).await.unwrap_or(());
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetRunning))
.await;
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
.await;
break;
}
WorkerEvent::Failed => {
worker.job_report.status = JobStatus::Failed;
worker.job_report.update(&ctx).await.unwrap_or(());
ctx
.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
.await;
break;
}
}
}
}
ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory))
.await;
break;
}
}
}
}
}

View file

@ -1,6 +1,6 @@
use crate::{
file::cas::identifier::FileIdentifierJob, library::loader::get_library_path,
node::state::NodeState,
file::cas::identifier::FileIdentifierJob, library::loader::get_library_path,
node::state::NodeState,
};
use job::jobs::{Job, JobReport, Jobs};
use prisma::PrismaClient;
@ -8,8 +8,8 @@ use serde::{Deserialize, Serialize};
use std::{fs, sync::Arc};
use thiserror::Error;
use tokio::sync::{
mpsc::{self, unbounded_channel, UnboundedReceiver, UnboundedSender},
oneshot,
mpsc::{self, unbounded_channel, UnboundedReceiver, UnboundedSender},
oneshot,
};
use ts_rs::TS;
@ -34,308 +34,310 @@ pub mod util;
// a wrapper around external input with a returning sender channel for core to respond
#[derive(Debug)]
pub struct ReturnableMessage<D, R = Result<CoreResponse, CoreError>> {
data: D,
return_sender: oneshot::Sender<R>,
data: D,
return_sender: oneshot::Sender<R>,
}
// core controller is passed to the client to communicate with the core which runs in a dedicated thread
pub struct CoreController {
query_sender: UnboundedSender<ReturnableMessage<ClientQuery>>,
command_sender: UnboundedSender<ReturnableMessage<ClientCommand>>,
query_sender: UnboundedSender<ReturnableMessage<ClientQuery>>,
command_sender: UnboundedSender<ReturnableMessage<ClientCommand>>,
}
impl CoreController {
pub async fn query(&self, query: ClientQuery) -> Result<CoreResponse, CoreError> {
// a one time use channel to send and await a response
let (sender, recv) = oneshot::channel();
self
.query_sender
.send(ReturnableMessage {
data: query,
return_sender: sender,
})
.unwrap_or(());
// wait for response and return
recv.await.unwrap_or(Err(CoreError::QueryError))
}
pub async fn query(&self, query: ClientQuery) -> Result<CoreResponse, CoreError> {
// a one time use channel to send and await a response
let (sender, recv) = oneshot::channel();
self.query_sender
.send(ReturnableMessage {
data: query,
return_sender: sender,
})
.unwrap_or(());
// wait for response and return
recv.await.unwrap_or(Err(CoreError::QueryError))
}
pub async fn command(&self, command: ClientCommand) -> Result<CoreResponse, CoreError> {
let (sender, recv) = oneshot::channel();
self
.command_sender
.send(ReturnableMessage {
data: command,
return_sender: sender,
})
.unwrap_or(());
pub async fn command(&self, command: ClientCommand) -> Result<CoreResponse, CoreError> {
let (sender, recv) = oneshot::channel();
self.command_sender
.send(ReturnableMessage {
data: command,
return_sender: sender,
})
.unwrap_or(());
recv.await.unwrap()
}
recv.await.unwrap()
}
}
#[derive(Debug)]
pub enum InternalEvent {
JobIngest(Box<dyn Job>),
JobQueue(Box<dyn Job>),
JobComplete(String),
JobIngest(Box<dyn Job>),
JobQueue(Box<dyn Job>),
JobComplete(String),
}
#[derive(Clone)]
pub struct CoreContext {
pub database: Arc<PrismaClient>,
pub event_sender: mpsc::Sender<CoreEvent>,
pub internal_sender: UnboundedSender<InternalEvent>,
pub database: Arc<PrismaClient>,
pub event_sender: mpsc::Sender<CoreEvent>,
pub internal_sender: UnboundedSender<InternalEvent>,
}
impl CoreContext {
pub fn spawn_job(&self, job: Box<dyn Job>) {
self
.internal_sender
.send(InternalEvent::JobIngest(job))
.unwrap_or_else(|e| {
println!("Failed to spawn job. {:?}", e);
});
}
pub fn queue_job(&self, job: Box<dyn Job>) {
self
.internal_sender
.send(InternalEvent::JobIngest(job))
.unwrap_or_else(|e| {
println!("Failed to queue job. {:?}", e);
});
}
pub async fn emit(&self, event: CoreEvent) {
self.event_sender.send(event).await.unwrap_or_else(|e| {
println!("Failed to emit event. {:?}", e);
});
}
pub fn spawn_job(&self, job: Box<dyn Job>) {
self.internal_sender
.send(InternalEvent::JobIngest(job))
.unwrap_or_else(|e| {
println!("Failed to spawn job. {:?}", e);
});
}
pub fn queue_job(&self, job: Box<dyn Job>) {
self.internal_sender
.send(InternalEvent::JobIngest(job))
.unwrap_or_else(|e| {
println!("Failed to queue job. {:?}", e);
});
}
pub async fn emit(&self, event: CoreEvent) {
self.event_sender.send(event).await.unwrap_or_else(|e| {
println!("Failed to emit event. {:?}", e);
});
}
}
pub struct Node {
state: NodeState,
jobs: job::jobs::Jobs,
database: Arc<PrismaClient>,
// filetype_registry: library::TypeRegistry,
// extension_registry: library::ExtensionRegistry,
state: NodeState,
jobs: job::jobs::Jobs,
database: Arc<PrismaClient>,
// filetype_registry: library::TypeRegistry,
// extension_registry: library::ExtensionRegistry,
// global messaging channels
query_channel: (
UnboundedSender<ReturnableMessage<ClientQuery>>,
UnboundedReceiver<ReturnableMessage<ClientQuery>>,
),
command_channel: (
UnboundedSender<ReturnableMessage<ClientCommand>>,
UnboundedReceiver<ReturnableMessage<ClientCommand>>,
),
event_sender: mpsc::Sender<CoreEvent>,
// global messaging channels
query_channel: (
UnboundedSender<ReturnableMessage<ClientQuery>>,
UnboundedReceiver<ReturnableMessage<ClientQuery>>,
),
command_channel: (
UnboundedSender<ReturnableMessage<ClientCommand>>,
UnboundedReceiver<ReturnableMessage<ClientCommand>>,
),
event_sender: mpsc::Sender<CoreEvent>,
// a channel for child threads to send events back to the core
internal_channel: (
UnboundedSender<InternalEvent>,
UnboundedReceiver<InternalEvent>,
),
// a channel for child threads to send events back to the core
internal_channel: (
UnboundedSender<InternalEvent>,
UnboundedReceiver<InternalEvent>,
),
}
impl Node {
// create new instance of node, run startup tasks
pub async fn new(mut data_dir: std::path::PathBuf) -> (Node, mpsc::Receiver<CoreEvent>) {
let (event_sender, event_recv) = mpsc::channel(100);
// create new instance of node, run startup tasks
pub async fn new(mut data_dir: std::path::PathBuf) -> (Node, mpsc::Receiver<CoreEvent>) {
let (event_sender, event_recv) = mpsc::channel(100);
data_dir = data_dir.join("spacedrive");
let data_dir = data_dir.to_str().unwrap();
// create data directory if it doesn't exist
fs::create_dir_all(&data_dir).unwrap();
// prepare basic client state
let mut state = NodeState::new(data_dir, "diamond-mastering-space-dragon").unwrap();
// load from disk
state
.read_disk()
.unwrap_or(println!("Error: No node state found, creating new one..."));
data_dir = data_dir.join("spacedrive");
let data_dir = data_dir.to_str().unwrap();
// create data directory if it doesn't exist
fs::create_dir_all(&data_dir).unwrap();
// prepare basic client state
let mut state = NodeState::new(data_dir, "diamond-mastering-space-dragon").unwrap();
// load from disk
state
.read_disk()
.unwrap_or(println!("Error: No node state found, creating new one..."));
state.save();
state.save();
println!("Node State: {:?}", state);
println!("Node State: {:?}", state);
// connect to default library
let database = Arc::new(
db::create_connection(&get_library_path(&data_dir))
.await
.unwrap(),
);
// connect to default library
let database = Arc::new(
db::create_connection(&get_library_path(&data_dir))
.await
.unwrap(),
);
let internal_channel = unbounded_channel::<InternalEvent>();
let internal_channel = unbounded_channel::<InternalEvent>();
let node = Node {
state,
query_channel: unbounded_channel(),
command_channel: unbounded_channel(),
jobs: Jobs::new(),
event_sender,
database,
internal_channel,
};
let node = Node {
state,
query_channel: unbounded_channel(),
command_channel: unbounded_channel(),
jobs: Jobs::new(),
event_sender,
database,
internal_channel,
};
#[cfg(feature = "p2p")]
tokio::spawn(async move {
p2p::listener::listen(None).await.unwrap_or(());
});
#[cfg(feature = "p2p")]
tokio::spawn(async move {
p2p::listener::listen(None).await.unwrap_or(());
});
(node, event_recv)
}
(node, event_recv)
}
pub fn get_context(&self) -> CoreContext {
CoreContext {
database: self.database.clone(),
event_sender: self.event_sender.clone(),
internal_sender: self.internal_channel.0.clone(),
}
}
pub fn get_context(&self) -> CoreContext {
CoreContext {
database: self.database.clone(),
event_sender: self.event_sender.clone(),
internal_sender: self.internal_channel.0.clone(),
}
}
pub fn get_controller(&self) -> CoreController {
CoreController {
query_sender: self.query_channel.0.clone(),
command_sender: self.command_channel.0.clone(),
}
}
pub fn get_controller(&self) -> CoreController {
CoreController {
query_sender: self.query_channel.0.clone(),
command_sender: self.command_channel.0.clone(),
}
}
pub async fn start(&mut self) {
let ctx = self.get_context();
loop {
// listen on global messaging channels for incoming messages
tokio::select! {
Some(msg) = self.query_channel.1.recv() => {
let res = self.exec_query(msg.data).await;
msg.return_sender.send(res).unwrap_or(());
}
Some(msg) = self.command_channel.1.recv() => {
let res = self.exec_command(msg.data).await;
msg.return_sender.send(res).unwrap_or(());
}
Some(event) = self.internal_channel.1.recv() => {
match event {
InternalEvent::JobIngest(job) => {
self.jobs.ingest(&ctx, job).await;
},
InternalEvent::JobQueue(job) => {
self.jobs.ingest_queue(&ctx, job);
},
InternalEvent::JobComplete(id) => {
self.jobs.complete(&ctx, id).await;
},
}
}
}
}
}
// load library database + initialize client with db
pub async fn initializer(&self) {
println!("Initializing...");
let ctx = self.get_context();
pub async fn start(&mut self) {
let ctx = self.get_context();
loop {
// listen on global messaging channels for incoming messages
tokio::select! {
Some(msg) = self.query_channel.1.recv() => {
let res = self.exec_query(msg.data).await;
msg.return_sender.send(res).unwrap_or(());
}
Some(msg) = self.command_channel.1.recv() => {
let res = self.exec_command(msg.data).await;
msg.return_sender.send(res).unwrap_or(());
}
Some(event) = self.internal_channel.1.recv() => {
match event {
InternalEvent::JobIngest(job) => {
self.jobs.ingest(&ctx, job).await;
},
InternalEvent::JobQueue(job) => {
self.jobs.ingest_queue(&ctx, job);
},
InternalEvent::JobComplete(id) => {
self.jobs.complete(&ctx, id).await;
},
}
}
}
}
}
// load library database + initialize client with db
pub async fn initializer(&self) {
println!("Initializing...");
let ctx = self.get_context();
if self.state.libraries.len() == 0 {
match library::loader::create(&ctx, None).await {
Ok(library) => println!("Created new library: {:?}", library),
Err(e) => println!("Error creating library: {:?}", e),
}
} else {
for library in self.state.libraries.iter() {
// init database for library
match library::loader::load(&ctx, &library.library_path, &library.library_uuid).await {
Ok(library) => println!("Loaded library: {:?}", library),
Err(e) => println!("Error loading library: {:?}", e),
}
}
}
// init node data within library
match node::LibraryNode::create(&self).await {
Ok(_) => println!("Spacedrive online"),
Err(e) => println!("Error initializing node: {:?}", e),
};
}
if self.state.libraries.len() == 0 {
match library::loader::create(&ctx, None).await {
Ok(library) => println!("Created new library: {:?}", library),
Err(e) => println!("Error creating library: {:?}", e),
}
} else {
for library in self.state.libraries.iter() {
// init database for library
match library::loader::load(&ctx, &library.library_path, &library.library_uuid)
.await
{
Ok(library) => println!("Loaded library: {:?}", library),
Err(e) => println!("Error loading library: {:?}", e),
}
}
}
// init node data within library
match node::LibraryNode::create(&self).await {
Ok(_) => println!("Spacedrive online"),
Err(e) => println!("Error initializing node: {:?}", e),
};
}
async fn exec_command(&mut self, cmd: ClientCommand) -> Result<CoreResponse, CoreError> {
println!("Core command: {:?}", cmd);
let ctx = self.get_context();
Ok(match cmd {
// CRUD for locations
ClientCommand::LocCreate { path } => {
let loc = sys::locations::new_location_and_scan(&ctx, &path).await?;
ctx.queue_job(Box::new(FileIdentifierJob));
CoreResponse::LocCreate(loc)
}
ClientCommand::LocUpdate { id: _, name: _ } => todo!(),
ClientCommand::LocDelete { id: _ } => todo!(),
// CRUD for files
ClientCommand::FileRead { id: _ } => todo!(),
// ClientCommand::FileEncrypt { id: _, algorithm: _ } => todo!(),
ClientCommand::FileDelete { id: _ } => todo!(),
// CRUD for tags
ClientCommand::TagCreate { name: _, color: _ } => todo!(),
ClientCommand::TagAssign {
file_id: _,
tag_id: _,
} => todo!(),
ClientCommand::TagDelete { id: _ } => todo!(),
// CRUD for libraries
ClientCommand::SysVolumeUnmount { id: _ } => todo!(),
ClientCommand::LibDelete { id: _ } => todo!(),
ClientCommand::TagUpdate { name: _, color: _ } => todo!(),
ClientCommand::GenerateThumbsForLocation { id, path } => {
ctx.spawn_job(Box::new(ThumbnailJob {
location_id: id,
path,
background: false, // fix
}));
CoreResponse::Success(())
}
// ClientCommand::PurgeDatabase => {
// println!("Purging database...");
// fs::remove_file(Path::new(&self.state.data_path).join("library.db")).unwrap();
// CoreResponse::Success(())
// }
ClientCommand::IdentifyUniqueFiles => {
ctx.spawn_job(Box::new(FileIdentifierJob));
CoreResponse::Success(())
}
})
}
async fn exec_command(&mut self, cmd: ClientCommand) -> Result<CoreResponse, CoreError> {
println!("Core command: {:?}", cmd);
let ctx = self.get_context();
Ok(match cmd {
// CRUD for locations
ClientCommand::LocCreate { path } => {
let loc = sys::locations::new_location_and_scan(&ctx, &path).await?;
ctx.queue_job(Box::new(FileIdentifierJob));
CoreResponse::LocCreate(loc)
}
ClientCommand::LocUpdate { id: _, name: _ } => todo!(),
ClientCommand::LocDelete { id: _ } => todo!(),
// CRUD for files
ClientCommand::FileRead { id: _ } => todo!(),
// ClientCommand::FileEncrypt { id: _, algorithm: _ } => todo!(),
ClientCommand::FileDelete { id: _ } => todo!(),
// CRUD for tags
ClientCommand::TagCreate { name: _, color: _ } => todo!(),
ClientCommand::TagAssign {
file_id: _,
tag_id: _,
} => todo!(),
ClientCommand::TagDelete { id: _ } => todo!(),
// CRUD for libraries
ClientCommand::SysVolumeUnmount { id: _ } => todo!(),
ClientCommand::LibDelete { id: _ } => todo!(),
ClientCommand::TagUpdate { name: _, color: _ } => todo!(),
ClientCommand::GenerateThumbsForLocation { id, path } => {
ctx.spawn_job(Box::new(ThumbnailJob {
location_id: id,
path,
background: false, // fix
}));
CoreResponse::Success(())
}
// ClientCommand::PurgeDatabase => {
// println!("Purging database...");
// fs::remove_file(Path::new(&self.state.data_path).join("library.db")).unwrap();
// CoreResponse::Success(())
// }
ClientCommand::IdentifyUniqueFiles => {
ctx.spawn_job(Box::new(FileIdentifierJob));
CoreResponse::Success(())
}
})
}
// query sources of data
async fn exec_query(&self, query: ClientQuery) -> Result<CoreResponse, CoreError> {
#[cfg(fdebug_assertions)]
println!("Core query: {:?}", query);
let ctx = self.get_context();
Ok(match query {
// return the client state from memory
ClientQuery::ClientGetState => CoreResponse::ClientGetState(self.state.clone()),
// get system volumes without saving to library
ClientQuery::SysGetVolumes => {
CoreResponse::SysGetVolumes(sys::volumes::Volume::get_volumes()?)
}
ClientQuery::SysGetLocations => {
CoreResponse::SysGetLocations(sys::locations::get_locations(&ctx).await?)
}
// get location from library
ClientQuery::SysGetLocation { id } => {
CoreResponse::SysGetLocation(sys::locations::get_location(&ctx, id).await?)
}
// return contents of a directory for the explorer
ClientQuery::LibGetExplorerDir {
path,
location_id,
limit: _,
} => CoreResponse::LibGetExplorerDir(
file::explorer::open::open_dir(&ctx, &location_id, &path).await?,
),
ClientQuery::LibGetTags => todo!(),
ClientQuery::JobGetRunning => CoreResponse::JobGetRunning(self.jobs.get_running().await),
ClientQuery::JobGetHistory => CoreResponse::JobGetHistory(Jobs::get_history(&ctx).await?),
ClientQuery::GetLibraryStatistics => {
CoreResponse::GetLibraryStatistics(library::statistics::Statistics::calculate(&ctx).await?)
}
ClientQuery::GetNodes => todo!(),
})
}
// query sources of data
async fn exec_query(&self, query: ClientQuery) -> Result<CoreResponse, CoreError> {
#[cfg(fdebug_assertions)]
println!("Core query: {:?}", query);
let ctx = self.get_context();
Ok(match query {
// return the client state from memory
ClientQuery::ClientGetState => CoreResponse::ClientGetState(self.state.clone()),
// get system volumes without saving to library
ClientQuery::SysGetVolumes => {
CoreResponse::SysGetVolumes(sys::volumes::Volume::get_volumes()?)
}
ClientQuery::SysGetLocations => {
CoreResponse::SysGetLocations(sys::locations::get_locations(&ctx).await?)
}
// get location from library
ClientQuery::SysGetLocation { id } => {
CoreResponse::SysGetLocation(sys::locations::get_location(&ctx, id).await?)
}
// return contents of a directory for the explorer
ClientQuery::LibGetExplorerDir {
path,
location_id,
limit: _,
} => CoreResponse::LibGetExplorerDir(
file::explorer::open::open_dir(&ctx, &location_id, &path).await?,
),
ClientQuery::LibGetTags => todo!(),
ClientQuery::JobGetRunning => {
CoreResponse::JobGetRunning(self.jobs.get_running().await)
}
ClientQuery::JobGetHistory => {
CoreResponse::JobGetHistory(Jobs::get_history(&ctx).await?)
}
ClientQuery::GetLibraryStatistics => CoreResponse::GetLibraryStatistics(
library::statistics::Statistics::calculate(&ctx).await?,
),
ClientQuery::GetNodes => todo!(),
})
}
}
// represents an event this library can emit
@ -343,26 +345,26 @@ impl Node {
#[serde(tag = "key", content = "params")]
#[ts(export)]
pub enum ClientCommand {
// Files
FileRead { id: i32 },
// FileEncrypt { id: i32, algorithm: EncryptionAlgorithm },
FileDelete { id: i32 },
// Library
LibDelete { id: i32 },
// Tags
TagCreate { name: String, color: String },
TagUpdate { name: String, color: String },
TagAssign { file_id: i32, tag_id: i32 },
TagDelete { id: i32 },
// Locations
LocCreate { path: String },
LocUpdate { id: i32, name: Option<String> },
LocDelete { id: i32 },
// System
SysVolumeUnmount { id: i32 },
GenerateThumbsForLocation { id: i32, path: String },
// PurgeDatabase,
IdentifyUniqueFiles,
// Files
FileRead { id: i32 },
// FileEncrypt { id: i32, algorithm: EncryptionAlgorithm },
FileDelete { id: i32 },
// Library
LibDelete { id: i32 },
// Tags
TagCreate { name: String, color: String },
TagUpdate { name: String, color: String },
TagAssign { file_id: i32, tag_id: i32 },
TagDelete { id: i32 },
// Locations
LocCreate { path: String },
LocUpdate { id: i32, name: Option<String> },
LocDelete { id: i32 },
// System
SysVolumeUnmount { id: i32 },
GenerateThumbsForLocation { id: i32, path: String },
// PurgeDatabase,
IdentifyUniqueFiles,
}
// represents an event this library can emit
@ -370,22 +372,22 @@ pub enum ClientCommand {
#[serde(tag = "key", content = "params")]
#[ts(export)]
pub enum ClientQuery {
ClientGetState,
SysGetVolumes,
LibGetTags,
JobGetRunning,
JobGetHistory,
SysGetLocations,
SysGetLocation {
id: i32,
},
LibGetExplorerDir {
location_id: i32,
path: String,
limit: i32,
},
GetLibraryStatistics,
GetNodes,
ClientGetState,
SysGetVolumes,
LibGetTags,
JobGetRunning,
JobGetHistory,
SysGetLocations,
SysGetLocation {
id: i32,
},
LibGetExplorerDir {
location_id: i32,
path: String,
limit: i32,
},
GetLibraryStatistics,
GetNodes,
}
// represents an event this library can emit
@ -393,54 +395,54 @@ pub enum ClientQuery {
#[serde(tag = "key", content = "data")]
#[ts(export)]
pub enum CoreEvent {
// most all events should be once of these two
InvalidateQuery(ClientQuery),
InvalidateQueryDebounced(ClientQuery),
InvalidateResource(CoreResource),
NewThumbnail { cas_id: String },
Log { message: String },
DatabaseDisconnected { reason: Option<String> },
// most all events should be once of these two
InvalidateQuery(ClientQuery),
InvalidateQueryDebounced(ClientQuery),
InvalidateResource(CoreResource),
NewThumbnail { cas_id: String },
Log { message: String },
DatabaseDisconnected { reason: Option<String> },
}
#[derive(Serialize, Deserialize, Debug, TS)]
#[serde(tag = "key", content = "data")]
#[ts(export)]
pub enum CoreResponse {
Success(()),
SysGetVolumes(Vec<sys::volumes::Volume>),
SysGetLocation(sys::locations::LocationResource),
SysGetLocations(Vec<sys::locations::LocationResource>),
LibGetExplorerDir(file::DirectoryWithContents),
ClientGetState(NodeState),
LocCreate(sys::locations::LocationResource),
JobGetRunning(Vec<JobReport>),
JobGetHistory(Vec<JobReport>),
GetLibraryStatistics(library::statistics::Statistics),
Success(()),
SysGetVolumes(Vec<sys::volumes::Volume>),
SysGetLocation(sys::locations::LocationResource),
SysGetLocations(Vec<sys::locations::LocationResource>),
LibGetExplorerDir(file::DirectoryWithContents),
ClientGetState(NodeState),
LocCreate(sys::locations::LocationResource),
JobGetRunning(Vec<JobReport>),
JobGetHistory(Vec<JobReport>),
GetLibraryStatistics(library::statistics::Statistics),
}
#[derive(Error, Debug)]
pub enum CoreError {
#[error("Query error")]
QueryError,
#[error("System error")]
SysError(#[from] sys::SysError),
#[error("File error")]
FileError(#[from] file::FileError),
#[error("Job error")]
JobError(#[from] job::JobError),
#[error("Database error")]
DatabaseError(#[from] prisma::QueryError),
#[error("Database error")]
LibraryError(#[from] library::LibraryError),
#[error("Query error")]
QueryError,
#[error("System error")]
SysError(#[from] sys::SysError),
#[error("File error")]
FileError(#[from] file::FileError),
#[error("Job error")]
JobError(#[from] job::JobError),
#[error("Database error")]
DatabaseError(#[from] prisma::QueryError),
#[error("Database error")]
LibraryError(#[from] library::LibraryError),
}
#[derive(Serialize, Deserialize, Debug, TS)]
#[ts(export)]
pub enum CoreResource {
Client,
Library,
Location(sys::locations::LocationResource),
File(file::File),
Job(JobReport),
Tag,
Client,
Library,
Location(sys::locations::LocationResource),
File(file::File),
Job(JobReport),
Tag,
}

View file

@ -11,86 +11,86 @@ pub static LIBRARY_DB_NAME: &str = "library.db";
pub static DEFAULT_NAME: &str = "My Library";
pub fn get_library_path(data_path: &str) -> String {
let path = data_path.to_owned();
format!("{}/{}", path, LIBRARY_DB_NAME)
let path = data_path.to_owned();
format!("{}/{}", path, LIBRARY_DB_NAME)
}
pub async fn get(core: &Node) -> Result<library::Data, LibraryError> {
let config = state::get();
let db = &core.database;
let config = state::get();
let db = &core.database;
let library_state = config.get_current_library();
let library_state = config.get_current_library();
println!("{:?}", library_state);
println!("{:?}", library_state);
// get library from db
let library = match db
.library()
.find_unique(library::pub_id::equals(library_state.library_uuid.clone()))
.exec()
.await?
{
Some(library) => Ok(library),
None => {
// update config library state to offline
// config.libraries
// get library from db
let library = match db
.library()
.find_unique(library::pub_id::equals(library_state.library_uuid.clone()))
.exec()
.await?
{
Some(library) => Ok(library),
None => {
// update config library state to offline
// config.libraries
Err(anyhow::anyhow!("library_not_found"))
}
};
Err(anyhow::anyhow!("library_not_found"))
}
};
Ok(library.unwrap())
Ok(library.unwrap())
}
pub async fn load(ctx: &CoreContext, library_path: &str, library_id: &str) -> Result<()> {
let mut config = state::get();
let mut config = state::get();
println!("Initializing library: {} {}", &library_id, library_path);
println!("Initializing library: {} {}", &library_id, library_path);
if config.current_library_uuid != library_id {
config.current_library_uuid = library_id.to_string();
config.save();
}
// create connection with library database & run migrations
migrate::run_migrations(&ctx).await?;
// if doesn't exist, mark as offline
Ok(())
if config.current_library_uuid != library_id {
config.current_library_uuid = library_id.to_string();
config.save();
}
// create connection with library database & run migrations
migrate::run_migrations(&ctx).await?;
// if doesn't exist, mark as offline
Ok(())
}
pub async fn create(ctx: &CoreContext, name: Option<String>) -> Result<()> {
let mut config = state::get();
let mut config = state::get();
let uuid = Uuid::new_v4().to_string();
let uuid = Uuid::new_v4().to_string();
println!("Creating library {:?}, UUID: {:?}", name, uuid);
println!("Creating library {:?}, UUID: {:?}", name, uuid);
let library_state = LibraryState {
library_uuid: uuid.clone(),
library_path: get_library_path(&config.data_path),
..LibraryState::default()
};
let library_state = LibraryState {
library_uuid: uuid.clone(),
library_path: get_library_path(&config.data_path),
..LibraryState::default()
};
migrate::run_migrations(&ctx).await?;
migrate::run_migrations(&ctx).await?;
config.libraries.push(library_state);
config.libraries.push(library_state);
config.current_library_uuid = uuid;
config.current_library_uuid = uuid;
config.save();
config.save();
let db = &ctx.database;
let db = &ctx.database;
let _library = db
.library()
.create(
library::pub_id::set(config.current_library_uuid),
library::name::set(name.unwrap_or(DEFAULT_NAME.into())),
vec![],
)
.exec()
.await;
let _library = db
.library()
.create(
library::pub_id::set(config.current_library_uuid),
library::name::set(name.unwrap_or(DEFAULT_NAME.into())),
vec![],
)
.exec()
.await;
println!("library created in database: {:?}", _library);
println!("library created in database: {:?}", _library);
Ok(())
Ok(())
}

Some files were not shown because too many files have changed in this diff Show more