Merge pull request #4 from oscartbeaumont/main

Linux support + upgrade prisma-client-rust
This commit is contained in:
Jamie Pine 2022-03-29 07:04:42 -07:00 committed by GitHub
commit 72bd5b21ef
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
27 changed files with 846 additions and 4899 deletions

View file

@ -112,4 +112,4 @@ Install instructions:
- `$ git clone https://github.com/jamiepine/spacedrive`
- `$ cd spacedrive`
- `$ yarn`
- `$ yarn desktop dev`
- `$ yarn dev`

View file

@ -1514,7 +1514,7 @@ checksum = "3ee2393c4a91429dffb4bedf19f4d6abf27d8a732c8ce4980305d782e5426d57"
[[package]]
name = "datamodel"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"bigdecimal 0.2.2",
"chrono",
@ -1537,7 +1537,7 @@ dependencies = [
[[package]]
name = "datamodel-connector"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"diagnostics",
"enumflags2 0.7.3",
@ -1602,7 +1602,7 @@ dependencies = [
[[package]]
name = "diagnostics"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"colored",
"pest",
@ -1688,7 +1688,7 @@ dependencies = [
[[package]]
name = "dml"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"chrono",
"cuid",
@ -1704,7 +1704,7 @@ dependencies = [
[[package]]
name = "dmmf"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"bigdecimal 0.2.2",
"datamodel",
@ -4337,7 +4337,7 @@ dependencies = [
[[package]]
name = "mongodb-client"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"mongodb",
"once_cell",
@ -4348,7 +4348,7 @@ dependencies = [
[[package]]
name = "mongodb-datamodel-connector"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"datamodel-connector",
"enumflags2 0.7.3",
@ -4360,7 +4360,7 @@ dependencies = [
[[package]]
name = "mongodb-query-connector"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"anyhow",
"async-trait",
@ -4553,7 +4553,7 @@ dependencies = [
[[package]]
name = "native-types"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"serde",
"serde_json",
@ -5180,7 +5180,7 @@ dependencies = [
[[package]]
name = "parser-database"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"diagnostics",
"enumflags2 0.7.3",
@ -5628,7 +5628,7 @@ checksum = "be91bcc43e73799dc46a6c194a55e7aae1d86cc867c860fd4a436019af21bd8c"
[[package]]
name = "prisma-client-rust"
version = "0.3.0"
source = "git+https://github.com/Brendonovich/prisma-client-rust.git?branch=master#55ae4638502bf36aaf1ec2512f6d7520ba9ed096"
source = "git+https://github.com/Brendonovich/prisma-client-rust.git?rev=022b799929c0e5b1a7dc1565ec8228ba52a32991#022b799929c0e5b1a7dc1565ec8228ba52a32991"
dependencies = [
"chrono",
"datamodel",
@ -5638,12 +5638,13 @@ dependencies = [
"request-handlers",
"serde",
"serde_json",
"thiserror",
]
[[package]]
name = "prisma-inflector"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"once_cell",
"regex",
@ -5653,7 +5654,7 @@ dependencies = [
[[package]]
name = "prisma-models"
version = "0.0.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"bigdecimal 0.2.2",
"chrono",
@ -5674,7 +5675,7 @@ dependencies = [
[[package]]
name = "prisma-value"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"base64 0.12.3",
"bigdecimal 0.2.2",
@ -5873,7 +5874,7 @@ dependencies = [
[[package]]
name = "query-connector"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"anyhow",
"async-trait",
@ -5893,7 +5894,7 @@ dependencies = [
[[package]]
name = "query-core"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"async-trait",
"base64 0.12.3",
@ -5908,6 +5909,7 @@ dependencies = [
"im",
"indexmap",
"itertools 0.10.3",
"lazy_static",
"mongodb-client",
"mongodb-query-connector",
"once_cell",
@ -6183,7 +6185,7 @@ dependencies = [
[[package]]
name = "request-handlers"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"bigdecimal 0.2.2",
"connection-string",
@ -6479,7 +6481,7 @@ dependencies = [
[[package]]
name = "schema-ast"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"diagnostics",
"pest",
@ -7029,7 +7031,7 @@ dependencies = [
[[package]]
name = "sql-datamodel-connector"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"datamodel-connector",
"enumflags2 0.7.3",
@ -7042,7 +7044,7 @@ dependencies = [
[[package]]
name = "sql-query-connector"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"anyhow",
"async-trait",
@ -8160,7 +8162,7 @@ dependencies = [
[[package]]
name = "user-facing-error-macros"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"proc-macro2",
"quote",
@ -8170,7 +8172,7 @@ dependencies = [
[[package]]
name = "user-facing-errors"
version = "0.1.0"
source = "git+https://github.com/prisma/prisma-engines?tag=3.10.0#73e60b76d394f8d37d8ebd1f8918c79029f0db86"
source = "git+https://github.com/prisma/prisma-engines?tag=3.11.0#b371888aaf8f51357c7457d836b86d12da91658b"
dependencies = [
"backtrace",
"indoc",

Binary file not shown.

After

Width:  |  Height:  |  Size: 128 KiB

412
packages/core/Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -29,7 +29,7 @@ once_cell = "1.8.0"
int-enum = "0.4.0"
# Project dependencies
ts-rs = "6.1"
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust.git", branch = "master" }
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust.git", rev = "022b799929c0e5b1a7dc1565ec8228ba52a32991" }
walkdir = "^2.3.2"
bytesize = "1.1.0"
env_logger = "0.9.0"

View file

@ -1,6 +1,15 @@
use crate::{db, prisma::Client, state, Core};
use anyhow::Result;
use crate::{
prisma::{self, Client},
state, Core,
};
use std::env;
use thiserror::Error;
#[derive(Error, Debug)]
pub enum ClientError {
#[error("Database error")]
DatabaseError(#[from] prisma::QueryError),
}
pub enum Platform {
Unknown = 0,
@ -11,7 +20,7 @@ pub enum Platform {
Android,
}
pub async fn create(core: &Core) -> Result<()> {
pub async fn create(core: &Core) -> Result<(), ClientError> {
println!("Creating client...");
let mut config = state::client::get();
@ -33,7 +42,7 @@ pub async fn create(core: &Core) -> Result<()> {
.client()
.find_unique(Client::uuid().equals(config.client_uuid.clone()))
.exec()
.await
.await?
{
Some(client) => client,
None => {
@ -41,10 +50,13 @@ pub async fn create(core: &Core) -> Result<()> {
.create_one(
Client::uuid().set(config.client_uuid.clone()),
Client::name().set(hostname.clone()),
vec![Client::platform().set(platform as i64), Client::online().set(true)],
vec![
Client::platform().set(platform as i32),
Client::online().set(true),
],
)
.exec()
.await
.await?
},
};

View file

@ -2,7 +2,7 @@ use int_enum::IntEnum;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
#[repr(i64)]
#[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum EncryptionAlgorithm {

View file

@ -6,14 +6,17 @@ use include_dir::{include_dir, Dir};
use std::ffi::OsStr;
use std::io::BufReader;
const INIT_MIGRATION: &str = include_str!("../../prisma/migrations/migration_table/migration.sql");
const INIT_MIGRATION: &str =
include_str!("../../prisma/migrations/migration_table/migration.sql");
static MIGRATIONS_DIR: Dir = include_dir!("$CARGO_MANIFEST_DIR/prisma/migrations");
pub async fn run_migrations(db_url: &str) -> Result<()> {
let client = prisma::new_client_with_url(&format!("file:{}", &db_url)).await;
match client
._query_raw::<serde_json::Value>("SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'")
._query_raw::<serde_json::Value>(
"SELECT name FROM sqlite_master WHERE type='table' AND name='_migrations'",
)
.await
{
Ok(data) => {
@ -60,7 +63,9 @@ pub async fn run_migrations(db_url: &str) -> Result<()> {
for subdir in migration_subdirs {
println!("{:?}", subdir.path());
let migration_file = subdir.get_file(subdir.path().join("./migration.sql")).unwrap();
let migration_file = subdir
.get_file(subdir.path().join("./migration.sql"))
.unwrap();
let migration_sql = migration_file.contents_utf8().unwrap();
let digest = sha256_digest(BufReader::new(migration_file.contents()))?;
@ -73,7 +78,7 @@ pub async fn run_migrations(db_url: &str) -> Result<()> {
.migration()
.find_unique(Migration::checksum().equals(checksum.clone()))
.exec()
.await;
.await?;
if existing_migration.is_none() {
println!("Running migration: {}", name);
@ -89,7 +94,7 @@ pub async fn run_migrations(db_url: &str) -> Result<()> {
vec![],
)
.exec()
.await;
.await?;
for (i, step) in steps.iter().enumerate() {
match client._execute_raw(&format!("{};", step)).await {
@ -97,10 +102,14 @@ pub async fn run_migrations(db_url: &str) -> Result<()> {
println!("Step {} ran successfully", i);
client
.migration()
.find_unique(Migration::checksum().equals(checksum.clone()))
.update(vec![Migration::steps_applied().set(i as i64 + 1)])
.find_unique(
Migration::checksum().equals(checksum.clone()),
)
.update(vec![
Migration::steps_applied().set(i as i32 + 1)
])
.exec()
.await;
.await?;
},
Err(e) => {
println!("Error running migration: {}", name);

View file

@ -1,6 +1,5 @@
use crate::state;
use crate::{prisma, prisma::PrismaClient};
use anyhow::Result;
use thiserror::Error;
pub mod migrate;

View file

@ -2,7 +2,7 @@ use anyhow::Result;
use data_encoding::HEXLOWER;
use ring::digest::{Context, Digest, SHA256};
use std::convert::TryInto;
use std::fs::{self, File};
use std::fs::File;
use std::io::{self, BufReader, Read};
use std::os::unix::prelude::FileExt;

View file

@ -1,9 +1,11 @@
use crate::{db, file::DirectoryWithContents, prisma::FilePath, Core, CoreContext};
use anyhow::Result;
use crate::{file::DirectoryWithContents, prisma::FilePath, CoreContext};
use super::{File, FileError};
use super::FileError;
pub async fn open_dir(ctx: &CoreContext, path: &str) -> Result<DirectoryWithContents, FileError> {
pub async fn open_dir(
ctx: &CoreContext,
path: &str,
) -> Result<DirectoryWithContents, FileError> {
let db = &ctx.database;
println!("getting files... {:?}", &path);
@ -15,14 +17,14 @@ pub async fn open_dir(ctx: &CoreContext, path: &str) -> Result<DirectoryWithCont
FilePath::is_dir().equals(true),
])
.exec()
.await
.await?
.ok_or(FileError::FileNotFound(path.to_string()))?;
let files = db
.file_path()
.find_many(vec![FilePath::parent_id().equals(directory.id)])
.exec()
.await;
.await?;
Ok(DirectoryWithContents {
directory: directory.into(),

View file

@ -59,7 +59,7 @@ pub async fn scan_path(
// query db to highers id, so we can increment it for the new files indexed
#[derive(Deserialize, Serialize, Debug)]
struct QueryRes {
id: Option<i64>,
id: Option<i32>,
}
// grab the next id so we can increment in memory for batch inserting
let first_file_id = match db
@ -79,9 +79,9 @@ pub async fn scan_path(
// spawn a dedicated thread to scan the directory for performance
let (paths, scan_start, on_progress) = tokio::task::spawn_blocking(move || {
// store every valid path discovered
let mut paths: Vec<(PathBuf, i64, Option<i64>)> = Vec::new();
let mut paths: Vec<(PathBuf, i32, Option<i32>)> = Vec::new();
// store a hashmap of directories to their file ids for fast lookup
let mut dirs: HashMap<String, i64> = HashMap::new();
let mut dirs: HashMap<String, i32> = HashMap::new();
// begin timer for logging purposes
let scan_start = Instant::now();
@ -188,9 +188,9 @@ pub async fn scan_path(
// reads a file at a path and creates an ActiveModel with metadata
fn prepare_values(
file_path: &PathBuf,
id: i64,
id: i32,
location: &LocationResource,
parent_id: &Option<i64>,
parent_id: &Option<i32>,
) -> Result<String> {
let metadata = fs::metadata(&file_path)?;
let location_path = location.path.as_ref().unwrap().as_str();

View file

@ -5,8 +5,7 @@ use ts_rs::TS;
use crate::{
crypto::encryption::EncryptionAlgorithm,
db,
prisma::{FileData, FilePathData},
prisma::{self, FileData, FilePathData},
};
pub mod checksum;
pub mod explorer;
@ -18,7 +17,7 @@ pub mod watcher;
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
pub struct File {
pub id: i64,
pub id: i32,
pub partial_checksum: String,
pub checksum: Option<String>,
pub size_in_bytes: String,
@ -38,18 +37,18 @@ pub struct File {
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
pub struct FilePath {
pub id: i64,
pub id: i32,
pub is_dir: bool,
pub location_id: i64,
pub location_id: i32,
pub materialized_path: String,
pub file_id: Option<i64>,
pub parent_id: Option<i64>,
pub file_id: Option<i32>,
pub parent_id: Option<i32>,
#[ts(type = "string")]
pub date_indexed: chrono::DateTime<chrono::Utc>,
pub permissions: Option<String>,
}
#[repr(i64)]
#[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum FileType {
@ -111,5 +110,5 @@ pub enum FileError {
#[error("File not found (path: {0:?})")]
FileNotFound(String),
#[error("Database error")]
DatabaseError(#[from] db::DatabaseError),
DatabaseError(#[from] prisma::QueryError),
}

View file

@ -9,7 +9,8 @@ pub async fn create_thumb(path: &str) -> Result<()> {
let file = File::open(path).unwrap();
let reader = BufReader::new(file);
let mut thumbnails = create_thumbnails(reader, mime::IMAGE_PNG, [ThumbnailSize::Small]).unwrap();
let mut thumbnails =
create_thumbnails(reader, mime::IMAGE_PNG, [ThumbnailSize::Small]).unwrap();
let thumbnail = thumbnails.pop().unwrap();

View file

@ -71,7 +71,7 @@ impl Jobs {
prisma::Job::status().equals(JobStatus::Queued.int_value()),
])])
.exec()
.await;
.await?;
Ok(jobs.into_iter().map(|j| j.into()).collect())
}
@ -88,20 +88,20 @@ pub enum JobReportUpdate {
#[ts(export)]
pub struct JobReport {
pub id: String,
// client_id: i64,
// client_id: i32,
#[ts(type = "string")]
pub date_created: chrono::DateTime<chrono::Utc>,
#[ts(type = "string")]
pub date_modified: chrono::DateTime<chrono::Utc>,
pub status: JobStatus,
pub task_count: i64,
pub completed_task_count: i64,
pub task_count: i32,
pub completed_task_count: i32,
pub message: String,
pub percentage_complete: f64,
// pub percentage_complete: f64,
#[ts(type = "string")]
pub seconds_elapsed: i64,
pub seconds_elapsed: i32,
}
// convert database struct into a resource struct
@ -116,7 +116,6 @@ impl Into<JobReport> for JobData {
date_created: self.date_created,
date_modified: self.date_modified,
message: String::new(),
percentage_complete: 0.0,
seconds_elapsed: self.seconds_elapsed,
}
}
@ -132,12 +131,11 @@ impl JobReport {
status: JobStatus::Queued,
task_count: 0,
completed_task_count: 0,
percentage_complete: 0.0,
message: String::new(),
seconds_elapsed: 0,
}
}
pub async fn create(&self, ctx: &CoreContext) -> Result<()> {
pub async fn create(&self, ctx: &CoreContext) -> Result<(), JobError> {
// let config = client::get();
ctx.database
.job()
@ -148,10 +146,10 @@ impl JobReport {
vec![],
)
.exec()
.await;
.await?;
Ok(())
}
pub async fn update(&self, ctx: &CoreContext) -> Result<()> {
pub async fn update(&self, ctx: &CoreContext) -> Result<(), JobError> {
// let config = client::get();
ctx.database
.job()
@ -164,12 +162,12 @@ impl JobReport {
prisma::Job::seconds_elapsed().set(self.seconds_elapsed),
])
.exec()
.await;
.await?;
Ok(())
}
}
#[repr(i64)]
#[repr(i32)]
#[derive(Debug, Clone, Copy, Serialize, Deserialize, TS, Eq, PartialEq, IntEnum)]
#[ts(export)]
pub enum JobStatus {

View file

@ -1,7 +1,7 @@
use std::fmt::Debug;
use thiserror::Error;
use crate::db;
use crate::prisma;
pub mod jobs;
pub mod worker;
@ -11,5 +11,212 @@ pub enum JobError {
#[error("Failed to create job (job_id {job_id:?})")]
CreateFailure { job_id: String },
#[error("Database error")]
DatabaseError(#[from] db::DatabaseError),
DatabaseError(#[from] prisma::QueryError),
}
// pub struct JobContext {
// pub core_ctx: CoreContext,
// pub job_data: JobReport,
// }
// #[derive(Debug)]
// pub enum JobCommand {
// Create(Box<dyn Job>),
// Update { id: i32, data: JobUpdateEvent },
// Completed { id: i32 },
// }
// #[derive(Debug)]
// pub struct JobUpdateEvent {
// pub task_count: Option<i32>,
// pub completed_task_count: Option<i32>,
// pub message: Option<String>,
// }
// // a struct to handle the runtime and execution of jobs
// pub struct Jobs {
// pub job_sender_channel: Sender<JobCommand>,
// pub running_job: Mutex<Option<JobReport>>,
// }
// impl Jobs {
// pub fn new() -> (Self, mpsc::Receiver<JobCommand>) {
// let (job_sender, job_receiver) = mpsc::channel(100);
// (
// Self {
// job_sender_channel: job_sender,
// running_job: Mutex::new(None),
// },
// job_receiver,
// )
// }
// pub fn start(&self, ctx: CoreContext, mut job_receiver: mpsc::Receiver<JobCommand>) {
// // open a thread to handle job execution
// tokio::spawn(async move {
// // local memory for job queue
// let mut queued_jobs: Vec<(Box<dyn Job>, JobReport)> = vec![];
// loop {
// tokio::select! {
// // when job is received via message channel
// Some(request) = job_receiver.recv() => {
// match request {
// // create a new job
// JobCommand::Create(job) => {
// // create job report and save to database
// let mut report = JobReport::new();
// println!("Creating job: {:?} Metadata: {:?}", &job, &report);
// report.create(&ctx).await;
// // queue the job
// queued_jobs.push((job, report));
// let current_running_job = self.running_job.lock().await;
// if current_running_job.is_none() {
// // replace the running job mutex with this job
// let (current_job, current_report) = queued_jobs.pop().unwrap();
// current_running_job.replace(current_report);
// // push job id into running jobs vector
// let id = report.id;
// let ctx = ctx.clone();
// // open a dedicated blocking thread to run job
// tokio::task::spawn_blocking(move || {
// // asynchronously call run method
// let handle = tokio::runtime::Handle::current();
// let job_sender = ctx.job_sender.clone();
// handle.block_on(current_report.update(&ctx, None, Some(JobStatus::Running))).unwrap();
// handle.block_on(job.run(JobContext { core_ctx: ctx.clone(), job_data: current_report.clone() })).unwrap();
// job_sender.send(JobCommand::Completed { id }).unwrap();
// });
// }
// }
// // update a running job
// JobCommand::Update { id, data } => {
// let ctx = ctx.clone();
// // find running job in memory by id
// let running_job = get_job(&id).unwrap_or_else(|| panic!("Job not found"));
// // update job data
// running_job.update(&ctx, Some(data), None).await.unwrap();
// // emit event to invalidate client cache
// ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetRunning)).await;
// },
// JobCommand::Completed { id } => {
// let ctx = ctx.clone();
// let running_job = get_job(&id).unwrap_or_else(|| panic!("Job not found"));
// running_job.update(&ctx, None, Some(JobStatus::Completed)).await.unwrap();
// ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetRunning)).await;
// ctx.emit(CoreEvent::InvalidateQuery(ClientQuery::JobGetHistory)).await;
// }
// }
// }
// }
// }
// });
// }
// pub async fn handle_job_command(&mut self, job: JobCommand) {
// self.job_sender_channel.send(job).await.unwrap_or(());
// }
// }
// impl JobReport {
// pub fn new() -> Self {
// Self {
// id: 0,
// // client_id: 0,
// date_created: chrono::Utc::now(),
// date_modified: chrono::Utc::now(),
// status: JobStatus::Queued,
// task_count: 0,
// completed_task_count: 0,
// message: String::new(),
// }
// }
// pub async fn create(&mut self, ctx: &CoreContext) {
// // let config = client::get();
// let job = ctx
// .database
// .job()
// .create_one(
// prisma::Job::action().set(1),
// // prisma::Job::clients().link(prisma::Client::id().equals(config.client_uuid)),
// vec![],
// )
// .exec()
// .await;
// self.id = job.id;
// }
// pub async fn update(
// &mut self,
// ctx: &CoreContext,
// changes: Option<JobUpdateEvent>,
// status: Option<JobStatus>,
// ) -> Result<()> {
// match changes {
// Some(changes) => {
// if changes.task_count.is_some() {
// self.task_count = changes.task_count.unwrap();
// }
// if changes.completed_task_count.is_some() {
// self.completed_task_count = changes.completed_task_count.unwrap();
// }
// if changes.message.is_some() {
// self.message = changes.message.unwrap();
// }
// },
// None => {},
// }
// if status.is_some() {
// self.status = status.unwrap();
// if self.status == JobStatus::Completed {
// ctx.database
// .job()
// .find_unique(prisma::Job::id().equals(self.id))
// .update(vec![
// prisma::Job::status().set(self.status.int_value()),
// prisma::Job::task_count().set(self.task_count),
// prisma::Job::completed_task_count().set(self.completed_task_count),
// prisma::Job::date_modified().set(chrono::Utc::now()),
// ])
// .exec()
// .await;
// }
// }
// println!("JOB REPORT: {:?}", self);
// Ok(())
// }
// pub async fn get_running(ctx: &CoreContext) -> Result<Vec<JobReport>, JobError> {
// let db = &ctx.database;
// let jobs = db
// .job()
// .find_many(vec![prisma::Job::status().equals(JobStatus::Running.int_value())])
// .exec()
// .await;
// Ok(jobs.into_iter().map(|j| j.into()).collect())
// }
// pub async fn get_history(ctx: &CoreContext) -> Result<Vec<JobReport>, JobError> {
// let db = &ctx.database;
// let jobs = db
// .job()
// .find_many(vec![or(vec![
// prisma::Job::status().equals(JobStatus::Completed.int_value()),
// prisma::Job::status().equals(JobStatus::Canceled.int_value()),
// prisma::Job::status().equals(JobStatus::Queued.int_value()),
// ])])
// .exec()
// .await;
// Ok(jobs.into_iter().map(|j| j.into()).collect())
// }
// }

View file

@ -142,23 +142,19 @@ impl Worker {
for change in changes {
match change {
JobReportUpdate::TaskCount(task_count) => {
worker.job_report.task_count = task_count as i64;
worker.job_report.task_count = task_count as i32;
},
JobReportUpdate::CompletedTaskCount(completed_task_count) => {
worker.job_report.completed_task_count =
completed_task_count as i64;
worker.job_report.percentage_complete =
(worker.job_report.completed_task_count as f64
/ worker.job_report.task_count as f64) * 100.0;
completed_task_count as i32;
},
JobReportUpdate::Message(message) => {
worker.job_report.message = message;
},
JobReportUpdate::SecondsElapsed(seconds) => {
worker.job_report.seconds_elapsed = seconds as i64;
worker.job_report.seconds_elapsed = seconds as i32;
},
}
worker.job_report.date_modified = chrono::Utc::now();
}
ctx.emit(CoreEvent::InvalidateQueryDebounced(
ClientQuery::JobGetRunning,

View file

@ -1,4 +1,3 @@
use anyhow::Result;
use job::jobs::{Job, JobReport, Jobs};
use log::{error, info};
use prisma::PrismaClient;
@ -308,22 +307,22 @@ impl Core {
#[ts(export)]
pub enum ClientCommand {
// Files
FileRead { id: i64 },
// FileEncrypt { id: i64, algorithm: EncryptionAlgorithm },
FileDelete { id: i64 },
FileRead { id: i32 },
// FileEncrypt { id: i32, algorithm: EncryptionAlgorithm },
FileDelete { id: i32 },
// Library
LibDelete { id: i64 },
LibDelete { id: i32 },
// Tags
TagCreate { name: String, color: String },
TagUpdate { name: String, color: String },
TagAssign { file_id: i64, tag_id: i64 },
TagDelete { id: i64 },
TagAssign { file_id: i32, tag_id: i32 },
TagDelete { id: i32 },
// Locations
LocCreate { path: String },
LocUpdate { id: i64, name: Option<String> },
LocDelete { id: i64 },
LocUpdate { id: i32, name: Option<String> },
LocDelete { id: i32 },
// System
SysVolumeUnmount { id: i64 },
SysVolumeUnmount { id: i32 },
}
// represents an event this library can emit
@ -336,8 +335,8 @@ pub enum ClientQuery {
LibGetTags,
JobGetRunning,
JobGetHistory,
SysGetLocation { id: i64 },
LibGetExplorerDir { path: String, limit: i64 },
SysGetLocation { id: i32 },
LibGetExplorerDir { path: String, limit: i32 },
}
// represents an event this library can emit
@ -378,7 +377,7 @@ pub enum CoreError {
#[error("Job error")]
JobError(#[from] job::JobError),
#[error("Database error")]
DatabaseError(#[from] db::DatabaseError),
DatabaseError(#[from] prisma::QueryError),
}
#[derive(Serialize, Deserialize, Debug, TS)]

View file

@ -1,18 +1,25 @@
use anyhow::Result;
use thiserror::Error;
use uuid::Uuid;
use crate::state::client::LibraryState;
use crate::Core;
use crate::{
db::{self, migrate},
db::migrate,
prisma::{Library, LibraryData},
state,
};
use crate::{prisma, Core};
pub static LIBRARY_DB_NAME: &str = "library.db";
pub static DEFAULT_NAME: &str = "My Library";
pub async fn get(core: &Core) -> Result<LibraryData> {
#[derive(Error, Debug)]
pub enum LibraryError {
#[error("Database error")]
DatabaseError(#[from] prisma::QueryError),
}
pub async fn get(core: &Core) -> Result<LibraryData, LibraryError> {
let config = state::client::get();
let db = &core.database;
@ -25,7 +32,7 @@ pub async fn get(core: &Core) -> Result<LibraryData> {
.library()
.find_unique(Library::uuid().equals(library_state.library_uuid.clone()))
.exec()
.await
.await?
{
Some(library) => Ok(library),
None => {

View file

@ -38,7 +38,9 @@ pub async fn listen(port: Option<u32>) -> Result<(), Box<dyn Error>> {
loop {
match swarm.select_next_some().await {
SwarmEvent::NewListenAddr { address, .. } => println!("Listening on {:?}", address),
SwarmEvent::NewListenAddr { address, .. } => {
println!("Listening on {:?}", address)
},
SwarmEvent::Behaviour(event) => println!("{:?}", event),
_ => {},
}

File diff suppressed because it is too large Load diff

View file

@ -1,11 +1,9 @@
use crate::{
db,
file::indexer::IndexerJob,
job,
prisma::{File, FilePath, Location},
prisma::{FilePath, Location},
state::client,
sys::{volumes, volumes::Volume},
Core, CoreContext,
CoreContext,
};
use anyhow::Result;
use log::info;
@ -21,11 +19,11 @@ use super::SysError;
#[derive(Debug, Clone, Serialize, Deserialize, TS)]
#[ts(export)]
pub struct LocationResource {
pub id: i64,
pub id: i32,
pub name: Option<String>,
pub path: Option<String>,
pub total_capacity: Option<i64>,
pub available_capacity: Option<i64>,
pub total_capacity: Option<i32>,
pub available_capacity: Option<i32>,
pub is_removable: bool,
pub is_ejectable: bool,
pub is_root_filesystem: bool,
@ -63,15 +61,19 @@ static DOTFILE_NAME: &str = ".spacedrive";
// - accessible on from the local filesystem
// - already exists in the database
pub async fn check_location(path: &str) -> Result<DotSpacedrive, LocationError> {
let dotfile: DotSpacedrive = match fs::File::open(format!("{}/{}", path.clone(), DOTFILE_NAME)) {
Ok(file) => serde_json::from_reader(file).unwrap_or(DotSpacedrive::default()),
Err(e) => return Err(LocationError::DotfileReadFailure(e)),
};
let dotfile: DotSpacedrive =
match fs::File::open(format!("{}/{}", path.clone(), DOTFILE_NAME)) {
Ok(file) => serde_json::from_reader(file).unwrap_or(DotSpacedrive::default()),
Err(e) => return Err(LocationError::DotfileReadFailure(e)),
};
Ok(dotfile)
}
pub async fn get_location(ctx: &CoreContext, location_id: i64) -> Result<LocationResource, SysError> {
pub async fn get_location(
ctx: &CoreContext,
location_id: i32,
) -> Result<LocationResource, SysError> {
let db = &ctx.database;
// get location by location_id from db and include location_paths
@ -81,7 +83,7 @@ pub async fn get_location(ctx: &CoreContext, location_id: i64) -> Result<Locatio
Location::file_paths().some(vec![FilePath::id().equals(location_id.into())])
])
.exec()
.await
.await?
{
Some(location) => location,
None => Err(LocationError::NotFound(location_id.to_string()))?,
@ -92,15 +94,23 @@ pub async fn get_location(ctx: &CoreContext, location_id: i64) -> Result<Locatio
Ok(location.into())
}
pub async fn new_location_and_scan(ctx: &CoreContext, path: &str) -> Result<LocationResource, SysError> {
pub async fn new_location_and_scan(
ctx: &CoreContext,
path: &str,
) -> Result<LocationResource, SysError> {
let location = create_location(&ctx, path).await?;
ctx.spawn_job(Box::new(IndexerJob { path: path.to_string() }));
ctx.spawn_job(Box::new(IndexerJob {
path: path.to_string(),
}));
Ok(location)
}
pub async fn create_location(ctx: &CoreContext, path: &str) -> Result<LocationResource, SysError> {
pub async fn create_location(
ctx: &CoreContext,
path: &str,
) -> Result<LocationResource, SysError> {
let db = &ctx.database;
let config = client::get();
@ -114,11 +124,14 @@ pub async fn create_location(ctx: &CoreContext, path: &str) -> Result<LocationRe
.location()
.find_first(vec![Location::local_path().equals(path.to_string())])
.exec()
.await
.await?
{
Some(location) => location,
None => {
info!("Location does not exist, creating new location for '{}'", &path);
info!(
"Location does not exist, creating new location for '{}'",
&path
);
let uuid = uuid::Uuid::new_v4();
// create new location
let create_location_params = {
@ -128,7 +141,9 @@ pub async fn create_location(ctx: &CoreContext, path: &str) -> Result<LocationRe
};
info!("Loaded mounted volumes: {:?}", volumes);
// find mount with matching path
let volume = volumes.into_iter().find(|mount| path.starts_with(&mount.mount_point));
let volume = volumes
.into_iter()
.find(|mount| path.starts_with(&mount.mount_point));
let volume_data = match volume {
Some(mount) => mount,
@ -137,8 +152,9 @@ pub async fn create_location(ctx: &CoreContext, path: &str) -> Result<LocationRe
vec![
Location::name().set(volume_data.name.to_string()),
Location::total_capacity().set(volume_data.total_capacity as i64),
Location::available_capacity().set(volume_data.available_capacity as i64),
Location::total_capacity().set(volume_data.total_capacity as i32),
Location::available_capacity()
.set(volume_data.available_capacity as i32),
Location::is_ejectable().set(false), // remove this
Location::is_removable().set(volume_data.is_removable),
Location::is_root_filesystem().set(false), // remove this
@ -147,15 +163,22 @@ pub async fn create_location(ctx: &CoreContext, path: &str) -> Result<LocationRe
]
};
let location = db.location().create_one(create_location_params).exec().await;
let location = db
.location()
.create_one(create_location_params)
.exec()
.await?;
info!("Created location: {:?}", location);
// write a file called .spacedrive to path containing the location id in JSON format
let mut dotfile = match fs::File::create(format!("{}/{}", path.clone(), DOTFILE_NAME)) {
Ok(file) => file,
Err(e) => Err(LocationError::DotfileWriteFailure(e, path.to_string()))?,
};
let mut dotfile =
match fs::File::create(format!("{}/{}", path.clone(), DOTFILE_NAME)) {
Ok(file) => file,
Err(e) => {
Err(LocationError::DotfileWriteFailure(e, path.to_string()))?
},
};
let data = DotSpacedrive {
location_uuid: uuid.to_string(),
@ -164,7 +187,9 @@ pub async fn create_location(ctx: &CoreContext, path: &str) -> Result<LocationRe
let json = match serde_json::to_string(&data) {
Ok(json) => json,
Err(e) => Err(LocationError::DotfileSerializeFailure(e, path.to_string()))?,
Err(e) => {
Err(LocationError::DotfileSerializeFailure(e, path.to_string()))?
},
};
match dotfile.write_all(json.as_bytes()) {

View file

@ -2,7 +2,7 @@ pub mod locations;
pub mod volumes;
use thiserror::Error;
use crate::{db, job};
use crate::{job, prisma};
use self::locations::LocationError;
@ -15,5 +15,5 @@ pub enum SysError {
#[error("Error from job runner")]
JobError(#[from] job::JobError),
#[error("Database error")]
DatabaseError(#[from] db::DatabaseError),
DatabaseError(#[from] prisma::QueryError),
}

View file

@ -34,7 +34,8 @@ pub fn get_volumes() -> Result<Vec<Volume>, SysError> {
let mut name = disk.name().to_str().unwrap_or("Volume").to_string();
let is_removable = disk.is_removable();
let file_system = String::from_utf8(disk.file_system().to_vec()).unwrap_or_else(|_| "Err".to_string());
let file_system = String::from_utf8(disk.file_system().to_vec())
.unwrap_or_else(|_| "Err".to_string());
let disk_type = match disk.type_() {
sysinfo::DiskType::SSD => "SSD".to_string(),
@ -50,11 +51,16 @@ pub fn get_volumes() -> Result<Vec<Volume>, SysError> {
let mut caption = mount_point.clone();
caption.pop();
let wmic_process = Command::new("cmd")
.args(["/C", &format!("wmic logical disk where Caption='{caption}' get Size")])
.args([
"/C",
&format!("wmic logical disk where Caption='{caption}' get Size"),
])
.output()
.expect("failed to execute process");
let wmic_process_output = String::from_utf8(wmic_process.stdout).unwrap();
let parsed_size = wmic_process_output.split("\r\r\n").collect::<Vec<&str>>()[1].to_string();
let parsed_size =
wmic_process_output.split("\r\r\n").collect::<Vec<&str>>()[1]
.to_string();
if let Ok(n) = parsed_size.trim().parse::<u64>() {
total_space = n;

View file

@ -3,7 +3,9 @@ use chrono::NaiveDateTime;
use std::io;
use std::time::{SystemTime, UNIX_EPOCH};
pub fn system_time_to_date_time(system_time: io::Result<SystemTime>) -> Result<NaiveDateTime> {
pub fn system_time_to_date_time(
system_time: io::Result<SystemTime>,
) -> Result<NaiveDateTime> {
// extract system time or resort to current time if failure
let system_time = system_time.unwrap_or(SystemTime::now());
let std_duration = system_time.duration_since(UNIX_EPOCH)?;

View file

@ -3,7 +3,7 @@ const plugin = require('tailwindcss/plugin');
const defaultTheme = require('tailwindcss/defaultTheme');
module.exports = {
purge: ['./src/index.html', './src/**/*.{vue,js,ts,jsx,tsx}'],
content: ['./src/index.html', './src/**/*.{vue,js,ts,jsx,tsx}'],
darkMode: 'media',
mode: 'jit',
theme: {

View file

@ -468,7 +468,7 @@
"@babel/types@^7.12.6", "@babel/types@^7.16.7":
version "7.16.7"
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.16.7.tgz#4ed19d51f840ed4bd5645be6ce40775fecf03159"
integrity sha512-E8HuV7FO9qLpx6OtoGfUQ2cjIYnbFwvZWYBS+87EwtdMvmUPJSwykpovFB+8insbpF0uJcpr8KMUi64XZntZcg==
integrity sha512-E8HuV7FO9qLpx6OtoGfUQ2cjIYnbFwvZWYBS+87EwtdMvmUPJSwykpovFB+8insbpF0uJcpr8KMUi32XZntZcg==
dependencies:
"@babel/helper-validator-identifier" "^7.16.7"
to-fast-properties "^2.0.0"