feature gated p2p + update deps + pushed bindings

This commit is contained in:
Oscar Beaumont 2022-04-25 16:42:37 +08:00
parent aa55027089
commit d834fe3b2c
18 changed files with 249 additions and 409 deletions

2
.gitignore vendored
View file

@ -59,3 +59,5 @@ yalc.lock
todos.md
examples/*/*.lock
/target
/sdserver_data

474
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -3,8 +3,6 @@ name = "debug"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
# anyhow = "1.0.56"
# data-encoding = "2.3.2"

View file

@ -5,7 +5,7 @@
},
"build": {
"distDir": "../dist",
"devPath": "http://localhost:8085",
"devPath": "http://localhost:8001",
"beforeDevCommand": "",
"beforeBuildCommand": ""
},

View file

@ -3,13 +3,11 @@ name = "server"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
actix = "0.13.0"
actix-web = "4.0.1"
actix-web-actors = "4.1.0"
sdcore = { path = "../../core" }
sdcore = { path = "../../core", features = [] }
serde = "1.0.136"
serde_json = "1.0.79"
tokio = { version = "1.17.0", features = ["sync", "rt"] }
tokio = { version = "1.17.0", features = ["sync", "rt"] }

View file

@ -14,9 +14,11 @@ use serde::{Deserialize, Serialize};
use tokio::sync::mpsc;
const DATA_DIR_ENV_VAR: &'static str = "DATA_DIR";
/// Define HTTP actor
struct Socket {
event_receiver: web::Data<mpsc::Receiver<CoreEvent>>,
_event_receiver: web::Data<mpsc::Receiver<CoreEvent>>,
core: web::Data<CoreController>,
}
@ -65,7 +67,7 @@ impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for Socket {
payload: SocketResponsePayload::Query(response),
}),
Err(err) => {
// println!("query error: {:?}", err);
println!("query error: {:?}", err);
// Err(err.to_string())
},
};
@ -77,12 +79,11 @@ impl StreamHandler<Result<ws::Message, ws::ProtocolError>> for Socket {
payload: SocketResponsePayload::Query(response),
}),
Err(err) => {
// println!("command error: {:?}", err);
println!("command error: {:?}", err);
// Err(err.to_string())
},
};
},
_ => {},
}
};
@ -113,7 +114,6 @@ impl Handler<SocketResponse> for Socket {
fn handle(&mut self, msg: SocketResponse, ctx: &mut Self::Context) {
let string = serde_json::to_string(&msg).unwrap();
println!("sending response: {string}");
ctx.text(string);
}
}
@ -137,16 +137,21 @@ async fn ws_handler(
) -> Result<HttpResponse, Error> {
let resp = ws::start(
Socket {
event_receiver,
_event_receiver: event_receiver,
core: controller,
},
&req,
stream,
);
println!("{:?}", resp);
resp
}
#[get("/file/{file:.*}")]
async fn file() -> impl Responder {
// TODO
format!("OK")
}
async fn not_found() -> impl Responder {
HttpResponse::build(StatusCode::OK).body("We're past the event horizon...")
}
@ -163,6 +168,7 @@ async fn main() -> std::io::Result<()> {
.service(index)
.service(healthcheck)
.service(ws_handler)
.service(file)
.default_service(web::route().to(not_found))
})
.bind(("0.0.0.0", 8080))?
@ -174,15 +180,23 @@ async fn setup() -> (
web::Data<mpsc::Receiver<CoreEvent>>,
web::Data<CoreController>,
) {
let data_dir_var = "DATA_DIR";
let data_dir = match env::var(data_dir_var) {
Ok(path) => path,
Err(e) => panic!("${} is not set ({})", data_dir_var, e),
let data_dir_path = match env::var(DATA_DIR_ENV_VAR) {
Ok(path) => Path::new(&path).to_path_buf(),
Err(_e) => {
#[cfg(not(debug_assertions))]
{
panic!("${} is not set ({})", DATA_DIR_ENV_VAR, _e)
}
std::env::current_dir()
.expect(
"Unable to get your currrent directory. Maybe try setting $DATA_DIR?",
)
.join("sdserver_data")
},
};
let data_dir_path = Path::new(&data_dir);
let (mut core, event_receiver) = Core::new(data_dir_path.to_path_buf()).await;
let (mut core, event_receiver) = Core::new(data_dir_path).await;
core.initializer().await;

View file

@ -7,13 +7,15 @@ license = "GNU GENERAL PUBLIC LICENSE"
repository = "https://github.com/jamiepine/spacedrive"
edition = "2021"
[features]
p2p = ["dep:libp2p"] # This feature controlls whether the Spacedrive Core contains the Peer to Peer syncing engine (It isn't required for the hosted core so we can disable it).
[dependencies]
swift-rs = "0.2.3"
hotwatch = "0.4.6"
hostname = "0.3.1"
# Universal Dependencies
anyhow = "1.0.44"
log = "0.4.14"
base64 = "0.13.0"
serde = { version = "1.0", features = ["derive"] }
chrono = { version = "0.4.0", features = ["serde"] }
@ -21,13 +23,13 @@ serde_json = "1.0"
futures = "0.3"
data-encoding = "2.3.2"
ring = "0.17.0-alpha.10"
once_cell = "1.8.0"
int-enum = "0.4.0"
# Project dependencies
ts-rs = "6.1"
prisma-client-rust = { git = "https://github.com/Brendonovich/prisma-client-rust.git", tag = "0.4.0" }
walkdir = "^2.3.2"
libp2p = "0.43.0"
libp2p = { version = "0.43.0", optional = true }
lazy_static = "1.4.0"
uuid = "0.8"
sysinfo = "0.23.9"
@ -41,3 +43,6 @@ image = "0.24.1"
webp = "0.2.2"
uhlc = "0.4.1"
ffmpeg-next = "5.0.3"
[target.'cfg(target_os = "macos")'.dependencies]
swift-rs = "0.2.3"

View file

@ -3,7 +3,5 @@ name = "prisma-cli"
version = "0.1.0"
edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
prisma-client-rust-cli = { git = "https://github.com/Brendonovich/prisma-client-rust", tag = "0.4.0" }

View file

@ -33,6 +33,7 @@ pub async fn run_migrations(db_url: &str) -> Result<()> {
{
Ok(data) => {
if data.len() == 0 {
#[cfg(debug_assertions)]
println!("Migration table does not exist");
// execute migration
match client._execute_raw(INIT_MIGRATION).await {
@ -47,8 +48,10 @@ pub async fn run_migrations(db_url: &str) -> Result<()> {
.await
.unwrap();
#[cfg(debug_assertions)]
println!("Migration table created: {:?}", value);
} else {
#[cfg(debug_assertions)]
println!("Migration table exists: {:?}", data);
}
@ -93,6 +96,7 @@ pub async fn run_migrations(db_url: &str) -> Result<()> {
.await?;
if existing_migration.is_none() {
#[cfg(debug_assertions)]
println!("Running migration: {}", name);
let steps = migration_sql.split(";").collect::<Vec<&str>>();
@ -111,6 +115,7 @@ pub async fn run_migrations(db_url: &str) -> Result<()> {
for (i, step) in steps.iter().enumerate() {
match client._execute_raw(&format!("{};", step)).await {
Ok(_) => {
#[cfg(debug_assertions)]
println!("Step {} ran successfully", i);
client
.migration()
@ -127,8 +132,10 @@ pub async fn run_migrations(db_url: &str) -> Result<()> {
}
}
#[cfg(debug_assertions)]
println!("Migration {} recorded successfully", name);
} else {
#[cfg(debug_assertions)]
println!("Migration {} already exists", name);
}
}

View file

@ -2,7 +2,7 @@ use crate::job::jobs::JobReportUpdate;
use crate::{
file::FileError,
job::{jobs::Job, worker::WorkerContext},
prisma::{self, file_path},
prisma::{file_path},
CoreContext,
};
use anyhow::Result;

View file

@ -173,8 +173,8 @@ pub struct JobReportCreate {}
impl Replicate for JobReport {
type Create = JobReportCreate;
async fn create(_data: Self::Create, ctx: SyncContext) {}
async fn delete(ctx: SyncContext) {}
async fn create(_data: Self::Create, _ctx: SyncContext) {}
async fn delete(_ctx: SyncContext) {}
}
#[repr(i32)]

View file

@ -1,6 +1,5 @@
use crate::file::cas::identifier::FileIdentifierJob;
use job::jobs::{Job, JobReport, Jobs};
use log::{error, info};
use prisma::PrismaClient;
use serde::{Deserialize, Serialize};
use state::client::ClientState;
@ -22,6 +21,7 @@ pub mod encode;
pub mod file;
pub mod job;
pub mod library;
#[cfg(target_os = "p2p")]
pub mod p2p;
pub mod prisma;
pub mod state;
@ -91,12 +91,12 @@ impl CoreContext {
.internal_sender
.send(InternalEvent::JobIngest(job))
.unwrap_or_else(|e| {
error!("Failed to spawn job. {:?}", e);
println!("Failed to spawn job. {:?}", e);
});
}
pub async fn emit(&self, event: CoreEvent) {
self.event_sender.send(event).await.unwrap_or_else(|e| {
error!("Failed to emit event. {:?}", e);
println!("Failed to emit event. {:?}", e);
});
}
}
@ -138,9 +138,9 @@ impl Core {
// prepare basic client state
let mut state = ClientState::new(data_dir, "diamond-mastering-space-dragon").unwrap();
// load from disk
state
.read_disk()
.unwrap_or(error!("No client state found, creating new one..."));
state.read_disk().unwrap_or(println!(
"Error: No client state found, creating new one..."
));
state.save();
@ -160,6 +160,7 @@ impl Core {
internal_channel,
};
#[cfg(feature = "p2p")]
tokio::spawn(async move {
p2p::listener::listen(None).await.unwrap_or(());
});
@ -213,22 +214,22 @@ impl Core {
println!("Initializing...");
if self.state.libraries.len() == 0 {
match library::loader::create(&self, None).await {
Ok(library) => info!("Created new library: {:?}", library),
Err(e) => info!("Error creating library: {:?}", e),
Ok(library) => println!("Created new library: {:?}", library),
Err(e) => println!("Error creating library: {:?}", e),
}
} else {
for library in self.state.libraries.iter() {
// init database for library
match library::loader::load(&library.library_path, &library.library_uuid).await {
Ok(library) => info!("Loaded library: {:?}", library),
Err(e) => info!("Error loading library: {:?}", e),
Ok(library) => println!("Loaded library: {:?}", library),
Err(e) => println!("Error loading library: {:?}", e),
}
}
}
// init client
match client::create(&self).await {
Ok(_) => info!("Spacedrive online"),
Err(e) => info!("Error initializing client: {:?}", e),
Ok(_) => println!("Spacedrive online"),
Err(e) => println!("Error initializing client: {:?}", e),
};
}
@ -279,6 +280,7 @@ impl Core {
// query sources of data
async fn exec_query(&self, query: ClientQuery) -> Result<CoreResponse, CoreError> {
#[cfg(fdebug_assertions)]
println!("Core query: {:?}", query);
let ctx = self.get_context();
Ok(match query {

View file

@ -3,7 +3,7 @@ use uuid::Uuid;
use crate::state::client::LibraryState;
use crate::{db::migrate, prisma::library, state};
use crate::{prisma, Core};
use crate::Core;
use super::LibraryError;

View file

@ -1,9 +1,7 @@
use crate::{prisma::library_statistics, state::client, CoreContext};
use crate::prisma::library_statistics;
use serde::{Deserialize, Serialize};
use ts_rs::TS;
use super::LibraryError;
#[derive(Debug, Serialize, Deserialize, TS, Clone)]
#[ts(export)]
pub struct Statistics {
@ -44,24 +42,24 @@ impl Default for Statistics {
}
}
impl Statistics {
pub async fn recalculate(ctx: &CoreContext) -> Result<(), LibraryError> {
let config = client::get();
let db = &ctx.database;
// impl Statistics {
// pub async fn recalculate(ctx: &CoreContext) -> Result<(), LibraryError> {
// let config = client::get();
// let db = &ctx.database;
let library_data = config.get_current_library();
// let library_data = config.get_current_library();
let library_statistics_db = match db
.library_statistics()
.find_unique(library_statistics::id::equals(library_data.library_id))
.exec()
.await?
{
Some(library_statistics_db) => library_statistics_db.into(),
// create the default values if database has no entry
None => Statistics::default(),
};
// let library_statistics_db = match db
// .library_statistics()
// .find_unique(library_statistics::id::equals(library_data.library_id))
// .exec()
// .await?
// {
// Some(library_statistics_db) => library_statistics_db.into(),
// // create the default values if database has no entry
// None => Statistics::default(),
// };
Ok(())
}
}
// Ok(())
// }
// }

View file

@ -48,7 +48,7 @@ pub enum SyncTransport {
}
impl SyncEngine {
pub fn new(core_ctx: &FakeCoreContext) -> Self {
pub fn new(_core_ctx: &FakeCoreContext) -> Self {
let (client_pool_sender, _client_pool_receiver) = mpsc::channel(10);
SyncEngine {

View file

@ -28,15 +28,15 @@ impl PropertyOperation for File {
type Create = FileCreate;
type Update = FileUpdate;
async fn create(_data: Self::Create, ctx: SyncContext) {}
async fn update(_data: Self::Update, ctx: SyncContext) {}
async fn delete(ctx: SyncContext) {}
async fn create(_data: Self::Create, _ctx: SyncContext) {}
async fn update(_data: Self::Update, _ctx: SyncContext) {}
async fn delete(_ctx: SyncContext) {}
}
#[async_trait::async_trait]
impl Replicate for File {
type Create = FileCreate;
async fn create(_data: Self::Create, ctx: SyncContext) {}
async fn delete(ctx: SyncContext) {}
async fn create(_data: Self::Create, _ctx: SyncContext) {}
async fn delete(_ctx: SyncContext) {}
}

View file

@ -29,7 +29,7 @@ impl PropertyOperation for Tag {
type Create = TagCreate;
type Update = TagUpdate;
async fn create(_data: Self::Create, ctx: SyncContext) {}
async fn update(_data: Self::Update, ctx: SyncContext) {}
async fn delete(ctx: SyncContext) {}
async fn create(_data: Self::Create, _ctx: SyncContext) {}
async fn update(_data: Self::Update, _ctx: SyncContext) {}
async fn delete(_ctx: SyncContext) {}
}

View file

@ -1,12 +1,7 @@
use crate::{
file::indexer::IndexerJob,
prisma::{file_path, location},
state::client,
sys::{volumes, volumes::Volume},
ClientQuery, CoreContext, CoreEvent,
file::indexer::IndexerJob, prisma::location, state::client, ClientQuery, CoreContext, CoreEvent,
};
use anyhow::Result;
use log::info;
use serde::{Deserialize, Serialize};
use std::{fs, io, io::Write, path::Path};
use thiserror::Error;
@ -80,7 +75,7 @@ pub async fn get_location(
None => Err(LocationError::NotFound(location_id.to_string()))?,
};
info!("Retrieved location: {:?}", location);
println!("Retrieved location: {:?}", location);
Ok(location.into())
}
@ -100,7 +95,6 @@ pub async fn new_location_and_scan(
pub async fn get_locations(ctx: &CoreContext) -> Result<Vec<LocationResource>, SysError> {
let db = &ctx.database;
let config = client::get();
let locations = db.location().find_many(vec![]).exec().await?;
@ -119,7 +113,7 @@ pub async fn create_location(ctx: &CoreContext, path: &str) -> Result<LocationRe
// check if we have access to this location
match fs::File::open(&path) {
Ok(_) => info!("Path is valid, creating location for '{}'", &path),
Ok(_) => println!("Path is valid, creating location for '{}'", &path),
Err(e) => Err(LocationError::FileReadError(e))?,
}
// check if location already exists
@ -131,7 +125,7 @@ pub async fn create_location(ctx: &CoreContext, path: &str) -> Result<LocationRe
{
Some(location) => location,
None => {
info!(
println!(
"Location does not exist, creating new location for '{}'",
&path
);
@ -152,7 +146,7 @@ pub async fn create_location(ctx: &CoreContext, path: &str) -> Result<LocationRe
.exec()
.await?;
info!("Created location: {:?}", location);
println!("Created location: {:?}", location);
// write a file called .spacedrive to path containing the location id in JSON format
let mut dotfile = match fs::File::create(format!("{}/{}", path.clone(), DOTFILE_NAME)) {