add (working!) cut job

This commit is contained in:
brxken128 2023-01-17 15:32:28 +00:00
parent 1397d84e15
commit 6196df4e48
2 changed files with 109 additions and 15 deletions

87
core/src/object/fs/cut.rs Normal file
View file

@ -0,0 +1,87 @@
use super::{context_menu_fs_info, get_path_from_location_id, FsInfo};
use crate::job::{JobError, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext};
use serde::{Deserialize, Serialize};
use specta::Type;
use std::{collections::VecDeque, hash::Hash, path::PathBuf};
pub struct FileCutterJob {}
#[derive(Serialize, Deserialize, Debug)]
pub struct FileCutterJobState {}
#[derive(Serialize, Deserialize, Hash, Type)]
pub struct FileCutterJobInit {
pub source_location_id: i32,
pub source_path_id: i32,
pub target_location_id: i32,
pub target_path: PathBuf,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct FileCutterJobStep {
pub source_fs_info: FsInfo,
pub target_directory: PathBuf,
}
const JOB_NAME: &str = "file_cutter";
#[async_trait::async_trait]
impl StatefulJob for FileCutterJob {
type Data = FileCutterJobState;
type Init = FileCutterJobInit;
type Step = FileCutterJobStep;
fn name(&self) -> &'static str {
JOB_NAME
}
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
let source_fs_info = context_menu_fs_info(
&ctx.library_ctx.db,
state.init.source_location_id,
state.init.source_path_id,
)
.await?;
let mut full_target_path =
get_path_from_location_id(&ctx.library_ctx.db, state.init.target_location_id).await?;
full_target_path.push(state.init.target_path.clone());
state.steps = VecDeque::new();
state.steps.push_back(FileCutterJobStep {
source_fs_info,
target_directory: full_target_path,
});
ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]);
Ok(())
}
async fn execute_step(
&self,
ctx: WorkerContext,
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let step = &state.steps[0];
let source_info = &step.source_fs_info;
let mut full_output = step.target_directory.clone();
full_output.push(source_info.obj_path.clone().file_name().unwrap());
dbg!(source_info.obj_path.clone());
dbg!(full_output.clone());
std::fs::rename(source_info.obj_path.clone(), full_output.clone())?;
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(
state.step_number + 1,
)]);
Ok(())
}
async fn finalize(&self, _ctx: WorkerContext, state: &mut JobState<Self>) -> JobResult {
Ok(Some(serde_json::to_value(&state.init)?))
}
}

View file

@ -8,6 +8,7 @@ use crate::{
};
pub mod copy;
pub mod cut;
pub mod decrypt;
pub mod delete;
pub mod duplicate;
@ -28,11 +29,10 @@ pub struct FsInfo {
pub obj_type: ObjectType,
}
pub async fn context_menu_fs_info(
pub async fn get_path_from_location_id(
db: &PrismaClient,
location_id: i32,
path_id: i32,
) -> Result<FsInfo, JobError> {
) -> Result<PathBuf, JobError> {
let location = db
.location()
.find_unique(location::id::equals(location_id))
@ -42,6 +42,22 @@ pub async fn context_menu_fs_info(
value: String::from("location which matches location_id"),
})?;
Ok(location
.local_path
.as_ref()
.map(PathBuf::from)
.ok_or(JobError::MissingData {
value: String::from("path when cast as `PathBuf`"),
})?)
}
pub async fn context_menu_fs_info(
db: &PrismaClient,
location_id: i32,
path_id: i32,
) -> Result<FsInfo, JobError> {
let location_path = get_path_from_location_id(db, location_id).await?;
let item = db
.file_path()
.find_unique(file_path::location_id_id(location_id, path_id))
@ -51,18 +67,9 @@ pub async fn context_menu_fs_info(
value: String::from("file_path that matches both location id and path id"),
})?;
let obj_path = [
location
.local_path
.as_ref()
.map(PathBuf::from)
.ok_or(JobError::MissingData {
value: String::from("path when cast as `PathBuf`"),
})?,
item.materialized_path.clone().into(),
]
.iter()
.collect();
let obj_path = [location_path, item.materialized_path.clone().into()]
.iter()
.collect();
// i don't know if this covers symlinks
let obj_type = if item.is_dir {