store cas ids on file paths

This commit is contained in:
Brendan Allan 2023-01-28 11:42:24 +08:00
parent fcad42f388
commit bf362eadd6
34 changed files with 1387 additions and 1312 deletions

View file

@ -6,86 +6,94 @@ import FileItem from '../explorer/FileItem';
const placeholderFileItems: ExplorerItem[] = [
{
date_created: '2020-01-01T00:00:00.000Z',
date_indexed: '2020-01-01T00:00:00.000Z',
date_modified: '2020-01-01T00:00:00.000Z',
extension: '',
id: 2,
name: 'Documents',
key_id: null,
type: 'Path',
is_dir: true,
location_id: 1,
materialized_path: '/Documents',
object_id: 5,
parent_id: 1,
object: {
extension: 'tsx',
cas_id: '3',
id: 3,
name: 'App.tsx',
key_id: null,
item: {
date_created: '2020-01-01T00:00:00.000Z',
date_indexed: '2020-01-01T00:00:00.000Z',
date_modified: '2020-01-01T00:00:00.000Z',
extension: '',
cas_id: '3',
integrity_checksum: '',
id: 2,
name: 'Documents',
key_id: null,
is_dir: true,
location_id: 1,
materialized_path: '/Documents',
object_id: 5,
parent_id: 1,
object: {
extension: 'tsx',
id: 3,
pub_id: [3],
name: 'App.tsx',
key_id: null,
date_created: '2020-01-01T00:00:00.000Z',
date_indexed: '2020-01-01T00:00:00.000Z',
date_modified: '2020-01-01T00:00:00.000Z',
favorite: false,
has_thumbnail: false,
has_thumbstrip: false,
has_video_preview: false,
hidden: false,
important: false,
ipfs_id: '',
kind: 2,
note: '',
size_in_bytes: '0'
}
},
has_thumbnail: false
},
{
type: 'Object',
has_thumbnail: false,
item: {
date_created: '2020-01-01T00:00:00.000Z',
date_indexed: '2020-01-01T00:00:00.000Z',
date_modified: '2020-01-01T00:00:00.000Z',
extension: '',
id: 1,
pub_id: [1],
name: 'Minecraft',
key_id: null,
favorite: false,
file_paths: [],
has_thumbnail: false,
has_thumbstrip: false,
has_video_preview: false,
hidden: false,
important: false,
integrity_checksum: '',
ipfs_id: '',
kind: 2,
kind: 4,
note: '',
size_in_bytes: '0'
}
},
{
date_created: '2020-01-01T00:00:00.000Z',
date_indexed: '2020-01-01T00:00:00.000Z',
date_modified: '2020-01-01T00:00:00.000Z',
extension: '',
id: 1,
name: 'Minecraft',
key_id: null,
type: 'Object',
cas_id: '555',
favorite: false,
file_paths: [],
has_thumbnail: false,
has_thumbstrip: false,
has_video_preview: false,
hidden: false,
important: false,
integrity_checksum: '',
ipfs_id: '',
kind: 4,
note: '',
size_in_bytes: '0'
},
{
date_created: '2020-01-01T00:00:00.000Z',
date_indexed: '2020-01-01T00:00:00.000Z',
date_modified: '2020-01-01T00:00:00.000Z',
extension: '',
id: 5,
name: 'Minecraft',
key_id: null,
type: 'Object',
cas_id: '555',
favorite: false,
file_paths: [],
has_thumbnail: false,
has_thumbstrip: false,
has_video_preview: false,
hidden: false,
important: false,
integrity_checksum: '',
ipfs_id: '',
kind: 5,
note: '',
size_in_bytes: '0'
item: {
date_created: '2020-01-01T00:00:00.000Z',
date_indexed: '2020-01-01T00:00:00.000Z',
date_modified: '2020-01-01T00:00:00.000Z',
extension: '',
id: 5,
pub_id: [5],
name: 'Minecraft',
key_id: null,
favorite: false,
file_paths: [],
has_thumbnail: false,
has_thumbstrip: false,
has_video_preview: false,
hidden: false,
important: false,
ipfs_id: '',
kind: 5,
note: '',
size_in_bytes: '0'
}
}
];
@ -104,7 +112,7 @@ const DeviceIcon = {
server: <Cloud color="white" weight="fill" size={18} style={tw`mr-2`} />
};
const Device = ({ name, locations, size, type }: DeviceProps) => {
const Device = ({ name, size, type }: DeviceProps) => {
return (
<View style={tw`my-2 bg-app-overlay border rounded-md border-app-line`}>
<View style={tw`flex flex-row items-center px-3.5 pt-3 pb-2`}>
@ -123,7 +131,7 @@ const Device = ({ name, locations, size, type }: DeviceProps) => {
<FlatList
data={placeholderFileItems}
renderItem={({ item }) => <FileItem data={item} />}
keyExtractor={(item) => item.id.toString()}
keyExtractor={(item) => item.item.id.toString()}
horizontal
contentContainerStyle={tw`mt-3 mb-5`}
showsHorizontalScrollIndicator={false}

View file

@ -39,8 +39,8 @@ const Explorer = ({ data }: ExplorerProps) => {
const { fileRef, setData } = useFileModalStore();
function handlePress(item: ExplorerItem) {
if (isPath(item) && item.is_dir) {
navigation.navigate('Location', { id: item.location_id });
if (isPath(item) && item.item.is_dir) {
navigation.navigate('Location', { id: item.item.location_id });
} else {
setData(item);
fileRef.current.present();
@ -70,7 +70,7 @@ const Explorer = ({ data }: ExplorerProps) => {
key={layoutMode}
numColumns={layoutMode === 'grid' ? getExplorerStore().gridNumColumns : 1}
data={data.items}
keyExtractor={(item) => item.id.toString()}
keyExtractor={(item) => item.item.id.toString()}
renderItem={({ item }) => (
<Pressable onPress={() => handlePress(item)}>
{layoutMode === 'grid' ? <FileItem data={item} /> : <FileRow data={item} />}

View file

@ -10,7 +10,9 @@ type FileItemProps = {
};
const FileItem = ({ data }: FileItemProps) => {
const isVid = isVideoExt(data.extension || '');
const { item } = data;
const isVid = isVideoExt(item.extension || '');
const gridItemSize = Layout.window.width / getExplorerStore().gridNumColumns;
@ -21,16 +23,19 @@ const FileItem = ({ data }: FileItemProps) => {
height: gridItemSize
})}
>
<FileThumb data={data} kind={data.extension === 'zip' ? 'zip' : isVid ? 'video' : 'other'} />
{data?.extension && isVid && (
<FileThumb
data={data}
kind={data.item.extension === 'zip' ? 'zip' : isVid ? 'video' : 'other'}
/>
{item.extension && isVid && (
<View style={tw`absolute bottom-8 opacity-70 right-5 py-0.5 px-1 bg-black/70 rounded`}>
<Text style={tw`text-[9px] text-white uppercase font-semibold`}>{data.extension}</Text>
<Text style={tw`text-[9px] text-white uppercase font-semibold`}>{item.extension}</Text>
</View>
)}
<View style={tw`px-1.5 py-[1px] mt-1`}>
<Text numberOfLines={1} style={tw`text-xs font-medium text-center text-white`}>
{data?.name}
{data?.extension && `.${data.extension}`}
{item?.name}
{item?.extension && `.${item.extension}`}
</Text>
</View>
</View>

View file

@ -10,7 +10,9 @@ type FileRowProps = {
};
const FileRow = ({ data }: FileRowProps) => {
const isVid = isVideoExt(data.extension || '');
const { item } = data;
const isVid = isVideoExt(item.extension || '');
return (
<View
@ -20,13 +22,13 @@ const FileRow = ({ data }: FileRowProps) => {
>
<FileThumb
data={data}
kind={data.extension === 'zip' ? 'zip' : isVid ? 'video' : 'other'}
kind={item.extension === 'zip' ? 'zip' : isVid ? 'video' : 'other'}
size={0.6}
/>
<View style={tw`ml-3`}>
<Text numberOfLines={1} style={tw`text-xs font-medium text-center text-ink-dull`}>
{data?.name}
{data?.extension && `.${data.extension}`}
{item?.name}
{item?.extension && `.${item.extension}`}
</Text>
</View>
</View>

View file

@ -1,7 +1,6 @@
import { Image, View } from 'react-native';
import { DocumentDirectoryPath } from 'react-native-fs';
import { ExplorerItem } from '@sd/client';
import { useExplorerStore } from '~/stores/explorerStore';
import { isObject, isPath } from '~/types/helper';
// import icons from '../../assets/icons/file';
import tw from '../../lib/tailwind';
@ -27,28 +26,26 @@ const FileThumbWrapper = ({ children, size = 1 }) => (
);
export default function FileThumb({ data, size = 1, kind }: FileThumbProps) {
const explorerStore = useExplorerStore();
// const Icon = useMemo(() => {
// const Icon = icons[data.extension];
// return Icon;
// }, [data.extension]);
if (isPath(data) && data.is_dir)
if (isPath(data) && data.item.is_dir)
return (
<FileThumbWrapper size={size}>
<FolderIcon size={70 * size} />
</FileThumbWrapper>
);
const cas_id = isObject(data) ? data.cas_id : data.object?.cas_id;
const cas_id = isObject(data) ? data.item.file_paths[0].cas_id : data.item.cas_id;
if (!cas_id) return undefined;
// Icon
let icon = undefined;
if (kind === 'Archive') icon = require('@sd/assets/images/Archive.png');
else if (kind === 'Video') icon = require('@sd/assets/images/Video.png');
else if (kind === 'Document' && data.extension === 'pdf')
else if (kind === 'Document' && data.item.extension === 'pdf')
icon = require('@sd/assets/images/Document_pdf.png');
else if (kind === 'Executable') icon = require('@sd/assets/images/Executable.png');
@ -60,17 +57,10 @@ export default function FileThumb({ data, size = 1, kind }: FileThumbProps) {
);
}
// Thumbnail
const has_thumbnail = isObject(data)
? data.has_thumbnail
: isPath(data)
? data.object?.has_thumbnail
: !!explorerStore.newThumbnails[cas_id];
const url = getThumbnailUrlById(cas_id);
// TODO: Not styled yet
if (has_thumbnail && url) {
if (data.has_thumbnail && url) {
return (
<FileThumbWrapper size={size}>
<Image source={{ uri: url }} resizeMode="contain" style={tw`w-full h-full`} />

View file

@ -28,6 +28,8 @@ export const FileModal = () => {
const fileDetailsRef = useRef<BottomSheetModal>(null);
const item = data.item;
return (
<>
<Modal ref={fileRef} snapPoints={['60%', '90%']}>
@ -38,11 +40,11 @@ export const FileModal = () => {
<FileIcon data={data} size={1.6} />
{/* File Name, Details etc. */}
<View style={tw`ml-2`}>
<Text style={tw`text-base font-bold text-gray-200`}>{data?.name}</Text>
<Text style={tw`text-base font-bold text-gray-200`}>{item.name}</Text>
<View style={tw`flex flex-row mt-2`}>
<Text style={tw`text-xs text-gray-400`}>5 MB,</Text>
<Text style={tw`ml-1 text-xs text-gray-400`}>
{data?.extension.toUpperCase()},
{item.extension.toUpperCase()},
</Text>
<Text style={tw`ml-1 text-xs text-gray-400`}>15 Aug</Text>
</View>
@ -77,24 +79,24 @@ export const FileModal = () => {
{/* File Icon / Name */}
<View style={tw`items-center`}>
<FileThumb data={data} size={1.8} />
<Text style={tw`text-base font-bold text-gray-200 mt-3`}>{data.name}</Text>
<Text style={tw`text-base font-bold text-gray-200 mt-3`}>{item.name}</Text>
</View>
{/* Details */}
<Divider style={tw`mt-6 mb-4`} />
<>
{/* Temp, we need cas id */}
{data?.id && <MetaItem title="Unique Content ID" value={'555555555'} />}
{item.id && <MetaItem title="Unique Content ID" value={'555555555'} />}
<Divider style={tw`my-4`} />
<MetaItem title="URI" value={`/Users/utku/Somewhere/vite.config.js`} />
<Divider style={tw`my-4`} />
<MetaItem
title="Date Created"
value={dayjs(data.date_created).format('MMMM Do yyyy, h:mm:ss aaa')}
value={dayjs(item.date_created).format('MMMM Do yyyy, h:mm:ss aaa')}
/>
<Divider style={tw`my-4`} />
<MetaItem
title="Date Indexed"
value={dayjs(data.date_indexed).format('MMMM Do yyyy, h:mm:ss aaa')}
value={dayjs(item.date_indexed).format('MMMM Do yyyy, h:mm:ss aaa')}
/>
</>
</BottomSheetScrollView>

View file

@ -113,37 +113,34 @@ model Location {
@@map("location")
}
/// @shared(id: cas_id)
/// @shared(id: pub_id)
model Object {
id Int @id @default(autoincrement())
// content addressable storage id - blake3 sampled checksum
cas_id String @unique
// full byte contents digested into blake3 checksum
integrity_checksum String? @unique
id Int @id @default(autoincrement())
pub_id Bytes @unique
// basic metadata
name String?
extension String?
kind Int @default(0)
size_in_bytes String @default("0")
key_id Int?
name String?
extension String?
kind Int @default(0)
size_in_bytes String @default("0")
key_id Int?
// handy ways to mark an object
hidden Boolean @default(false)
favorite Boolean @default(false)
important Boolean @default(false)
hidden Boolean @default(false)
favorite Boolean @default(false)
important Boolean @default(false)
// if we have generated preview media for this object
has_thumbnail Boolean @default(false)
has_thumbstrip Boolean @default(false)
has_video_preview Boolean @default(false)
has_thumbnail Boolean @default(false)
has_thumbstrip Boolean @default(false)
has_video_preview Boolean @default(false)
// integration with ipfs
ipfs_id String?
ipfs_id String?
// plain text note
note String?
note String?
// the original known creation date of this object
date_created DateTime @default(now())
date_created DateTime @default(now())
// the last time this object was modified
date_modified DateTime @default(now())
date_modified DateTime @default(now())
// when this object was first indexed
date_indexed DateTime @default(now())
date_indexed DateTime @default(now())
tags TagOnObject[]
labels LabelOnObject[]
@ -160,22 +157,31 @@ model Object {
/// @shared(id: [location, id])
model FilePath {
id Int
is_dir Boolean @default(false)
id Int
is_dir Boolean @default(false)
// content addressable storage id - blake3 sampled checksum
cas_id String?
// full byte contents digested into blake3 checksum
integrity_checksum String? @unique
// location that owns this path
location_id Int
location Location @relation(fields: [location_id], references: [id], onDelete: Cascade, onUpdate: Cascade)
location_id Int
location Location @relation(fields: [location_id], references: [id], onDelete: Cascade, onUpdate: Cascade)
// a path generated from local file_path ids eg: "34/45/67/890"
materialized_path String
// the name and extension
name String
extension String?
// the unique Object for this file path
object_id Int?
object Object? @relation(fields: [object_id], references: [id], onDelete: Restrict)
object_id Int?
object Object? @relation(fields: [object_id], references: [id], onDelete: Restrict)
// the parent in the file tree
parent_id Int?
key_id Int? // replacement for encryption
parent_id Int?
key_id Int? // replacement for encryption
// permissions String?
// temp_cas_id String? // so a filepath can be created without its File, as they're created lazily

View file

@ -4,7 +4,6 @@ use crate::{
indexer::{indexer_job::indexer_job_location, rules::IndexerRuleCreateArgs},
relink_location, scan_location, LocationCreateArgs, LocationError, LocationUpdateArgs,
},
object::preview::THUMBNAIL_CACHE_DIR_NAME,
prisma::{file_path, indexer_rule, indexer_rules_in_location, location, object, tag},
};
@ -12,7 +11,6 @@ use std::path::PathBuf;
use rspc::{self, internal::MiddlewareBuilderLike, ErrorCode, Type};
use serde::{Deserialize, Serialize};
use tokio::{fs, io};
use super::{utils::LibraryRequest, Ctx, RouterBuilder};
@ -27,8 +25,14 @@ pub enum ExplorerContext {
#[derive(Serialize, Deserialize, Type, Debug)]
#[serde(tag = "type")]
pub enum ExplorerItem {
Path(Box<file_path_with_object::Data>),
Object(Box<object_with_file_paths::Data>),
Path {
has_thumbnail: bool,
item: Box<file_path_with_object::Data>,
},
Object {
has_thumbnail: bool,
item: Box<object_with_file_paths::Data>,
},
}
#[derive(Serialize, Deserialize, Type, Debug)]
@ -130,24 +134,20 @@ pub(crate) fn mount() -> rspc::RouterBuilder<
// .await;
let mut items = Vec::with_capacity(file_paths.len());
for mut file_path in file_paths {
if let Some(object) = &mut file_path.object.as_mut() {
// TODO: Use helper function to build this url as as the Rust file loading layer
let thumb_path = library
.config()
.data_directory()
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(&object.cas_id)
.with_extension("webp");
object.has_thumbnail = (match fs::metadata(thumb_path).await {
Ok(_) => Ok(true),
Err(e) if e.kind() == io::ErrorKind::NotFound => Ok(false),
Err(e) => Err(e),
})
.map_err(LocationError::IOError)?;
}
items.push(ExplorerItem::Path(Box::new(file_path)));
for file_path in file_paths {
let has_thumbnail = match &file_path.cas_id {
None => false,
Some(cas_id) => library
.thumbnail_exists(cas_id)
.await
.map_err(LocationError::IOError)?,
};
items.push(ExplorerItem::Path {
has_thumbnail,
item: Box::new(file_path),
});
}
Ok(ExplorerData {

View file

@ -1,12 +1,13 @@
use rspc::{ErrorCode, Type};
use serde::Deserialize;
use tracing::info;
use uuid::Uuid;
use crate::{
api::locations::{object_with_file_paths, ExplorerContext, ExplorerData, ExplorerItem},
invalidate_query,
object::preview::THUMBNAIL_CACHE_DIR_NAME,
library::LibraryContext,
prisma::{object, tag, tag_on_object},
};
@ -23,8 +24,9 @@ pub(crate) fn mount() -> RouterBuilder {
t(|_, tag_id: i32, library| async move {
info!("Getting files for tag {}", tag_id);
let tag = library
.db
let LibraryContext { db, .. } = &library;
let tag = db
.tag()
.find_unique(tag::id::equals(tag_id))
.exec()
@ -36,42 +38,47 @@ pub(crate) fn mount() -> RouterBuilder {
)
})?;
let objects: Vec<ExplorerItem> = library
.db
let objects = db
.object()
.find_many(vec![object::tags::some(vec![
tag_on_object::tag_id::equals(tag_id),
])])
.include(object_with_file_paths::include())
.exec()
.await?
.into_iter()
.map(|mut object| {
// sorry brendan
// grab the first path and tac on the name
let oldest_path = &object.file_paths[0];
object.name = Some(oldest_path.name.clone());
object.extension = oldest_path.extension.clone();
// a long term fix for this would be to have the indexer give the Object a name and extension, sacrificing its own and only store newly found Path names that differ from the Object name
.await?;
let thumb_path = library
.config()
.data_directory()
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(&object.cas_id)
.with_extension("webp");
let mut items = Vec::with_capacity(objects.len());
object.has_thumbnail = thumb_path.try_exists().unwrap();
for mut object in objects {
// sorry brendan
// grab the first path and tac on the name
let oldest_path = &object.file_paths[0];
object.name = Some(oldest_path.name.clone());
object.extension = oldest_path.extension.clone();
// a long term fix for this would be to have the indexer give the Object a name and extension, sacrificing its own and only store newly found Path names that differ from the Object name
ExplorerItem::Object(Box::new(object))
})
.collect();
let cas_id = object
.file_paths
.iter()
.map(|fp| fp.cas_id.as_ref())
.find_map(|c| c);
info!("Got objects {}", objects.len());
let has_thumbnail = match cas_id {
None => false,
Some(cas_id) => library.thumbnail_exists(cas_id).await.unwrap(),
};
items.push(ExplorerItem::Object {
has_thumbnail,
item: Box::new(object),
});
}
info!("Got objects {}", items.len());
Ok(ExplorerData {
context: ExplorerContext::Tag(tag),
items: objects,
items,
})
})
})

View file

@ -1,6 +1,7 @@
use crate::{
api::CoreEvent, job::DynJob, location::LocationManager, node::NodeConfigManager,
prisma::PrismaClient, sync::SyncManager, NodeContext,
object::preview::THUMBNAIL_CACHE_DIR_NAME, prisma::PrismaClient, sync::SyncManager,
NodeContext,
};
use std::{
@ -69,4 +70,19 @@ impl LibraryContext {
pub(crate) fn location_manager(&self) -> &Arc<LocationManager> {
&self.node_context.location_manager
}
pub async fn thumbnail_exists(&self, cas_id: &str) -> tokio::io::Result<bool> {
let thumb_path = self
.config()
.data_directory()
.join(THUMBNAIL_CACHE_DIR_NAME)
.join(cas_id)
.with_extension("webp");
match tokio::fs::metadata(thumb_path).await {
Ok(_) => Ok(true),
Err(e) if e.kind() == tokio::io::ErrorKind::NotFound => Ok(false),
Err(e) => Err(e),
}
}
}

View file

@ -2,7 +2,7 @@ use crate::{
job::{JobError, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext},
location::indexer::rules::RuleKind,
prisma::{file_path, location},
prisma_sync,
sync,
};
use std::{
@ -269,9 +269,9 @@ impl StatefulJob for IndexerJob {
(
(
prisma_sync::file_path::SyncId {
sync::file_path::SyncId {
id: entry.file_id,
location: prisma_sync::location::SyncId {
location: sync::location::SyncId {
pub_id: state.init.location.pub_id.clone(),
},
},

View file

@ -1,3 +1,4 @@
use crate::prisma::file_path;
use crate::{
invalidate_query,
library::LibraryContext,
@ -8,13 +9,13 @@ use crate::{
manager::{helpers::subtract_location_path, LocationId, LocationManagerError},
},
object::{
identifier_job::{assemble_object_metadata, ObjectCreationMetadata},
identifier_job::FileMetadata,
preview::{
can_generate_thumbnail_for_image, generate_image_thumbnail, THUMBNAIL_CACHE_DIR_NAME,
},
validation::hash::file_checksum,
},
prisma::{file_path, object},
prisma::object,
};
use std::{
@ -23,13 +24,14 @@ use std::{
str::FromStr,
};
use chrono::{FixedOffset, Utc};
use chrono::{DateTime, FixedOffset, Local, Utc};
use int_enum::IntEnum;
use notify::{event::RemoveKind, Event};
use prisma_client_rust::{raw, PrismaValue};
use sd_file_ext::extensions::ImageExtension;
use tokio::{fs, io::ErrorKind};
use tracing::{error, info, trace, warn};
use uuid::Uuid;
use super::file_path_with_object;
@ -128,59 +130,69 @@ async fn inner_create_file(
"Location: <root_path ='{location_local_path}'> creating file: {}",
event.paths[0].display()
);
if let Some(materialized_path) = subtract_location_path(location_local_path, &event.paths[0]) {
if let Some(parent_directory) =
get_parent_dir(location_id, &materialized_path, library_ctx).await?
{
let created_file = create_file_path(
library_ctx,
location_id,
materialized_path
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
materialized_path
.file_stem()
.unwrap_or_default()
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
materialized_path.extension().and_then(|ext| {
if ext.is_empty() {
None
} else {
Some(ext.to_str().expect("Found non-UTF-8 path").to_string())
}
}),
Some(parent_directory.id),
false,
)
.await?;
info!("Created path: {}", created_file.materialized_path);
let db = &library_ctx.db;
// generate provisional object
let ObjectCreationMetadata {
cas_id,
size_str,
kind,
date_created,
} = assemble_object_metadata(location_local_path, &created_file).await?;
let Some(materialized_path) = subtract_location_path(location_local_path, &event.paths[0]) else { return Ok(()) };
// upsert object because in can be from a file that previously existed and was moved
let object = library_ctx
.db
.object()
.upsert(
object::cas_id::equals(cas_id.clone()),
object::create_unchecked(
cas_id.clone(),
vec![
object::date_created::set(date_created),
object::kind::set(kind.int_value()),
object::size_in_bytes::set(size_str.clone()),
],
),
let Some(parent_directory) =
get_parent_dir(location_id, &materialized_path, library_ctx).await?
else {
warn!("Watcher found a path without parent");
return Ok(())
};
let created_file = create_file_path(
library_ctx,
location_id,
materialized_path
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
materialized_path
.file_stem()
.unwrap_or_default()
.to_str()
.expect("Found non-UTF-8 path")
.to_string(),
materialized_path.extension().and_then(|ext| {
if ext.is_empty() {
None
} else {
Some(ext.to_str().expect("Found non-UTF-8 path").to_string())
}
}),
Some(parent_directory.id),
false,
)
.await?;
info!("Created path: {}", created_file.materialized_path);
// generate provisional object
let FileMetadata {
cas_id,
kind,
fs_metadata,
} = FileMetadata::new(location_local_path, &created_file.materialized_path).await?;
let existing_object = db
.object()
.find_first(vec![object::file_paths::some(vec![
file_path::cas_id::equals(Some(cas_id.clone())),
])])
.exec()
.await?;
object::select!(object_id { id has_thumbnail });
let size_str = fs_metadata.len().to_string();
let object = match existing_object {
Some(object) => {
db.object()
.update(
object::id::equals(object.id),
vec![
object::size_in_bytes::set(size_str),
object::date_indexed::set(
@ -188,32 +200,45 @@ async fn inner_create_file(
),
],
)
.select(object_id::select())
.exec()
.await?;
library_ctx
.db
.file_path()
.update(
file_path::location_id_id(location_id, created_file.id),
vec![file_path::object_id::set(Some(object.id))],
.await?
}
None => {
db.object()
.create(
Uuid::new_v4().as_bytes().to_vec(),
vec![
object::date_created::set(
DateTime::<Local>::from(fs_metadata.created().unwrap()).into(),
),
object::kind::set(kind.int_value()),
object::size_in_bytes::set(size_str.clone()),
],
)
.select(object_id::select())
.exec()
.await?;
.await?
}
};
trace!("object: {:#?}", object);
if !object.has_thumbnail {
if let Some(ref extension) = created_file.extension {
generate_thumbnail(extension, &cas_id, &event.paths[0], library_ctx).await;
}
}
db.file_path()
.update(
file_path::location_id_id(location_id, created_file.id),
vec![file_path::object_id::set(Some(object.id))],
)
.exec()
.await?;
invalidate_query!(library_ctx, "locations.getExplorerData");
} else {
warn!("Watcher found a path without parent");
trace!("object: {:#?}", object);
if !object.has_thumbnail {
if let Some(ref extension) = created_file.extension {
generate_thumbnail(extension, &cas_id, &event.paths[0], library_ctx).await;
}
}
invalidate_query!(library_ctx, "locations.getExplorerData");
Ok(())
}
@ -226,7 +251,14 @@ pub(super) async fn file_creation_or_update(
if let Some(file_path) =
get_existing_file_path(&location, &event.paths[0], false, library_ctx).await?
{
inner_update_file(location_local_path, file_path, event, library_ctx).await
inner_update_file(
&location,
location_local_path,
file_path,
event,
library_ctx,
)
.await
} else {
// We received None because it is a new file
inner_create_file(location.id, location_local_path, event, library_ctx).await
@ -245,7 +277,14 @@ pub(super) async fn update_file(
if let Some(file_path) =
get_existing_file_path(&location, &event.paths[0], false, library_ctx).await?
{
let ret = inner_update_file(location_local_path, file_path, event, library_ctx).await;
let ret = inner_update_file(
&location,
location_local_path,
file_path,
event,
library_ctx,
)
.await;
invalidate_query!(library_ctx, "locations.getExplorerData");
ret
} else {
@ -259,6 +298,7 @@ pub(super) async fn update_file(
}
async fn inner_update_file(
location: &indexer_job_location::Data,
location_local_path: &str,
file_path: file_path_with_object::Data,
event: Event,
@ -268,59 +308,48 @@ async fn inner_update_file(
"Location: <root_path ='{location_local_path}'> updating file: {}",
event.paths[0].display()
);
// We have to separate this object, as the `assemble_object_metadata` doesn't
// accept `file_path_with_object::Data`
let file_path_only = file_path::Data {
id: file_path.id,
is_dir: file_path.is_dir,
location_id: file_path.location_id,
location: None,
materialized_path: file_path.materialized_path,
name: file_path.name,
extension: file_path.extension,
object_id: file_path.object_id,
object: None,
parent_id: file_path.parent_id,
key_id: file_path.key_id,
date_created: file_path.date_created,
date_modified: file_path.date_modified,
date_indexed: file_path.date_indexed,
key: None,
};
let ObjectCreationMetadata {
cas_id,
size_str,
kind,
date_created,
} = assemble_object_metadata(location_local_path, &file_path_only).await?;
if let Some(ref object) = file_path.object {
if object.cas_id != cas_id {
let FileMetadata {
cas_id,
kind,
fs_metadata,
} = FileMetadata::new(location_local_path, &file_path.materialized_path).await?;
if let Some(old_cas_id) = &file_path.cas_id {
if old_cas_id != &cas_id {
// file content changed
library_ctx
.db
.object()
.file_path()
.update(
object::id::equals(object.id),
file_path::location_id_id(location.id, file_path.id),
vec![
object::cas_id::set(cas_id.clone()),
object::size_in_bytes::set(size_str),
object::kind::set(kind.int_value()),
object::date_modified::set(date_created),
object::integrity_checksum::set(if object.integrity_checksum.is_some() {
// If a checksum was already computed, we need to recompute it
Some(file_checksum(&event.paths[0]).await?)
} else {
None
}),
file_path::cas_id::set(Some(old_cas_id.clone())),
// file_path::size_in_bytes::set(fs_metadata.len().to_string()),
// file_path::kind::set(kind.int_value()),
file_path::date_modified::set(
DateTime::<Local>::from(fs_metadata.created().unwrap()).into(),
),
file_path::integrity_checksum::set(
if file_path.integrity_checksum.is_some() {
// If a checksum was already computed, we need to recompute it
Some(file_checksum(&event.paths[0]).await?)
} else {
None
},
),
],
)
.exec()
.await?;
if object.has_thumbnail {
if file_path
.object
.map(|o| o.has_thumbnail)
.unwrap_or_default()
{
// if this file had a thumbnail previously, we update it to match the new content
if let Some(ref extension) = file_path_only.extension {
if let Some(extension) = &file_path.extension {
generate_thumbnail(extension, &cas_id, &event.paths[0], library_ctx).await;
}
}

View file

@ -7,7 +7,7 @@ use crate::{
preview::{ThumbnailJob, ThumbnailJobInit},
},
prisma::{file_path, indexer_rules_in_location, location, node, object},
prisma_sync,
sync,
};
use rspc::Type;
@ -340,7 +340,7 @@ async fn create_location(
.write_op(
db,
ctx.sync.owned_create(
prisma_sync::location::SyncId {
sync::location::SyncId {
pub_id: location_pub_id.as_bytes().to_vec(),
},
[

View file

@ -6,15 +6,14 @@ use serde::{Deserialize, Serialize};
use specta::Type;
use tracing::trace;
use super::{context_menu_fs_info, get_path_from_location_id, osstr_to_string, FsInfo, ObjectType};
use super::{context_menu_fs_info, get_path_from_location_id, osstr_to_string, FsInfo};
pub struct FileCopierJob {}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct FileCopierJobState {
pub target_path: PathBuf, // target dir prefix too
pub source_path: PathBuf,
pub root_type: ObjectType,
pub source_fs_info: FsInfo,
}
#[derive(Serialize, Deserialize, Hash, Type)]
@ -27,8 +26,22 @@ pub struct FileCopierJobInit {
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct FileCopierJobStep {
pub source_fs_info: FsInfo,
pub enum FileCopierJobStep {
Directory { path: PathBuf },
File { path: PathBuf },
}
impl From<FsInfo> for FileCopierJobStep {
fn from(value: FsInfo) -> Self {
match value.path_data.is_dir {
true => Self::Directory {
path: value.fs_path,
},
false => Self::File {
path: value.fs_path,
},
}
}
}
pub const COPY_JOB_NAME: &str = "file_copier";
@ -61,17 +74,21 @@ impl StatefulJob for FileCopierJob {
// if no suffix has been selected, just use the file name
// if a suffix is provided and it's a directory, use the directory name + suffix
// if a suffix is provided and it's a file, use the (file name + suffix).extension
let file_name = osstr_to_string(source_fs_info.obj_path.file_name())?;
let file_name = osstr_to_string(source_fs_info.fs_path.file_name())?;
let target_file_name = state.init.target_file_name_suffix.as_ref().map_or_else(
|| Ok::<_, JobError>(file_name.clone()),
|s| match source_fs_info.obj_type {
ObjectType::Directory => Ok(format!("{file_name}{s}")),
ObjectType::File => Ok(osstr_to_string(source_fs_info.obj_path.file_stem())?
+ s + &source_fs_info.obj_path.extension().map_or_else(
|| Ok::<_, JobError>(String::new()),
|x| Ok(format!(".{}", x.to_str().ok_or(JobError::OsStr)?)),
)?),
|suffix| {
Ok(match source_fs_info.path_data.is_dir {
true => format!("{file_name}{suffix}"),
false => {
osstr_to_string(source_fs_info.fs_path.file_stem())?
+ suffix + &source_fs_info.fs_path.extension().map_or_else(
|| Ok(String::new()),
|ext| ext.to_str().map(|e| format!(".{e}")).ok_or(JobError::OsStr),
)?
}
})
},
)?;
@ -79,11 +96,10 @@ impl StatefulJob for FileCopierJob {
state.data = Some(FileCopierJobState {
target_path: full_target_path,
source_path: source_fs_info.obj_path.clone(),
root_type: source_fs_info.obj_type.clone(),
source_fs_info: source_fs_info.clone(),
});
state.steps = [FileCopierJobStep { source_fs_info }].into_iter().collect();
state.steps = [source_fs_info.into()].into_iter().collect();
ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]);
@ -96,68 +112,57 @@ impl StatefulJob for FileCopierJob {
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let step = &state.steps[0];
let info = &step.source_fs_info;
let job_state = state.data.as_ref().ok_or(JobError::MissingData {
value: String::from("job state"),
})?;
match info.obj_type {
ObjectType::File => {
let mut path = job_state.target_path.clone();
match step {
FileCopierJobStep::File { path } => {
let mut target_path = job_state.target_path.clone();
if job_state.root_type == ObjectType::Directory {
if job_state.source_fs_info.path_data.is_dir {
// if root type is a dir, we need to preserve structure by making paths relative
path.push(
info.obj_path
.strip_prefix(&job_state.source_path)
target_path.push(
path.strip_prefix(&job_state.source_fs_info.fs_path)
.map_err(|_| JobError::Path)?,
);
}
trace!("Copying from {:?} to {:?}", info.obj_path, path);
trace!("Copying from {:?} to {:?}", path, target_path);
tokio::fs::copy(&info.obj_path, &path).await?;
tokio::fs::copy(&path, &target_path).await?;
}
ObjectType::Directory => {
FileCopierJobStep::Directory { path } => {
// if this is the very first path, create the target dir
// fixes copying dirs with no child directories
if job_state.root_type == ObjectType::Directory
&& job_state.source_path == info.obj_path
if job_state.source_fs_info.path_data.is_dir
&& &job_state.source_fs_info.fs_path == path
{
tokio::fs::create_dir_all(&job_state.target_path).await?;
}
let mut dir = tokio::fs::read_dir(&info.obj_path).await?;
let mut dir = tokio::fs::read_dir(&path).await?;
while let Some(entry) = dir.next_entry().await? {
if entry.metadata().await?.is_dir() {
state.steps.push_back(FileCopierJobStep {
source_fs_info: FsInfo {
obj_id: None,
obj_name: String::new(),
obj_path: entry.path(),
obj_type: ObjectType::Directory,
},
});
state
.steps
.push_back(FileCopierJobStep::Directory { path: entry.path() });
tokio::fs::create_dir_all(
job_state.target_path.join(
entry
.path()
.strip_prefix(&job_state.source_path)
.strip_prefix(&job_state.source_fs_info.fs_path)
.map_err(|_| JobError::Path)?,
),
)
.await?;
} else {
state.steps.push_back(FileCopierJobStep {
source_fs_info: FsInfo {
obj_id: None,
obj_name: osstr_to_string(Some(&entry.file_name()))?,
obj_path: entry.path(),
obj_type: ObjectType::File,
},
});
state
.steps
.push_back(FileCopierJobStep::File { path: entry.path() });
};
ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]);

View file

@ -73,11 +73,11 @@ impl StatefulJob for FileCutterJob {
let full_output = step
.target_directory
.join(source_info.obj_path.file_name().ok_or(JobError::OsStr)?);
.join(source_info.fs_path.file_name().ok_or(JobError::OsStr)?);
trace!("Cutting {:?} to {:?}", source_info.obj_path, full_output);
trace!("Cutting {:?} to {:?}", source_info.fs_path, full_output);
tokio::fs::rename(&source_info.obj_path, &full_output).await?;
tokio::fs::rename(&source_info.fs_path, &full_output).await?;
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(
state.step_number + 1,

View file

@ -68,7 +68,7 @@ impl StatefulJob for FileDecryptorJob {
// handle overwriting checks, and making sure there's enough available space
let output_path = state.init.output_path.clone().map_or_else(
|| {
let mut path = info.obj_path.clone();
let mut path = info.fs_path.clone();
let extension = path.extension().map_or("decrypted", |ext| {
if ext == ".sdenc" {
""
@ -82,7 +82,7 @@ impl StatefulJob for FileDecryptorJob {
|p| p,
);
let mut reader = File::open(info.obj_path.clone())?;
let mut reader = File::open(info.fs_path.clone())?;
let mut writer = File::create(output_path)?;
let (header, aad) = FileHeader::from_reader(&mut reader)?;

View file

@ -1,4 +1,4 @@
use super::{context_menu_fs_info, FsInfo, ObjectType};
use super::{context_menu_fs_info, FsInfo};
use crate::job::{JobError, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext};
use serde::{Deserialize, Serialize};
use specta::Type;
@ -59,9 +59,9 @@ impl StatefulJob for FileDeleterJob {
// need to handle stuff such as querying prisma for all paths of a file, and deleting all of those if requested (with a checkbox in the ui)
// maybe a files.countOccurances/and or files.getPath(location_id, path_id) to show how many of these files would be deleted (and where?)
match info.obj_type {
ObjectType::File => tokio::fs::remove_file(info.obj_path.clone()).await,
ObjectType::Directory => tokio::fs::remove_dir_all(info.obj_path.clone()).await,
match info.path_data.is_dir {
false => tokio::fs::remove_file(info.fs_path.clone()).await,
true => tokio::fs::remove_dir_all(info.fs_path.clone()).await,
}?;
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(

View file

@ -1,8 +1,6 @@
use std::{collections::VecDeque, fs::File, io::Read, path::PathBuf};
use super::{context_menu_fs_info, FsInfo};
use crate::{job::*, library::LibraryContext};
use tokio::task;
use chrono::FixedOffset;
use sd_crypto::{
crypto::stream::{Algorithm, StreamEncryption},
header::{file::FileHeader, keyslot::Keyslot},
@ -11,17 +9,14 @@ use sd_crypto::{
LATEST_PREVIEW_MEDIA,
},
};
use chrono::FixedOffset;
use serde::{Deserialize, Serialize};
use specta::Type;
use std::{fs::File, io::Read, path::PathBuf};
use tokio::task;
use tracing::warn;
use crate::{
job::{JobError, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext},
prisma::object,
};
use super::{context_menu_fs_info, FsInfo, ObjectType};
pub struct FileEncryptorJob;
#[derive(Serialize, Deserialize, Debug)]
@ -38,11 +33,6 @@ pub struct FileEncryptorJobInit {
pub output_path: Option<PathBuf>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct FileEncryptorJobStep {
pub fs_info: FsInfo,
}
#[derive(Serialize, Deserialize)]
pub struct Metadata {
pub path_id: i32,
@ -61,22 +51,24 @@ const JOB_NAME: &str = "file_encryptor";
impl StatefulJob for FileEncryptorJob {
type Init = FileEncryptorJobInit;
type Data = FileEncryptorJobState;
type Step = FileEncryptorJobStep;
type Step = FsInfo;
fn name(&self) -> &'static str {
JOB_NAME
}
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
let fs_info = context_menu_fs_info(
let step = context_menu_fs_info(
&ctx.library_ctx.db,
state.init.location_id,
state.init.path_id,
)
.await?;
.await
.map_err(|_| JobError::MissingData {
value: String::from("file_path that matches both location id and path id"),
})?;
state.steps = VecDeque::new();
state.steps.push_back(FileEncryptorJobStep { fs_info });
state.steps = [step].into_iter().collect();
ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]);
@ -88,27 +80,21 @@ impl StatefulJob for FileEncryptorJob {
ctx: WorkerContext,
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let step = &state.steps[0];
let info = &step.fs_info;
let info = &state.steps[0];
match info.obj_type {
ObjectType::File => {
let LibraryContext { key_manager, .. } = &ctx.library_ctx;
match info.path_data.is_dir {
false => {
// handle overwriting checks, and making sure there's enough available space
let user_key = ctx
.library_ctx
.key_manager
.access_keymount(state.init.key_uuid)?
.hashed_key;
let user_key = key_manager.access_keymount(state.init.key_uuid)?.hashed_key;
let user_key_details = ctx
.library_ctx
.key_manager
.access_keystore(state.init.key_uuid)?;
let user_key_details = key_manager.access_keystore(state.init.key_uuid)?;
let output_path = state.init.output_path.clone().map_or_else(
|| {
let mut path = info.obj_path.clone();
let mut path = info.fs_path.clone();
let extension = path.extension().map_or_else(
|| Ok("sdenc".to_string()),
|extension| {
@ -141,7 +127,7 @@ impl StatefulJob for FileEncryptorJob {
)
.await?;
let mut reader = task::block_in_place(|| File::open(&info.obj_path))?;
let mut reader = task::block_in_place(|| File::open(&info.fs_path))?;
let mut writer = task::block_in_place(|| File::create(output_path))?;
let master_key = generate_master_key();
@ -161,24 +147,11 @@ impl StatefulJob for FileEncryptorJob {
if state.init.metadata || state.init.preview_media {
// if any are requested, we can make the query as it'll be used at least once
if let Some(obj_id) = info.obj_id {
let object = ctx
.library_ctx
.db
.object()
.find_unique(object::id::equals(obj_id))
.exec()
.await?
.ok_or_else(|| {
JobError::JobDataNotFound(String::from(
"can't find information about the object",
))
})?;
if let Some(object) = info.path_data.object.clone() {
if state.init.metadata {
let metadata = Metadata {
path_id: state.init.path_id,
name: info.obj_name.clone(),
name: info.path_data.materialized_path.clone(),
hidden: object.hidden,
favourite: object.favorite,
important: object.important,
@ -205,7 +178,8 @@ impl StatefulJob for FileEncryptorJob {
.config()
.data_directory()
.join("thumbnails")
.join(object.cas_id + ".webp");
.join(info.path_data.cas_id.as_ref().unwrap())
.with_extension("wepb");
if tokio::fs::metadata(&pvm_path).await.is_ok() {
let mut pvm_bytes = Vec::new();
@ -242,7 +216,7 @@ impl StatefulJob for FileEncryptorJob {
}
_ => warn!(
"encryption is skipping {} as it isn't a file",
info.obj_name
info.path_data.materialized_path
),
}

View file

@ -7,14 +7,13 @@ use specta::Type;
use tokio::{fs::OpenOptions, io::AsyncWriteExt};
use tracing::{trace, warn};
use super::{context_menu_fs_info, osstr_to_string, FsInfo, ObjectType};
use super::{context_menu_fs_info, FsInfo};
pub struct FileEraserJob {}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct FileEraserJobState {
pub root_path: PathBuf,
pub root_type: ObjectType,
pub fs_info: FsInfo,
}
#[derive(Serialize, Deserialize, Hash, Type)]
@ -25,8 +24,22 @@ pub struct FileEraserJobInit {
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct FileEraserJobStep {
pub fs_info: FsInfo,
pub enum FileEraserJobStep {
Directory { path: PathBuf },
File { path: PathBuf },
}
impl From<FsInfo> for FileEraserJobStep {
fn from(value: FsInfo) -> Self {
match value.path_data.is_dir {
true => Self::Directory {
path: value.fs_path,
},
false => Self::File {
path: value.fs_path,
},
}
}
}
pub const ERASE_JOB_NAME: &str = "file_eraser";
@ -50,11 +63,10 @@ impl StatefulJob for FileEraserJob {
.await?;
state.data = Some(FileEraserJobState {
root_path: fs_info.obj_path.clone(),
root_type: fs_info.obj_type.clone(),
fs_info: fs_info.clone(),
});
state.steps = [FileEraserJobStep { fs_info }].into_iter().collect();
state.steps = [fs_info.into()].into_iter().collect();
ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]);
@ -67,17 +79,16 @@ impl StatefulJob for FileEraserJob {
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let step = &state.steps[0];
let info = &step.fs_info;
// need to handle stuff such as querying prisma for all paths of a file, and deleting all of those if requested (with a checkbox in the ui)
// maybe a files.countOccurances/and or files.getPath(location_id, path_id) to show how many of these files would be erased (and where?)
match info.obj_type {
ObjectType::File => {
match step {
FileEraserJobStep::File { path } => {
let mut file = OpenOptions::new()
.read(true)
.write(true)
.open(&info.obj_path)
.open(&path)
.await?;
let file_len = file.metadata().await?.len();
@ -87,32 +98,20 @@ impl StatefulJob for FileEraserJob {
file.flush().await?;
drop(file);
trace!("Erasing file: {:?}", info.obj_path);
trace!("Erasing file: {:?}", path);
tokio::fs::remove_file(&info.obj_path).await?;
tokio::fs::remove_file(&path).await?;
}
ObjectType::Directory => {
let mut dir = tokio::fs::read_dir(&info.obj_path).await?;
FileEraserJobStep::Directory { path } => {
let mut dir = tokio::fs::read_dir(&path).await?;
while let Some(entry) = dir.next_entry().await? {
if entry.metadata().await?.is_dir() {
state.steps.push_back(FileEraserJobStep {
fs_info: FsInfo {
obj_id: None,
obj_name: String::new(),
obj_path: entry.path(),
obj_type: ObjectType::Directory,
},
state
.steps
.push_back(match entry.metadata().await?.is_dir() {
true => FileEraserJobStep::Directory { path: entry.path() },
false => FileEraserJobStep::File { path: entry.path() },
});
} else {
state.steps.push_back(FileEraserJobStep {
fs_info: FsInfo {
obj_id: None,
obj_name: osstr_to_string(Some(&entry.file_name()))?,
obj_path: entry.path(),
obj_type: ObjectType::File,
},
});
};
ctx.progress(vec![JobReportUpdate::TaskCount(state.steps.len())]);
}
@ -127,8 +126,8 @@ impl StatefulJob for FileEraserJob {
async fn finalize(&self, _ctx: WorkerContext, state: &mut JobState<Self>) -> JobResult {
if let Some(ref info) = state.data {
if info.root_type == ObjectType::Directory {
tokio::fs::remove_dir_all(&info.root_path).await?;
if info.fs_info.path_data.is_dir {
tokio::fs::remove_dir_all(&info.fs_info.fs_path).await?;
}
} else {
warn!("missing job state, unable to fully finalise erase job");

View file

@ -7,6 +7,8 @@ use std::{ffi::OsStr, path::PathBuf};
use serde::{Deserialize, Serialize};
use super::preview::file_path_with_object;
pub mod copy;
pub mod cut;
pub mod decrypt;
@ -22,20 +24,15 @@ pub enum ObjectType {
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct FsInfo {
pub obj_id: Option<i32>,
pub obj_name: String,
pub obj_path: PathBuf,
pub obj_type: ObjectType,
pub path_data: file_path_with_object::Data,
pub fs_path: PathBuf,
}
pub fn osstr_to_string(os_str: Option<&OsStr>) -> Result<String, JobError> {
let string = os_str
.ok_or(JobError::OsStr)?
.to_str()
.ok_or(JobError::OsStr)?
.to_string();
Ok(string)
os_str
.and_then(OsStr::to_str)
.map(str::to_string)
.ok_or(JobError::OsStr)
}
pub async fn get_path_from_location_id(
@ -65,30 +62,20 @@ pub async fn context_menu_fs_info(
location_id: i32,
path_id: i32,
) -> Result<FsInfo, JobError> {
let location_path = get_path_from_location_id(db, location_id).await?;
let item = db
let path_data = db
.file_path()
.find_unique(file_path::location_id_id(location_id, path_id))
.include(file_path_with_object::include())
.exec()
.await?
.ok_or(JobError::MissingData {
value: String::from("file_path that matches both location id and path id"),
})?;
let obj_path = location_path.join(&item.materialized_path);
// i don't know if this covers symlinks
let obj_type = if item.is_dir {
ObjectType::Directory
} else {
ObjectType::File
};
Ok(FsInfo {
obj_id: item.object_id,
obj_name: item.materialized_path,
obj_type,
obj_path,
fs_path: get_path_from_location_id(db, location_id)
.await?
.join(&path_data.materialized_path),
path_data,
})
}

View file

@ -2,29 +2,30 @@ use crate::{
job::JobError,
library::LibraryContext,
object::cas::generate_cas_id,
prisma::{file_path, location, object},
prisma_sync,
};
use chrono::{DateTime, FixedOffset};
use serde_json::json;
use std::{
collections::{HashMap, HashSet},
ffi::OsStr,
path::{Path, PathBuf},
prisma::{file_path, location, object, PrismaClient},
sync,
sync::SyncManager,
};
use sd_file_ext::{extensions::Extension, kind::ObjectKind};
use sd_sync::CRDTOperation;
use futures::future::join_all;
use int_enum::IntEnum;
use prisma_client_rust::QueryError;
use sd_file_ext::{extensions::Extension, kind::ObjectKind};
use serde_json::json;
use std::{
collections::{HashMap, HashSet},
path::{Path, PathBuf},
};
use thiserror::Error;
use tokio::{fs, io};
use tracing::{error, info};
use uuid::Uuid;
pub mod full_identifier_job;
// we break these jobs into chunks of 100 to improve performance
static CHUNK_SIZE: usize = 100;
const CHUNK_SIZE: usize = 100;
#[derive(Error, Debug)]
pub enum IdentifierJobError {
@ -37,261 +38,280 @@ pub enum IdentifierJobError {
}
#[derive(Debug, Clone)]
pub struct ObjectCreationMetadata {
pub struct FileMetadata {
pub cas_id: String,
pub size_str: String,
pub kind: ObjectKind,
pub date_created: DateTime<FixedOffset>,
pub fs_metadata: std::fs::Metadata,
}
pub async fn assemble_object_metadata(
location_path: impl AsRef<Path>,
file_path: &file_path::Data,
) -> Result<ObjectCreationMetadata, io::Error> {
assert!(
!file_path.is_dir,
"We can't generate cas_id for directories"
);
impl FileMetadata {
/// Assembles `create_unchecked` params for a given file path
pub async fn new(
location_path: impl AsRef<Path>,
materialized_path: impl AsRef<Path>, // TODO: use dedicated CreateUnchecked type
) -> Result<FileMetadata, io::Error> {
let path = location_path.as_ref().join(materialized_path.as_ref());
let path = location_path.as_ref().join(&file_path.materialized_path);
let fs_metadata = fs::metadata(&path).await?;
let metadata = fs::metadata(&path).await?;
assert!(
!fs_metadata.is_dir(),
"We can't generate cas_id for directories"
);
// derive Object kind
let object_kind = match path.extension().and_then(OsStr::to_str) {
Some(ext) => {
let mut file = fs::File::open(&path).await?;
// derive Object kind
let kind = Extension::resolve_conflicting(&path, false)
.await
.map(Into::into)
.unwrap_or(ObjectKind::Unknown);
Extension::resolve_conflicting(&ext.to_lowercase(), &mut file, false)
.await
.map(Into::into)
.unwrap_or(ObjectKind::Unknown)
}
None => ObjectKind::Unknown,
};
let cas_id = generate_cas_id(&path, fs_metadata.len()).await?;
let size = metadata.len();
info!("Analyzed file: {:?} {:?} {:?}", path, cas_id, kind);
let cas_id = generate_cas_id(&path, size).await?;
info!("Analyzed file: {:?} {:?} {:?}", path, cas_id, object_kind);
Ok(ObjectCreationMetadata {
cas_id,
size_str: size.to_string(),
kind: object_kind,
date_created: file_path.date_created,
})
}
async fn batch_update_file_paths(
library: &LibraryContext,
location: &location::Data,
objects: &[object::Data],
cas_id_lookup: &HashMap<String, Vec<i32>>,
) -> Result<Vec<file_path::Data>, QueryError> {
let (sync, updates): (Vec<_>, Vec<_>) = objects
.iter()
.flat_map(|object| {
let file_path_ids = cas_id_lookup.get(&object.cas_id).unwrap();
let sync = &library.sync;
file_path_ids.iter().map(|file_path_id| {
info!(
"Linking: <file_path_id = '{}', object_id = '{}'>",
file_path_id, object.id
);
(
sync.owned_update(
prisma_sync::file_path::SyncId {
id: *file_path_id,
location: prisma_sync::location::SyncId {
pub_id: location.pub_id.clone(),
},
},
[("object", json!({ "cas_id": object.cas_id }))],
),
library.db.file_path().update(
file_path::location_id_id(location.id, *file_path_id),
vec![file_path::object::connect(object::id::equals(object.id))],
),
)
})
Ok(FileMetadata {
cas_id,
kind,
fs_metadata,
})
.unzip();
info!(
"Updating {} file paths for {} objects",
updates.len(),
objects.len()
);
library.sync.write_ops(&library.db, sync, updates).await
}
async fn generate_provisional_objects(
location_path: impl AsRef<Path>,
file_paths: &[file_path::Data],
) -> HashMap<i32, (String, Vec<object::SetParam>)> {
let mut provisional_objects = HashMap::with_capacity(file_paths.len());
// analyze each file_path
let location_path = location_path.as_ref();
for (file_path_id, objects_result) in join_all(file_paths.iter().map(|file_path| async move {
(
file_path.id,
assemble_object_metadata(location_path, file_path).await,
)
}))
.await
{
// get the cas_id and extract metadata
match objects_result {
Ok(ObjectCreationMetadata {
cas_id,
size_str,
kind,
date_created,
}) => {
// create entry into chunks for created file data
provisional_objects.insert(
file_path_id,
object::create_unchecked(
cas_id,
vec![
object::date_created::set(date_created),
object::kind::set(kind.int_value()),
object::size_in_bytes::set(size_str),
],
),
);
}
Err(e) => {
error!("Error assembling Object metadata: {:#?}", e);
continue;
}
};
}
provisional_objects
}
async fn identifier_job_step(
library: &LibraryContext,
LibraryContext { db, sync, .. }: &LibraryContext,
location: &location::Data,
location_path: impl AsRef<Path>,
file_paths: &[file_path::Data],
) -> Result<(usize, usize), JobError> {
let location_path = location_path.as_ref();
// generate objects for all file paths
let provisional_objects = generate_provisional_objects(location_path, file_paths).await;
let file_path_metas = join_all(file_paths.iter().map(|file_path| async move {
FileMetadata::new(location_path, &file_path.materialized_path)
.await
.map(|params| (file_path.id, (params, file_path)))
}))
.await
.into_iter()
.flat_map(|data| {
if let Err(e) = &data {
error!("Error assembling Object metadata: {:#?}", e);
}
let unique_cas_ids = provisional_objects
data
})
.collect::<HashMap<i32, _>>();
// Assign cas_id to each file path
sync.write_ops(
db,
file_path_metas
.iter()
.map(|(id, (meta, _))| {
(
sync.owned_update(
sync::file_path::SyncId {
id: *id,
location: sync::location::SyncId {
pub_id: location.pub_id.clone(),
},
},
[("cas_id", json!(&meta.cas_id))],
),
db.file_path().update(
file_path::location_id_id(location.id, *id),
vec![file_path::cas_id::set(Some(meta.cas_id.clone()))],
),
)
})
.unzip::<_, _, _, Vec<_>>(),
)
.await?;
let unique_cas_ids = file_path_metas
.values()
.map(|(cas_id, _)| cas_id.clone())
.map(|(meta, _)| meta.cas_id.clone())
.collect::<HashSet<_>>()
.into_iter()
.collect::<Vec<_>>();
.collect();
// allow easy lookup of cas_id to many file_path_ids
let mut cas_id_lookup: HashMap<String, Vec<i32>> = HashMap::with_capacity(unique_cas_ids.len());
// populate cas_id_lookup with file_path_ids
for (file_path_id, (cas_id, _)) in provisional_objects.iter() {
cas_id_lookup
.entry(cas_id.clone())
.or_insert_with(Vec::new)
.push(*file_path_id);
}
// info!("{:#?}", cas_id_lookup);
// get all objects that already exist in the database
let existing_objects = library
.db
// Retrieves objects that are already connected to file paths with the same id
let existing_objects = db
.object()
.find_many(vec![object::cas_id::in_vec(unique_cas_ids)])
.find_many(vec![object::file_paths::some(vec![
file_path::cas_id::in_vec(unique_cas_ids),
])])
.select(object::select!({
pub_id
file_paths: select { id cas_id }
}))
.exec()
.await?;
let existing_object_cas_ids = existing_objects
.iter()
.flat_map(|o| o.file_paths.iter().filter_map(|fp| fp.cas_id.as_ref()))
.collect::<HashSet<_>>();
// Attempt to associate each file path with an object that has been
// connected to file paths with the same cas_id
let updated_file_paths = sync
.write_ops(
db,
file_path_metas
.iter()
.flat_map(|(id, (meta, _))| {
existing_objects
.iter()
.find(|o| {
o.file_paths
.iter()
.any(|fp| fp.cas_id.as_ref() == Some(&meta.cas_id))
})
.map(|o| (*id, o))
})
.map(|(id, object)| {
file_path_object_connect_ops(
id,
Uuid::from_slice(&object.pub_id).unwrap(),
location,
sync,
db,
)
})
.unzip::<_, _, Vec<_>, Vec<_>>(),
)
.await?;
info!(
"Found {} existing Objects in Library, linking file paths...",
existing_objects.len()
);
let existing_objects_linked = if !existing_objects.is_empty() {
// link file_path.object_id to existing objects
batch_update_file_paths(library, location, &existing_objects, &cas_id_lookup)
.await?
.len()
} else {
0
};
let existing_object_cas_ids = existing_objects
.iter()
.map(|object| object.cas_id.clone())
.collect::<HashSet<_>>();
// extract objects that don't already exist in the database
let new_objects = provisional_objects
let file_paths_requiring_new_object = file_path_metas
.into_iter()
.filter(|(_, (cas_id, _))| !existing_object_cas_ids.contains(cas_id))
.filter(|(_, (meta, _))| !existing_object_cas_ids.contains(&meta.cas_id))
.collect::<Vec<_>>();
let new_objects_cas_ids = new_objects
.iter()
.map(|(_, (cas_id, _))| cas_id.clone())
.collect::<Vec<_>>();
let total_created = if !file_paths_requiring_new_object.is_empty() {
let new_objects_cas_ids = file_paths_requiring_new_object
.iter()
.map(|(_, (meta, _))| &meta.cas_id)
.collect::<HashSet<_>>();
info!(
"Creating {} new Objects in Library... {:#?}",
new_objects.len(),
new_objects_cas_ids
);
info!(
"Creating {} new Objects in Library... {:#?}",
file_paths_requiring_new_object.len(),
new_objects_cas_ids
);
let (object_create_args, file_path_update_args): (Vec<_>, Vec<_>) =
file_paths_requiring_new_object
.iter()
.map(|(id, (meta, fp))| {
let pub_id = Uuid::new_v4();
let pub_id_vec = pub_id.as_bytes().to_vec();
let sync_id = || sync::object::SyncId {
pub_id: pub_id_vec.clone(),
};
let size = meta.fs_metadata.len().to_string();
let kind = meta.kind.int_value();
let object_creation_args = (
[sync.shared_create(sync_id())]
.into_iter()
.chain(
[
("date_created", json!(fp.date_created)),
("kind", json!(kind)),
("size_in_bytes", json!(size)),
]
.into_iter()
.map(|(f, v)| sync.shared_update(sync_id(), f, v)),
)
.collect::<Vec<_>>(),
object::create_unchecked(
pub_id_vec.clone(),
vec![
object::date_created::set(fp.date_created),
object::kind::set(kind),
object::size_in_bytes::set(size),
],
),
);
(
object_creation_args,
file_path_object_connect_ops(*id, pub_id, location, sync, db),
)
})
.unzip();
let mut total_created: usize = 0;
if !new_objects.is_empty() {
// create new object records with assembled values
let total_created_files = library
.db
.object()
.create_many(new_objects.into_iter().map(|(_, p)| p).collect())
.skip_duplicates()
.exec()
let total_created_files = sync
.write_ops(db, {
let (sync, db_params): (Vec<_>, Vec<_>) = object_create_args.into_iter().unzip();
(sync.concat(), db.object().create_many(db_params))
})
.await
.unwrap_or_else(|e| {
error!("Error inserting files: {:#?}", e);
0
});
total_created = total_created_files as usize;
info!("Created {} new Objects in Library", total_created_files);
info!("Created {} new Objects in Library", total_created);
if total_created_files > 0 {
sync.write_ops(db, {
let (sync, db): (Vec<_>, Vec<_>) = file_path_update_args.into_iter().unzip();
// fetch newly created objects so we can link them to file_paths by their id
let created_files = library
.db
.object()
.find_many(vec![object::cas_id::in_vec(new_objects_cas_ids)])
.exec()
.await
.unwrap_or_else(|e| {
error!("Error finding created files: {:#?}", e);
vec![]
});
info!(
"Retrieved {} newly created Objects in Library",
created_files.len()
);
if !created_files.is_empty() {
batch_update_file_paths(library, &location, &created_files, &cas_id_lookup).await?;
(sync, db)
})
.await?;
}
}
Ok((total_created, existing_objects_linked))
total_created_files as usize
} else {
0
};
Ok((total_created, updated_file_paths.len()))
}
file_path::select!(file_path_only_id { id });
fn file_path_object_connect_ops<'db>(
file_path_id: i32,
object_id: Uuid,
location: &location::Data,
sync: &SyncManager,
db: &'db PrismaClient,
) -> (
CRDTOperation,
prisma_client_rust::Select<'db, file_path_only_id::Data>,
) {
info!("Connecting <FilePath id={file_path_id}> to <Object pub_id={object_id}'>");
(
sync.owned_update(
sync::file_path::SyncId {
id: file_path_id,
location: sync::location::SyncId {
pub_id: location.pub_id.clone(),
},
},
[("object", json!({ "pub_id": object_id }))],
),
db.file_path()
.update(
file_path::location_id_id(location.id, file_path_id),
vec![file_path::object::connect(object::pub_id::equals(
object_id.as_bytes().to_vec(),
))],
)
.select(file_path_only_id::select()),
)
}

View file

@ -209,18 +209,17 @@ impl StatefulJob for ThumbnailJob {
trace!("image_file {:?}", step);
// get cas_id, if none found skip
let cas_id = if let Some(ref object) = step.file_path.object {
object.cas_id.clone()
} else {
let Some(cas_id) = &step.file_path.cas_id else {
warn!(
"skipping thumbnail generation for {}",
step.file_path.materialized_path
);
return Ok(());
};
// Define and write the WebP-encoded file to a given path
let output_path = data.thumbnail_dir.join(&cas_id).with_extension("webp");
let output_path = data.thumbnail_dir.join(cas_id).with_extension("webp");
// check if file exists at output path
if !output_path.try_exists().unwrap() {
@ -277,7 +276,9 @@ impl StatefulJob for ThumbnailJob {
}
if !state.init.background {
ctx.library_ctx.emit(CoreEvent::NewThumbnail { cas_id });
ctx.library_ctx.emit(CoreEvent::NewThumbnail {
cas_id: cas_id.clone(),
});
};
// With this invalidate query, we update the user interface to show each new thumbnail

View file

@ -1,10 +1,13 @@
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::{collections::VecDeque, path::PathBuf};
use crate::{
job::{JobError, JobReportUpdate, JobResult, JobState, StatefulJob, WorkerContext},
prisma::{file_path, location, object},
library::LibraryContext,
prisma::{file_path, location},
sync,
};
use tracing::info;
@ -34,10 +37,15 @@ pub struct ObjectValidatorJobInit {
}
file_path::select!(file_path_and_object {
id
materialized_path
integrity_checksum
location: select {
id
pub_id
}
object: select {
id
integrity_checksum
}
});
@ -52,14 +60,14 @@ impl StatefulJob for ObjectValidatorJob {
}
async fn init(&self, ctx: WorkerContext, state: &mut JobState<Self>) -> Result<(), JobError> {
state.steps = ctx
.library_ctx
.db
let db = &ctx.library_ctx.db;
state.steps = db
.file_path()
.find_many(vec![
file_path::location_id::equals(state.init.location_id),
file_path::is_dir::equals(false),
file_path::object::is(vec![object::integrity_checksum::equals(None)]),
file_path::integrity_checksum::equals(None),
])
.select(file_path_and_object::select())
.exec()
@ -67,9 +75,7 @@ impl StatefulJob for ObjectValidatorJob {
.into_iter()
.collect::<VecDeque<_>>();
let location = ctx
.library_ctx
.db
let location = db
.location()
.find_unique(location::id::equals(state.init.location_id))
.exec()
@ -91,27 +97,35 @@ impl StatefulJob for ObjectValidatorJob {
ctx: WorkerContext,
state: &mut JobState<Self>,
) -> Result<(), JobError> {
let step = &state.steps[0];
let LibraryContext { db, sync, .. } = &ctx.library_ctx;
let file_path = &state.steps[0];
let data = state.data.as_ref().expect("fatal: missing job state");
// this is to skip files that already have checksums
// i'm unsure what the desired behaviour is in this case
// we can also compare old and new checksums here
if let Some(ref object) = step.object {
// This if is just to make sure, we already queried objects where integrity_checksum is null
if object.integrity_checksum.is_none() {
ctx.library_ctx
.db
.object()
.update(
object::id::equals(object.id),
vec![object::SetParam::SetIntegrityChecksum(Some(
file_checksum(data.root_path.join(&step.materialized_path)).await?,
))],
)
.exec()
.await?;
}
// This if is just to make sure, we already queried objects where integrity_checksum is null
if file_path.integrity_checksum.is_none() {
let checksum = file_checksum(data.root_path.join(&file_path.materialized_path)).await?;
sync.write_op(
db,
sync.owned_update(
sync::file_path::SyncId {
id: file_path.id,
location: sync::location::SyncId {
pub_id: file_path.location.pub_id.clone(),
},
},
[("integrity_checksum", json!(Some(&checksum)))],
),
db.file_path().update(
file_path::location_id_id(file_path.location.id, file_path.id),
vec![file_path::integrity_checksum::set(Some(checksum))],
),
)
.await?;
}
ctx.progress(vec![JobReportUpdate::CompletedTaskCount(

528
core/src/sync/manager.rs Normal file
View file

@ -0,0 +1,528 @@
use crate::{
prisma::{file_path, location, node, object, owned_operation, shared_operation, PrismaClient},
prisma_sync,
};
use sd_sync::*;
use futures::future::join_all;
use serde_json::{from_slice, from_value, json, to_vec, Value};
use std::{
collections::{HashMap, HashSet},
sync::Arc,
};
use tokio::sync::mpsc::{self, Receiver, Sender};
use uhlc::{HLCBuilder, HLC, NTP64};
use uuid::Uuid;
pub struct SyncManager {
db: Arc<PrismaClient>,
node: Uuid,
_clocks: HashMap<Uuid, NTP64>,
clock: HLC,
tx: Sender<CRDTOperation>,
}
impl SyncManager {
pub fn new(db: Arc<PrismaClient>, node: Uuid) -> (Self, Receiver<CRDTOperation>) {
let (tx, rx) = mpsc::channel(64);
(
Self {
db,
node,
clock: HLCBuilder::new().with_id(node.into()).build(),
_clocks: Default::default(),
tx,
},
rx,
)
}
pub async fn write_ops<'item, I: prisma_client_rust::BatchItem<'item>>(
&self,
tx: &PrismaClient,
(ops, queries): (Vec<CRDTOperation>, I),
) -> prisma_client_rust::Result<<I as prisma_client_rust::BatchItemParent>::ReturnValue> {
let owned = ops
.iter()
.filter_map(|op| match &op.typ {
CRDTOperationType::Owned(owned_op) => Some(tx.owned_operation().create(
op.id.as_bytes().to_vec(),
op.timestamp.0 as i64,
to_vec(&owned_op.items).unwrap(),
owned_op.model.clone(),
node::pub_id::equals(op.node.as_bytes().to_vec()),
vec![],
)),
_ => None,
})
.collect::<Vec<_>>();
let shared = ops
.iter()
.filter_map(|op| match &op.typ {
CRDTOperationType::Shared(shared_op) => {
let kind = match &shared_op.data {
SharedOperationData::Create(_) => "c",
SharedOperationData::Update { .. } => "u",
SharedOperationData::Delete => "d",
};
Some(tx.shared_operation().create(
op.id.as_bytes().to_vec(),
op.timestamp.0 as i64,
shared_op.model.to_string(),
to_vec(&shared_op.record_id).unwrap(),
kind.to_string(),
to_vec(&shared_op.data).unwrap(),
node::pub_id::equals(op.node.as_bytes().to_vec()),
vec![],
))
}
_ => None,
})
.collect::<Vec<_>>();
let (res, _) = tx._batch((queries, (owned, shared))).await?;
for op in ops {
self.tx.send(op).await.ok();
}
Ok(res)
}
pub async fn write_op<'item, Q: prisma_client_rust::BatchItem<'item>>(
&self,
tx: &PrismaClient,
op: CRDTOperation,
query: Q,
) -> prisma_client_rust::Result<<Q as prisma_client_rust::BatchItemParent>::ReturnValue> {
let ret = match &op.typ {
CRDTOperationType::Owned(owned_op) => {
tx._batch((
tx.owned_operation().create(
op.id.as_bytes().to_vec(),
op.timestamp.0 as i64,
to_vec(&owned_op.items).unwrap(),
owned_op.model.clone(),
node::pub_id::equals(op.node.as_bytes().to_vec()),
vec![],
),
query,
))
.await?
.1
}
CRDTOperationType::Shared(shared_op) => {
let kind = match &shared_op.data {
SharedOperationData::Create(_) => "c",
SharedOperationData::Update { .. } => "u",
SharedOperationData::Delete => "d",
};
tx._batch((
tx.shared_operation().create(
op.id.as_bytes().to_vec(),
op.timestamp.0 as i64,
shared_op.model.to_string(),
to_vec(&shared_op.record_id).unwrap(),
kind.to_string(),
to_vec(&shared_op.data).unwrap(),
node::pub_id::equals(op.node.as_bytes().to_vec()),
vec![],
),
query,
))
.await?
.1
}
_ => todo!(),
};
self.tx.send(op).await.ok();
Ok(ret)
}
pub async fn get_ops(&self) -> prisma_client_rust::Result<Vec<CRDTOperation>> {
owned_operation::include!(owned_op_with_node { node });
impl TryInto<CRDTOperation> for owned_op_with_node::Data {
type Error = ();
fn try_into(self) -> Result<CRDTOperation, Self::Error> {
let id = Uuid::from_slice(&self.id).map_err(|_| ())?;
let node = Uuid::from_slice(&self.node.pub_id).map_err(|_| ())?;
Ok(CRDTOperation {
id,
node,
timestamp: NTP64(self.timestamp as u64),
typ: CRDTOperationType::Owned(OwnedOperation {
model: self.model,
items: serde_json::from_slice(&self.data).unwrap(),
}),
})
}
}
shared_operation::include!(shared_op_with_node { node });
impl TryInto<CRDTOperation> for shared_op_with_node::Data {
type Error = ();
fn try_into(self) -> Result<CRDTOperation, Self::Error> {
let id = Uuid::from_slice(&self.id).map_err(|_| ())?;
let node = Uuid::from_slice(&self.node.pub_id).map_err(|_| ())?;
Ok(CRDTOperation {
id,
node,
timestamp: NTP64(self.timestamp as u64),
typ: CRDTOperationType::Shared(SharedOperation {
record_id: serde_json::from_slice(&self.record_id).unwrap(),
model: self.model,
data: from_slice(&self.data).unwrap(),
}),
})
}
}
let owned = self
.db
.owned_operation()
.find_many(vec![])
.include(owned_op_with_node::include())
.exec()
.await?
.into_iter()
.map(TryInto::try_into);
let shared = self
.db
.shared_operation()
.find_many(vec![])
.include(shared_op_with_node::include())
.exec()
.await?
.into_iter()
.map(TryInto::try_into);
let mut result: Vec<CRDTOperation> = owned.chain(shared).flatten().collect();
result.sort_by(|a, b| a.timestamp.cmp(&b.timestamp));
Ok(result)
}
pub async fn ingest_op(&self, op: CRDTOperation) -> prisma_client_rust::Result<()> {
match op.typ {
CRDTOperationType::Owned(owned_op) => match owned_op.model.as_str() {
"FilePath" => {
for item in owned_op.items {
let id: prisma_sync::file_path::SyncId =
serde_json::from_value(item.id).unwrap();
let location = self
.db
.location()
.find_unique(location::pub_id::equals(id.location.pub_id))
.select(location::select!({ id }))
.exec()
.await?
.unwrap();
match item.data {
OwnedOperationData::Create(mut data) => {
self.db
.file_path()
.create(
id.id,
location::id::equals(location.id),
serde_json::from_value(
data.remove("materialized_path").unwrap(),
)
.unwrap(),
serde_json::from_value(data.remove("name").unwrap())
.unwrap(),
data.into_iter()
.flat_map(|(k, v)| {
file_path::SetParam::deserialize(&k, v)
})
.collect(),
)
.exec()
.await?;
}
OwnedOperationData::CreateMany {
values,
skip_duplicates,
} => {
let location_ids =
values
.iter()
.map(|(id, _)| {
serde_json::from_value::<prisma_sync::file_path::SyncId>(id.clone())
.unwrap()
.location
.pub_id
})
.collect::<HashSet<_>>();
let location_id_mappings =
join_all(location_ids.iter().map(|id| async move {
self.db
.location()
.find_unique(location::pub_id::equals(id.clone()))
.exec()
.await
.map(|o| o.map(|v| (id, v.id)))
}))
.await
.into_iter()
.flatten()
.flatten()
.collect::<HashMap<_, _>>();
let mut q = self.db.file_path().create_many(
values
.into_iter()
.map(|(id, mut data)| {
let id: prisma_sync::file_path::SyncId =
serde_json::from_value(id).unwrap();
file_path::create_unchecked(
id.id,
*location_id_mappings
.get(&id.location.pub_id)
.unwrap(),
serde_json::from_value(
data.remove("materialized_path").unwrap(),
)
.unwrap(),
serde_json::from_value(
data.remove("name").unwrap(),
)
.unwrap(),
data.into_iter()
.flat_map(|(k, v)| {
file_path::SetParam::deserialize(&k, v)
})
.collect(),
)
})
.collect(),
);
if skip_duplicates {
q = q.skip_duplicates()
}
q.exec().await?;
}
OwnedOperationData::Update(data) => {
self.db
.file_path()
.update(
file_path::location_id_id(location.id, id.id),
data.into_iter()
.flat_map(|(k, v)| {
file_path::SetParam::deserialize(&k, v)
})
.collect(),
)
.exec()
.await?;
}
_ => todo!(),
}
}
}
"Location" => {
for item in owned_op.items {
let id: prisma_sync::location::SyncId = from_value(item.id).unwrap();
match item.data {
OwnedOperationData::Create(mut data) => {
self.db
.location()
.create(
id.pub_id,
{
let val: std::collections::HashMap<String, Value> =
from_value(data.remove("node").unwrap()).unwrap();
let val = val.into_iter().next().unwrap();
node::UniqueWhereParam::deserialize(&val.0, val.1)
.unwrap()
},
data.into_iter()
.flat_map(|(k, v)| {
location::SetParam::deserialize(&k, v)
})
.collect(),
)
.exec()
.await?;
}
_ => todo!(),
}
}
}
_ => {}
},
CRDTOperationType::Shared(shared_op) => match shared_op.model.as_str() {
"Object" => {
let id: prisma_sync::object::SyncId = from_value(shared_op.record_id).unwrap();
match shared_op.data {
SharedOperationData::Create(_) => {
self.db
.object()
.upsert(
object::pub_id::equals(id.pub_id.clone()),
(id.pub_id, vec![]),
vec![],
)
.exec()
.await
.ok();
}
SharedOperationData::Update { field, value } => {
self.db
.object()
.update(
object::pub_id::equals(id.pub_id),
vec![object::SetParam::deserialize(&field, value).unwrap()],
)
.exec()
.await?;
}
_ => todo!(),
}
}
_ => todo!(),
},
_ => {}
}
Ok(())
}
fn new_op(&self, typ: CRDTOperationType) -> CRDTOperation {
let timestamp = self.clock.new_timestamp();
CRDTOperation {
node: self.node,
timestamp: *timestamp.get_time(),
id: Uuid::new_v4(),
typ,
}
}
pub fn owned_create<
const SIZE: usize,
TSyncId: SyncId<ModelTypes = TModel>,
TModel: SyncType<Marker = OwnedSyncType>,
>(
&self,
id: TSyncId,
values: [(&'static str, Value); SIZE],
) -> CRDTOperation {
self.new_op(CRDTOperationType::Owned(OwnedOperation {
model: TModel::MODEL.to_string(),
items: [(id, values)]
.into_iter()
.map(|(id, data)| OwnedOperationItem {
id: json!(id),
data: OwnedOperationData::Create(
data.into_iter().map(|(k, v)| (k.to_string(), v)).collect(),
),
})
.collect(),
}))
}
pub fn owned_create_many<
const SIZE: usize,
TSyncId: SyncId<ModelTypes = TModel>,
TModel: SyncType<Marker = SharedSyncType>,
>(
&self,
data: impl IntoIterator<Item = (TSyncId, [(&'static str, Value); SIZE])>,
skip_duplicates: bool,
) -> CRDTOperation {
self.new_op(CRDTOperationType::Owned(OwnedOperation {
model: TModel::MODEL.to_string(),
items: vec![OwnedOperationItem {
id: Value::Null,
data: OwnedOperationData::CreateMany {
values: data
.into_iter()
.map(|(id, data)| {
(
json!(id),
data.into_iter().map(|(k, v)| (k.to_string(), v)).collect(),
)
})
.collect(),
skip_duplicates,
},
}],
}))
}
pub fn owned_update<
const SIZE: usize,
TSyncId: SyncId<ModelTypes = TModel>,
TModel: SyncType<Marker = SharedSyncType>,
>(
&self,
id: TSyncId,
values: [(&'static str, Value); SIZE],
) -> CRDTOperation {
self.new_op(CRDTOperationType::Owned(OwnedOperation {
model: TModel::MODEL.to_string(),
items: [(id, values)]
.into_iter()
.map(|(id, data)| OwnedOperationItem {
id: json!(id),
data: OwnedOperationData::Update(
data.into_iter().map(|(k, v)| (k.to_string(), v)).collect(),
),
})
.collect(),
}))
}
pub fn shared_create<
TSyncId: SyncId<ModelTypes = TModel>,
TModel: SyncType<Marker = SharedSyncType>,
>(
&self,
id: TSyncId,
) -> CRDTOperation {
self.new_op(CRDTOperationType::Shared(SharedOperation {
model: TModel::MODEL.to_string(),
record_id: json!(id),
data: SharedOperationData::Create(SharedOperationCreateData::Atomic),
}))
}
pub fn shared_update<
TSyncId: SyncId<ModelTypes = TModel>,
TModel: SyncType<Marker = SharedSyncType>,
>(
&self,
id: TSyncId,
field: &str,
value: Value,
) -> CRDTOperation {
self.new_op(CRDTOperationType::Shared(SharedOperation {
model: TModel::MODEL.to_string(),
record_id: json!(id),
data: SharedOperationData::Update {
field: field.to_string(),
value,
},
}))
}
}

View file

@ -1,528 +1,4 @@
use futures::future::join_all;
use sd_sync::*;
use serde_json::{from_slice, from_value, json, to_vec, Value};
use std::{
collections::{HashMap, HashSet},
sync::Arc,
};
use tokio::sync::mpsc::{self, Receiver, Sender};
use uhlc::{HLCBuilder, HLC, NTP64};
use uuid::Uuid;
mod manager;
use crate::{
prisma::{file_path, location, node, object, owned_operation, shared_operation, PrismaClient},
prisma_sync,
};
pub struct SyncManager {
db: Arc<PrismaClient>,
node: Uuid,
_clocks: HashMap<Uuid, NTP64>,
clock: HLC,
tx: Sender<CRDTOperation>,
}
impl SyncManager {
pub fn new(db: Arc<PrismaClient>, node: Uuid) -> (Self, Receiver<CRDTOperation>) {
let (tx, rx) = mpsc::channel(64);
(
Self {
db,
node,
clock: HLCBuilder::new().with_id(node.into()).build(),
_clocks: Default::default(),
tx,
},
rx,
)
}
pub async fn write_ops<'item, I: prisma_client_rust::BatchItem<'item>>(
&self,
tx: &PrismaClient,
ops: Vec<CRDTOperation>,
queries: I,
) -> prisma_client_rust::Result<<I as prisma_client_rust::BatchItemParent>::ReturnValue> {
let owned = ops
.iter()
.filter_map(|op| match &op.typ {
CRDTOperationType::Owned(owned_op) => Some(tx.owned_operation().create(
op.id.as_bytes().to_vec(),
op.timestamp.0 as i64,
to_vec(&owned_op.items).unwrap(),
owned_op.model.clone(),
node::pub_id::equals(op.node.as_bytes().to_vec()),
vec![],
)),
_ => None,
})
.collect::<Vec<_>>();
let shared = ops
.iter()
.filter_map(|op| match &op.typ {
CRDTOperationType::Shared(shared_op) => {
let kind = match &shared_op.data {
SharedOperationData::Create(_) => "c",
SharedOperationData::Update { .. } => "u",
SharedOperationData::Delete => "d",
};
Some(tx.shared_operation().create(
op.id.as_bytes().to_vec(),
op.timestamp.0 as i64,
shared_op.model.to_string(),
to_vec(&shared_op.record_id).unwrap(),
kind.to_string(),
to_vec(&shared_op.data).unwrap(),
node::pub_id::equals(op.node.as_bytes().to_vec()),
vec![],
))
}
_ => None,
})
.collect::<Vec<_>>();
let (res, _) = tx._batch((queries, (owned, shared))).await?;
for op in ops {
self.tx.send(op).await.ok();
}
Ok(res)
}
pub async fn write_op<'item, Q: prisma_client_rust::BatchItem<'item>>(
&self,
tx: &PrismaClient,
op: CRDTOperation,
query: Q,
) -> prisma_client_rust::Result<<Q as prisma_client_rust::BatchItemParent>::ReturnValue> {
let ret = match &op.typ {
CRDTOperationType::Owned(owned_op) => {
tx._batch((
tx.owned_operation().create(
op.id.as_bytes().to_vec(),
op.timestamp.0 as i64,
to_vec(&owned_op.items).unwrap(),
owned_op.model.clone(),
node::pub_id::equals(op.node.as_bytes().to_vec()),
vec![],
),
query,
))
.await?
.1
}
CRDTOperationType::Shared(shared_op) => {
let kind = match &shared_op.data {
SharedOperationData::Create(_) => "c",
SharedOperationData::Update { .. } => "u",
SharedOperationData::Delete => "d",
};
tx._batch((
tx.shared_operation().create(
op.id.as_bytes().to_vec(),
op.timestamp.0 as i64,
shared_op.model.to_string(),
to_vec(&shared_op.record_id).unwrap(),
kind.to_string(),
to_vec(&shared_op.data).unwrap(),
node::pub_id::equals(op.node.as_bytes().to_vec()),
vec![],
),
query,
))
.await?
.1
}
_ => todo!(),
};
self.tx.send(op).await.ok();
Ok(ret)
}
pub async fn get_ops(&self) -> prisma_client_rust::Result<Vec<CRDTOperation>> {
owned_operation::include!(owned_op_with_node { node });
impl TryInto<CRDTOperation> for owned_op_with_node::Data {
type Error = ();
fn try_into(self) -> Result<CRDTOperation, Self::Error> {
let id = Uuid::from_slice(&self.id).map_err(|_| ())?;
let node = Uuid::from_slice(&self.node.pub_id).map_err(|_| ())?;
Ok(CRDTOperation {
id,
node,
timestamp: NTP64(self.timestamp as u64),
typ: CRDTOperationType::Owned(OwnedOperation {
model: self.model,
items: serde_json::from_slice(&self.data).unwrap(),
}),
})
}
}
shared_operation::include!(shared_op_with_node { node });
impl TryInto<CRDTOperation> for shared_op_with_node::Data {
type Error = ();
fn try_into(self) -> Result<CRDTOperation, Self::Error> {
let id = Uuid::from_slice(&self.id).map_err(|_| ())?;
let node = Uuid::from_slice(&self.node.pub_id).map_err(|_| ())?;
Ok(CRDTOperation {
id,
node,
timestamp: NTP64(self.timestamp as u64),
typ: CRDTOperationType::Shared(SharedOperation {
record_id: serde_json::from_slice(&self.record_id).unwrap(),
model: self.model,
data: from_slice(&self.data).unwrap(),
}),
})
}
}
let owned = self
.db
.owned_operation()
.find_many(vec![])
.include(owned_op_with_node::include())
.exec()
.await?
.into_iter()
.map(TryInto::try_into);
let shared = self
.db
.shared_operation()
.find_many(vec![])
.include(shared_op_with_node::include())
.exec()
.await?
.into_iter()
.map(TryInto::try_into);
let mut result: Vec<CRDTOperation> = owned.chain(shared).flatten().collect();
result.sort_by(|a, b| a.timestamp.cmp(&b.timestamp));
Ok(result)
}
pub async fn ingest_op(&self, op: CRDTOperation) -> prisma_client_rust::Result<()> {
match op.typ {
CRDTOperationType::Owned(owned_op) => match owned_op.model.as_str() {
"FilePath" => {
for item in owned_op.items {
let id: prisma_sync::file_path::SyncId =
serde_json::from_value(item.id).unwrap();
let location = self
.db
.location()
.find_unique(location::pub_id::equals(id.location.pub_id))
.select(location::select!({ id }))
.exec()
.await?
.unwrap();
match item.data {
OwnedOperationData::Create(mut data) => {
self.db
.file_path()
.create(
id.id,
location::id::equals(location.id),
serde_json::from_value(
data.remove("materialized_path").unwrap(),
)
.unwrap(),
serde_json::from_value(data.remove("name").unwrap())
.unwrap(),
data.into_iter()
.flat_map(|(k, v)| {
file_path::SetParam::deserialize(&k, v)
})
.collect(),
)
.exec()
.await?;
}
OwnedOperationData::CreateMany {
values,
skip_duplicates,
} => {
let location_ids =
values
.iter()
.map(|(id, _)| {
serde_json::from_value::<prisma_sync::file_path::SyncId>(id.clone())
.unwrap()
.location
.pub_id
})
.collect::<HashSet<_>>();
let location_id_mappings =
join_all(location_ids.iter().map(|id| async move {
self.db
.location()
.find_unique(location::pub_id::equals(id.clone()))
.exec()
.await
.map(|o| o.map(|v| (id, v.id)))
}))
.await
.into_iter()
.flatten()
.flatten()
.collect::<HashMap<_, _>>();
let mut q = self.db.file_path().create_many(
values
.into_iter()
.map(|(id, mut data)| {
let id: prisma_sync::file_path::SyncId =
serde_json::from_value(id).unwrap();
file_path::create_unchecked(
id.id,
*location_id_mappings
.get(&id.location.pub_id)
.unwrap(),
serde_json::from_value(
data.remove("materialized_path").unwrap(),
)
.unwrap(),
serde_json::from_value(
data.remove("name").unwrap(),
)
.unwrap(),
data.into_iter()
.flat_map(|(k, v)| {
file_path::SetParam::deserialize(&k, v)
})
.collect(),
)
})
.collect(),
);
if skip_duplicates {
q = q.skip_duplicates()
}
q.exec().await?;
}
OwnedOperationData::Update(data) => {
self.db
.file_path()
.update(
file_path::location_id_id(location.id, id.id),
data.into_iter()
.flat_map(|(k, v)| {
file_path::SetParam::deserialize(&k, v)
})
.collect(),
)
.exec()
.await?;
}
_ => todo!(),
}
}
}
"Location" => {
for item in owned_op.items {
let id: prisma_sync::location::SyncId = from_value(item.id).unwrap();
match item.data {
OwnedOperationData::Create(mut data) => {
self.db
.location()
.create(
id.pub_id,
{
let val: std::collections::HashMap<String, Value> =
from_value(data.remove("node").unwrap()).unwrap();
let val = val.into_iter().next().unwrap();
node::UniqueWhereParam::deserialize(&val.0, val.1)
.unwrap()
},
data.into_iter()
.flat_map(|(k, v)| {
location::SetParam::deserialize(&k, v)
})
.collect(),
)
.exec()
.await?;
}
_ => todo!(),
}
}
}
_ => {}
},
CRDTOperationType::Shared(shared_op) => match shared_op.model.as_str() {
"Object" => {
let id: prisma_sync::object::SyncId = from_value(shared_op.record_id).unwrap();
match shared_op.data {
SharedOperationData::Create(_) => {
self.db
.object()
.upsert(
object::cas_id::equals(id.cas_id.clone()),
(id.cas_id, vec![]),
vec![],
)
.exec()
.await
.ok();
}
SharedOperationData::Update { field, value } => {
self.db
.object()
.update(
object::cas_id::equals(id.cas_id),
vec![object::SetParam::deserialize(&field, value).unwrap()],
)
.exec()
.await?;
}
_ => todo!(),
}
}
_ => todo!(),
},
_ => {}
}
Ok(())
}
fn new_op(&self, typ: CRDTOperationType) -> CRDTOperation {
let timestamp = self.clock.new_timestamp();
CRDTOperation {
node: self.node,
timestamp: *timestamp.get_time(),
id: Uuid::new_v4(),
typ,
}
}
pub fn owned_create<
const SIZE: usize,
TSyncId: SyncId<ModelTypes = TModel>,
TModel: SyncType<Marker = OwnedSyncType>,
>(
&self,
id: TSyncId,
values: [(&'static str, Value); SIZE],
) -> CRDTOperation {
self.new_op(CRDTOperationType::Owned(OwnedOperation {
model: TModel::MODEL.to_string(),
items: [(id, values)]
.into_iter()
.map(|(id, data)| OwnedOperationItem {
id: json!(id),
data: OwnedOperationData::Create(
data.into_iter().map(|(k, v)| (k.to_string(), v)).collect(),
),
})
.collect(),
}))
}
pub fn owned_create_many<
const SIZE: usize,
TSyncId: SyncId<ModelTypes = TModel>,
TModel: SyncType<Marker = SharedSyncType>,
>(
&self,
data: impl IntoIterator<Item = (TSyncId, [(&'static str, Value); SIZE])>,
skip_duplicates: bool,
) -> CRDTOperation {
self.new_op(CRDTOperationType::Owned(OwnedOperation {
model: TModel::MODEL.to_string(),
items: vec![OwnedOperationItem {
id: Value::Null,
data: OwnedOperationData::CreateMany {
values: data
.into_iter()
.map(|(id, data)| {
(
json!(id),
data.into_iter().map(|(k, v)| (k.to_string(), v)).collect(),
)
})
.collect(),
skip_duplicates,
},
}],
}))
}
pub fn owned_update<
const SIZE: usize,
TSyncId: SyncId<ModelTypes = TModel>,
TModel: SyncType<Marker = SharedSyncType>,
>(
&self,
id: TSyncId,
values: [(&'static str, Value); SIZE],
) -> CRDTOperation {
self.new_op(CRDTOperationType::Owned(OwnedOperation {
model: TModel::MODEL.to_string(),
items: [(id, values)]
.into_iter()
.map(|(id, data)| OwnedOperationItem {
id: json!(id),
data: OwnedOperationData::Update(
data.into_iter().map(|(k, v)| (k.to_string(), v)).collect(),
),
})
.collect(),
}))
}
pub fn shared_create<
TSyncId: SyncId<ModelTypes = TModel>,
TModel: SyncType<Marker = SharedSyncType>,
>(
&self,
id: TSyncId,
) -> CRDTOperation {
self.new_op(CRDTOperationType::Shared(SharedOperation {
model: TModel::MODEL.to_string(),
record_id: json!(id),
data: SharedOperationData::Create(SharedOperationCreateData::Atomic),
}))
}
pub fn shared_update<
TSyncId: SyncId<ModelTypes = TModel>,
TModel: SyncType<Marker = SharedSyncType>,
>(
&self,
id: TSyncId,
field: &str,
value: Value,
) -> CRDTOperation {
self.new_op(CRDTOperationType::Shared(SharedOperation {
model: TModel::MODEL.to_string(),
record_id: json!(id),
data: SharedOperationData::Update {
field: field.to_string(),
value,
},
}))
}
}
pub use crate::prisma_sync::*;
pub use manager::SyncManager;

View file

@ -1,10 +1,10 @@
#![allow(dead_code)]
use crate::extensions::{CodeExtension, Extension, VideoExtension};
use std::io::SeekFrom;
use std::{ffi::OsStr, io::SeekFrom, path::Path};
use tokio::{
fs::File,
fs::{self, File},
io::{AsyncReadExt, AsyncSeekExt},
};
@ -172,15 +172,23 @@ pub async fn verify_magic_bytes<T: MagicBytes>(ext: T, file: &mut File) -> Optio
impl Extension {
pub async fn resolve_conflicting(
ext_str: &str,
file: &mut File,
path: impl AsRef<Path>,
always_check_magic_bytes: bool,
) -> Option<Extension> {
let ext = match Extension::from_str(ext_str) {
Some(e) => e,
None => return None,
let Some(ext_str) = path.as_ref().extension().and_then(OsStr::to_str) else {
return None
};
let Some(ext) = Extension::from_str(ext_str)else {
return None
};
let Ok(mut file) = fs::File::open(&path).await else {
return None
};
let file = &mut file;
match ext {
// we don't need to check the magic bytes unless there is conflict
// always_check_magic_bytes forces the check for tests

View file

@ -4,7 +4,7 @@
export type Procedures = {
queries:
{ key: "buildInfo", input: never, result: BuildInfo } |
{ key: "files.get", input: LibraryArgs<GetArgs>, result: { id: number, cas_id: string, integrity_checksum: string | null, name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string, file_paths: Array<FilePath>, media_data: MediaData | null } | null } |
{ key: "files.get", input: LibraryArgs<GetArgs>, result: { id: number, pub_id: Array<number>, name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string, file_paths: Array<FilePath>, media_data: MediaData | null } | null } |
{ key: "jobs.getHistory", input: LibraryArgs<null>, result: Array<JobReport> } |
{ key: "jobs.getRunning", input: LibraryArgs<null>, result: Array<JobReport> } |
{ key: "jobs.isRunning", input: LibraryArgs<null>, result: boolean } |
@ -97,7 +97,7 @@ export type ExplorerContext = { type: "Location" } & Location | { type: "Tag" }
export interface ExplorerData { context: ExplorerContext, items: Array<ExplorerItem> }
export type ExplorerItem = { type: "Path" } & FilePathWithObject | { type: "Object" } & ObjectWithFilePaths
export type ExplorerItem = { type: "Path", has_thumbnail: boolean, item: FilePathWithObject } | { type: "Object", has_thumbnail: boolean, item: ObjectWithFilePaths }
export interface FileCopierJobInit { source_location_id: number, source_path_id: number, target_location_id: number, target_path: string, target_file_name_suffix: string | null }
@ -111,7 +111,7 @@ export interface FileEncryptorJobInit { location_id: number, path_id: number, ke
export interface FileEraserJobInit { location_id: number, path_id: number, passes: number }
export interface FilePath { id: number, is_dir: boolean, location_id: number, materialized_path: string, name: string, extension: string | null, object_id: number | null, parent_id: number | null, key_id: number | null, date_created: string, date_modified: string, date_indexed: string }
export interface FilePath { id: number, is_dir: boolean, cas_id: string | null, integrity_checksum: string | null, location_id: number, materialized_path: string, name: string, extension: string | null, object_id: number | null, parent_id: number | null, key_id: number | null, date_created: string, date_modified: string, date_indexed: string }
export interface GenerateThumbsForLocationArgs { id: number, path: string }
@ -165,7 +165,7 @@ export interface NormalisedUser { $type: string, $id: any, id: string, name: str
export interface NormalizedVec<T> { $type: string, edges: Array<T> }
export interface Object { id: number, cas_id: string, integrity_checksum: string | null, name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string }
export interface Object { id: number, pub_id: Array<number>, name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string }
export interface ObjectValidatorArgs { id: number, path: string }
@ -197,6 +197,6 @@ export interface TagUpdateArgs { id: number, name: string | null, color: string
export interface Volume { name: string, mount_point: string, total_capacity: bigint, available_capacity: bigint, is_removable: boolean, disk_type: string | null, file_system: string | null, is_root_filesystem: boolean }
export interface FilePathWithObject { id: number, is_dir: boolean, location_id: number, materialized_path: string, name: string, extension: string | null, object_id: number | null, parent_id: number | null, key_id: number | null, date_created: string, date_modified: string, date_indexed: string, object: Object | null }
export interface FilePathWithObject { id: number, is_dir: boolean, cas_id: string | null, integrity_checksum: string | null, location_id: number, materialized_path: string, name: string, extension: string | null, object_id: number | null, parent_id: number | null, key_id: number | null, date_created: string, date_modified: string, date_indexed: string, object: Object | null }
export interface ObjectWithFilePaths { id: number, cas_id: string, integrity_checksum: string | null, name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string, file_paths: Array<FilePath> }
export interface ObjectWithFilePaths { id: number, pub_id: Array<number>, name: string | null, extension: string | null, kind: number, size_in_bytes: string, key_id: number | null, hidden: boolean, favorite: boolean, important: boolean, has_thumbnail: boolean, has_thumbstrip: boolean, has_video_preview: boolean, ipfs_id: string | null, note: string | null, date_created: string, date_modified: string, date_indexed: string, file_paths: Array<FilePath> }

View file

@ -68,7 +68,7 @@ export default function Explorer(props: Props) {
};
});
}}
key={props.data?.items[expStore.selectedRowIndex]?.id}
key={props.data?.items[expStore.selectedRowIndex]?.item.id}
data={props.data?.items[expStore.selectedRowIndex]}
/>
</div>

View file

@ -203,13 +203,13 @@ export function ExplorerContextMenu(props: PropsWithChildren) {
}
export interface FileItemContextMenuProps extends PropsWithChildren {
item: ExplorerItem;
data: ExplorerItem;
}
export function FileItemContextMenu({ ...props }: FileItemContextMenuProps) {
export function FileItemContextMenu({ data, ...props }: FileItemContextMenuProps) {
const store = useExplorerStore();
const params = useExplorerParams();
const objectData = props.item ? (isObject(props.item) ? props.item : props.item.object) : null;
const objectData = data ? (isObject(data) ? data.item : data.item.object) : null;
const hasMasterPasswordQuery = useLibraryQuery(['keys.hasMasterPassword']);
const hasMasterPassword =
@ -243,7 +243,7 @@ export function FileItemContextMenu({ ...props }: FileItemContextMenuProps) {
onClick={(e) => {
copyFiles.mutate({
source_location_id: store.locationId!,
source_path_id: props.item.id,
source_path_id: data.item.id,
target_location_id: store.locationId!,
target_path: params.path,
target_file_name_suffix: ' - Clone'
@ -257,7 +257,7 @@ export function FileItemContextMenu({ ...props }: FileItemContextMenuProps) {
onClick={(e) => {
getExplorerStore().cutCopyState = {
sourceLocationId: store.locationId!,
sourcePathId: props.item.id,
sourcePathId: data.item.id,
actionType: CutCopyType.Cut,
active: true
};
@ -271,7 +271,7 @@ export function FileItemContextMenu({ ...props }: FileItemContextMenuProps) {
onClick={(e) => {
getExplorerStore().cutCopyState = {
sourceLocationId: store.locationId!,
sourcePathId: props.item.id,
sourcePathId: data.item.id,
actionType: CutCopyType.Copy,
active: true
};
@ -323,8 +323,8 @@ export function FileItemContextMenu({ ...props }: FileItemContextMenuProps) {
dialogManager.create((dp) => (
<EncryptFileDialog
{...dp}
location_id={useExplorerStore().locationId!}
path_id={props.item.id}
location_id={store.locationId!}
path_id={data.item.id}
/>
));
} else if (!hasMasterPassword) {
@ -351,7 +351,7 @@ export function FileItemContextMenu({ ...props }: FileItemContextMenuProps) {
<DecryptFileDialog
{...dp}
location_id={useExplorerStore().locationId!}
path_id={props.item.id}
path_id={data.item.id}
/>
));
} else {
@ -378,7 +378,7 @@ export function FileItemContextMenu({ ...props }: FileItemContextMenuProps) {
<EraseFileDialog
{...dp}
location_id={getExplorerStore().locationId!}
path_id={props.item.id}
path_id={data.item.id}
/>
));
}}
@ -397,7 +397,7 @@ export function FileItemContextMenu({ ...props }: FileItemContextMenuProps) {
<DeleteFileDialog
{...dp}
location_id={getExplorerStore().locationId!}
path_id={props.item.id}
path_id={data.item.id}
/>
));
}}

View file

@ -3,6 +3,7 @@ import { HTMLAttributes } from 'react';
import { ExplorerItem, isVideoExt } from '@sd/client';
import { cva, tw } from '@sd/ui';
import { getExplorerStore } from '~/hooks/useExplorerStore';
import { ObjectKind } from '~/util/kind';
import { FileItemContextMenu } from './ExplorerContextMenu';
import FileThumb from './FileThumb';
import { isObject } from './utils';
@ -27,11 +28,12 @@ interface Props extends HTMLAttributes<HTMLDivElement> {
}
function FileItem({ data, selected, index, ...rest }: Props) {
const objectData = data ? (isObject(data) ? data : data.object) : null;
const isVid = isVideoExt(data.extension || '');
const objectData = data ? (isObject(data) ? data.item : data.item.object) : null;
const isVid = isVideoExt(data.item.extension || '');
const item = data.item;
return (
<FileItemContextMenu item={data}>
<FileItemContextMenu data={data}>
<div
onContextMenu={(e) => {
if (index != undefined) {
@ -65,20 +67,20 @@ function FileItem({ data, selected, index, ...rest }: Props) {
isVid && '!border-black rounded border-x-0 border-y-[7px]'
)}
data={data}
kind={data.extension === 'zip' ? 'zip' : isVid ? 'video' : 'other'}
kind={ObjectKind[objectData?.kind || 0]}
size={getExplorerStore().gridItemSize}
/>
{data?.extension && isVid && (
{item.extension && isVid && (
<div className="absolute bottom-4 font-semibold opacity-70 right-2 py-0.5 px-1 text-[9px] uppercase bg-black/60 rounded">
{data.extension}
{item.extension}
</div>
)}
</div>
</div>
<NameArea>
<span className={nameContainerStyles({ selected })}>
{data?.name}
{data?.extension && `.${data.extension}`}
{item.name}
{item.extension && `.${item.extension}`}
</span>
</NameArea>
</div>

View file

@ -55,13 +55,13 @@ const RenderCell: React.FC<{
return <DocumentIcon className="flex-shrink-0 w-5 h-5 mr-3 text-gray-300" />;
}
})()} */}
<span className="text-xs truncate">{data[colKey]}</span>
<span className="text-xs truncate">{data.item[colKey]}</span>
</div>
);
// case 'size_in_bytes':
// return <span className="text-xs text-left">{byteSize(Number(value || 0))}</span>;
case 'extension':
return <span className="text-xs text-left">{data[colKey]}</span>;
return <span className="text-xs text-left">{data.item[colKey]}</span>;
// case 'meta_integrity_hash':
// return <span className="truncate">{value}</span>;
// case 'tags':

View file

@ -26,30 +26,26 @@ export default function FileThumb({ data, ...props }: Props) {
const platform = usePlatform();
const store = useExplorerStore();
const item = data.item;
const Icon = useMemo(() => {
const icon = icons[`../../../../assets/icons/${data.extension as any}.svg`];
const icon = icons[`../../../../assets/icons/${item.extension}.svg`];
const Icon = icon
? lazy(() => icon().then((v) => ({ default: (v as any).ReactComponent })))
: undefined;
return Icon;
}, [data.extension]);
}, [item.extension]);
if (isPath(data) && data.is_dir) return <Folder size={props.size * 0.7} />;
if (isPath(data) && data.item.is_dir) return <Folder size={props.size * 0.7} />;
const cas_id = isObject(data) ? data.cas_id : data.object?.cas_id;
const cas_id = isObject(data) ? data.item.file_paths[0]?.cas_id : data.item.cas_id;
if (!cas_id) return <div></div>;
const has_thumbnail = isObject(data)
? data.has_thumbnail
: isPath(data)
? data.object?.has_thumbnail
: !!store.newThumbnails[cas_id];
const url = platform.getThumbnailUrlById(cas_id);
if (has_thumbnail && url)
if (data.has_thumbnail && url)
return (
<img
style={props.style}
@ -64,7 +60,7 @@ export default function FileThumb({ data, ...props }: Props) {
// Hacky (and temporary) way to integrate thumbnails
if (props.kind === 'Archive') icon = archive;
else if (props.kind === 'Video') icon = video;
else if (props.kind === 'Document' && data.extension === 'pdf') icon = documentPdf;
else if (props.kind === 'Document' && data.item.extension === 'pdf') icon = documentPdf;
else if (props.kind === 'Executable') icon = executable;
else if (props.kind === 'Encrypted') icon = archive;

View file

@ -35,13 +35,11 @@ interface Props extends DefaultProps<HTMLDivElement> {
data?: ExplorerItem;
}
export const Inspector = (props: Props) => {
const { context, data, ...elementProps } = props;
export const Inspector = ({ data, context, ...elementProps }: Props) => {
const objectData = data ? (isObject(data) ? data.item : data.item.object) : null;
const filePathData = data ? (isObject(data) ? data.item.file_paths[0] : data.item) : null;
const is_dir = props.data?.type === 'Path' ? props.data.is_dir : false;
const objectData = props.data ? (isObject(props.data) ? props.data : props.data.object) : null;
const isDir = props.data?.type === 'Path' ? props.data.is_dir : false;
const isDir = data?.type === 'Path' ? data.item.is_dir : false;
// this prevents the inspector from fetching data when the user is navigating quickly
const [readyToFetch, setReadyToFetch] = useState(false);
@ -50,7 +48,7 @@ export const Inspector = (props: Props) => {
setReadyToFetch(true);
}, 350);
return () => clearTimeout(timeout);
}, [props.data?.id]);
}, [data?.item.id]);
// this is causing LAG
const tags = useLibraryQuery(['tags.getForObject', objectData?.id || -1], {
@ -61,12 +59,14 @@ export const Inspector = (props: Props) => {
enabled: readyToFetch && objectData?.id !== undefined
});
const item = data?.item;
return (
<div
{...elementProps}
className="-mt-[50px] pt-[55px] z-10 pl-1.5 pr-1 w-full h-screen overflow-x-hidden custom-scroll inspector-scroll pb-4"
>
{!!props.data && (
{data && (
<>
<div
className={clsx(
@ -79,13 +79,13 @@ export const Inspector = (props: Props) => {
size={230}
kind={ObjectKind[objectData?.kind || 0]}
className="flex flex-grow-0 flex-shrink bg-green-500"
data={props.data}
data={data}
/>
</div>
<div className="flex flex-col w-full pt-0.5 pb-0.5 overflow-hidden bg-app-box rounded-lg select-text shadow-app-shade/10 border border-app-line">
<h3 className="px-3 pt-2 pb-1 text-base font-bold truncate">
{props.data?.name}
{props.data?.extension && `.${props.data.extension}`}
{item?.name}
{item?.extension && `.${item.extension}`}
</h3>
{objectData && (
<div className="flex flex-row mt-1 mb-0.5 mx-3 space-x-0.5">
@ -106,10 +106,10 @@ export const Inspector = (props: Props) => {
</div>
)}
{props.context?.type == 'Location' && props.data?.type === 'Path' && (
{context?.type == 'Location' && data?.type === 'Path' && (
<MetaContainer>
<MetaTitle>URI</MetaTitle>
<MetaValue>{`${props.context.local_path}/${props.data.materialized_path}`}</MetaValue>
<MetaValue>{`${context.local_path}/${data.item.materialized_path}`}</MetaValue>
</MetaContainer>
)}
<Divider />
@ -117,7 +117,7 @@ export const Inspector = (props: Props) => {
<MetaContainer>
<div className="flex flex-wrap gap-1">
<InfoPill>{isDir ? 'Folder' : ObjectKind[objectData?.kind || 0]}</InfoPill>
{props.data.extension && <InfoPill>{props.data.extension}</InfoPill>}
{item && <InfoPill>{item.extension}</InfoPill>}
{tags?.data?.map((tag) => (
<InfoPill
className="!text-white"
@ -148,40 +148,40 @@ export const Inspector = (props: Props) => {
</MetaContainer>
<Divider />
<MetaContainer>
<Tooltip label={dayjs(props.data?.date_created).format('h:mm:ss a')}>
<Tooltip label={dayjs(item?.date_created).format('h:mm:ss a')}>
<MetaTextLine>
<InspectorIcon component={Clock} />
<span className="mr-1.5">Created</span>
<MetaValue>{dayjs(props.data?.date_created).format('MMM Do YYYY')}</MetaValue>
<MetaValue>{dayjs(item?.date_created).format('MMM Do YYYY')}</MetaValue>
</MetaTextLine>
</Tooltip>
<Tooltip label={dayjs(props.data?.date_created).format('h:mm:ss a')}>
<Tooltip label={dayjs(item?.date_created).format('h:mm:ss a')}>
<MetaTextLine>
<InspectorIcon component={Barcode} />
<span className="mr-1.5">Indexed</span>
<MetaValue>{dayjs(props.data?.date_indexed).format('MMM Do YYYY')}</MetaValue>
<MetaValue>{dayjs(item?.date_indexed).format('MMM Do YYYY')}</MetaValue>
</MetaTextLine>
</Tooltip>
</MetaContainer>
{!is_dir && objectData && (
{!isDir && objectData && (
<>
<Note data={objectData} />
<Divider />
<MetaContainer>
<Tooltip label={objectData?.cas_id || ''}>
<Tooltip label={filePathData?.cas_id || ''}>
<MetaTextLine>
<InspectorIcon component={Snowflake} />
<span className="mr-1.5">Content ID</span>
<MetaValue>{objectData?.cas_id || ''}</MetaValue>
<MetaValue>{filePathData?.cas_id || ''}</MetaValue>
</MetaTextLine>
</Tooltip>
{objectData?.integrity_checksum && (
<Tooltip label={objectData?.integrity_checksum || ''}>
{filePathData?.integrity_checksum && (
<Tooltip label={filePathData?.integrity_checksum || ''}>
<MetaTextLine>
<InspectorIcon component={CircleWavyCheck} />
<span className="mr-1.5">Checksum</span>
<MetaValue>{objectData.integrity_checksum}</MetaValue>
<MetaValue>{filePathData?.integrity_checksum}</MetaValue>
</MetaTextLine>
</Tooltip>
)}

View file

@ -183,7 +183,7 @@ const WrappedItem = memo(({ item, index, isSelected, kind }: WrappedItemProps) =
const [_, setSearchParams] = useSearchParams();
const onDoubleClick = useCallback(() => {
if (isPath(item) && item.is_dir) setSearchParams({ path: item.materialized_path });
if (isPath(item) && item.item.is_dir) setSearchParams({ path: item.item.materialized_path });
}, [item, setSearchParams]);
const onClick = useCallback(() => {