commit before I tear out rusqlite

This commit is contained in:
Jamie 2021-09-30 00:34:42 -07:00
parent 6470067c08
commit c50728a4d6
15 changed files with 904 additions and 103 deletions

View file

@ -1,6 +1,3 @@
{
"cSpell.words": [
"ipfs",
"tailwindcss"
]
}
"cSpell.words": ["ipfs", "repr", "tailwindcss"]
}

683
src-tauri/Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -21,9 +21,13 @@ serde_json = "1.0"
rebind = "0.2.1"
data-encoding = "2.3.2"
ring = "0.17.0-alpha.10"
rusqlite = "0.25.3"
rusqlite = { version = "0.25.3", features = ["chrono"] }
chrono = { version = "0.4.0", features = ["serde"] }
crossbeam = "0.8.1"
cargo-edit = "0.8.0"
sha256 = "1.0.2"
int-enum = "0.4.0"
rusqlite_migration = "0.5.0"
[features]
default = [ "custom-protocol" ]

View file

@ -1,5 +1,7 @@
use crate::filesystem::checksum;
use crate::filesystem::file;
use crate::filesystem::file::File;
use tauri::InvokeError;
#[tauri::command(async)]
@ -7,5 +9,17 @@ pub async fn read_file_command(path: &str) -> Result<File, InvokeError> {
let file = file::read_file(path)
.await
.map_err(|error| InvokeError::from(format!("Failed to read file: {}", error)))?;
file::commit_file(&file).await;
Ok(file)
}
#[tauri::command(async)]
pub async fn generate_buffer_checksum(path: &str) -> Result<File, InvokeError> {
let mut file = file::read_file(path)
.await
.map_err(|error| InvokeError::from(format!("Failed to read file: {}", error)))?;
file.buffer_checksum = Some(checksum::create_hash(&file.uri).await.unwrap());
Ok(file)
}

View file

@ -1,9 +1,22 @@
use int_enum::IntEnum;
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize)]
#[repr(u8)]
#[derive(Clone, Copy, Debug, Serialize, Deserialize, IntEnum)]
pub enum Encryption {
NONE,
AES128,
AES192,
AES256,
NONE = 0,
AES128 = 1,
AES192 = 2,
AES256 = 3,
}
impl From<i64> for Encryption {
fn from(val: i64) -> Self {
match val {
0 => Encryption::NONE,
1 => Encryption::AES128,
2 => Encryption::AES192,
3 => Encryption::AES256,
}
}
}

View file

@ -0,0 +1,10 @@
use crate::app::config;
use rusqlite;
use tauri::InvokeError;
pub fn get_connection() -> Result<rusqlite::Connection, InvokeError> {
let config = config::get_config();
rusqlite::Connection::open(config.primary_db)
.map_err(|error| InvokeError::from("database_connection_failure"))
}

View file

@ -1,9 +0,0 @@
use rusqlite;
// use tauri::api::path;
use crate::app::config;
pub fn create_connection() -> Result<rusqlite::Connection, rusqlite::Error> {
let config = config::get_config();
rusqlite::Connection::open(config.primary_db)
}

View file

@ -0,0 +1,15 @@
pub(crate) trait ToVec<T> {
fn to_vec(self) -> Vec<T>;
}
pub(crate) type QueryMapper<T> = fn(&rusqlite::Row) -> rusqlite::Result<T>;
impl<T> ToVec<T> for rusqlite::MappedRows<'_, QueryMapper<T>> {
fn to_vec(self) -> Vec<T> {
let mut list = Vec::new();
for row in self {
list.push(row.unwrap());
}
list
}
}

View file

@ -0,0 +1,29 @@
use rusqlite::Connection;
use rusqlite_migration::{Migrations, M};
pub fn run_migrations(mut connection: Connection) {
let migrations = Migrations::new(vec![M::up(
"
CREATE TABLE IF NOT EXISTS files (
id INTEGER PRIMARY KEY AUTOINCREMENT,
uri STRING NOT NULL,
meta_checksum STRING NOT NULL,
buffer_checksum STRING,
name STRING,
extension STRING,
size_in_bytes INTEGER NOT NULL,
encryption INTEGER DEFAULT 0,
ipfs_id STRING,
user_id STRING,
storage_device_id STRING,
capture_device_id STRING,
parent_file_id STRING
date_created TEXT NOT NULL,
date_modified TEXT NOT NULL,
date_indexed TEXT NOT NULL,
);
",
)]);
migrations.to_latest(&mut connection).unwrap();
}

View file

@ -1 +1,4 @@
pub mod init;
pub mod connection;
pub mod mapper;
pub mod migrate;
pub mod page;

49
src-tauri/src/db/page.rs Normal file
View file

@ -0,0 +1,49 @@
use serde::{Deserialize, Serialize};
use std::fmt::{Display, Formatter};
#[derive(Serialize, Deserialize, Debug)]
pub(crate) struct PageRequest {
#[serde(default)]
pub(crate) page: i64,
#[serde(default)]
pub(crate) size: i64,
#[serde(default)]
pub(crate) sort_by: String,
#[serde(default)]
pub(crate) direction: SortDirection,
}
#[derive(Serialize, Deserialize, Debug)]
pub(crate) enum SortDirection {
ASC,
DESC,
}
impl Default for SortDirection {
fn default() -> Self {
SortDirection::ASC
}
}
impl Display for SortDirection {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{:?}", self)
}
}
#[derive(Serialize, Deserialize, Debug)]
pub(crate) struct PageResponse<T> {
content: Vec<T>,
total_pages: i64,
total_elements: i64,
}
impl<T> PageResponse<T> {
pub(crate) fn new(content: Vec<T>, total_pages: i64, total_elements: i64) -> Self {
PageResponse {
content,
total_pages,
total_elements,
}
}
}

View file

@ -1,8 +1,9 @@
use data_encoding::HEXUPPER;
use data_encoding::HEXLOWER;
use ring::digest::{Context, Digest, SHA256};
use std::fs::File;
use sha256::digest;
use std::io;
use std::io::{BufReader, Read};
use std::time::Instant;
fn sha256_digest<R: Read>(mut reader: R) -> io::Result<Digest> {
let mut context = Context::new(&SHA256);
@ -18,10 +19,15 @@ fn sha256_digest<R: Read>(mut reader: R) -> io::Result<Digest> {
}
pub async fn create_hash(path: &str) -> io::Result<String> {
let input = File::open(path)?;
let reader = BufReader::new(input);
let digest = sha256_digest(reader)?;
let hash = HEXUPPER.encode(digest.as_ref());
println!("hashing complete {}", hash);
Ok(hash)
let start = Instant::now();
// read file as buffer and convert to digest
let digest = sha256_digest(BufReader::new(std::fs::File::open(path)?))?;
// create a lowercase hash from
let hex = HEXLOWER.encode(digest.as_ref());
println!("hashing complete in {:?} {}", start.elapsed(), hex);
Ok(hex)
}
pub fn create_meta_hash(uri: String, size_in_bytes: u64) -> io::Result<String> {
Ok(digest(format!("{}{}", uri, size_in_bytes.to_string())))
}

View file

@ -1,13 +1,16 @@
use crate::crypto;
use chrono::prelude::*;
use crossbeam::thread;
use rusqlite::named_params;
use serde::{Deserialize, Serialize};
use std::ffi::OsStr;
use std::fs;
use std::io;
use std::path;
use std::time::Instant;
use tauri::InvokeError;
use crate::db;
// use crate::db::mapper::QueryMapper;
use crate::filesystem::checksum;
use crate::util::time;
@ -15,7 +18,8 @@ use crate::util::time;
pub struct File {
// identity
pub id: Option<u64>,
pub checksum: Option<String>,
pub buffer_checksum: Option<String>,
pub meta_checksum: String,
pub uri: String,
// metadata
pub name: String,
@ -35,58 +39,38 @@ pub struct File {
}
// Read a file from path returning the File struct
// Generates checksum and extracts metadata
// Generates meta checksum and extracts metadata
pub async fn read_file(path: &str) -> io::Result<File> {
// let start = Instant::now();
let path_buff = path::PathBuf::from(path);
// extract metadata
let metadata = match fs::metadata(&path) {
Ok(metadata) => metadata,
Err(e) => return Err(e),
};
let metadata = fs::metadata(&path)?;
// if metadata.is_dir() {
// return Err();
// }
if metadata.is_dir() {
// return Err();
}
// let checksum = thread::scope(|s| {
// let res = s.spawn(move |_| checksum::create_hash(path).unwrap());
// res.join()
// })
// .unwrap()
// .unwrap();
let size = metadata.len();
let meta_checksum = checksum::create_meta_hash(path.to_owned(), size)?;
// let checksum = match checksum {
// Ok(metadata) => metadata, // Err(e) => return Err(e.into()),
// };
// generate checksum
// let checksum = match checksum::create_hash(path) {
// Ok(checksum) => checksum,
// Err(e) => return Err(e),
// };
// assemble File struct with initial values
let file = File {
name: extract_name(path_buff.file_name()),
extension: extract_name(path_buff.extension()),
uri: path.to_owned(),
size_in_bytes: metadata.len(),
size_in_bytes: size,
date_created: time::system_time_to_date_time(metadata.created()).unwrap_or(Utc::now()),
date_modified: time::system_time_to_date_time(metadata.created()).unwrap_or(Utc::now()),
date_indexed: Utc::now(),
encryption: crypto::Encryption::NONE,
// this will be populated later, either by the database or other functions
id: None,
checksum: None,
meta_checksum,
buffer_checksum: None,
ipfs_id: None,
user_id: None,
storage_device_id: None,
capture_device_id: None,
parent_file_id: None,
};
checksum::create_hash(path).await;
Ok(file)
}
@ -98,3 +82,50 @@ fn extract_name(os_string: Option<&OsStr>) -> String {
.unwrap_or_default()
.to_owned()
}
pub async fn commit_file(file: &File) -> Result<(), InvokeError> {
let connection = db::connection::get_connection()?;
connection.execute("
INSERT INTO files (uri, meta_checksum, buffer_checksum, name, extension, size_in_bytes, encryption, ipfs_id, user_id, storage_device_id, capture_device_id, parent_file_id, date_created, date_modified, date_indexed) VALUES (:uri, :meta_checksum, :buffer_checksum, :name, :extension, :size_in_bytes, :encryption, :ipfs_id, :user_id, :storage_device_id, :capture_device_id, :parent_file_id, :date_created, :date_modified, :date_indexed)
", named_params! {
":uri": &file.uri,
":meta_checksum": &file.meta_checksum,
":buffer_checksum": &file.buffer_checksum,
":name": &file.name,
":extension": &file.extension,
":size_in_bytes": &file.size_in_bytes,
":encryption": crypto::Encryption::NONE,
":ipfs_id": &file.ipfs_id,
":user_id": &file.user_id,
":storage_device_id": &file.storage_device_id,
":capture_device_id": &file.capture_device_id,
":parent_file_id": &file.parent_file_id,
":date_created": &file.date_created,
":date_modified": &file.date_modified,
":date_indexed": &file.date_indexed
});
Ok(())
}
// const FILE_MAPPER: QueryMapper<File> = |row| {
// Ok(File {
// id: row.get(0)?,
// buffer_checksum: row.get(1)?,
// meta_checksum: row.get(2)?,
// uri: row.get(3)?,
// name: row.get(4)?,
// extension: row.get(5)?,
// size_in_bytes: row.get(6)?,
// encryption: crypto::Encryption::from(row.get(7)?),
// ipfs_id: row.get(8)?,
// user_id: row.get(9)?,
// storage_device_id: row.get(10)?,
// capture_device_id: row.get(11)?,
// parent_file_id: row.get(12)?,
// date_created: chrono::DateTime::parse_from_str(row.get(13).unwrap(), "utc")?,
// date_modified: row.get(14)?,
// date_indexed: row.get(15)?,
// })
// };

View file

@ -12,12 +12,16 @@ mod util;
use crate::app::menu;
fn main() {
let connection = db::init::create_connection();
let connection = db::connection::get_connection().unwrap();
db::migrate::run_migrations(connection);
println!("primary database connected {:?}", connection);
println!("primary database connected");
tauri::Builder::default()
.invoke_handler(tauri::generate_handler![commands::read_file_command])
.invoke_handler(tauri::generate_handler![
commands::read_file_command,
commands::generate_buffer_checksum
])
.menu(menu::get_menu())
.run(tauri::generate_context!())
.expect("error while running tauri application");

View file

@ -38,11 +38,21 @@ export default function App() {
return (
<VechaiProvider theme={theme} colorScheme="pale">
<div data-tauri-drag-region className="max-w h-10 bg-primary-800"></div>
<div data-tauri-drag-region className="max-w h-4"></div>
<div className="p-2">
<div className="flex flex-wrap w-full space-x-2">
<Input value={fileInputVal} onChange={(e) => setFileInputVal(e.target.value)} />
<input ref={fileUploader} type="file" id="file" onChange={changeHandler} />
<Input
className="mb-2"
value={fileInputVal}
onChange={(e) => setFileInputVal(e.target.value)}
/>
<input
ref={fileUploader}
type="file"
id="file"
className="hidden"
onChange={changeHandler}
/>
<Button
variant="solid"
color="primary"
@ -54,8 +64,16 @@ export default function App() {
>
Load File
</Button>
<Button variant="solid" color="primary">
Reset
<Button
variant="solid"
color="primary"
onClick={() => {
invoke('generate_buffer_checksum', {
path: fileInputVal
}).then(console.log);
}}
>
Generate Buffer Checksum
</Button>
<Button variant="solid" color="primary">
Close