[ENG-974] DB Backup prototype (#1216)

* DB Backup prototype

* Put backups behind feature flag

* Warning for data folder

* nit

* Clippy

---------

Co-authored-by: Utku <74243531+utkubakir@users.noreply.github.com>
Co-authored-by: Vítor Vasconcellos <vasconcellos.dev@gmail.com>
This commit is contained in:
Oscar Beaumont 2023-08-17 13:37:10 +08:00 committed by GitHub
parent 9a9af6e9c7
commit bcbbe58141
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
19 changed files with 565 additions and 38 deletions

2
Cargo.lock generated
View file

@ -6924,6 +6924,7 @@ dependencies = [
"ctor",
"dashmap",
"enumflags2 0.7.7",
"flate2",
"futures",
"futures-concurrency",
"globset",
@ -6965,6 +6966,7 @@ dependencies = [
"strum",
"strum_macros",
"sysinfo",
"tar",
"tempfile",
"thiserror",
"tokio",

View file

@ -1,5 +1,6 @@
import { QueryClient, QueryClientProvider } from '@tanstack/react-query';
import { dialog, invoke, os, shell } from '@tauri-apps/api';
import { confirm } from '@tauri-apps/api/dialog';
import { listen } from '@tauri-apps/api/event';
import { convertFileSrc } from '@tauri-apps/api/tauri';
import { appWindow } from '@tauri-apps/api/window';
@ -75,6 +76,7 @@ const platform: Platform = {
openFilePickerDialog: () => dialog.open(),
saveFilePickerDialog: () => dialog.save(),
showDevtools: () => invoke('show_devtools'),
confirm: (msg, cb) => confirm(msg).then(cb),
...commands
};

View file

@ -41,7 +41,7 @@ const platform: Platform = {
locationLocalId
)}/${encodeURIComponent(filePathId)}`,
openLink: (url) => window.open(url, '_blank')?.focus(),
demoMode: import.meta.env.VITE_SD_DEMO_MODE === 'true'
confirm: (message, cb) => cb(window.confirm(message))
};
const queryClient = new QueryClient({

View file

@ -102,6 +102,9 @@ async-channel = "1.9"
tokio-util = "0.7"
slotmap = "1.0.6"
aovec = "1.1.0"
flate2 = "1.0.26"
tar = "0.4.40"
tempfile = "^3.5.0"
[target.'cfg(target_os = "macos")'.dependencies]
plist = "1"
@ -110,5 +113,4 @@ plist = "1"
version = "0.1.5"
[dev-dependencies]
tempfile = "^3.5.0"
tracing-test = "^0.2.4"

371
core/src/api/backups.rs Normal file
View file

@ -0,0 +1,371 @@
use std::{
cmp,
fs::{self, File},
io::{self, BufReader, BufWriter, Read, Write},
path::PathBuf,
sync::Arc,
time::{SystemTime, UNIX_EPOCH},
};
use flate2::{bufread::GzDecoder, write::GzEncoder, Compression};
use futures::executor::block_on;
use rspc::{alpha::AlphaRouter, ErrorCode};
use serde::{Serialize, Serializer};
use specta::Type;
use tar::Archive;
use tempfile::tempdir;
use thiserror::Error;
use tokio::task::spawn_blocking;
use tracing::{error, info};
use uuid::Uuid;
use crate::{
invalidate_query,
library::{Library, LibraryManagerError},
Node,
};
use super::{utils::library, Ctx, R};
pub(crate) fn mount() -> AlphaRouter<Ctx> {
R.router()
.procedure("getAll", {
#[derive(Serialize, Type)]
pub struct Backup {
#[serde(flatten)]
header: Header,
path: String,
}
#[derive(Serialize, Type)]
pub struct GetAll {
backups: Vec<Backup>,
directory: String,
}
R.query(|node, _: ()| async move {
let directory = node.data_dir.join("backups");
Ok(GetAll {
backups: if !directory.exists() {
vec![]
} else {
spawn_blocking(move || {
fs::read_dir(node.data_dir.join("backups"))
.map(|dir| {
dir.filter_map(|entry| {
match entry.and_then(|e| Ok((e.metadata()?, e))) {
Ok((metadata, entry)) if metadata.is_file() => {
File::open(entry.path())
.ok()
.and_then(|mut file| {
Header::read(&mut file).ok()
})
.map(|header| Backup {
header,
// TODO: Lossy strings are bad
path: entry
.path()
.to_string_lossy()
.to_string(),
})
}
_ => None,
}
})
.collect::<Vec<_>>()
})
.map_err(|e| {
rspc::Error::with_cause(
ErrorCode::InternalServerError,
"Failed to fetch backups".to_string(),
e,
)
})
})
.await
.map_err(|e| {
rspc::Error::with_cause(
ErrorCode::InternalServerError,
"Failed to fetch backups".to_string(),
e,
)
})??
},
directory: directory.to_string_lossy().to_string(),
})
})
})
.procedure("backup", {
R.with2(library())
.mutation(|(node, library), _: ()| start_backup(node, library))
})
.procedure("restore", {
R
// TODO: Paths as strings is bad but here we want the flexibility of the frontend allowing any path
.mutation(|node, path: String| start_restore(node, path.into()))
})
.procedure("delete", {
R
// TODO: Paths as strings is bad but here we want the flexibility of the frontend allowing any path
.mutation(|node, path: String| async move {
tokio::fs::remove_file(path)
.await
.map(|_| {
invalidate_query!(node; node, "backups.getAll");
})
.map_err(|_| {
rspc::Error::new(
ErrorCode::InternalServerError,
"Error deleting backup!".to_string(),
)
})
})
})
}
async fn start_backup(node: Arc<Node>, library: Arc<Library>) -> Uuid {
let bkp_id = Uuid::new_v4();
spawn_blocking(move || {
match do_backup(bkp_id, &node, &library) {
Ok(path) => {
info!(
"Backup '{bkp_id}' for library '{}' created at '{path:?}'!",
library.id
);
invalidate_query!(library, "backups.getAll");
}
Err(e) => {
error!(
"Error with backup '{bkp_id}' for library '{}': {e:?}",
library.id
);
// TODO: Alert user something went wrong
}
}
});
bkp_id
}
#[derive(Error, Debug)]
enum BackupError {
#[error("io error: {0}")]
Io(#[from] io::Error),
#[error("library manager error: {0}")]
LibraryManager(#[from] LibraryManagerError),
#[error("malformed header")]
MalformedHeader,
#[error("Library already exists, please remove it and try again!")]
LibraryAlreadyExists,
}
#[derive(Debug)]
pub struct MustRemoveLibraryErr;
// This is intended to be called in a `spawn_blocking` task.
// Async is pure overhead for an IO bound operation like this.
fn do_backup(id: Uuid, node: &Node, library: &Library) -> Result<PathBuf, BackupError> {
let backups_dir = node.data_dir.join("backups");
fs::create_dir_all(&backups_dir)?;
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.expect("Time went backwards")
.as_millis();
let bkp_path = backups_dir.join(format!("{id}.bkp"));
let mut bkp_file = BufWriter::new(File::create(&bkp_path)?);
// Header. We do this so the file is self-sufficient.
Header {
id,
timestamp,
library_id: library.id,
library_name: library.config.name.to_string(),
}
.write(&mut bkp_file)?;
// Regular tar.gz encoded data
let mut tar = tar::Builder::new(GzEncoder::new(bkp_file, Compression::default()));
tar.append_file(
"library.sdlibrary",
&mut File::open(
node.libraries
.libraries_dir
.join(format!("{}.sdlibrary", library.id)),
)?,
)?;
tar.append_file(
"library.db",
&mut File::open(
node.libraries
.libraries_dir
.join(format!("{}.db", library.id)),
)?,
)?;
Ok(bkp_path)
}
fn start_restore(node: Arc<Node>, path: PathBuf) {
spawn_blocking(move || {
match restore_backup(&node, path.clone()) {
Ok(header) => {
info!(
"Restored to '{}' for library '{}'!",
header.id, header.library_id
);
}
Err(e) => {
error!("Error restoring backup '{path:?}': {e:?}");
// TODO: Alert user something went wrong
}
}
});
}
fn restore_backup(node: &Arc<Node>, path: PathBuf) -> Result<Header, BackupError> {
let mut file = BufReader::new(fs::File::open(path)?);
let header = Header::read(&mut file)?;
// TODO: Actually handle restoring into a library that exists. For now it's easier to error out.
let None = block_on(node.libraries.get_library(&header.library_id)) else {
return Err(BackupError::LibraryAlreadyExists)
};
let temp_dir = tempdir()?;
let mut archive = Archive::new(GzDecoder::new(file));
archive.unpack(&temp_dir)?;
let library_path = temp_dir.path().join("library.sdlibrary");
let db_path = temp_dir.path().join("library.db");
fs::copy(
library_path,
node.libraries
.libraries_dir
.join(format!("{}.sdlibrary", header.library_id)),
)?;
fs::copy(
db_path,
node.libraries
.libraries_dir
.join(format!("{}.db", header.library_id)),
)?;
let config_path = node
.libraries
.libraries_dir
.join(format!("{}.sdlibrary", header.library_id));
let db_path = config_path.with_extension("db");
block_on(
node.libraries
.load(header.library_id, &db_path, config_path, None, true, node),
)?;
Ok(header)
}
#[derive(Debug, PartialEq, Eq, Serialize, Type)]
struct Header {
// Backup unique id
id: Uuid,
// Time since epoch the backup was created at
#[specta(type = String)]
#[serde(serialize_with = "as_string")]
timestamp: u128,
// Library id
library_id: Uuid,
// Library display name
library_name: String,
}
fn as_string<T: ToString, S>(x: &T, s: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
s.serialize_str(&x.to_string())
}
impl Header {
fn write(&self, file: &mut impl Write) -> Result<(), io::Error> {
// For future versioning we can bump `1` to `2` and match on it in the decoder.
file.write_all(b"sdbkp1")?;
file.write_all(&self.id.to_bytes_le())?;
file.write_all(&self.timestamp.to_le_bytes())?;
file.write_all(&self.library_id.to_bytes_le())?;
{
let bytes = &self.library_name.as_bytes()
[..cmp::min(u32::MAX as usize, self.library_name.len())];
file.write_all(&(bytes.len() as u32).to_le_bytes())?;
file.write_all(bytes)?;
}
Ok(())
}
fn read(file: &mut impl Read) -> Result<Self, BackupError> {
let mut buf = vec![0u8; 6 + 16 + 16 + 16 + 4];
file.read_exact(&mut buf)?;
if &buf[..6] != b"sdbkp1" {
return Err(BackupError::MalformedHeader);
}
Ok(Self {
id: Uuid::from_bytes_le(
buf[6..22]
.try_into()
.map_err(|_| BackupError::MalformedHeader)?,
),
timestamp: u128::from_le_bytes(
buf[22..38]
.try_into()
.map_err(|_| BackupError::MalformedHeader)?,
),
library_id: Uuid::from_bytes_le(
buf[38..54]
.try_into()
.map_err(|_| BackupError::MalformedHeader)?,
),
library_name: {
let len = u32::from_le_bytes(
buf[54..58]
.try_into()
.map_err(|_| BackupError::MalformedHeader)?,
);
let mut name = vec![0; len as usize];
file.read_exact(&mut name)?;
String::from_utf8(name).map_err(|_| BackupError::MalformedHeader)?
},
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_backup_header() {
let original = Header {
id: Uuid::new_v4(),
timestamp: 1234567890,
library_id: Uuid::new_v4(),
library_name: "Test Library".to_string(),
};
let mut buf = Vec::new();
original.write(&mut buf).unwrap();
let decoded = Header::read(&mut buf.as_slice()).unwrap();
assert_eq!(original, decoded);
}
}

View file

@ -21,6 +21,7 @@ pub enum CoreEvent {
InvalidateOperation(InvalidateOperationEvent),
}
mod backups;
mod categories;
mod files;
mod jobs;
@ -113,6 +114,7 @@ pub(crate) fn mount() -> Arc<Router> {
.merge("sync.", sync::mount())
.merge("preferences.", preferences::mount())
.merge("notifications.", notifications::mount())
.merge("backups.", backups::mount())
.merge("invalidation.", utils::mount_invalidate())
.build(
#[allow(clippy::let_and_return)]

View file

@ -143,6 +143,33 @@ macro_rules! invalidate_query {
$crate::api::utils::InvalidateOperationEvent::dangerously_create($key, serde_json::Value::Null, None)
))
}};
(node; $ctx:expr, $key:literal) => {{
let ctx: &$crate::Node = &$ctx; // Assert the context is the correct type
#[cfg(debug_assertions)]
{
#[ctor::ctor]
fn invalidate() {
$crate::api::utils::INVALIDATION_REQUESTS
.lock()
.unwrap()
.queries
.push($crate::api::utils::InvalidationRequest {
key: $key,
arg_ty: None,
result_ty: None,
macro_src: concat!(file!(), ":", line!()),
})
}
}
::tracing::trace!(target: "sd_core::invalidate-query", "invalidate_query!(\"{}\") at {}", $key, concat!(file!(), ":", line!()));
// The error are ignored here because they aren't mission critical. If they fail the UI might be outdated for a bit.
ctx.event_bus.0.send($crate::api::CoreEvent::InvalidateOperation(
$crate::api::utils::InvalidateOperationEvent::dangerously_create($key, serde_json::Value::Null, None)
)).ok();
}};
($ctx:expr, $key:literal: $arg_ty:ty, $arg:expr $(,)?) => {{
let _: $arg_ty = $arg; // Assert the type the user provided is correct
let ctx: &$crate::library::Library = &$ctx; // Assert the context is the correct type

View file

@ -49,7 +49,7 @@ pub enum LibraryManagerEvent {
/// is a singleton that manages all libraries for a node.
pub struct Libraries {
/// libraries_dir holds the path to the directory where libraries are stored.
libraries_dir: PathBuf,
pub libraries_dir: PathBuf,
/// libraries holds the list of libraries which are currently loaded into the node.
libraries: RwLock<HashMap<Uuid, Arc<Library>>>,
// Transmit side of `self.rx` channel
@ -304,8 +304,8 @@ impl Libraries {
self.libraries.read().await.get(library_id).is_some()
}
/// load the library from a given path
async fn load(
/// load the library from a given path.
pub async fn load(
self: &Arc<Self>,
id: Uuid,
db_path: impl AsRef<Path>,

View file

@ -166,7 +166,7 @@ impl LocationManagerActor {
LibraryManagerEvent::InstancesModified(_) => {}
LibraryManagerEvent::Delete(_) => {
#[cfg(debug_assertions)]
todo!("TODO: Remove locations from location manager"); // TODO
error!("TODO: Remove locations from location manager"); // TODO
}
}
}

View file

@ -116,7 +116,7 @@ export default () => {
key={`${library.uuid} ${key}`}
title={StatItemNames[key as keyof Statistics]!}
bytes={BigInt(value)}
isLoading={platform.demoMode ? false : stats.isLoading}
isLoading={stats.isLoading}
info={StatDescriptions[key as keyof Statistics]}
/>
);

View file

@ -1,6 +1,7 @@
import {
Books,
Cloud,
Database,
FlyingSaucer,
GearSix,
HardDrive,
@ -25,7 +26,8 @@ const Section = tw.div`space-y-0.5`;
export default () => {
const os = useOperatingSystem();
const isPairingEnabled = useFeatureFlag('p2pPairing');
// const isPairingEnabled = useFeatureFlag('p2pPairing');
const isBackupsEnabled = useFeatureFlag('backups');
return (
<div className="custom-scroll no-scrollbar h-full w-60 max-w-[180px] shrink-0 border-r border-app-line/50 pb-5">
@ -56,6 +58,10 @@ export default () => {
<Icon component={PaintBrush} />
Appearance
</SidebarLink>
<SidebarLink to="client/backups" disabled={!isBackupsEnabled}>
<Icon component={Database} />
Backups
</SidebarLink>
<SidebarLink to="client/keybindings" disabled>
<Icon component={KeyReturn} />
Keybinds

View file

@ -0,0 +1,87 @@
import dayjs from 'dayjs';
import { useBridgeMutation, useBridgeQuery, useLibraryMutation } from '@sd/client';
import { Button, Card } from '@sd/ui';
import { Database } from '~/components';
import { usePlatform } from '~/util/Platform';
import { Heading } from '../Layout';
// TODO: This is a non-library page but does a library query for backup. That will be confusing UX.
// TODO: Should this be a library or node page? If it's a library page how can a user view all their backups across libraries (say they wanted to save some storage cause their SSD is full)?
// TODO: If it were a library page what do we do when restoring a backup? It can't be a `useLibraryQuery` to restore cause we are gonna have to unload the library from the backend.
export const Component = () => {
const platform = usePlatform();
const backups = useBridgeQuery(['backups.getAll']);
const doBackup = useLibraryMutation('backups.backup');
const doRestore = useBridgeMutation('backups.restore');
const doDelete = useBridgeMutation('backups.delete');
console.log(doRestore.isLoading);
return (
<>
<Heading
title="Backups"
description="Manage your Spacedrive database backups."
rightArea={
<div className="flex flex-row items-center space-x-5">
<Button
disabled={doBackup.isLoading}
variant="gray"
size="md"
onClick={() => {
if (backups.data) {
// TODO: opening paths from the frontend is hacky cause non-UTF-8 chars in the filename break stuff
platform.openLink(backups.data.directory);
}
}}
>
Backups Directory
</Button>
<Button
disabled={doBackup.isLoading}
variant="accent"
size="md"
onClick={() => doBackup.mutate(null)}
>
Backup
</Button>
</div>
}
/>
{backups.data?.backups.map((backup) => (
<Card key={backup.id} className="hover:bg-app-box/70">
<Database className="mr-3 h-10 w-10 self-center" />
<div className="grid min-w-[110px] grid-cols-1">
<h1 className="truncate pt-0.5 text-sm font-semibold">
{dayjs(backup.timestamp).toString()}
</h1>
<p className="mt-0.5 select-text truncate text-sm text-ink-dull">
For library '{backup.library_name}'
</p>
</div>
<div className="flex grow" />
<div className="flex h-[45px] space-x-2 p-2">
<Button
disabled={doRestore.isLoading}
onClick={() => doRestore.mutate(backup.path)}
variant="gray"
>
Restore
</Button>
<Button
disabled={doDelete.isLoading}
onClick={() => doDelete.mutate(backup.path)}
size="sm"
variant="colored"
className="border-red-500 bg-red-500"
>
Delete
</Button>
</div>
</Card>
))}
</>
);
};

View file

@ -90,20 +90,20 @@ export const Component = () => {
<div>
<NodeSettingLabel>Data Folder</NodeSettingLabel>
<div className="mt-2 flex w-full flex-row gap-2">
<Input
className="grow"
value={node.data?.data_path}
onChange={() => {
/* TODO */
}}
disabled
/>
<Input className="grow" value={node.data?.data_path} disabled />
<Button
size="sm"
variant="outline"
onClick={() => {
if (node.data && platform?.openLink) {
platform.openLink(node.data.data_path);
if (node.data && !!platform?.openLink) {
platform.confirm(
'Modifying or backing up data within this folder may cause irreparable damage! Proceed at your own risk!',
(result) => {
if (result) {
platform.openLink(node.data.data_path);
}
}
);
}
}}
>

View file

@ -5,5 +5,6 @@ export default [
{ path: 'appearance', lazy: () => import('./appearance') },
{ path: 'keybindings', lazy: () => import('./keybindings') },
{ path: 'extensions', lazy: () => import('./extensions') },
{ path: 'privacy', lazy: () => import('./privacy') }
{ path: 'privacy', lazy: () => import('./privacy') },
{ path: 'backups', lazy: () => import('./backups') }
] satisfies RouteObject[];

View file

@ -19,8 +19,6 @@ export default ({ location }: Props) => {
const fullRescan = useLibraryMutation('locations.fullRescan');
const onlineLocations = useOnlineLocations();
const isDark = useIsDark();
if (hide) return <></>;
const online = onlineLocations.some((l) => arraysEqual(location.pub_id, l));
@ -32,7 +30,7 @@ export default ({ location }: Props) => {
navigate(`${location.id}`);
}}
>
<Folder white={!isDark} className="mr-3 h-10 w-10 self-center" />
<Folder className="mr-3 h-10 w-10 self-center" />
<div className="grid min-w-[110px] grid-cols-1">
<h1 className="truncate pt-0.5 text-sm font-semibold">{location.name}</h1>
<p className="mt-0.5 select-text truncate text-sm text-ink-dull">

View file

@ -1,31 +1,44 @@
import { Folder as Folder_Dark, Folder_Light } from '@sd/assets/icons';
import {
Database as Database_Dark,
Database_Light,
Folder as Folder_Dark,
Folder_Light
} from '@sd/assets/icons';
import { useIsDark } from '~/hooks';
interface FolderProps {
interface Props {
/**
* Append additional classes to the underlying SVG
*/
className?: string;
/**
* Render a white folder icon
*/
white?: boolean;
/**
* The size of the icon to show -- uniform width and height
*/
size?: number;
}
export function Folder(props: FolderProps) {
const { size = 24 } = props;
export function Folder({ size = 24, className }: Props) {
const isDark = useIsDark();
return (
<img
className={props.className}
className={className}
width={size}
height={size}
src={props.white ? Folder_Light : Folder_Dark}
src={isDark ? Folder_Light : Folder_Dark}
alt="Folder icon"
/>
);
}
export function Database({ size = 24, className }: Props) {
const isDark = useIsDark();
return (
<img
className={className}
width={size}
height={size}
src={isDark ? Database_Light : Database_Dark}
alt="Folder icon"
/>
);

View file

@ -14,7 +14,8 @@ export type Platform = {
_linux_workaround?: boolean
) => string;
openLink: (url: string) => void;
demoMode?: boolean; // TODO: Remove this in favour of demo mode being handled at the React Query level
// Tauri patches `window.confirm` to return `Promise` not `bool`
confirm(msg: string, cb: (result: boolean) => void): void;
getOs?(): Promise<OperatingSystem>;
openDirectoryPickerDialog?(): Promise<null | string | string[]>;
openFilePickerDialog?(): Promise<null | string | string[]>;

View file

@ -3,6 +3,7 @@
export type Procedures = {
queries:
{ key: "backups.getAll", input: never, result: GetAll } |
{ key: "buildInfo", input: never, result: BuildInfo } |
{ key: "categories.list", input: LibraryArgs<null>, result: { [key in Category]: number } } |
{ key: "files.get", input: LibraryArgs<GetArgs>, result: { id: number; pub_id: number[]; kind: number | null; key_id: number | null; hidden: boolean | null; favorite: boolean | null; important: boolean | null; note: string | null; date_created: string | null; date_accessed: string | null; file_paths: FilePath[]; media_data: MediaData | null } | null } |
@ -33,6 +34,9 @@ export type Procedures = {
{ key: "tags.list", input: LibraryArgs<null>, result: Tag[] } |
{ key: "volumes.list", input: never, result: Volume[] },
mutations:
{ key: "backups.backup", input: LibraryArgs<null>, result: string } |
{ key: "backups.delete", input: string, result: null } |
{ key: "backups.restore", input: string, result: null } |
{ key: "files.copyFiles", input: LibraryArgs<FileCopierJobInit>, result: null } |
{ key: "files.cutFiles", input: LibraryArgs<FileCutterJobInit>, result: null } |
{ key: "files.deleteFiles", input: LibraryArgs<FileDeleterJobInit>, result: null } |
@ -88,6 +92,8 @@ export type Procedures = {
{ key: "sync.newMessage", input: LibraryArgs<null>, result: null }
};
export type Backup = ({ id: string; timestamp: string; library_id: string; library_name: string }) & { path: string }
export type BuildInfo = { version: string; commit: string }
export type CRDTOperation = { instance: string; timestamp: number; id: string; typ: CRDTOperationType }
@ -135,8 +141,12 @@ export type FullRescanArgs = { location_id: number; reidentify_objects: boolean
export type GenerateThumbsForLocationArgs = { id: number; path: string }
export type GetAll = { backups: Backup[]; directory: string }
export type GetArgs = { id: number }
export type Header = { id: string; timestamp: string; library_id: string; library_name: string }
export type IdentifyUniqueFilesArgs = { id: number; path: string }
export type IndexerRule = { id: number; pub_id: number[]; name: string | null; default: boolean | null; rules_per_kind: number[] | null; date_created: string | null; date_modified: string | null }

View file

@ -2,9 +2,9 @@ import { useEffect } from 'react';
import { subscribe, useSnapshot } from 'valtio';
import { valtioPersist } from '../lib/valito';
export const features = ['spacedrop', 'p2pPairing', 'syncRoute'] as const;
export const features = ['spacedrop', 'p2pPairing', 'syncRoute', 'backups'] as const;
export type FeatureFlag = typeof features[number];
export type FeatureFlag = (typeof features)[number];
const featureFlagState = valtioPersist('sd-featureFlags', {
enabled: [] as FeatureFlag[]
@ -23,7 +23,8 @@ export function useOnFeatureFlagsChange(callback: (flags: FeatureFlag[]) => void
useEffect(() => subscribe(featureFlagState, () => callback(featureFlagState.enabled)));
}
export const isEnabled = (flag: FeatureFlag) => featureFlagState.enabled.find((ff) => flag === ff);
export const isEnabled = (flag: FeatureFlag) =>
featureFlagState.enabled.find((ff) => flag === ff) !== undefined;
export function toggleFeatureFlag(flags: FeatureFlag | FeatureFlag[]) {
if (!Array.isArray(flags)) {
@ -35,6 +36,10 @@ export function toggleFeatureFlag(flags: FeatureFlag | FeatureFlag[]) {
alert(
'Pairing will render your database broken and it WILL need to be reset! Use at your own risk!'
);
} else if (f === 'backups') {
alert(
'Backups are done on your live DB without proper Sqlite snapshotting. This will work but it could result in unintended side effects on the backup!'
);
}
featureFlagState.enabled.push(f);