[ENG-927, ENG-735, ENG-766] Fix Updater & Tauri 1.5 (#1361)

* custom updater with toasts

* new state management + updated router route

* tauri-specific update route

* ref

* update in prod only

* change 'Install' to 'Update'

* fix tsconfig

* desktop tauri

* remove tauri patch

* tauri 1.5

* tauri 1.5

* use tauri script

* native-deps

* Rework preprep and tauri script to better support tauri 1.5

* Update to tauri 1.5.1
 - Update workspace and apps/desktop dependencies
 - Fix mustache import, @types/mustache is not compatible with ES imports
 - Replace arm64 with aarch64 in machineID, they should be treated the same and this simplyfies the code

* Fix tauri updater not building due to missing key
 - Fix dmg background not being found
 - Generate an adhoc key for tauri updater with it is enabled and the user is doing a prod build

* Fix ctrl+c/ctrl+v typo

* Normalie @tanstack/react-query version through workspace
 - Use undici in scripts instead of global fetch
 - Fix typecheck

* Fix linux prod and dev builds
 - Improve error handling in tauri.mjs

* Normalize dev deps in workspace
 - Improve linux shared libs setup

* Fix CI and server docker

* Fix windows
 - Remove superfluous envvar

* Attempt to fix server, mobile, deb and release updater

* Attempt to fix deb and mobile again
 - Fix type on deb dependency
 - Enable release deb for aarch64-unknown-linux-gnu

* Github doesn't have arm runners
 - Fix typo in server Dockerfile

* Publish deb and updater artifacts

* remove version from asset name

* update commands

* log release

* Some logs on updater errors

* show updater errors on frontend

* fix desktop ui caching

---------

Co-authored-by: Vítor Vasconcellos <vasconcellos.dev@gmail.com>
Co-authored-by: Ericson Fogo Soares <ericson.ds999@gmail.com>
This commit is contained in:
Brendan Allan 2023-10-10 15:30:56 +08:00 committed by GitHub
parent fb8af0832b
commit 49cc098f32
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
60 changed files with 4194 additions and 2747 deletions

View file

@ -2,48 +2,48 @@
{{#protoc}} {{#protoc}}
PROTOC = "{{{protoc}}}" PROTOC = "{{{protoc}}}"
{{/protoc}} {{/protoc}}
{{#ffmpeg}} {{^isLinux}}
FFMPEG_DIR = "{{{ffmpeg}}}" FFMPEG_DIR = "{{{nativeDeps}}}"
{{/ffmpeg}} {{/isLinux}}
{{#isMacOS}} {{#isMacOS}}
[target.x86_64-apple-darwin] [target.x86_64-apple-darwin]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib"] rustflags = ["-L", "{{{nativeDeps}}}/lib"]
[target.x86_64-apple-darwin.heif] [target.x86_64-apple-darwin.heif]
rustc-link-search = ["{{{projectRoot}}}/target/Frameworks/lib"] rustc-link-search = ["{{{nativeDeps}}}/lib"]
rustc-link-lib = ["heif"] rustc-link-lib = ["heif"]
[target.aarch64-apple-darwin] [target.aarch64-apple-darwin]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib"] rustflags = ["-L", "{{{nativeDeps}}}/lib"]
[target.aarch64-apple-darwin.heif] [target.aarch64-apple-darwin.heif]
rustc-link-search = ["{{{projectRoot}}}/target/Frameworks/lib"] rustc-link-search = ["{{{nativeDeps}}}/lib"]
rustc-link-lib = ["heif"] rustc-link-lib = ["heif"]
{{/isMacOS}} {{/isMacOS}}
{{#isWin}} {{#isWin}}
[target.x86_64-pc-windows-msvc] [target.x86_64-pc-windows-msvc]
rustflags = ["-L", "{{{projectRoot}}}\\target\\Frameworks\\lib"] rustflags = ["-L", "{{{nativeDeps}}}\\lib"]
[target.x86_64-pc-windows-msvc.heif] [target.x86_64-pc-windows-msvc.heif]
rustc-link-search = ["{{{projectRoot}}}\\target\\Frameworks\\lib"] rustc-link-search = ["{{{nativeDeps}}}\\lib"]
rustc-link-lib = ["heif"] rustc-link-lib = ["heif"]
{{/isWin}} {{/isWin}}
{{#isLinux}} {{#isLinux}}
[target.x86_64-unknown-linux-gnu] [target.x86_64-unknown-linux-gnu]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib", "-C", "link-arg=-Wl,-rpath=${ORIGIN}/../lib/spacedrive"] rustflags = ["-L", "{{{nativeDeps}}}/lib", "-C", "link-arg=-Wl,-rpath=${ORIGIN}/../lib/spacedrive"]
[target.x86_64-unknown-linux-gnu.heif] [target.x86_64-unknown-linux-gnu.heif]
rustc-link-search = ["{{{projectRoot}}}/target/Frameworks/lib"] rustc-link-search = ["{{{nativeDeps}}}/lib"]
rustc-link-lib = ["heif"] rustc-link-lib = ["heif"]
[target.aarch64-unknown-linux-gnu] [target.aarch64-unknown-linux-gnu]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib", "-C", "link-arg=-Wl,-rpath=${ORIGIN}/../lib/spacedrive"] rustflags = ["-L", "{{{nativeDeps}}}/lib", "-C", "link-arg=-Wl,-rpath=${ORIGIN}/../lib/spacedrive"]
[target.aarch64-unknown-linux-gnu.heif] [target.aarch64-unknown-linux-gnu.heif]
rustc-link-search = ["{{{projectRoot}}}/target/Frameworks/lib"] rustc-link-search = ["{{{nativeDeps}}}/lib"]
rustc-link-lib = ["heif"] rustc-link-lib = ["heif"]
{{/isLinux}} {{/isLinux}}

View file

@ -23,14 +23,14 @@ runs:
if-no-files-found: error if-no-files-found: error
retention-days: 1 retention-days: 1
# - name: Publish artifacts (Debian - deb) - name: Publish artifacts (Debian - deb)
# if: ${{ matrix.settings.host == 'ubuntu-20.04' }} if: ${{ matrix.settings.host == 'ubuntu-20.04' }}
# uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
# with: with:
# name: Spacedrive-deb-${{ inputs.target }}-${{ env.GITHUB_SHA_SHORT }} name: Spacedrive-deb-${{ inputs.target }}-${{ env.GITHUB_SHA_SHORT }}
# path: target/${{ inputs.target }}/${{ inputs.profile }}/bundle/deb/*.deb path: target/${{ inputs.target }}/${{ inputs.profile }}/bundle/deb/*.deb
# if-no-files-found: error if-no-files-found: error
# retention-days: 1 retention-days: 1
- name: Publish artifacts (Windows - msi) - name: Publish artifacts (Windows - msi)
if: ${{ matrix.settings.host == 'windows-latest' }} if: ${{ matrix.settings.host == 'windows-latest' }}
@ -50,13 +50,13 @@ runs:
if-no-files-found: error if-no-files-found: error
retention-days: 1 retention-days: 1
# - name: Publish updater binaries - name: Publish updater binaries
# uses: actions/upload-artifact@v3 uses: actions/upload-artifact@v3
# with: with:
# name: Spacedrive-Updaters-${{ inputs.target }}-${{ env.GITHUB_SHA_SHORT }} name: Spacedrive-Updater-${{ inputs.target }}-${{ env.GITHUB_SHA_SHORT }}
# path: | path: |
# target/${{ inputs.target }}/${{ inputs.profile }}/bundle/**/*.tar.gz* target/${{ inputs.target }}/${{ inputs.profile }}/bundle/**/*.tar.gz*
# target/${{ inputs.target }}/${{ inputs.profile }}/bundle/**/*.zip* target/${{ inputs.target }}/${{ inputs.profile }}/bundle/**/*.zip*
# !target/**/deb/**/*.tar.gz !target/**/deb/**/*.tar.gz
# if-no-files-found: error if-no-files-found: error
# retention-days: 1 retention-days: 1

View file

@ -64,7 +64,7 @@ runs:
TARGET_TRIPLE: ${{ inputs.target }} TARGET_TRIPLE: ${{ inputs.target }}
GITHUB_TOKEN: ${{ inputs.token }} GITHUB_TOKEN: ${{ inputs.token }}
run: | run: |
pushd .. pushd scripts
npm i archive-wasm mustache npm i --production
popd popd
node scripts/preprep.mjs node scripts/preprep.mjs

View file

@ -24,11 +24,12 @@ jobs:
# target: aarch64-pc-windows-msvc # target: aarch64-pc-windows-msvc
- host: ubuntu-20.04 - host: ubuntu-20.04
target: x86_64-unknown-linux-gnu target: x86_64-unknown-linux-gnu
bundles: appimage bundles: appimage,deb
# - host: ubuntu-20.04 # - host: ubuntu-20.04
# target: x86_64-unknown-linux-musl # target: x86_64-unknown-linux-musl
# - host: ubuntu-20.04 # - host: ubuntu-20.04
# target: aarch64-unknown-linux-gnu # target: aarch64-unknown-linux-gnu
# bundles: deb # no appimage for now unfortunetly
# - host: ubuntu-20.04 # - host: ubuntu-20.04
# target: aarch64-unknown-linux-musl # target: aarch64-unknown-linux-musl
# - host: ubuntu-20.04 # - host: ubuntu-20.04
@ -95,7 +96,7 @@ jobs:
- name: Build - name: Build
run: | run: |
pnpm tauri build --ci -v --target ${{ matrix.settings.target }} --bundles ${{ matrix.settings.bundles }} pnpm tauri build --ci -v --target ${{ matrix.settings.target }} --bundles ${{ matrix.settings.bundles }},updater
env: env:
TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }} TAURI_PRIVATE_KEY: ${{ secrets.TAURI_PRIVATE_KEY }}
TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }} TAURI_KEY_PASSWORD: ${{ secrets.TAURI_KEY_PASSWORD }}
@ -107,7 +108,6 @@ jobs:
APPLE_API_ISSUER: ${{ secrets.APPLE_API_ISSUER }} APPLE_API_ISSUER: ${{ secrets.APPLE_API_ISSUER }}
APPLE_API_KEY: ${{ secrets.APPLE_API_KEY }} APPLE_API_KEY: ${{ secrets.APPLE_API_KEY }}
SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }}
NODE_OPTIONS: --max-old-space-size=4096
- name: Publish Artifacts - name: Publish Artifacts
uses: ./.github/actions/publish-artifacts uses: ./.github/actions/publish-artifacts

View file

@ -61,7 +61,6 @@ jobs:
build-args: | build-args: |
REPO=${{ steps.image_info.outputs.repo }} REPO=${{ steps.image_info.outputs.repo }}
REPO_REF=${{ steps.image_info.outputs.repo_ref }} REPO_REF=${{ steps.image_info.outputs.repo_ref }}
NODE_OPTIONS: "--max-old-space-size=4096"
containerfiles: | containerfiles: |
./apps/server/docker/Dockerfile ./apps/server/docker/Dockerfile

2
.gitignore vendored
View file

@ -25,6 +25,7 @@ packages/*/node_modules
packages/*/data packages/*/data
apps/*/data apps/*/data
apps/*/stats.html apps/*/stats.html
apps/.deps
apps/releases/.vscode apps/releases/.vscode
apps/desktop/src-tauri/tauri.conf.patch.json apps/desktop/src-tauri/tauri.conf.patch.json
apps/desktop/src-tauri/*.dll apps/desktop/src-tauri/*.dll
@ -78,7 +79,6 @@ dev.db-journal
sd_init.json sd_init.json
spacedrive spacedrive
scripts/.tmp
.cargo/config .cargo/config
.cargo/config.toml .cargo/config.toml
.github/scripts/deps .github/scripts/deps

62
Cargo.lock generated
View file

@ -401,22 +401,6 @@ dependencies = [
"crossbeam-utils", "crossbeam-utils",
] ]
[[package]]
name = "attohttpc"
version = "0.22.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1fcf00bc6d5abb29b5f97e3c61a90b6d3caa12f3faf897d4a3e3607c050a35a7"
dependencies = [
"flate2",
"http",
"log",
"native-tls",
"serde",
"serde_json",
"serde_urlencoded",
"url",
]
[[package]] [[package]]
name = "autocfg" name = "autocfg"
version = "1.1.0" version = "1.1.0"
@ -6331,10 +6315,12 @@ dependencies = [
"system-configuration", "system-configuration",
"tokio", "tokio",
"tokio-native-tls", "tokio-native-tls",
"tokio-util",
"tower-service", "tower-service",
"url", "url",
"wasm-bindgen", "wasm-bindgen",
"wasm-bindgen-futures", "wasm-bindgen-futures",
"wasm-streams",
"web-sys", "web-sys",
"winreg 0.50.0", "winreg 0.50.0",
] ]
@ -6865,7 +6851,7 @@ dependencies = [
[[package]] [[package]]
name = "sd-desktop" name = "sd-desktop"
version = "0.1.0" version = "0.0.1"
dependencies = [ dependencies = [
"axum", "axum",
"dotenv", "dotenv",
@ -7863,6 +7849,19 @@ dependencies = [
"unicode-xid", "unicode-xid",
] ]
[[package]]
name = "sys-locale"
version = "0.2.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8a11bd9c338fdba09f7881ab41551932ad42e405f61d01e8406baea71c07aee"
dependencies = [
"js-sys",
"libc",
"wasm-bindgen",
"web-sys",
"windows-sys 0.45.0",
]
[[package]] [[package]]
name = "sysinfo" name = "sysinfo"
version = "0.29.10" version = "0.29.10"
@ -8014,13 +8013,13 @@ checksum = "9d0e916b1148c8e263850e1ebcbd046f333e0683c724876bb0da63ea4373dc8a"
[[package]] [[package]]
name = "tauri" name = "tauri"
version = "1.3.0" version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d42ba3a2e8556722f31336a0750c10dbb6a81396a1c452977f515da83f69f842" checksum = "0238c5063bf9613054149a1b6bce4935922e532b7d8211f36989a490a79806be"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"attohttpc",
"base64 0.21.4", "base64 0.21.4",
"bytes",
"cocoa", "cocoa",
"dirs-next", "dirs-next",
"embed_plist", "embed_plist",
@ -8043,6 +8042,7 @@ dependencies = [
"rand 0.8.5", "rand 0.8.5",
"raw-window-handle", "raw-window-handle",
"regex", "regex",
"reqwest",
"rfd", "rfd",
"semver", "semver",
"serde", "serde",
@ -8051,6 +8051,7 @@ dependencies = [
"serialize-to-javascript", "serialize-to-javascript",
"shared_child", "shared_child",
"state", "state",
"sys-locale",
"tar", "tar",
"tauri-macros", "tauri-macros",
"tauri-runtime", "tauri-runtime",
@ -8129,9 +8130,9 @@ dependencies = [
[[package]] [[package]]
name = "tauri-runtime" name = "tauri-runtime"
version = "0.13.0" version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3b80ea3fcd5fefb60739a3b577b277e8fc30434538a2f5bba82ad7d4368c422" checksum = "07f8e9e53e00e9f41212c115749e87d5cd2a9eebccafca77a19722eeecd56d43"
dependencies = [ dependencies = [
"gtk", "gtk",
"http", "http",
@ -8150,9 +8151,9 @@ dependencies = [
[[package]] [[package]]
name = "tauri-runtime-wry" name = "tauri-runtime-wry"
version = "0.13.0" version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d1c396950b1ba06aee1b4ffe6c7cd305ff433ca0e30acbc5fa1a2f92a4ce70f1" checksum = "8141d72b6b65f2008911e9ef5b98a68d1e3413b7a1464e8f85eb3673bb19a895"
dependencies = [ dependencies = [
"cocoa", "cocoa",
"gtk", "gtk",
@ -9298,6 +9299,19 @@ version = "0.2.87"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1"
[[package]]
name = "wasm-streams"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4609d447824375f43e1ffbc051b50ad8f4b3ae8219680c94452ea05eb240ac7"
dependencies = [
"futures-util",
"js-sys",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
]
[[package]] [[package]]
name = "web-sys" name = "web-sys"
version = "0.3.64" version = "0.3.64"

View file

@ -1,14 +1,11 @@
{ {
"name": "@sd/desktop", "name": "@sd/desktop",
"version": "1.0.0",
"main": "index.ts",
"license": "GPL-3.0-only",
"private": true, "private": true,
"scripts": { "scripts": {
"vite": "vite", "vite": "vite",
"dev": "vite dev", "dev": "vite dev",
"build": "vite build", "build": "vite build",
"tauri": "node ./src-tauri/scripts/tauri.js", "tauri": "pnpm --filter @sd/scripts -- tauri",
"dmg": "open ../../target/release/bundle/dmg/", "dmg": "open ../../target/release/bundle/dmg/",
"typecheck": "tsc -b", "typecheck": "tsc -b",
"lint": "eslint src --cache" "lint": "eslint src --cache"
@ -19,27 +16,26 @@
"@sd/client": "workspace:*", "@sd/client": "workspace:*",
"@sd/interface": "workspace:*", "@sd/interface": "workspace:*",
"@sd/ui": "workspace:*", "@sd/ui": "workspace:*",
"@sentry/vite-plugin": "^2.7.0", "@sentry/vite-plugin": "^2.7.1",
"@tanstack/react-query": "^4.24.4", "@tanstack/react-query": "^4.35",
"@tauri-apps/api": "1.3.0", "@tauri-apps/api": "1.5.0",
"react": "^18.2.0", "react": "^18.2.0",
"react-dom": "^18.2.0", "react-dom": "^18.2.0",
"react-router-dom": "6.9.0" "react-router-dom": "6.9.0",
"sonner": "^1.0.3"
}, },
"devDependencies": { "devDependencies": {
"@iarna/toml": "^2.2.5",
"@sd/config": "workspace:*", "@sd/config": "workspace:*",
"@tauri-apps/cli": "1.3.1", "@tauri-apps/cli": "^1.5",
"@types/babel__core": "^7.20.1", "@types/babel__core": "^7.20",
"@types/react": "^18.0.21", "@types/react": "^18.2.0",
"@types/react-dom": "^18.0.6", "@types/react-dom": "^18.2.0",
"@vitejs/plugin-react": "^2.1.0", "@vitejs/plugin-react": "^4.1",
"sass": "^1.55.0", "sass": "^1.68",
"semver": "^7.5.0", "typescript": "^5.2",
"typescript": "^5.0.4", "vite": "^4.4",
"vite": "^4.0.4", "vite-plugin-html": "^3.2",
"vite-plugin-html": "^3.2.0", "vite-plugin-svgr": "^3.3",
"vite-plugin-svgr": "^2.2.1", "vite-tsconfig-paths": "^4.2"
"vite-tsconfig-paths": "^4.0.3"
} }
} }

View file

@ -1,6 +1,6 @@
[package] [package]
name = "sd-desktop" name = "sd-desktop"
version = "0.1.0" version = "0.0.1"
description = "The universal file manager." description = "The universal file manager."
authors = ["Spacedrive Technology Inc."] authors = ["Spacedrive Technology Inc."]
default-run = "sd-desktop" default-run = "sd-desktop"
@ -9,7 +9,8 @@ repository = { workspace = true }
edition = { workspace = true } edition = { workspace = true }
[dependencies] [dependencies]
tauri = { version = "=1.3.0", features = ["dialog-all", "linux-protocol-headers", "macos-private-api", "os-all", "path-all", "protocol-all", "shell-all", "window-all"] } tauri = { version = "1.5.1", features = ["dialog-all", "linux-protocol-headers", "macos-private-api", "os-all", "path-all", "protocol-all", "shell-all", "updater", "window-all"] }
rspc = { workspace = true, features = ["tauri"] } rspc = { workspace = true, features = ["tauri"] }
sd-core = { path = "../../../core", features = [ sd-core = { path = "../../../core", features = [
"ffmpeg", "ffmpeg",
@ -44,9 +45,8 @@ sd-desktop-macos = { path = "../crates/macos" }
sd-desktop-windows = { path = "../crates/windows" } sd-desktop-windows = { path = "../crates/windows" }
[build-dependencies] [build-dependencies]
tauri-build = { version = "1.4.0", features = [] } tauri-build = { version = "1.5.0", features = [] }
[features] [features]
default = ["custom-protocol"] default = ["custom-protocol"]
custom-protocol = ["tauri/custom-protocol"] custom-protocol = ["tauri/custom-protocol"]
updater = ["tauri/updater"]

View file

@ -1,30 +0,0 @@
const { spawn } = require('node:child_process');
module.exports.spawn = (command, args) => {
if (typeof command !== 'string' || command.length === 0)
throw new Error('Command must be a string and not empty');
if (args == null) args = [];
else if (!Array.isArray(args) || args.some((arg) => typeof arg !== 'string'))
throw new Error('Args must be an array of strings');
return new Promise((resolve, reject) => {
const child = spawn(command, args, { shell: true, stdio: 'inherit' });
process.on('SIGTERM', () => child.kill('SIGTERM'));
process.on('SIGINT', () => child.kill('SIGINT'));
process.on('SIGBREAK', () => child.kill('SIGBREAK'));
process.on('SIGHUP', () => child.kill('SIGHUP'));
child.on('error', (error) => {
console.error(error);
reject(1);
});
child.on('exit', (code, signal) => {
if (code === null) code = signal === 'SIGINT' ? 0 : 1;
if (code === 0) {
resolve();
} else {
reject(code);
}
});
});
};

View file

@ -1,193 +0,0 @@
const fs = require('node:fs');
const path = require('node:path');
const toml = require('@iarna/toml');
const semver = require('semver');
const { spawn } = require('./spawn.js');
const workspace = path.resolve(__dirname, '../../../../');
const cargoConfig = toml.parse(
fs.readFileSync(path.resolve(workspace, '.cargo/config.toml'), { encoding: 'binary' })
);
if (cargoConfig.env && typeof cargoConfig.env === 'object')
for (const [name, value] of Object.entries(cargoConfig.env))
if (!process.env[name]) process.env[name] = value;
const toRemove = [];
const [_, __, ...args] = process.argv;
if (args.length === 0) args.push('build');
const tauriConf = JSON.parse(
fs.readFileSync(path.resolve(__dirname, '..', 'tauri.conf.json'), 'utf-8')
);
const framework = path.join(workspace, 'target/Frameworks');
switch (args[0]) {
case 'dev': {
if (process.platform === 'win32') setupSharedLibs('dll', path.join(framework, 'bin'), true);
break;
}
case 'build': {
if (
!process.env.NODE_OPTIONS ||
!process.env.NODE_OPTIONS.includes('--max_old_space_size')
) {
process.env.NODE_OPTIONS = `--max_old_space_size=4096 ${
process.env.NODE_OPTIONS ?? ''
}`;
}
if (args.findIndex((e) => e === '-c' || e === '--config') !== -1) {
throw new Error('Custom tauri build config is not supported.');
}
const targets = args
.filter((_, index, args) => {
if (index === 0) return false;
const previous = args[index - 1];
return previous === '-t' || previous === '--target';
})
.flatMap((target) => target.split(','));
const tauriPatch = {
tauri: { bundle: { macOS: {}, resources: [] } }
};
switch (process.platform) {
case 'darwin': {
// ARM64 support was added in macOS 11, but we need at least 11.2 due to our ffmpeg build
let macOSMinimumVersion = tauriConf?.tauri?.bundle?.macOS?.minimumSystemVersion;
let macOSArm64MinimumVersion = '11.2';
if (
(targets.includes('aarch64-apple-darwin') ||
(targets.length === 0 && process.arch === 'arm64')) &&
(macOSMinimumVersion == null ||
semver.lt(
semver.coerce(macOSMinimumVersion),
semver.coerce(macOSArm64MinimumVersion)
))
) {
macOSMinimumVersion = macOSArm64MinimumVersion;
console.log(
`aarch64-apple-darwin target detected, setting minimum system version to ${macOSMinimumVersion}`
);
}
if (macOSMinimumVersion) {
process.env.MACOSX_DEPLOYMENT_TARGET = macOSMinimumVersion;
tauriPatch.tauri.bundle.macOS.minimumSystemVersion = macOSMinimumVersion;
}
// Point tauri to our ffmpeg framework
tauriPatch.tauri.bundle.macOS.frameworks = [
path.join(workspace, 'target/Frameworks/FFMpeg.framework')
];
// Configure DMG background
process.env.BACKGROUND_FILE = path.resolve(__dirname, '..', 'dmg-background.png');
process.env.BACKGROUND_FILE_NAME = path.basename(process.env.BACKGROUND_FILE);
process.env.BACKGROUND_CLAUSE = `set background picture of opts to file ".background:${process.env.BACKGROUND_FILE_NAME}"`;
if (!fs.existsSync(process.env.BACKGROUND_FILE))
console.warn(
`WARNING: DMG background file not found at ${process.env.BACKGROUND_FILE}`
);
break;
}
case 'linux':
fs.rmSync(path.join(workspace, 'target/release/bundle/appimage'), {
recursive: true,
force: true
});
// Point tauri to the ffmpeg DLLs
tauriPatch.tauri.bundle.resources.push(
...setupSharedLibs('so', path.join(framework, 'lib'))
);
break;
case 'win32':
// Point tauri to the ffmpeg DLLs
tauriPatch.tauri.bundle.resources.push(
...setupSharedLibs('dll', path.join(framework, 'bin'))
);
break;
}
toRemove.push(
...tauriPatch.tauri.bundle.resources.map((file) =>
path.join(workspace, 'apps/desktop/src-tauri', file)
)
);
const tauriPatchConf = path.resolve(__dirname, '..', 'tauri.conf.patch.json');
fs.writeFileSync(tauriPatchConf, JSON.stringify(tauriPatch, null, 2));
toRemove.push(tauriPatchConf);
args.splice(1, 0, '-c', tauriPatchConf);
}
}
process.on('SIGINT', () => {
for (const file of toRemove)
try {
fs.unlinkSync(file);
} catch (e) {}
});
let code = 0;
spawn('pnpm', ['exec', 'tauri', ...args])
.catch((exitCode) => {
if (args[0] === 'build' || process.platform === 'linux') {
// Work around appimage buindling not working sometimes
appimageDir = path.join(workspace, 'target/release/bundle/appimage');
appDir = path.join(appimageDir, 'spacedrive.AppDir');
if (
fs.existsSync(path.join(appimageDir, 'build_appimage.sh')) &&
fs.existsSync(appDir) &&
!fs.readdirSync(appimageDir).filter((file) => file.endsWith('.AppImage')).length
) {
process.chdir(appimageDir);
fs.rmSync(appDir, { recursive: true, force: true });
return spawn('bash', ['build_appimage.sh']).catch((exitCode) => {
code = exitCode;
console.error(`tauri ${args[0]} failed with exit code ${exitCode}`);
});
}
}
code = exitCode;
console.error(`tauri ${args[0]} failed with exit code ${exitCode}`);
console.error(
`If you got an error related to FFMpeg or Protoc/Protobuf you may need to re-run \`pnpm i\``
);
})
.finally(() => {
for (const file of toRemove)
try {
fs.unlinkSync(file);
} catch (e) {}
process.exit(code);
});
function setupSharedLibs(sufix, binDir, dev = false) {
const sharedLibs = fs
.readdirSync(binDir)
.filter((file) => file.endsWith(`.${sufix}`) || file.includes(`.${sufix}.`));
let targetDir = path.join(workspace, 'apps/desktop/src-tauri');
if (dev) {
targetDir = path.join(workspace, 'target/debug');
// Ensure the target/debug directory exists
fs.mkdirSync(targetDir, { recursive: true });
}
// Copy all shared libs to targetDir
for (const dll of sharedLibs)
fs.copyFileSync(path.join(binDir, dll), path.join(targetDir, dll));
return sharedLibs;
}

View file

@ -18,6 +18,7 @@ mod theme;
mod file; mod file;
mod menu; mod menu;
mod updater;
#[tauri::command(async)] #[tauri::command(async)]
#[specta::specta] #[specta::specta]
@ -133,9 +134,6 @@ async fn main() -> tauri::Result<()> {
let app = app let app = app
.setup(|app| { .setup(|app| {
#[cfg(feature = "updater")]
tauri::updater::builder(app.handle()).should_install(|_current, _latest| true);
let app = app.handle(); let app = app.handle();
app.windows().iter().for_each(|(_, window)| { app.windows().iter().for_each(|(_, window)| {
@ -178,6 +176,7 @@ async fn main() -> tauri::Result<()> {
}) })
.on_menu_event(menu::handle_menu_event) .on_menu_event(menu::handle_menu_event)
.menu(menu::get_menu()) .menu(menu::get_menu())
.manage(updater::State::default())
.invoke_handler(tauri_handlers![ .invoke_handler(tauri_handlers![
app_ready, app_ready,
reset_spacedrive, reset_spacedrive,
@ -189,7 +188,9 @@ async fn main() -> tauri::Result<()> {
file::open_file_path_with, file::open_file_path_with,
file::open_ephemeral_file_with, file::open_ephemeral_file_with,
file::reveal_items, file::reveal_items,
theme::lock_app_theme theme::lock_app_theme,
updater::check_for_update,
updater::install_update
]) ])
.build(tauri::generate_context!())?; .build(tauri::generate_context!())?;

View file

@ -0,0 +1,95 @@
use tauri::Manager;
use tokio::sync::Mutex;
use tracing::{error, warn};
#[derive(Debug, Clone, specta::Type, serde::Serialize)]
pub struct Update {
pub version: String,
pub body: Option<String>,
}
impl Update {
fn new(update: &tauri::updater::UpdateResponse<impl tauri::Runtime>) -> Self {
Self {
version: update.latest_version().to_string(),
body: update.body().map(|b| b.to_string()),
}
}
}
#[derive(Default)]
pub struct State {
install_lock: Mutex<()>,
}
async fn get_update(
app: tauri::AppHandle,
) -> Result<tauri::updater::UpdateResponse<impl tauri::Runtime>, String> {
tauri::updater::builder(app)
.header("X-Spacedrive-Version", "stable")
.map_err(|e| e.to_string())?
.check()
.await
.map_err(|e| e.to_string())
}
#[derive(Clone, serde::Serialize, specta::Type)]
#[serde(rename_all = "camelCase", tag = "status")]
pub enum UpdateEvent {
Loading,
Error(String),
UpdateAvailable { update: Update },
NoUpdateAvailable,
Installing,
}
#[tauri::command]
#[specta::specta]
pub async fn check_for_update(app: tauri::AppHandle) -> Result<Option<Update>, String> {
app.emit_all("updater", UpdateEvent::Loading).ok();
let update = match get_update(app.clone()).await {
Ok(update) => update,
Err(e) => {
app.emit_all("updater", UpdateEvent::Error(e.clone())).ok();
return Err(e);
}
};
let update = update.is_update_available().then(|| Update::new(&update));
app.emit_all(
"updater",
update
.clone()
.map(|update| UpdateEvent::UpdateAvailable { update })
.unwrap_or(UpdateEvent::NoUpdateAvailable),
)
.ok();
Ok(update)
}
#[tauri::command]
#[specta::specta]
pub async fn install_update(
app: tauri::AppHandle,
state: tauri::State<'_, State>,
) -> Result<(), String> {
let lock = match state.install_lock.try_lock() {
Ok(lock) => lock,
Err(_) => return Err("Update already installing".into()),
};
app.emit_all("updater", UpdateEvent::Installing).ok();
get_update(app.clone())
.await?
.download_and_install()
.await
.map_err(|e| e.to_string())?;
drop(lock);
Ok(())
}

View file

@ -1,13 +1,12 @@
{ {
"package": { "package": {
"productName": "Spacedrive", "productName": "Spacedrive"
"version": "0.1.0"
}, },
"build": { "build": {
"distDir": "../dist", "distDir": "../dist",
"devPath": "http://localhost:8001", "devPath": "http://localhost:8001",
"beforeDevCommand": "pnpm dev", "beforeDevCommand": "pnpm dev",
"beforeBuildCommand": "pnpm turbo run build --filter @sd/desktop" "beforeBuildCommand": "pnpm turbo run build --filter=@sd/desktop..."
}, },
"tauri": { "tauri": {
"macOSPrivateApi": true, "macOSPrivateApi": true,
@ -31,10 +30,16 @@
"shortDescription": "The universal file manager.", "shortDescription": "The universal file manager.",
"longDescription": "A cross-platform universal file explorer, powered by an open-source virtual distributed filesystem.", "longDescription": "A cross-platform universal file explorer, powered by an open-source virtual distributed filesystem.",
"deb": { "deb": {
"depends": [] "depends": [
"ffmpeg",
"gstreamer1.0-plugins-bad",
"gstreamer1.0-plugins-ugly",
"gstreamer1.0-gtk3",
"gstreamer1.0-libav"
]
}, },
"macOS": { "macOS": {
"frameworks": [], "frameworks": ["../../.deps/FFMpeg.framework"],
"minimumSystemVersion": "10.15", "minimumSystemVersion": "10.15",
"exceptionDomain": "", "exceptionDomain": "",
"entitlements": null "entitlements": null
@ -50,9 +55,12 @@
} }
}, },
"updater": { "updater": {
"active": false, "active": true,
"dialog": false,
"pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEZBMURCMkU5NEU3NDAyOEMKUldTTUFuUk82YklkK296dlkxUGkrTXhCT3ZMNFFVOWROcXNaS0RqWU1kMUdRV2tDdFdIS0Y3YUsK", "pubkey": "dW50cnVzdGVkIGNvbW1lbnQ6IG1pbmlzaWduIHB1YmxpYyBrZXk6IEZBMURCMkU5NEU3NDAyOEMKUldTTUFuUk82YklkK296dlkxUGkrTXhCT3ZMNFFVOWROcXNaS0RqWU1kMUdRV2tDdFdIS0Y3YUsK",
"endpoints": ["https://spacedrive.com/api/releases/alpha/{{target}}/{{arch}}"] "endpoints": [
"https://spacedrive-landing-git-eng-927-fix-updater-spacedrive.vercel.app/api/releases/tauri/{{target}}/{{arch}}"
]
}, },
"allowlist": { "allowlist": {
"all": false, "all": false,

View file

@ -22,6 +22,7 @@ import { getSpacedropState } from '@sd/interface/hooks/useSpacedropState';
import '@sd/ui/style'; import '@sd/ui/style';
import * as commands from './commands'; import * as commands from './commands';
import { updater, useUpdater } from './updater';
// TODO: Bring this back once upstream is fixed up. // TODO: Bring this back once upstream is fixed up.
// const client = hooks.createClient({ // const client = hooks.createClient({
@ -57,7 +58,7 @@ if (customUriServerUrl && !customUriServerUrl?.endsWith('/')) {
} }
const queryParams = customUriAuthToken ? `?token=${encodeURIComponent(customUriAuthToken)}` : ''; const queryParams = customUriAuthToken ? `?token=${encodeURIComponent(customUriAuthToken)}` : '';
const platform: Platform = { const platform = {
platform: 'tauri', platform: 'tauri',
getThumbnailUrlByThumbKey: (keyParts) => getThumbnailUrlByThumbKey: (keyParts) =>
`${customUriServerUrl}thumbnail/${keyParts `${customUriServerUrl}thumbnail/${keyParts
@ -75,13 +76,14 @@ const platform: Platform = {
showDevtools: () => invoke('show_devtools'), showDevtools: () => invoke('show_devtools'),
confirm: (msg, cb) => confirm(msg).then(cb), confirm: (msg, cb) => confirm(msg).then(cb),
userHomeDir: homeDir, userHomeDir: homeDir,
updater,
auth: { auth: {
start(url) { start(url) {
open(url); open(url);
} }
}, },
...commands ...commands
}; } satisfies Platform;
const queryClient = new QueryClient({ const queryClient = new QueryClient({
defaultOptions: { defaultOptions: {
@ -119,6 +121,8 @@ export default function App() {
}; };
}, []); }, []);
useUpdater();
return ( return (
<RspcProvider queryClient={queryClient}> <RspcProvider queryClient={queryClient}>
<PlatformProvider platform={platform}> <PlatformProvider platform={platform}>

View file

@ -54,6 +54,15 @@ export function lockAppTheme(themeType: AppThemeType) {
return invoke()<null>("lock_app_theme", { themeType }) return invoke()<null>("lock_app_theme", { themeType })
} }
export function checkForUpdate() {
return invoke()<Update | null>("check_for_update")
}
export function installUpdate() {
return invoke()<null>("install_update")
}
export type Update = { version: string; body: string | null }
export type OpenWithApplication = { url: string; name: string } export type OpenWithApplication = { url: string; name: string }
export type AppThemeType = "Auto" | "Light" | "Dark" export type AppThemeType = "Auto" | "Light" | "Dark"
export type EphemeralFileOpenResult = { t: "Ok"; c: string } | { t: "Err"; c: string } export type EphemeralFileOpenResult = { t: "Ok"; c: string } | { t: "Err"; c: string }

View file

@ -1,13 +1,16 @@
<!doctype html> <!doctype html>
<html lang="en" class="vanilla-theme"> <html lang="en" class="vanilla-theme">
<head>
<meta charset="UTF-8" /> <head>
<link rel="icon" type="image/svg+xml" href="/src/favicon.svg" /> <meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> <link rel="icon" type="image/svg+xml" href="/src/favicon.svg" />
<title>Spacedrive</title> <meta name="viewport" content="width=device-width, initial-scale=1.0" />
</head> <title>Spacedrive</title>
<body style="overflow: hidden"> </head>
<div id="root"></div>
<script type="module" src="./index.tsx"></script> <body style="overflow: hidden">
</body> <div id="root"></div>
<script type="module" src="./index.tsx"></script>
</body>
</html> </html>

View file

@ -0,0 +1,87 @@
import { listen } from '@tauri-apps/api/event';
import { useEffect, useRef } from 'react';
import { proxy, useSnapshot } from 'valtio';
import { UpdateStore } from '@sd/interface';
import { toast, ToastId } from '@sd/ui';
import * as commands from './commands';
export const updateStore = proxy<UpdateStore>({
status: 'idle'
});
listen<UpdateStore>('updater', (e) => {
Object.assign(updateStore, e.payload);
console.log(updateStore);
});
const onInstallCallbacks = new Set<() => void>();
export const updater = {
useSnapshot: () => useSnapshot(updateStore),
checkForUpdate: commands.checkForUpdate,
installUpdate: () => {
for (const cb of onInstallCallbacks) {
cb();
}
const promise = commands.installUpdate();
toast.promise(promise, {
loading: 'Downloading Update',
success: 'Update Downloaded. Restart Spacedrive to install',
error: (e: any) => (
<>
<p>Failed to download update</p>
<p className="text-gray-300">Error: {e.toString()}</p>
</>
)
});
return promise;
}
};
async function checkForUpdate() {
const update = await updater.checkForUpdate();
if (!update) return;
let id: ToastId | null = null;
const cb = () => {
if (id !== null) toast.dismiss(id);
};
onInstallCallbacks.add(cb);
toast.info(
(_id) => {
id = _id;
return {
title: 'New Update Available',
body: `Version ${update.version}`
};
},
{
onClose() {
onInstallCallbacks.delete(cb);
},
duration: 10 * 1000,
action: {
label: 'Update',
onClick: () => updater.installUpdate()
}
}
);
}
export function useUpdater() {
const alreadyChecked = useRef(false);
useEffect(() => {
if (!alreadyChecked.current && import.meta.env.PROD) checkForUpdate();
alreadyChecked.current = true;
}, []);
}

View file

@ -1,5 +1,6 @@
{ {
"name": "@sd/landing", "name": "@sd/landing",
"private": true,
"scripts": { "scripts": {
"dev": "next dev", "dev": "next dev",
"build": "contentlayer build && next build", "build": "contentlayer build && next build",
@ -21,12 +22,12 @@
"@vercel/edge-config": "^0.1.11", "@vercel/edge-config": "^0.1.11",
"autoprefixer": "^10.4.14", "autoprefixer": "^10.4.14",
"clsx": "^1.2.1", "clsx": "^1.2.1",
"contentlayer": "^0.3.2", "contentlayer": "^0.3.4",
"drizzle-orm": "^0.26.0", "drizzle-orm": "^0.26.0",
"markdown-to-jsx": "^7.2.0", "markdown-to-jsx": "^7.2.0",
"md5": "^2.3.0", "md5": "^2.3.0",
"next": "13.4.3", "next": "13.4.3",
"next-contentlayer": "^0.3.2", "next-contentlayer": "^0.3.4",
"react": "18.2.0", "react": "18.2.0",
"react-burger-menu": "^3.0.9", "react-burger-menu": "^3.0.9",
"react-dom": "^18.2.0", "react-dom": "^18.2.0",
@ -49,15 +50,14 @@
"devDependencies": { "devDependencies": {
"@sd/config": "workspace:*", "@sd/config": "workspace:*",
"@svgr/webpack": "^8.0.1", "@svgr/webpack": "^8.0.1",
"@types/node": "20.2.1", "@types/node": "^18.17",
"@types/react": "18.2.6", "@types/react": "^18.2.0",
"@types/react-burger-menu": "^2.8.3", "@types/react-burger-menu": "^2.8.3",
"@types/react-dom": "18.2.4", "@types/react-dom": "^18.2.0",
"@types/react-helmet": "^6.1.6", "@types/three": "^0.152",
"@types/three": "^0.152.1",
"drizzle-kit": "db-push", "drizzle-kit": "db-push",
"postcss": "^8.4.23", "postcss": "^8.4",
"tailwindcss": "^3.3.2", "tailwindcss": "^3.3.2",
"typescript": "5.0.4" "typescript": "^5.2"
} }
} }

View file

@ -24,15 +24,11 @@ type TauriResponse = {
export const runtime = 'edge'; export const runtime = 'edge';
export async function GET(req: Request, extra: { params: Record<string, unknown> }) { export async function GET(req: Request, extra: { params: Record<string, unknown> }) {
// handles old /api/releases/[target]/[arch]/[currentVersion] requests const version = req.headers.get('X-Spacedrive-Version');
// should be removed once stable release is out
if (tauriArch.safeParse(extra.params['target']).success) {
return NextResponse.redirect(
new URL(`/api/releases/alpha/${extra.params.version}/${extra.params.target}`, req.url)
);
}
const params = await paramsSchema.parseAsync(extra.params); if (version === null) return NextResponse.json({ error: 'No version header' }, { status: 400 });
const params = await paramsSchema.parseAsync({ ...extra.params, version });
const release = await getRelease(params); const release = await getRelease(params);
@ -64,20 +60,19 @@ export async function GET(req: Request, extra: { params: Record<string, unknown>
return NextResponse.json(response); return NextResponse.json(response);
} }
const ORG = 'spacedriveapp';
const REPO = 'spacedrive';
async function getRelease({ version }: z.infer<typeof paramsSchema>): Promise<any> { async function getRelease({ version }: z.infer<typeof paramsSchema>): Promise<any> {
switch (version) { switch (version) {
case 'alpha': { case 'alpha': {
const data = await githubFetch(`/repos/${ORG}/${REPO}/releases`); const data = await githubFetch(`/repos/${env.GITHUB_ORG}/${env.GITHUB_REPO}/releases`);
return data.find((d: any) => d.tag_name.includes('alpha')); return data.find((d: any) => d.tag_name.includes('alpha'));
} }
case 'stable': case 'stable':
return githubFetch(`/repos/${ORG}/${REPO}/releases/latest`); return githubFetch(`/repos/${env.GITHUB_ORG}/${env.GITHUB_REPO}/releases/latest`);
default: default:
return githubFetch(`/repos/${ORG}/${REPO}/releases/tags/${version}`); return githubFetch(
`/repos/$${env.GITHUB_ORG}/${env.GITHUB_REPO}/releases/tags/${version}`
);
} }
} }
@ -95,10 +90,10 @@ async function githubFetch(path: string) {
return fetch(`https://api.github.com${path}`, FETCH_META).then((r) => r.json()); return fetch(`https://api.github.com${path}`, FETCH_META).then((r) => r.json());
} }
function binaryName({ version, target, arch }: z.infer<typeof paramsSchema>) { function binaryName({ target, arch }: z.infer<typeof paramsSchema>) {
const ext = extensionForTarget(target); const ext = extensionForTarget(target);
return `Spacedrive-Updater-${version}-${target}-${arch}.${ext}`; return `Spacedrive-Updater-${target}-${arch}.${ext}`;
} }
function extensionForTarget(target: z.infer<typeof tauriTarget>) { function extensionForTarget(target: z.infer<typeof tauriTarget>) {

View file

@ -12,7 +12,9 @@ export const env = createEnv({
AWS_SES_ACCESS_KEY: z.string(), AWS_SES_ACCESS_KEY: z.string(),
AWS_SES_SECRET_KEY: z.string(), AWS_SES_SECRET_KEY: z.string(),
AWS_SES_REGION: z.string(), AWS_SES_REGION: z.string(),
MAILER_FROM: z.string().default('Spacedrive <no-reply@spacedrive.com>') MAILER_FROM: z.string().default('Spacedrive <no-reply@spacedrive.com>'),
GITHUB_ORG: z.string().default('spacedriveapp'),
GITHUB_REPO: z.string().default('spacedrive')
}, },
client: {}, client: {},
runtimeEnv: { runtimeEnv: {
@ -25,7 +27,9 @@ export const env = createEnv({
AWS_SES_ACCESS_KEY: process.env.AWS_SES_ACCESS_KEY, AWS_SES_ACCESS_KEY: process.env.AWS_SES_ACCESS_KEY,
AWS_SES_SECRET_KEY: process.env.AWS_SES_SECRET_KEY, AWS_SES_SECRET_KEY: process.env.AWS_SES_SECRET_KEY,
AWS_SES_REGION: process.env.AWS_SES_REGION, AWS_SES_REGION: process.env.AWS_SES_REGION,
MAILER_FROM: process.env.MAILER_FROM MAILER_FROM: process.env.MAILER_FROM,
GITHUB_ORG: process.env.GITHUB_ORG,
GITHUB_REPO: process.env.GITHUB_REPO
}, },
// In dev or in eslint disable checking. // In dev or in eslint disable checking.
// Kinda sucks for in dev but you don't need the whole setup to change the docs. // Kinda sucks for in dev but you don't need the whole setup to change the docs.

View file

@ -1,40 +1,49 @@
#!/usr/bin/env zsh #!/usr/bin/env sh
set -e set -eu
echo "Building \'sd-mobile-ios\' library..." if [ "${CI:-}" = "true" ]; then
set -x
fi
if [ -z "${HOME:-}" ]; then
HOME="$(CDPATH='' cd -- "$(osascript -e 'set output to (POSIX path of (path to home folder))')" && pwd)"
export HOME
fi
echo "Building 'sd-mobile-ios' library..."
__dirname="$(CDPATH='' cd -- "$(dirname -- "$0")" && pwd)" __dirname="$(CDPATH='' cd -- "$(dirname -- "$0")" && pwd)"
TARGET_DIRECTORY="$(CDPATH='' cd -- "${__dirname}/../../../../../target" && pwd)"
if [[ $CONFIGURATION != "Debug" ]]; then # Ensure target dir exists
TARGET_DIRECTORY="${__dirname}/../../../../../target"
mkdir -p "$TARGET_DIRECTORY"
TARGET_DIRECTORY="$(CDPATH='' cd -- "$TARGET_DIRECTORY" && pwd)"
if [ "${CONFIGURATION:-}" != "Debug" ]; then
CARGO_FLAGS=--release CARGO_FLAGS=--release
export CARGO_FLAGS export CARGO_FLAGS
fi fi
export PROTOC="${TARGET_DIRECTORY}/Frameworks/bin/protoc"
# TODO: Also do this for non-Apple Silicon Macs # TODO: Also do this for non-Apple Silicon Macs
if [[ $SPACEDRIVE_CI == "1" ]]; then if [ "${SPACEDRIVE_CI:-}" = "1" ]; then
# Required for CI # Required for CI
export PATH="$HOME/.cargo/bin:$PATH" export PATH="${CARGO_HOME:-"${HOME}/.cargo"}/bin:$PATH"
cargo build -p sd-mobile-ios --target x86_64-apple-ios cargo build -p sd-mobile-ios --target x86_64-apple-ios
if [[ $PLATFORM_NAME = "iphonesimulator" ]] if [ "${PLATFORM_NAME:-}" = "iphonesimulator" ]; then
then lipo -create -output "$TARGET_DIRECTORY"/libsd_mobile_iossim.a "$TARGET_DIRECTORY"/x86_64-apple-ios/debug/libsd_mobile_ios.a
lipo -create -output $TARGET_DIRECTORY/libsd_mobile_iossim.a $TARGET_DIRECTORY/x86_64-apple-ios/debug/libsd_mobile_ios.a
else else
lipo -create -output $TARGET_DIRECTORY/libsd_mobile_ios.a $TARGET_DIRECTORY/x86_64-apple-ios/debug/libsd_mobile_ios.a lipo -create -output "$TARGET_DIRECTORY"/libsd_mobile_ios.a "$TARGET_DIRECTORY"/x86_64-apple-ios/debug/libsd_mobile_ios.a
fi fi
exit 0 exit 0
fi fi
if [[ $PLATFORM_NAME = "iphonesimulator" ]] if [ "${PLATFORM_NAME:-}" = "iphonesimulator" ]; then
then cargo build -p sd-mobile-ios --target aarch64-apple-ios-sim
cargo build -p sd-mobile-ios --target aarch64-apple-ios-sim lipo -create -output "$TARGET_DIRECTORY"/libsd_mobile_iossim.a "$TARGET_DIRECTORY"/aarch64-apple-ios-sim/debug/libsd_mobile_ios.a
lipo -create -output $TARGET_DIRECTORY/libsd_mobile_iossim.a $TARGET_DIRECTORY/aarch64-apple-ios-sim/debug/libsd_mobile_ios.a
else else
cargo build -p sd-mobile-ios --target aarch64-apple-ios cargo build -p sd-mobile-ios --target aarch64-apple-ios
lipo -create -output $TARGET_DIRECTORY/libsd_mobile_ios.a $TARGET_DIRECTORY/aarch64-apple-ios/debug/libsd_mobile_ios.a lipo -create -output "$TARGET_DIRECTORY"/libsd_mobile_ios.a "$TARGET_DIRECTORY"/aarch64-apple-ios/debug/libsd_mobile_ios.a
fi fi

View file

@ -28,7 +28,7 @@
"@sd/assets": "workspace:*", "@sd/assets": "workspace:*",
"@sd/client": "workspace:*", "@sd/client": "workspace:*",
"@shopify/flash-list": "1.5.0", "@shopify/flash-list": "1.5.0",
"@tanstack/react-query": "^4.29.1", "@tanstack/react-query": "^4.35",
"class-variance-authority": "^0.5.3", "class-variance-authority": "^0.5.3",
"dayjs": "^1.11.8", "dayjs": "^1.11.8",
"event-target-polyfill": "^0.0.3", "event-target-polyfill": "^0.0.3",
@ -42,7 +42,7 @@
"lottie-react-native": "6.2.0", "lottie-react-native": "6.2.0",
"moti": "^0.26.0", "moti": "^0.26.0",
"phosphor-react-native": "^1.1.2", "phosphor-react-native": "^1.1.2",
"react": "18.2.0", "react": "^18.2.0",
"react-hook-form": "~7.45.2", "react-hook-form": "~7.45.2",
"react-native": "0.72.4", "react-native": "0.72.4",
"react-native-document-picker": "^9.0.1", "react-native-document-picker": "^9.0.1",
@ -61,13 +61,13 @@
"zod": "~3.22.2" "zod": "~3.22.2"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "^7.22.11", "@babel/core": "~7",
"@rnx-kit/metro-config": "^1.3.8", "@rnx-kit/metro-config": "^1.3.8",
"@sd/config": "workspace:*", "@sd/config": "workspace:*",
"@types/react": "~18.0.38", "@types/react": "^18.2.0",
"babel-plugin-module-resolver": "^5.0.0", "babel-plugin-module-resolver": "^5.0.0",
"eslint-plugin-react-native": "^4.0.0", "eslint-plugin-react-native": "^4.0.0",
"react-native-svg-transformer": "^1.1.0", "react-native-svg-transformer": "^1.1.0",
"typescript": "^5.1.3" "typescript": "^5.2"
} }
} }

View file

@ -67,6 +67,8 @@ ENV PATH="/root/.cargo/bin:$PATH"
RUN --mount=type=cache,target=/var/cache/apt --mount=type=cache,target=/var/lib/apt \ RUN --mount=type=cache,target=/var/cache/apt --mount=type=cache,target=/var/lib/apt \
env CI=true ./scripts/setup.sh env CI=true ./scripts/setup.sh
RUN cd ./scripts; npm i --production
RUN --mount=type=cache,target=/root/.cache/prisma/binaries/cli/ \ RUN --mount=type=cache,target=/root/.cache/prisma/binaries/cli/ \
pnpm prep pnpm prep
@ -89,11 +91,12 @@ ENV TZ=UTC \
# Note: This needs to happen before the apt call to avoid locking issues with the previous step # Note: This needs to happen before the apt call to avoid locking issues with the previous step
COPY --from=server /srv/spacedrive/target/release/sd-server /usr/bin/ COPY --from=server /srv/spacedrive/target/release/sd-server /usr/bin/
COPY --from=server /srv/spacedrive/apps/.deps/lib /usr/lib/spacedrive
RUN --mount=type=cache,target=/var/cache/apt --mount=type=cache,target=/var/lib/apt \ RUN --mount=type=cache,target=/var/cache/apt --mount=type=cache,target=/var/lib/apt \
apt-get install \ apt-get install \
libavdevice59 libpostproc56 libswscale6 libswresample4 libavformat59 libavutil57 libavfilter8 \ libavdevice59 libpostproc56 libswscale6 libswresample4 libavformat59 libavutil57 libavfilter8 \
libavcodec59 libheif1 libavcodec59
COPY --chmod=755 entrypoint.sh /usr/bin/ COPY --chmod=755 entrypoint.sh /usr/bin/

View file

@ -21,15 +21,15 @@
"devDependencies": { "devDependencies": {
"@sd/config": "workspace:*", "@sd/config": "workspace:*",
"@sd/ui": "workspace:*", "@sd/ui": "workspace:*",
"@types/react": "^18.0.28", "@types/react": "^18.2.0",
"@types/react-dom": "^18.0.11", "@types/react-dom": "^18.2.0",
"@vitejs/plugin-react": "^3.1.0", "@vitejs/plugin-react": "^4.1",
"autoprefixer": "^10.4.12", "autoprefixer": "^10.4.12",
"postcss": "^8.4.17", "postcss": "^8.4",
"prop-types": "^15.8.1", "prop-types": "^15.8.1",
"storybook": "^7.0.5", "storybook": "^7.0.5",
"tailwindcss": "^3.3.2", "tailwindcss": "^3.3.2",
"typescript": "^5.0.4", "typescript": "^5.2",
"vite": "^4.2.0" "vite": "^4.4"
} }
} }

View file

@ -14,7 +14,7 @@
"@rspc/client": "=0.0.0-main-799eec5d", "@rspc/client": "=0.0.0-main-799eec5d",
"@sd/client": "workspace:*", "@sd/client": "workspace:*",
"@sd/interface": "workspace:*", "@sd/interface": "workspace:*",
"@tanstack/react-query": "^4.12.0", "@tanstack/react-query": "^4.35",
"react": "^18.2.0", "react": "^18.2.0",
"react-dom": "^18.2.0", "react-dom": "^18.2.0",
"react-router-dom": "6.9.0" "react-router-dom": "6.9.0"
@ -23,16 +23,16 @@
"@playwright/test": "^1.30.0", "@playwright/test": "^1.30.0",
"@sd/config": "workspace:*", "@sd/config": "workspace:*",
"@sd/ui": "workspace:*", "@sd/ui": "workspace:*",
"@types/react": "^18.0.21", "@types/react": "^18.2.0",
"@types/react-dom": "^18.0.6", "@types/react-dom": "^18.2.0",
"@vitejs/plugin-react": "^2.1.0", "@vitejs/plugin-react": "^4.1",
"autoprefixer": "^10.4.12", "autoprefixer": "^10.4.12",
"postcss": "^8.4.17", "postcss": "^8.4",
"rollup-plugin-visualizer": "^5.9.0", "rollup-plugin-visualizer": "^5.9.0",
"typescript": "^5.0.4", "typescript": "^5.2",
"vite": "^4.0.4", "vite": "^4.4",
"vite-plugin-html": "^3.2.0", "vite-plugin-html": "^3.2",
"vite-plugin-svgr": "^2.2.1", "vite-plugin-svgr": "^3.3",
"vite-tsconfig-paths": "^4.0.3" "vite-tsconfig-paths": "^4.2"
} }
} }

View file

@ -10,10 +10,10 @@
}, },
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"@tanstack/react-query": "^4.10.1", "@tanstack/react-query": "^4.35",
"@vitejs/plugin-react": "^2.1.0", "@vitejs/plugin-react": "^4.1",
"typescript": "^4.8.2", "typescript": "^5.2",
"vite": "^4.0.4" "vite": "^4.4"
}, },
"dependencies": { "dependencies": {
"clsx": "^1.2.1", "clsx": "^1.2.1",

View file

@ -4,6 +4,7 @@ import { useKeys } from 'rooks';
import { JobManagerContextProvider, useClientContext, useDebugState } from '@sd/client'; import { JobManagerContextProvider, useClientContext, useDebugState } from '@sd/client';
import { Button, ButtonLink, dialogManager, modifierSymbols, Popover, Tooltip } from '@sd/ui'; import { Button, ButtonLink, dialogManager, modifierSymbols, Popover, Tooltip } from '@sd/ui';
import { useKeyMatcher } from '~/hooks'; import { useKeyMatcher } from '~/hooks';
import { usePlatform } from '~/util/Platform';
import DebugPopover from './DebugPopover'; import DebugPopover from './DebugPopover';
import FeedbackDialog from './FeedbackDialog'; import FeedbackDialog from './FeedbackDialog';
@ -20,8 +21,24 @@ export default () => {
navigate('settings/client/general'); navigate('settings/client/general');
}); });
const updater = usePlatform().updater;
const updaterState = updater?.useSnapshot();
return ( return (
<div className="space-y-2"> <div className="space-y-2">
{updater && updaterState && (
<>
{updaterState.status === 'updateAvailable' && (
<Button
variant="outline"
className="w-full"
onClick={updater.installUpdate}
>
Install Update
</Button>
)}
</>
)}
<div className="flex w-full items-center justify-between"> <div className="flex w-full items-center justify-between">
<div className="flex"> <div className="flex">
<ButtonLink <ButtonLink

View file

@ -1,7 +1,7 @@
import { useMemo } from 'react'; import { useEffect, useMemo } from 'react';
import { Navigate, Outlet, useMatches, type RouteObject } from 'react-router-dom'; import { Navigate, Outlet, useMatches, type RouteObject } from 'react-router-dom';
import { currentLibraryCache, useCachedLibraries, useInvalidateQuery } from '@sd/client'; import { currentLibraryCache, useCachedLibraries, useInvalidateQuery } from '@sd/client';
import { Dialogs, Toaster } from '@sd/ui'; import { Dialogs, toast, Toaster } from '@sd/ui';
import { RouterErrorBoundary } from '~/ErrorFallback'; import { RouterErrorBoundary } from '~/ErrorFallback';
import { useKeybindHandler, useTheme } from '~/hooks'; import { useKeybindHandler, useTheme } from '~/hooks';
@ -11,6 +11,8 @@ import { RootContext } from './RootContext';
import './style.scss'; import './style.scss';
import { usePlatform } from '..';
const Index = () => { const Index = () => {
const libraries = useCachedLibraries(); const libraries = useCachedLibraries();

View file

@ -1,16 +1,8 @@
{ {
"name": "@sd/interface", "name": "@sd/interface",
"version": "1.0.0",
"license": "GPL-3.0-only",
"private": true, "private": true,
"main": "index.tsx", "main": "index.tsx",
"types": "index.tsx", "types": "index.tsx",
"exports": {
".": "./index.tsx",
"./assets/*": "./assets/*",
"./components/*": "./components/*",
"./hooks/*": "./hooks/*"
},
"scripts": { "scripts": {
"lint": "eslint . --cache", "lint": "eslint . --cache",
"typecheck": "tsc -b" "typecheck": "tsc -b"
@ -19,6 +11,7 @@
"@fontsource/inter": "^4.5.13", "@fontsource/inter": "^4.5.13",
"@headlessui/react": "^1.7.3", "@headlessui/react": "^1.7.3",
"@icons-pack/react-simple-icons": "^7.2.0", "@icons-pack/react-simple-icons": "^7.2.0",
"@phosphor-icons/react": "^2.0.10",
"@radix-ui/react-progress": "^1.0.1", "@radix-ui/react-progress": "^1.0.1",
"@radix-ui/react-slider": "^1.1.0", "@radix-ui/react-slider": "^1.1.0",
"@radix-ui/react-toast": "^1.1.2", "@radix-ui/react-toast": "^1.1.2",
@ -31,13 +24,10 @@
"@splinetool/react-spline": "^2.2.3", "@splinetool/react-spline": "^2.2.3",
"@splinetool/runtime": "^0.9.128", "@splinetool/runtime": "^0.9.128",
"@tailwindcss/forms": "^0.5.3", "@tailwindcss/forms": "^0.5.3",
"@tanstack/react-query": "^4.12.0", "@tanstack/react-query": "^4.35",
"@tanstack/react-query-devtools": "^4.22.0", "@tanstack/react-query-devtools": "^4.35",
"@tanstack/react-table": "^8.8.5", "@tanstack/react-table": "^8.8.5",
"@tanstack/react-virtual": "3.0.0-beta.61", "@tanstack/react-virtual": "3.0.0-beta.61",
"@types/react-scroll-sync": "^0.8.4",
"@types/uuid": "^9.0.2",
"@vitejs/plugin-react": "^2.1.0",
"autoprefixer": "^10.4.12", "autoprefixer": "^10.4.12",
"class-variance-authority": "^0.5.3", "class-variance-authority": "^0.5.3",
"clsx": "^1.2.1", "clsx": "^1.2.1",
@ -45,7 +35,6 @@
"dayjs": "^1.11.8", "dayjs": "^1.11.8",
"dragselect": "^2.7.4", "dragselect": "^2.7.4",
"framer-motion": "^10.11.5", "framer-motion": "^10.11.5",
"@phosphor-icons/react": "^2.0.10",
"prismjs": "^1.29.0", "prismjs": "^1.29.0",
"react": "^18.2.0", "react": "^18.2.0",
"react-colorful": "^5.6.1", "react-colorful": "^5.6.1",
@ -76,15 +65,16 @@
}, },
"devDependencies": { "devDependencies": {
"@sd/config": "workspace:*", "@sd/config": "workspace:*",
"@types/babel__core": "^7.20.1", "@types/babel__core": "^7.20",
"@types/loadable__component": "^5.13.4", "@types/loadable__component": "^5.13.4",
"@types/node": "^18.11.9", "@types/node": "^18.17",
"@types/react": "^18.0.21", "@types/react": "^18.2.0",
"@types/react-dom": "^18.0.6", "@types/react-dom": "^18.2.0",
"@types/react-router-dom": "^5.3.3", "@types/react-router-dom": "^5.3.3",
"@vitejs/plugin-react": "^1.3.1", "@types/uuid": "^9.0.2",
"typescript": "5.0.4", "@vitejs/plugin-react": "^4.1",
"vite": "^4.0.4", "typescript": "^5.2",
"vite-plugin-svgr": "^2.2.1" "vite": "^4.4",
"vite-plugin-svgr": "^3.3"
} }
} }

View file

@ -1,20 +1,13 @@
{ {
"extends": "../packages/config/base.tsconfig.json", "extends": "../packages/config/base.tsconfig.json",
"compilerOptions": { "compilerOptions": {
"declarationDir": "dist",
"paths": { "paths": {
"~/*": ["./*"] "~/*": ["./*"]
}, },
"types": ["vite-plugin-svgr/client", "vite/client", "node"] "types": ["vite-plugin-svgr/client", "vite/client", "node"],
"declarationDir": "dist"
}, },
"include": ["./**/*"], "include": ["**/*"],
"exclude": ["dist"], "exclude": ["dist"],
"references": [ "references": [{ "path": "../packages/ui" }, { "path": "../packages/client" }]
{
"path": "../packages/ui"
},
{
"path": "../packages/client"
}
]
} }

View file

@ -37,9 +37,23 @@ export type Platform = {
openFilePathWith?(library: string, fileIdsAndAppUrls: [number, string][]): Promise<unknown>; openFilePathWith?(library: string, fileIdsAndAppUrls: [number, string][]): Promise<unknown>;
openEphemeralFileWith?(pathsAndUrls: [string, string][]): Promise<unknown>; openEphemeralFileWith?(pathsAndUrls: [string, string][]): Promise<unknown>;
lockAppTheme?(themeType: 'Auto' | 'Light' | 'Dark'): any; lockAppTheme?(themeType: 'Auto' | 'Light' | 'Dark'): any;
updater?: {
useSnapshot: () => UpdateStore;
checkForUpdate(): Promise<Update | null>;
installUpdate(): Promise<any>;
};
auth: auth.ProviderConfig; auth: auth.ProviderConfig;
}; };
export type Update = { version: string; body: string | null };
export type UpdateStore =
| { status: 'idle' }
| { status: 'loading' }
| { status: 'error' }
| { status: 'updateAvailable'; update: Update }
| { status: 'noUpdateAvailable' }
| { status: 'installing' };
// Keep this private and use through helpers below // Keep this private and use through helpers below
const context = createContext<Platform>(undefined!); const context = createContext<Platform>(undefined!);

View file

@ -1,7 +1,7 @@
{ {
"private": true, "private": true,
"scripts": { "scripts": {
"preprep": "pnpm exec node scripts/preprep.mjs", "preprep": "pnpm --filter @sd/scripts -- prep",
"prep": "pnpm gen:prisma", "prep": "pnpm gen:prisma",
"postprep": "pnpm codegen", "postprep": "pnpm codegen",
"build": "turbo run build", "build": "turbo run build",
@ -27,7 +27,7 @@
"typecheck": "pnpm -r typecheck", "typecheck": "pnpm -r typecheck",
"lint": "turbo run lint", "lint": "turbo run lint",
"lint:fix": "turbo run lint -- --fix", "lint:fix": "turbo run lint -- --fix",
"clean": "rimraf -g \"node_modules/\" \"**/node_modules/\" \"target/\" \"**/.build/\" \"**/.next/\" \"**/dist/!(.gitignore)**\" \"**/tsconfig.tsbuildinfo\"" "clean": "git clean -qfX ."
}, },
"pnpm": { "pnpm": {
"overrides": { "overrides": {
@ -35,25 +35,22 @@
} }
}, },
"devDependencies": { "devDependencies": {
"@babel/plugin-syntax-import-assertions": "^7.22.5", "@babel/plugin-syntax-import-assertions": "~7",
"@cspell/dict-rust": "^2.0.1", "@cspell/dict-rust": "^4.0.1",
"@cspell/dict-typescript": "^2.0.2", "@cspell/dict-typescript": "^3.1.2",
"@ianvs/prettier-plugin-sort-imports": "^4.1.0", "@ianvs/prettier-plugin-sort-imports": "^4.1.0",
"@storybook/react-vite": "^7.0.20", "@storybook/react-vite": "^7.4.6",
"archive-wasm": "^1.5.3", "cspell": "^7.3.7",
"cspell": "^6.31.1",
"mustache": "^4.2.0",
"prettier": "^3.0.3", "prettier": "^3.0.3",
"prettier-plugin-tailwindcss": "^0.5.3", "prettier-plugin-tailwindcss": "^0.5.5",
"rimraf": "^4.4.1", "turbo": "^1.10.14",
"turbo": "^1.10.2", "turbo-ignore": "^1.10.14",
"turbo-ignore": "^0.3.0", "typescript": "^5.2",
"typescript": "^5.0.4", "vite": "^4.4"
"vite": "^4.3.9"
}, },
"overrides": { "overrides": {
"vite-plugin-svgr": "https://github.com/spacedriveapp/vite-plugin-svgr#cb4195b69849429cdb18d1f12381676bf9196a84", "vite-plugin-svgr": "https://github.com/spacedriveapp/vite-plugin-svgr#cb4195b69849429cdb18d1f12381676bf9196a84",
"@types/node": "^18.0.0" "@types/node": "^18.17"
}, },
"engines": { "engines": {
"pnpm": ">=8.0.0", "pnpm": ">=8.0.0",
@ -62,23 +59,6 @@
"node": ">=18.17 <19 || >=20.1" "node": ">=18.17 <19 || >=20.1"
}, },
"eslintConfig": { "eslintConfig": {
"root": true, "root": true
"overrides": [
{
"files": [
"*.mjs"
],
"env": {
"node": true,
"es2022": true,
"browser": false,
"commonjs": false,
"shared-node-browser": false
},
"parserOptions": {
"sourceType": "module"
}
}
]
} }
} }

View file

@ -1,11 +1,8 @@
{ {
"name": "@sd/client", "name": "@sd/client",
"version": "0.0.0",
"private": true, "private": true,
"main": "./src/index.ts", "main": "./src/index.ts",
"files": [ "types": "./src/index.ts",
"dist/**"
],
"scripts": { "scripts": {
"test": "jest", "test": "jest",
"lint": "eslint src --cache", "lint": "eslint src --cache",
@ -16,7 +13,7 @@
"@rspc/client": "=0.0.0-main-799eec5d", "@rspc/client": "=0.0.0-main-799eec5d",
"@rspc/react": "=0.0.0-main-799eec5d", "@rspc/react": "=0.0.0-main-799eec5d",
"@sd/config": "workspace:*", "@sd/config": "workspace:*",
"@tanstack/react-query": "^4.12.0", "@tanstack/react-query": "^4.35",
"@zxcvbn-ts/core": "^2.1.0", "@zxcvbn-ts/core": "^2.1.0",
"@zxcvbn-ts/language-common": "^2.0.1", "@zxcvbn-ts/language-common": "^2.0.1",
"@zxcvbn-ts/language-en": "^2.1.0", "@zxcvbn-ts/language-en": "^2.1.0",
@ -29,7 +26,7 @@
"@types/react": "^18.0.21", "@types/react": "^18.0.21",
"scripts": "*", "scripts": "*",
"tsconfig": "*", "tsconfig": "*",
"typescript": "^5.0.4" "typescript": "^5.2"
}, },
"peerDependencies": { "peerDependencies": {
"react": "^18.2.0" "react": "^18.2.0"

View file

@ -2,7 +2,8 @@
"extends": "../config/base.tsconfig.json", "extends": "../config/base.tsconfig.json",
"compilerOptions": { "compilerOptions": {
"rootDir": "src", "rootDir": "src",
"declarationDir": "dist" "outDir": "./dist",
"emitDeclarationOnly": false
}, },
"include": ["src"] "include": ["src"]
} }

View file

@ -11,19 +11,19 @@
"lint": "eslint . --cache" "lint": "eslint . --cache"
}, },
"devDependencies": { "devDependencies": {
"@typescript-eslint/eslint-plugin": "^5.59.6", "@typescript-eslint/eslint-plugin": "^6.7",
"@typescript-eslint/parser": "^5.59.6", "@typescript-eslint/parser": "^6.7",
"eslint": "^8.41.0", "eslint": "^8.50",
"eslint-config-next": "13.3.0", "eslint-config-next": "13.3.0",
"eslint-config-prettier": "^8.8.0", "eslint-config-prettier": "^9.0",
"eslint-config-turbo": "^1.9.8", "eslint-config-turbo": "^1.9.8",
"eslint-plugin-prettier": "^4.2.1", "eslint-plugin-prettier": "^5.0",
"eslint-plugin-react": "^7.32.2", "eslint-plugin-react": "^7.32.2",
"eslint-plugin-react-hooks": "^4.6.0", "eslint-plugin-react-hooks": "^4.6.0",
"eslint-plugin-tailwindcss": "^3.12.0", "eslint-plugin-tailwindcss": "^3.12.0",
"eslint-utils": "^3.0.0", "eslint-utils": "^3.0.0",
"regexpp": "^3.2.0", "regexpp": "^3.2.0",
"vite-plugin-html": "^3.2.0", "vite-plugin-html": "^3.2",
"vite-plugin-svgr": "^2.2.1" "vite-plugin-svgr": "^3.3"
} }
} }

View file

@ -36,7 +36,6 @@
"class-variance-authority": "^0.5.3", "class-variance-authority": "^0.5.3",
"clsx": "^1.2.1", "clsx": "^1.2.1",
"@phosphor-icons/react": "^2.0.10", "@phosphor-icons/react": "^2.0.10",
"postcss": "^8.4.17",
"react": "^18.2.0", "react": "^18.2.0",
"react-dom": "^18.2.0", "react-dom": "^18.2.0",
"react-loading-icons": "^1.1.0", "react-loading-icons": "^1.1.0",
@ -48,20 +47,18 @@
"zod": "~3.22.2" "zod": "~3.22.2"
}, },
"devDependencies": { "devDependencies": {
"@babel/core": "^7.22.11", "@babel/core": "~7",
"@sd/config": "workspace:*", "@sd/config": "workspace:*",
"@storybook/types": "^7.0.24", "@storybook/types": "^7.0.24",
"@tailwindcss/typography": "^0.5.7", "@tailwindcss/typography": "^0.5.7",
"@types/node": "^18.15.1", "@types/node": "^18.17",
"@types/react": "^18.0.21", "@types/react": "^18.2.0",
"@types/react-dom": "^18.0.6", "@types/react-dom": "^18.2.0",
"autoprefixer": "^10.4.12", "autoprefixer": "^10.4.12",
"babel-loader": "^8.2.5", "sass": "^1.68",
"sass": "^1.55.0", "postcss": "^8.4",
"sass-loader": "^13.0.2",
"style-loader": "^3.3.1",
"tailwindcss": "^3.3.2", "tailwindcss": "^3.3.2",
"tailwindcss-animate": "^1.0.5", "tailwindcss-animate": "^1.0.5",
"typescript": "5.0.4" "typescript": "^5.2"
} }
} }

File diff suppressed because it is too large Load diff

View file

@ -5,3 +5,4 @@ packages:
- 'interface' - 'interface'
- 'docs' - 'docs'
- 'crates/sync/example/web' - 'crates/sync/example/web'
- 'scripts'

70
scripts/.eslintrc.cjs Normal file
View file

@ -0,0 +1,70 @@
module.exports = {
root: true,
env: {
node: true,
es2022: true,
browser: false,
commonjs: false,
'shared-node-browser': false,
},
rules: {
'no-void': [
'error',
{
allowAsStatement: true,
},
],
'no-proto': 'error',
'valid-jsdoc': 'off',
'import/order': [
'error',
{
alphabetize: {
order: 'asc',
},
'newlines-between': 'always',
},
],
'no-unused-vars': [
'error',
{ argsIgnorePattern: '^_', destructuredArrayIgnorePattern: '^_' },
],
'jsdoc/require-returns-check': 'off',
'jsdoc/require-param-description': 'off',
'jsdoc/require-returns-description': 'off',
'standard/no-callback-literal': 'off',
},
parser: '@babel/eslint-parser',
plugins: ['@babel'],
extends: [
'eslint:recommended',
'standard',
'plugin:import/recommended',
'plugin:prettier/recommended',
'plugin:jsdoc/recommended-typescript-flavor',
],
settings: {
jsdoc: {
mode: 'typescript',
tagNamePreference: {
typicalname: 'typicalname',
},
},
},
parserOptions: {
project: './tsconfig.json',
sourceType: 'module',
babelOptions: {
presets: [
[
'@babel/preset-env',
{
shippedProposals: true,
},
],
],
},
tsconfigRootDir: __dirname,
requireConfigFile: false,
},
}

2
scripts/.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
.tmp
node_modules

View file

@ -1,197 +0,0 @@
import * as fs from 'node:fs/promises';
import * as os from 'node:os';
import * as path from 'node:path';
import { env } from 'node:process';
import { extractTo } from 'archive-wasm/src/fs.mjs';
import {
getGh,
getGhArtifactContent,
getGhReleasesAssets,
getGhWorkflowRunArtifacts
} from './github.mjs';
import {
FFMPEG_SUFFFIX,
FFMPEG_WORKFLOW,
getConst,
getSuffix,
LIBHEIF_SUFFIX,
LIBHEIF_WORKFLOW,
PDFIUM_SUFFIX,
PROTOC_SUFFIX
} from './suffix.mjs';
import { which } from './which.mjs';
const noop = () => {};
const __debug = env.NODE_ENV === 'debug';
const __osType = os.type();
// Github repos
const PDFIUM_REPO = 'bblanchon/pdfium-binaries';
const PROTOBUF_REPO = 'protocolbuffers/protobuf';
const SPACEDRIVE_REPO = 'spacedriveapp/spacedrive';
/**
* Download and extract protobuff compiler binary
* @param {string[]} machineId
* @param {string} framework
*/
export async function downloadProtc(machineId, framework) {
if (await which('protoc')) return;
console.log('Downloading protoc...');
const protocSuffix = getSuffix(PROTOC_SUFFIX, machineId);
if (protocSuffix == null) throw new Error('NO_PROTOC');
let found = false;
for await (const release of getGhReleasesAssets(PROTOBUF_REPO)) {
if (!protocSuffix.test(release.name)) continue;
try {
await extractTo(await getGh(release.downloadUrl), framework, {
chmod: 0o600,
overwrite: true
});
found = true;
break;
} catch (error) {
console.warn('Failed to download protoc, re-trying...');
if (__debug) console.error(error);
}
}
if (!found) throw new Error('NO_PROTOC');
// cleanup
await fs.unlink(path.join(framework, 'readme.txt')).catch(__debug ? console.error : noop);
}
/**
* Download and extract pdfium library for generating PDFs thumbnails
* @param {string[]} machineId
* @param {string} framework
*/
export async function downloadPDFium(machineId, framework) {
console.log('Downloading pdfium...');
const pdfiumSuffix = getSuffix(PDFIUM_SUFFIX, machineId);
if (pdfiumSuffix == null) throw new Error('NO_PDFIUM');
let found = false;
for await (const release of getGhReleasesAssets(PDFIUM_REPO)) {
if (!pdfiumSuffix.test(release.name)) continue;
try {
await extractTo(await getGh(release.downloadUrl), framework, {
chmod: 0o600,
overwrite: true
});
found = true;
break;
} catch (error) {
console.warn('Failed to download pdfium, re-trying...');
if (__debug) console.error(error);
}
}
if (!found) throw new Error('NO_PDFIUM');
// cleanup
const cleanup = [
fs.rename(path.join(framework, 'LICENSE'), path.join(framework, 'LICENSE.pdfium')),
...['args.gn', 'PDFiumConfig.cmake', 'VERSION'].map((file) =>
fs.unlink(path.join(framework, file)).catch(__debug ? console.error : noop)
)
];
switch (__osType) {
case 'Linux':
cleanup.push(fs.chmod(path.join(framework, 'lib', 'libpdfium.so'), 0o750));
break;
case 'Darwin':
cleanup.push(fs.chmod(path.join(framework, 'lib', 'libpdfium.dylib'), 0o750));
break;
}
await Promise.all(cleanup);
}
/**
* Download and extract ffmpeg libs for video thumbnails
* @param {string[]} machineId
* @param {string} framework
* @param {string[]} branches
*/
export async function downloadFFMpeg(machineId, framework, branches) {
const workflow = getConst(FFMPEG_WORKFLOW, machineId);
if (workflow == null) {
console.log('Checking FFMPeg...');
if (await which('ffmpeg')) {
// TODO: check ffmpeg version match what we need
return;
} else {
throw new Error('NO_FFMPEG');
}
}
console.log('Downloading FFMPeg...');
const ffmpegSuffix = getSuffix(FFMPEG_SUFFFIX, machineId);
if (ffmpegSuffix == null) throw new Error('NO_FFMPEG');
let found = false;
for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) {
if (!ffmpegSuffix.test(artifact.name)) continue;
try {
const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id);
await extractTo(data, framework, {
chmod: 0o600,
recursive: true,
overwrite: true
});
found = true;
break;
} catch (error) {
console.warn('Failed to download FFMpeg, re-trying...');
if (__debug) console.error(error);
}
}
if (!found) throw new Error('NO_FFMPEG');
}
/**
* Download and extract libheif libs for heif thumbnails
* @param {string[]} machineId
* @param {string} framework
* @param {string[]} branches
*/
export async function downloadLibHeif(machineId, framework, branches) {
const workflow = getConst(LIBHEIF_WORKFLOW, machineId);
if (workflow == null) return;
console.log('Downloading LibHeif...');
const libHeifSuffix = getSuffix(LIBHEIF_SUFFIX, machineId);
if (libHeifSuffix == null) throw new Error('NO_LIBHEIF');
let found = false;
for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) {
if (!libHeifSuffix.test(artifact.name)) continue;
try {
const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id);
await extractTo(data, framework, {
chmod: 0o600,
recursive: true,
overwrite: true
});
found = true;
break;
} catch (error) {
console.warn('Failed to download LibHeif, re-trying...');
if (__debug) console.error(error);
}
}
if (!found) throw new Error('NO_LIBHEIF');
}

View file

@ -1,86 +0,0 @@
import { exec as execCb } from 'node:child_process';
import * as fs from 'node:fs/promises';
import * as path from 'node:path';
import { env } from 'node:process';
import { promisify } from 'node:util';
const __debug = env.NODE_ENV === 'debug';
const exec = promisify(execCb);
/**
* @param {string} repoPath
* @returns {string?}
*/
async function getRemoteBranchName(repoPath) {
let branchName;
try {
branchName = (await exec('git symbolic-ref --short HEAD', { cwd: repoPath })).stdout.trim();
if (!branchName) throw 'Empty local branch name';
} catch (error) {
if (__debug) {
console.warn(`Failed to read git local branch name`);
console.error(error);
}
return null;
}
let remoteBranchName;
try {
remoteBranchName = (
await exec(`git for-each-ref --format="%(upstream:short)" refs/heads/${branchName}`, {
cwd: repoPath
})
).stdout.trim();
const [remote, branch] = remoteBranchName.split('/');
if (!branch) throw 'Empty remote branch name';
remoteBranchName = branch;
} catch (error) {
if (__debug) {
console.warn(`Failed to read git remote branch name`);
console.error(error);
}
return null;
}
return remoteBranchName;
}
// https://stackoverflow.com/q/3651860#answer-67151923
const REF_REGEX = /ref:\s+refs\/heads\/(?<branch>[^\s\x00-\x1F\:\?\[\\\^\~]+)/;
const GITHUB_REF_REGEX = /^refs\/heads\//;
/**
* @param {string} repoPath
* @returns {Promise<string[]>}
*/
export async function getGitBranches(repoPath) {
const branches = ['main', 'master'];
if (env.GITHUB_HEAD_REF) {
branches.unshift(env.GITHUB_HEAD_REF);
} else if (env.GITHUB_REF) {
branches.unshift(env.GITHUB_REF.replace(GITHUB_REF_REGEX, ''));
}
const remoteBranchName = await getRemoteBranchName(repoPath);
if (remoteBranchName) {
branches.unshift(remoteBranchName);
} else {
let head;
try {
head = await fs.readFile(path.join(repoPath, '.git', 'HEAD'), { encoding: 'utf8' });
} catch (error) {
if (__debug) {
console.warn(`Failed to read git HEAD file`);
console.error(error);
}
return branches;
}
const match = REF_REGEX.exec(head);
if (match?.groups?.branch) branches.unshift(match.groups.branch);
}
return branches;
}

View file

@ -1,60 +0,0 @@
import { exec as execCb } from 'node:child_process';
import * as os from 'node:os';
import { env } from 'node:process';
import { promisify } from 'node:util';
const __debug = env.NODE_ENV === 'debug';
let libc = 'glibc';
if (os.type() === 'Linux') {
try {
const exec = promisify(execCb);
if ((await exec('ldd /bin/ls')).stdout.includes('musl')) {
libc = 'musl';
}
} catch (error) {
if (__debug) {
console.warn(`Failed to check libc type`);
console.error(error);
}
}
}
const OS_TYPE = {
darwin: 'Darwin',
windows: 'Windows_NT',
linux: 'Linux'
};
export function getMachineId() {
let machineId;
/**
* Possible TARGET_TRIPLE:
* x86_64-apple-darwin
* aarch64-apple-darwin
* x86_64-pc-windows-msvc
* aarch64-pc-windows-msvc
* x86_64-unknown-linux-gnu
* x86_64-unknown-linux-musl
* aarch64-unknown-linux-gnu
* aarch64-unknown-linux-musl
* armv7-unknown-linux-gnueabihf
*/
if (env.TARGET_TRIPLE) {
const target = env.TARGET_TRIPLE.split('-');
const osType = OS_TYPE[target[2]];
if (!osType) throw new Error(`Unknown OS type: ${target[2]}`);
if (!target[0]) throw new Error(`Unknown machine type: ${target[0]}`);
machineId = [osType, target[0]];
if (machineId[0] === 'Linux') machineId.push(target[3].includes('musl') ? 'musl' : 'glibc');
} else {
// Current machine identifiers
machineId = [os.type(), os.machine()];
if (machineId[0] === 'Linux') machineId.push(libc);
}
return machineId;
}

42
scripts/package.json Normal file
View file

@ -0,0 +1,42 @@
{
"name": "@sd/scripts",
"private": true,
"main": "./preprep.mjs",
"type": "module",
"scripts": {
"prep": "node preprep.mjs",
"tauri": "node tauri.mjs",
"lint": "eslint --cache",
"typecheck": "tsc"
},
"prettier": {
"semi": false,
"endOfLine": "lf",
"printWidth": 99,
"singleQuote": true,
"arrowParens": "avoid",
"trailingComma": "es5"
},
"dependencies": {
"@iarna/toml": "^2.2.5",
"archive-wasm": "^1.5.3",
"mustache": "^4.2.0",
"semver": "^7.5.0",
"undici": "^5.25.4"
},
"devDependencies": {
"@babel/core": "~7",
"@babel/eslint-parser": "~7",
"@babel/eslint-plugin": "~7",
"@types/mustache": "^4.2.3",
"@types/node": "^18.17",
"@typescript-eslint/eslint-plugin": "^6.7",
"@typescript-eslint/parser": "^6.7",
"eslint": "^8.50",
"eslint-config-prettier": "^9.0",
"eslint-config-standard": "^17.1",
"eslint-plugin-jsdoc": "^46.8",
"eslint-plugin-prettier": "^5.0",
"typescript": "^5.2"
}
}

View file

@ -1,229 +1,156 @@
import { exec as _exec } from 'node:child_process'; import * as fs from 'node:fs/promises'
import * as fs from 'node:fs/promises'; import * as path from 'node:path'
import * as os from 'node:os'; import { env, exit, umask } from 'node:process'
import * as path from 'node:path'; import { fileURLToPath } from 'node:url'
import { env, umask } from 'node:process';
import { fileURLToPath } from 'node:url';
import { promisify } from 'node:util';
import mustache from 'mustache';
import { downloadFFMpeg, downloadLibHeif, downloadPDFium, downloadProtc } from './deps.mjs'; import * as _mustache from 'mustache'
import { getGitBranches } from './git.mjs';
import { getMachineId } from './machineId.mjs';
import { which } from './which.mjs';
umask(0o026); import { downloadFFMpeg, downloadLibHeif, downloadPDFium, downloadProtc } from './utils/deps.mjs'
import { getGitBranches } from './utils/git.mjs'
import { getMachineId } from './utils/machineId.mjs'
import {
setupMacOsFramework,
symlinkSharedLibsMacOS,
symlinkSharedLibsLinux,
} from './utils/shared.mjs'
import { which } from './utils/which.mjs'
if (/^(msys|mingw|cygwin)$/i.test(env.OSTYPE ?? '')) { if (/^(msys|mingw|cygwin)$/i.test(env.OSTYPE ?? '')) {
console.error('Bash for windows is not supported, please execute this from Powershell or CMD'); console.error(
process.exit(255); 'Bash for windows is not supported, please interact with this repo from Powershell or CMD'
)
exit(255)
} }
const exec = promisify(_exec); // @ts-expect-error
const mustache = /** @type {import("mustache")} */ (_mustache.default)
const __debug = env.NODE_ENV === 'debug'; // Limit file permissions
const __filename = fileURLToPath(import.meta.url); umask(0o026)
const __dirname = path.dirname(__filename);
const __debug = env.NODE_ENV === 'debug'
const __filename = fileURLToPath(import.meta.url)
const __dirname = path.dirname(__filename)
// NOTE: Must point to package root path // NOTE: Must point to package root path
const __root = path.resolve(path.join(__dirname, '..')); const __root = path.resolve(path.join(__dirname, '..'))
const bugWarn =
'This is probably a bug, please open a issue with you system info at: ' +
'https://github.com/spacedriveapp/spacedrive/issues/new/choose'
// Current machine identifiers // Current machine identifiers
const machineId = getMachineId(); const machineId = getMachineId()
// Basic dependeny check // Basic dependeny check
if ( if ((await Promise.all([which('cargo'), which('rustc'), which('pnpm')])).some(found => !found)) {
(await Promise.all([which('cargo'), which('rustc'), which('pnpm'), which('node')])).some(
(found) => !found
)
) {
console.error(`Basic dependencies missing. console.error(`Basic dependencies missing.
Make sure you have rust, node.js and pnpm installed: Make sure you have rust and pnpm installed:
https://rustup.rs https://rustup.rs
https://nodejs.org/en/download
https://pnpm.io/installation https://pnpm.io/installation
Also that you have run the setup script: Also that you have run the setup script:
packages/scripts/${machineId[0] === 'Windows_NT' ? 'setup.ps1' : 'setup.sh'} packages/scripts/${machineId[0] === 'Windows_NT' ? 'setup.ps1' : 'setup.sh'}
`); `)
} }
// Accepted git branches for querying for artifacts (current, main, master) // Directory where the native deps will be downloaded
const branches = await getGitBranches(__root); const nativeDeps = path.join(__root, 'apps', '.deps')
await fs.rm(nativeDeps, { force: true, recursive: true })
// Create the basic target directory hierarchy
const framework = path.join(__root, 'target', 'Frameworks');
await fs.rm(framework, { force: true, recursive: true });
await Promise.all( await Promise.all(
['bin', 'lib', 'include'].map((dir) => ['bin', 'lib', 'include'].map(dir =>
fs.mkdir(path.join(framework, dir), { mode: 0o750, recursive: true }) fs.mkdir(path.join(nativeDeps, dir), { mode: 0o750, recursive: true })
) )
); )
// Accepted git branches for querying for artifacts (current, main, master)
const branches = await getGitBranches(__root)
// Download all necessary external dependencies // Download all necessary external dependencies
await Promise.all([ await Promise.all([
downloadProtc(machineId, framework).catch((e) => { downloadProtc(machineId, nativeDeps).catch(e => {
console.error( console.error(
'Failed to download protoc, this is required for Spacedrive to compile. ' + 'Failed to download protobuf compiler, this is required to build Spacedrive. ' +
'Please install it with your system package manager' 'Please install it with your system package manager'
); )
throw e; throw e
}), }),
downloadPDFium(machineId, framework).catch((e) => { downloadPDFium(machineId, nativeDeps).catch(e => {
console.warn( console.warn(
'Failed to download pdfium lib. ' + 'Failed to download pdfium lib. ' +
"This is optional, but if one isn't configured Spacedrive won't be able to generate thumbnails for PDF files" "This is optional, but if one isn't present Spacedrive won't be able to generate thumbnails for PDF files"
); )
if (__debug) console.error(e); if (__debug) console.error(e)
}), }),
downloadFFMpeg(machineId, framework, branches).catch((e) => { downloadFFMpeg(machineId, nativeDeps, branches).catch(e => {
console.error( console.error(`Failed to download ffmpeg. ${bugWarn}`)
'Failed to download ffmpeg. This is probably a bug, please open a issue with you system info at: ' + throw e
'https://github.com/spacedriveapp/spacedrive/issues/new/choose'
);
throw e;
}), }),
downloadLibHeif(machineId, framework, branches).catch((e) => { downloadLibHeif(machineId, nativeDeps, branches).catch(e => {
console.error( console.error(`Failed to download libheif. ${bugWarn}`)
'Failed to download libheif. This is probably a bug, please open a issue with you system info at: ' + throw e
'https://github.com/spacedriveapp/spacedrive/issues/new/choose' }),
); ]).catch(e => {
throw e; if (__debug) console.error(e)
}) exit(1)
]).catch((e) => { })
if (__debug) console.error(e);
process.exit(1); // Extra OS specific setup
}); try {
if (machineId[0] === 'Linux') {
console.log(`Symlink shared libs...`)
symlinkSharedLibsLinux(__root, nativeDeps).catch(e => {
console.error(`Failed to symlink shared libs. ${bugWarn}`)
throw e
})
} else if (machineId[0] === 'Darwin') {
console.log(`Setup Framework...`)
await setupMacOsFramework(nativeDeps).catch(e => {
console.error(`Failed to setup Framework. ${bugWarn}`)
throw e
})
// This is still required due to how ffmpeg-sys-next builds script works
console.log(`Symlink shared libs...`)
await symlinkSharedLibsMacOS(nativeDeps).catch(e => {
console.error(`Failed to symlink shared libs. ${bugWarn}`)
throw e
})
}
} catch (error) {
if (__debug) console.error(error)
exit(1)
}
// Generate .cargo/config.toml // Generate .cargo/config.toml
console.log('Generating cargo config...'); console.log('Generating cargo config...')
try { try {
await fs.writeFile( await fs.writeFile(
path.join(__root, '.cargo', 'config.toml'), path.join(__root, '.cargo', 'config.toml'),
mustache mustache
.render( .render(
await fs.readFile(path.join(__root, '.cargo', 'config.toml.mustache'), { await fs.readFile(path.join(__root, '.cargo', 'config.toml.mustache'), {
encoding: 'utf8' encoding: 'utf8',
}), }),
{ {
ffmpeg: machineId[0] === 'Linux' ? false : framework.replaceAll('\\', '\\\\'), isWin: machineId[0] === 'Windows_NT',
isMacOS: machineId[0] === 'Darwin',
isLinux: machineId[0] === 'Linux',
// Escape windows path separator to be compatible with TOML parsing
protoc: path protoc: path
.join( .join(
framework, nativeDeps,
'bin', 'bin',
machineId[0] === 'Windows_NT' ? 'protoc.exe' : 'protoc' machineId[0] === 'Windows_NT' ? 'protoc.exe' : 'protoc'
) )
.replaceAll('\\', '\\\\'), .replaceAll('\\', '\\\\'),
projectRoot: __root.replaceAll('\\', '\\\\'), nativeDeps: nativeDeps.replaceAll('\\', '\\\\'),
isWin: machineId[0] === 'Windows_NT',
isMacOS: machineId[0] === 'Darwin',
isLinux: machineId[0] === 'Linux'
} }
) )
.replace(/\n\n+/g, '\n'), .replace(/\n\n+/g, '\n'),
{ mode: 0o751, flag: 'w+' } { mode: 0o751, flag: 'w+' }
); )
} catch (error) { } catch (error) {
console.error( console.error(`Failed to generate .cargo/config.toml. ${bugWarn}`)
'Failed to generate .cargo/config.toml, please open an issue on: ' + if (__debug) console.error(error)
'https://github.com/spacedriveapp/spacedrive/issues/new/choose' exit(1)
);
if (__debug) console.error(error);
process.exit(1);
}
if (machineId[0] === 'Linux') {
// Setup Linux libraries
const libDir = path.join(__root, 'target', 'lib');
await fs.rm(libDir, { force: true, recursive: true });
await fs.mkdir(libDir, { recursive: true, mode: 0o751 });
await fs.symlink(path.join(framework, 'lib'), path.join(__root, 'target', 'lib', 'spacedrive'));
} else if (machineId[0] === 'Darwin') {
// Setup macOS Frameworks
try {
console.log('Setup Frameworks & Sign libraries...');
const ffmpegFramework = path.join(framework, 'FFMpeg.framework');
// Move pdfium License to FFMpeg.framework
await fs.rename(
path.join(framework, 'LICENSE.pdfium'),
path.join(
ffmpegFramework,
'Resources',
'English.lproj',
'Documentation',
'LICENSE.pdfium'
)
);
// Move include files to FFMpeg.framework
const include = path.join(framework, 'include');
const headers = path.join(ffmpegFramework, 'Headers');
const includeFiles = await fs.readdir(include, { recursive: true, withFileTypes: true });
const moveIncludes = includeFiles
.filter(
(entry) =>
(entry.isFile() || entry.isSymbolicLink()) && !entry.name.endsWith('.proto')
)
.map(async (entry) => {
const file = path.join(entry.path, entry.name);
const newFile = path.resolve(headers, path.relative(include, file));
await fs.mkdir(path.dirname(newFile), { mode: 0o751, recursive: true });
await fs.rename(file, newFile);
});
// Move libs to FFMpeg.framework
const lib = path.join(framework, 'lib');
const libraries = path.join(ffmpegFramework, 'Libraries');
const libFiles = await fs.readdir(lib, { recursive: true, withFileTypes: true });
const moveLibs = libFiles
.filter(
(entry) =>
(entry.isFile() || entry.isSymbolicLink()) && entry.name.endsWith('.dylib')
)
.map(async (entry) => {
const file = path.join(entry.path, entry.name);
const newFile = path.resolve(libraries, path.relative(lib, file));
await fs.mkdir(path.dirname(newFile), { mode: 0o751, recursive: true });
await fs.rename(file, newFile);
});
await Promise.all([...moveIncludes, ...moveLibs]);
// Symlink headers
const headerFiles = await fs.readdir(headers, { recursive: true, withFileTypes: true });
const linkHeaders = headerFiles
.filter((entry) => entry.isFile() || entry.isSymbolicLink())
.map(async (entry) => {
const file = path.join(entry.path, entry.name);
const link = path.resolve(include, path.relative(headers, file));
const linkDir = path.dirname(link);
await fs.mkdir(linkDir, { mode: 0o751, recursive: true });
await fs.symlink(path.relative(linkDir, file), link);
});
// Symlink libraries
const libraryFiles = await fs.readdir(libraries, { recursive: true, withFileTypes: true });
const linkLibs = libraryFiles
.filter(
(entry) =>
(entry.isFile() || entry.isSymbolicLink()) && entry.name.endsWith('.dylib')
)
.map(async (entry) => {
const file = path.join(entry.path, entry.name);
const link = path.resolve(lib, path.relative(libraries, file));
const linkDir = path.dirname(link);
await fs.mkdir(linkDir, { mode: 0o751, recursive: true });
await fs.symlink(path.relative(linkDir, file), link);
if (entry.isFile()) {
// Sign the lib with the local machine certificate (Required for it to work on macOS 13+)
await exec(`codesign -s "${env.APPLE_SIGNING_IDENTITY || '-'}" -f "${file}"`);
}
});
await Promise.all([...linkHeaders, ...linkLibs]);
} catch (error) {
console.error(
'Failed to configure required Frameworks.This is probably a bug, please open a issue with you system info at: ' +
'https://github.com/spacedriveapp/spacedrive/issues/new/choose'
);
if (__debug) console.error(error);
process.exit(1);
}
} }

View file

@ -32,6 +32,12 @@ script_failure() {
trap 'script_failure ${LINENO:-}' ERR trap 'script_failure ${LINENO:-}' ERR
case "${OSTYPE:-}" in
'msys' | 'mingw' | 'cygwin')
err 'Bash for windows is not supported, please interact with this repo from Powershell or CMD'
;;
esac
if [ "${CI:-}" != "true" ]; then if [ "${CI:-}" != "true" ]; then
echo 'Spacedrive Development Environment Setup' echo 'Spacedrive Development Environment Setup'
echo 'To set up your machine for Spacedrive development, this script will install some required dependencies with your system package manager' echo 'To set up your machine for Spacedrive development, this script will install some required dependencies with your system package manager'
@ -106,7 +112,7 @@ case "$(uname)" in
echo echo
fi fi
;; ;;
"Linux") # https://github.com/tauri-apps/tauri-docs/blob/dev/docs/guides/getting-started/prerequisites.md "Linux") # https://github.com/tauri-apps/tauri-docs/blob/dev/docs/guides/getting-started/prerequisites.md#setting-up-linux
if has apt-get; then if has apt-get; then
echo "Detected apt!" echo "Detected apt!"
echo "Installing dependencies with apt..." echo "Installing dependencies with apt..."

139
scripts/tauri.mjs Normal file
View file

@ -0,0 +1,139 @@
import * as fs from 'node:fs/promises'
import * as path from 'node:path'
import { env, exit, umask, platform } from 'node:process'
import { fileURLToPath } from 'node:url'
import * as toml from '@iarna/toml'
import { patchTauri } from './utils/patchTauri.mjs'
import spawn from './utils/spawn.mjs'
if (/^(msys|mingw|cygwin)$/i.test(env.OSTYPE ?? '')) {
console.error(
'Bash for windows is not supported, please interact with this repo from Powershell or CMD'
)
exit(255)
}
// Limit file permissions
umask(0o026)
const __filename = fileURLToPath(import.meta.url)
const __dirname = path.dirname(__filename)
const [_, __, ...args] = process.argv
// NOTE: Must point to package root path
const __root = path.resolve(path.join(__dirname, '..'))
// Location for desktop app
const desktopApp = path.join(__root, 'apps', 'desktop')
// Location of the native dependencies
const nativeDeps = path.join(__root, 'apps', '.deps')
// Files to be removed when script finish executing
const __cleanup = /** @type {string[]} */ ([])
const cleanUp = () => Promise.all(__cleanup.map(file => fs.unlink(file).catch(() => {})))
process.on('SIGINT', cleanUp)
// Check if file/dir exists
const exists = (/** @type {string} */ path) =>
fs
.access(path, fs.constants.R_OK)
.then(() => true)
.catch(() => false)
// Export environment variables defined in cargo.toml
const cargoConfig = await fs
.readFile(path.resolve(__root, '.cargo', 'config.toml'), { encoding: 'binary' })
.then(toml.parse)
if (cargoConfig.env && typeof cargoConfig.env === 'object')
for (const [name, value] of Object.entries(cargoConfig.env)) if (!env[name]) env[name] = value
// Default command
if (args.length === 0) args.push('build')
let code = 0
try {
switch (args[0]) {
case 'dev': {
__cleanup.push(...(await patchTauri(__root, nativeDeps, args)))
break
}
case 'build': {
if (!env.NODE_OPTIONS || !env.NODE_OPTIONS.includes('--max_old_space_size')) {
env.NODE_OPTIONS = `--max_old_space_size=4096 ${env.NODE_OPTIONS ?? ''}`
}
__cleanup.push(...(await patchTauri(__root, nativeDeps, args)))
switch (process.platform) {
case 'darwin': {
// Configure DMG background
env.BACKGROUND_FILE = path.resolve(
desktopApp,
'src-tauri',
'dmg-background.png'
)
env.BACKGROUND_FILE_NAME = path.basename(env.BACKGROUND_FILE)
env.BACKGROUND_CLAUSE = `set background picture of opts to file ".background:${env.BACKGROUND_FILE_NAME}"`
if (!(await exists(env.BACKGROUND_FILE)))
console.warn(
`WARNING: DMG background file not found at ${env.BACKGROUND_FILE}`
)
break
}
case 'linux':
// Cleanup appimage bundle to avoid build_appimage.sh failing
await fs.rm(path.join(__root, 'target', 'release', 'bundle', 'appimage'), {
recursive: true,
force: true,
})
break
}
}
}
await spawn('pnpm', ['exec', 'tauri', ...args], desktopApp).catch(async error => {
if (args[0] === 'build' || platform === 'linux') {
// Work around appimage buindling not working sometimes
const appimageDir = path.join(__root, 'target', 'release', 'bundle', 'appimage')
if (
(await exists(path.join(appimageDir, 'build_appimage.sh'))) &&
(await fs.readdir(appimageDir).then(f => f.every(f => !f.endsWith('.AppImage'))))
) {
// Remove AppDir to allow build_appimage to rebuild it
await fs.rm(path.join(appimageDir, 'spacedrive.AppDir'), {
recursive: true,
force: true,
})
return spawn('bash', ['build_appimage.sh'], appimageDir).catch(exitCode => {
code = exitCode
console.error(`tauri ${args[0]} failed with exit code ${exitCode}`)
})
}
}
console.error(
`tauri ${args[0]} failed with exit code ${typeof error === 'number' ? error : 1}`
)
console.warn(
`If you got an error related to FFMpeg or Protoc/Protobuf you may need to re-run \`pnpm prep\``
)
throw error
})
} catch (error) {
if (typeof error === 'number') {
code = error
} else {
if (error instanceof Error) console.error(error)
code = 1
}
} finally {
cleanUp()
exit(code)
}

33
scripts/tsconfig.json Normal file
View file

@ -0,0 +1,33 @@
{
"compilerOptions": {
"lib": ["esnext"],
"noEmit": true,
"outDir": "src",
"strict": true,
"checkJs": true,
"allowJs": true,
"module": "esnext",
"target": "esnext",
"declaration": true,
"incremental": true,
"skipLibCheck": true,
"removeComments": false,
"noUnusedLocals": true,
"isolatedModules": true,
"esModuleInterop": false,
"disableSizeLimit": true,
"moduleResolution": "node",
"noImplicitReturns": true,
"resolveJsonModule": true,
"noUnusedParameters": true,
"experimentalDecorators": true,
"useDefineForClassFields": true,
"noUncheckedIndexedAccess": true,
"exactOptionalPropertyTypes": true,
"forceConsistentCasingInFileNames": true,
"noPropertyAccessFromIndexSignature": false
},
"include": ["./**/*.mjs"],
"exclude": ["node_modules"],
"$schema": "https://json.schemastore.org/tsconfig"
}

View file

@ -4,78 +4,69 @@ export const PROTOC_SUFFIX = {
i386: 'linux-x86_32', i386: 'linux-x86_32',
i686: 'linux-x86_32', i686: 'linux-x86_32',
x86_64: 'linux-x86_64', x86_64: 'linux-x86_64',
arm64: 'linux-aarch_64', aarch64: 'linux-aarch_64',
aarch64: 'linux-aarch_64'
}, },
Darwin: { Darwin: {
x86_64: 'osx-x86_64', x86_64: 'osx-x86_64',
arm64: 'osx-aarch_64',
aarch64: 'osx-aarch_64' aarch64: 'osx-aarch_64',
}, },
Windows_NT: { Windows_NT: {
i386: 'win32', i386: 'win32',
i686: 'win32', i686: 'win32',
x86_64: 'win64' x86_64: 'win64',
} },
}; }
export const PDFIUM_SUFFIX = { export const PDFIUM_SUFFIX = {
Linux: { Linux: {
x86_64: { x86_64: {
musl: 'linux-musl-x64', musl: 'linux-musl-x64',
glibc: 'linux-x64' glibc: 'linux-x64',
}, },
arm64: 'linux-arm64', aarch64: 'linux-arm64',
aarch64: 'linux-arm64'
}, },
Darwin: { Darwin: {
x86_64: 'mac-x64', x86_64: 'mac-x64',
arm64: 'mac-arm64', aarch64: 'mac-arm64',
aarch64: 'mac-arm64'
}, },
Windows_NT: { Windows_NT: {
x86_64: 'win-x64', x86_64: 'win-x64',
arm64: 'win-arm64', aarch64: 'win-arm64',
aarch64: 'win-arm64' },
} }
};
export const FFMPEG_SUFFFIX = { export const FFMPEG_SUFFFIX = {
Darwin: { Darwin: {
x86_64: 'x86_64', x86_64: 'x86_64',
arm64: 'arm64', aarch64: 'arm64',
aarch64: 'arm64'
}, },
Windows_NT: { Windows_NT: {
x86_64: 'x86_64' x86_64: 'x86_64',
} },
}; }
export const FFMPEG_WORKFLOW = { export const FFMPEG_WORKFLOW = {
Darwin: 'ffmpeg-macos.yml', Darwin: 'ffmpeg-macos.yml',
Windows_NT: 'ffmpeg-windows.yml' Windows_NT: 'ffmpeg-windows.yml',
}; }
export const LIBHEIF_SUFFIX = { export const LIBHEIF_SUFFIX = {
Linux: { Linux: {
x86_64: { x86_64: {
musl: 'x86_64-linux-musl', musl: 'x86_64-linux-musl',
glibc: 'x86_64-linux-gnu' glibc: 'x86_64-linux-gnu',
},
arm64: {
musl: 'aarch64-linux-musl',
glibc: 'aarch64-linux-gnu'
}, },
aarch64: { aarch64: {
musl: 'aarch64-linux-musl', musl: 'aarch64-linux-musl',
glibc: 'aarch64-linux-gnu' glibc: 'aarch64-linux-gnu',
} },
} },
}; }
export const LIBHEIF_WORKFLOW = { export const LIBHEIF_WORKFLOW = {
Linux: 'libheif-linux.yml' Linux: 'libheif-linux.yml',
}; }
/** /**
* @param {Record<string, unknown>} constants * @param {Record<string, unknown>} constants
@ -84,15 +75,15 @@ export const LIBHEIF_WORKFLOW = {
*/ */
export function getConst(constants, identifiers) { export function getConst(constants, identifiers) {
/** @type {string | Record<string, unknown>} */ /** @type {string | Record<string, unknown>} */
let constant = constants; let constant = constants
for (const id of identifiers) { for (const id of identifiers) {
constant = /** @type {string | Record<string, unknown>} */ (constant[id]); constant = /** @type {string | Record<string, unknown>} */ (constant[id])
if (!constant) return null; if (!constant) return null
if (typeof constant !== 'object') break; if (typeof constant !== 'object') break
} }
return typeof constant === 'string' ? constant : null; return typeof constant === 'string' ? constant : null
} }
/** /**
@ -101,6 +92,6 @@ export function getConst(constants, identifiers) {
* @returns {RegExp?} * @returns {RegExp?}
*/ */
export function getSuffix(suffixes, identifiers) { export function getSuffix(suffixes, identifiers) {
const suffix = getConst(suffixes, identifiers); const suffix = getConst(suffixes, identifiers)
return suffix ? new RegExp(`${suffix}(\\.[^\\.]+)*$`) : null; return suffix ? new RegExp(`${suffix}(\\.[^\\.]+)*$`) : null
} }

198
scripts/utils/deps.mjs Normal file
View file

@ -0,0 +1,198 @@
import * as fs from 'node:fs/promises'
import * as os from 'node:os'
import * as path from 'node:path'
import { env } from 'node:process'
import { extractTo } from 'archive-wasm/src/fs.mjs'
import {
FFMPEG_SUFFFIX,
FFMPEG_WORKFLOW,
getConst,
getSuffix,
LIBHEIF_SUFFIX,
LIBHEIF_WORKFLOW,
PDFIUM_SUFFIX,
PROTOC_SUFFIX,
} from './consts.mjs'
import {
getGh,
getGhArtifactContent,
getGhReleasesAssets,
getGhWorkflowRunArtifacts,
} from './github.mjs'
import { which } from './which.mjs'
const noop = () => {}
const __debug = env.NODE_ENV === 'debug'
const __osType = os.type()
// Github repos
const PDFIUM_REPO = 'bblanchon/pdfium-binaries'
const PROTOBUF_REPO = 'protocolbuffers/protobuf'
const SPACEDRIVE_REPO = 'spacedriveapp/spacedrive'
/**
* Download and extract protobuff compiler binary
* @param {string[]} machineId
* @param {string} nativeDeps
*/
export async function downloadProtc(machineId, nativeDeps) {
if (await which('protoc')) return
console.log('Downloading protoc...')
const protocSuffix = getSuffix(PROTOC_SUFFIX, machineId)
if (protocSuffix == null) throw new Error('NO_PROTOC')
let found = false
for await (const release of getGhReleasesAssets(PROTOBUF_REPO)) {
if (!protocSuffix.test(release.name)) continue
try {
await extractTo(await getGh(release.downloadUrl), nativeDeps, {
chmod: 0o600,
overwrite: true,
})
found = true
break
} catch (error) {
console.warn('Failed to download protoc, re-trying...')
if (__debug) console.error(error)
}
}
if (!found) throw new Error('NO_PROTOC')
// cleanup
await fs.unlink(path.join(nativeDeps, 'readme.txt')).catch(__debug ? console.error : noop)
}
/**
* Download and extract pdfium library for generating PDFs thumbnails
* @param {string[]} machineId
* @param {string} nativeDeps
*/
export async function downloadPDFium(machineId, nativeDeps) {
console.log('Downloading pdfium...')
const pdfiumSuffix = getSuffix(PDFIUM_SUFFIX, machineId)
if (pdfiumSuffix == null) throw new Error('NO_PDFIUM')
let found = false
for await (const release of getGhReleasesAssets(PDFIUM_REPO)) {
if (!pdfiumSuffix.test(release.name)) continue
try {
await extractTo(await getGh(release.downloadUrl), nativeDeps, {
chmod: 0o600,
overwrite: true,
})
found = true
break
} catch (error) {
console.warn('Failed to download pdfium, re-trying...')
if (__debug) console.error(error)
}
}
if (!found) throw new Error('NO_PDFIUM')
// cleanup
const cleanup = [
fs.rename(path.join(nativeDeps, 'LICENSE'), path.join(nativeDeps, 'LICENSE.pdfium')),
...['args.gn', 'PDFiumConfig.cmake', 'VERSION'].map(file =>
fs.unlink(path.join(nativeDeps, file)).catch(__debug ? console.error : noop)
),
]
switch (__osType) {
case 'Linux':
cleanup.push(fs.chmod(path.join(nativeDeps, 'lib', 'libpdfium.so'), 0o750))
break
case 'Darwin':
cleanup.push(fs.chmod(path.join(nativeDeps, 'lib', 'libpdfium.dylib'), 0o750))
break
}
await Promise.all(cleanup)
}
/**
* Download and extract ffmpeg libs for video thumbnails
* @param {string[]} machineId
* @param {string} nativeDeps
* @param {string[]} branches
*/
export async function downloadFFMpeg(machineId, nativeDeps, branches) {
const workflow = getConst(FFMPEG_WORKFLOW, machineId)
if (workflow == null) {
console.log('Checking FFMPeg...')
if (await which('ffmpeg')) {
// TODO: check ffmpeg version match what we need
return
} else {
throw new Error('NO_FFMPEG')
}
}
console.log('Downloading FFMPeg...')
const ffmpegSuffix = getSuffix(FFMPEG_SUFFFIX, machineId)
if (ffmpegSuffix == null) throw new Error('NO_FFMPEG')
let found = false
for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) {
if (!ffmpegSuffix.test(artifact.name)) continue
try {
const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id)
await extractTo(data, nativeDeps, {
chmod: 0o600,
recursive: true,
overwrite: true,
})
found = true
break
} catch (error) {
console.warn('Failed to download FFMpeg, re-trying...')
if (__debug) console.error(error)
}
}
if (!found) throw new Error('NO_FFMPEG')
}
/**
* Download and extract libheif libs for heif thumbnails
* @param {string[]} machineId
* @param {string} nativeDeps
* @param {string[]} branches
*/
export async function downloadLibHeif(machineId, nativeDeps, branches) {
const workflow = getConst(LIBHEIF_WORKFLOW, machineId)
if (workflow == null) return
console.log('Downloading LibHeif...')
const libHeifSuffix = getSuffix(LIBHEIF_SUFFIX, machineId)
if (libHeifSuffix == null) throw new Error('NO_LIBHEIF')
let found = false
for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) {
if (!libHeifSuffix.test(artifact.name)) continue
try {
const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id)
await extractTo(data, nativeDeps, {
chmod: 0o600,
recursive: true,
overwrite: true,
})
found = true
break
} catch (error) {
console.warn('Failed to download LibHeif, re-trying...')
if (__debug) console.error(error)
}
}
if (!found) throw new Error('NO_LIBHEIF')
}

87
scripts/utils/git.mjs Normal file
View file

@ -0,0 +1,87 @@
import { exec as execCb } from 'node:child_process'
import * as fs from 'node:fs/promises'
import * as path from 'node:path'
import { env } from 'node:process'
import { promisify } from 'node:util'
const __debug = env.NODE_ENV === 'debug'
const exec = promisify(execCb)
/**
* @param {string} repoPath
* @returns {Promise<string?>}
*/
async function getRemoteBranchName(repoPath) {
let branchName
try {
branchName = (await exec('git symbolic-ref --short HEAD', { cwd: repoPath })).stdout.trim()
if (!branchName) throw new Error('Empty local branch name')
} catch (error) {
if (__debug) {
console.warn(`Failed to read git local branch name`)
console.error(error)
}
return null
}
let remoteBranchName
try {
remoteBranchName = (
await exec(`git for-each-ref --format="%(upstream:short)" refs/heads/${branchName}`, {
cwd: repoPath,
})
).stdout.trim()
const [_, branch] = remoteBranchName.split('/')
if (!branch) throw new Error('Empty remote branch name')
remoteBranchName = branch
} catch (error) {
if (__debug) {
console.warn(`Failed to read git remote branch name`)
console.error(error)
}
return null
}
return remoteBranchName
}
// https://stackoverflow.com/q/3651860#answer-67151923
// eslint-disable-next-line no-control-regex
const REF_REGEX = /ref:\s+refs\/heads\/(?<branch>[^\s\x00-\x1F:?[\\^~]+)/
const GITHUB_REF_REGEX = /^refs\/heads\//
/**
* @param {string} repoPath
* @returns {Promise<string[]>}
*/
export async function getGitBranches(repoPath) {
const branches = ['main', 'master']
if (env.GITHUB_HEAD_REF) {
branches.unshift(env.GITHUB_HEAD_REF)
} else if (env.GITHUB_REF) {
branches.unshift(env.GITHUB_REF.replace(GITHUB_REF_REGEX, ''))
}
const remoteBranchName = await getRemoteBranchName(repoPath)
if (remoteBranchName) {
branches.unshift(remoteBranchName)
} else {
let head
try {
head = await fs.readFile(path.join(repoPath, '.git', 'HEAD'), { encoding: 'utf8' })
} catch (error) {
if (__debug) {
console.warn(`Failed to read git HEAD file`)
console.error(error)
}
return branches
}
const match = REF_REGEX.exec(head)
if (match?.groups?.branch) branches.unshift(match.groups.branch)
}
return branches
}

View file

@ -1,35 +1,36 @@
import * as fs from 'node:fs/promises'; import * as fs from 'node:fs/promises'
import { dirname, join as joinPath, posix as path } from 'node:path'; import { dirname, join as joinPath, posix as path } from 'node:path'
import { env } from 'node:process'; import { env } from 'node:process'
import { setTimeout } from 'node:timers/promises'; import { setTimeout } from 'node:timers/promises'
import { fileURLToPath } from 'node:url'; import { fileURLToPath } from 'node:url'
import { extract } from 'archive-wasm';
const __debug = env.NODE_ENV === 'debug'; import { fetch, Headers } from 'undici'
const __offline = env.OFFLINE === 'true';
const __filename = fileURLToPath(import.meta.url); const __debug = env.NODE_ENV === 'debug'
const __dirname = dirname(__filename); const __offline = env.OFFLINE === 'true'
const cacheDir = joinPath(__dirname, '.tmp'); const __filename = fileURLToPath(import.meta.url)
await fs.mkdir(cacheDir, { recursive: true, mode: 0o751 }); const __dirname = dirname(__filename)
const cacheDir = joinPath(__dirname, '.tmp')
await fs.mkdir(cacheDir, { recursive: true, mode: 0o751 })
// Note: Trailing slashs are important to correctly append paths // Note: Trailing slashs are important to correctly append paths
const GH = 'https://api.github.com/repos/'; const GH = 'https://api.github.com/repos/'
const NIGTHLY = 'https://nightly.link/'; const NIGTHLY = 'https://nightly.link/'
// Github routes // Github routes
const RELEASES = 'releases'; const RELEASES = 'releases'
const WORKFLOWS = 'actions/workflows'; const WORKFLOWS = 'actions/workflows'
const ARTIFACTS = 'actions/artifacts'; const ARTIFACTS = 'actions/artifacts'
// Default GH headers // Default GH headers
const GH_HEADERS = new Headers({ const GH_HEADERS = new Headers({
'Accept': 'application/vnd.github+json', Accept: 'application/vnd.github+json',
'X-GitHub-Api-Version': '2022-11-28' 'X-GitHub-Api-Version': '2022-11-28',
}); })
// Load github auth token if available // Load github auth token if available
if ('GITHUB_TOKEN' in env && env.GITHUB_TOKEN) if ('GITHUB_TOKEN' in env && env.GITHUB_TOKEN)
GH_HEADERS.append('Authorization', `Bearer ${env.GITHUB_TOKEN}`); GH_HEADERS.append('Authorization', `Bearer ${env.GITHUB_TOKEN}`)
/** /**
* @param {string} resource * @param {string} resource
@ -38,69 +39,69 @@ if ('GITHUB_TOKEN' in env && env.GITHUB_TOKEN)
*/ */
async function getCache(resource, headers) { async function getCache(resource, headers) {
/** @type {Buffer | undefined} */ /** @type {Buffer | undefined} */
let data; let data
/** @type {[string, string] | undefined} */ /** @type {[string, string] | undefined} */
let header; let header
// Don't cache in CI // Don't cache in CI
if (env.CI === 'true') return null; if (env.CI === 'true') return null
if (headers) if (headers)
resource += Array.from(headers.entries()) resource += Array.from(headers.entries())
.filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since') .filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since')
.flat() .flat()
.join(':'); .join(':')
try { try {
const cache = JSON.parse( const cache = JSON.parse(
await fs.readFile(joinPath(cacheDir, Buffer.from(resource).toString('base64url')), { await fs.readFile(joinPath(cacheDir, Buffer.from(resource).toString('base64url')), {
encoding: 'utf8' encoding: 'utf8',
}) })
); )
if (cache && typeof cache === 'object') { if (cache && typeof cache === 'object') {
if (cache.etag && typeof cache.etag === 'string') { if (cache.etag && typeof cache.etag === 'string') {
header = ['If-None-Match', cache.etag]; header = ['If-None-Match', cache.etag]
} else if (cache.modifiedSince && typeof cache.modifiedSince === 'string') { } else if (cache.modifiedSince && typeof cache.modifiedSince === 'string') {
header = ['If-Modified-Since', cache.modifiedSince]; header = ['If-Modified-Since', cache.modifiedSince]
} }
if (cache.data && typeof cache.data === 'string') if (cache.data && typeof cache.data === 'string')
data = Buffer.from(cache.data, 'base64'); data = Buffer.from(cache.data, 'base64')
} }
} catch (error) { } catch (error) {
if (__debug) { if (__debug) {
console.warn(`CACHE MISS: ${resource}`); console.warn(`CACHE MISS: ${resource}`)
console.error(error); console.error(error)
} }
} }
return data ? { data, header } : null; return data ? { data, header } : null
} }
/** /**
* @param {Response} response * @param {import('undici').Response} response
* @param {string} resource * @param {string} resource
* @param {Buffer} [cachedData] * @param {Buffer} [cachedData]
* @param {Headers} [headers] * @param {Headers} [headers]
* @returns {Promise<Buffer>} * @returns {Promise<Buffer>}
*/ */
async function setCache(response, resource, cachedData, headers) { async function setCache(response, resource, cachedData, headers) {
const data = Buffer.from(await response.arrayBuffer()); const data = Buffer.from(await response.arrayBuffer())
// Don't cache in CI // Don't cache in CI
if (env.CI === 'true') return data; if (env.CI === 'true') return data
const etag = response.headers.get('ETag') || undefined; const etag = response.headers.get('ETag') || undefined
const modifiedSince = response.headers.get('Last-Modified') || undefined; const modifiedSince = response.headers.get('Last-Modified') || undefined
if (headers) if (headers)
resource += Array.from(headers.entries()) resource += Array.from(headers.entries())
.filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since') .filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since')
.flat() .flat()
.join(':'); .join(':')
if (response.status === 304 || (response.ok && data.length === 0)) { if (response.status === 304 || (response.ok && data.length === 0)) {
// Cache hit // Cache hit
if (!cachedData) throw new Error('Empty cache hit ????'); if (!cachedData) throw new Error('Empty cache hit ????')
return cachedData; return cachedData
} }
try { try {
@ -109,18 +110,18 @@ async function setCache(response, resource, cachedData, headers) {
JSON.stringify({ JSON.stringify({
etag, etag,
modifiedSince, modifiedSince,
data: data.toString('base64') data: data.toString('base64'),
}), }),
{ mode: 0o640, flag: 'w+' } { mode: 0o640, flag: 'w+' }
); )
} catch (error) { } catch (error) {
if (__debug) { if (__debug) {
console.warn(`CACHE WRITE FAIL: ${resource}`); console.warn(`CACHE WRITE FAIL: ${resource}`)
console.error(error); console.error(error)
} }
} }
return data; return data
} }
/** /**
@ -130,30 +131,30 @@ async function setCache(response, resource, cachedData, headers) {
* @returns {Promise<Buffer>} * @returns {Promise<Buffer>}
*/ */
export async function get(resource, headers, preferCache) { export async function get(resource, headers, preferCache) {
if (headers == null) headers = new Headers(); if (headers == null) headers = new Headers()
if (resource instanceof URL) resource = resource.toString(); if (resource instanceof URL) resource = resource.toString()
const cache = await getCache(resource, headers); const cache = await getCache(resource, headers)
if (__offline) { if (__offline) {
if (cache?.data == null) if (cache?.data == null)
throw new Error(`OFFLINE MODE: Cache for request ${resource} doesn't exist`); throw new Error(`OFFLINE MODE: Cache for request ${resource} doesn't exist`)
return cache.data; return cache.data
} }
if (preferCache && cache?.data != null) return cache.data; if (preferCache && cache?.data != null) return cache.data
if (cache?.header) headers.append(...cache.header); if (cache?.header) headers.append(...cache.header)
const response = await fetch(resource, { headers }); const response = await fetch(resource, { headers })
if (!response.ok) { if (!response.ok) {
if (cache?.data) { if (cache?.data) {
if (__debug) console.warn(`CACHE HIT due to fail: ${resource} ${response.statusText}`); if (__debug) console.warn(`CACHE HIT due to fail: ${resource} ${response.statusText}`)
return cache.data; return cache.data
} }
throw new Error(response.statusText); throw new Error(response.statusText)
} }
return await setCache(response, resource, cache?.data, headers); return await setCache(response, resource, cache?.data, headers)
} }
// Header name Description // Header name Description
@ -163,8 +164,8 @@ export async function get(resource, headers, preferCache) {
// x-ratelimit-reset The time at which the current rate limit window resets in UTC epoch seconds. // x-ratelimit-reset The time at which the current rate limit window resets in UTC epoch seconds.
const RATE_LIMIT = { const RATE_LIMIT = {
reset: 0, reset: 0,
remaining: Infinity remaining: Infinity,
}; }
/** /**
* Get resource from a Github route with some pre-defined parameters * Get resource from a Github route with some pre-defined parameters
@ -172,52 +173,52 @@ const RATE_LIMIT = {
* @returns {Promise<Buffer>} * @returns {Promise<Buffer>}
*/ */
export async function getGh(route) { export async function getGh(route) {
route = new URL(route, GH).toString(); route = new URL(route, GH).toString()
const cache = await getCache(route); const cache = await getCache(route)
if (__offline) { if (__offline) {
if (cache?.data == null) if (cache?.data == null)
throw new Error(`OFFLINE MODE: Cache for request ${route} doesn't exist`); throw new Error(`OFFLINE MODE: Cache for request ${route} doesn't exist`)
return cache?.data; return cache?.data
} }
if (RATE_LIMIT.remaining === 0) { if (RATE_LIMIT.remaining === 0) {
if (cache?.data) return cache.data; if (cache?.data) return cache.data
console.warn( console.warn(
`RATE LIMIT: Waiting ${RATE_LIMIT.reset} seconds before contacting Github again... [CTRL+C to cancel]` `RATE LIMIT: Waiting ${RATE_LIMIT.reset} seconds before contacting Github again... [CTRL+C to cancel]`
); )
await setTimeout(RATE_LIMIT.reset * 1000); await setTimeout(RATE_LIMIT.reset * 1000)
} }
const headers = new Headers(GH_HEADERS); const headers = new Headers(GH_HEADERS)
if (cache?.header) headers.append(...cache.header); if (cache?.header) headers.append(...cache.header)
const response = await fetch(route, { method: 'GET', headers }); const response = await fetch(route, { method: 'GET', headers })
const rateReset = Number.parseInt(response.headers.get('x-ratelimit-reset') ?? ''); const rateReset = Number.parseInt(response.headers.get('x-ratelimit-reset') ?? '')
const rateRemaining = Number.parseInt(response.headers.get('x-ratelimit-remaining') ?? ''); const rateRemaining = Number.parseInt(response.headers.get('x-ratelimit-remaining') ?? '')
if (!(Number.isNaN(rateReset) || Number.isNaN(rateRemaining))) { if (!(Number.isNaN(rateReset) || Number.isNaN(rateRemaining))) {
const reset = rateReset - Date.now() / 1000; const reset = rateReset - Date.now() / 1000
if (reset > RATE_LIMIT.reset) RATE_LIMIT.reset = reset; if (reset > RATE_LIMIT.reset) RATE_LIMIT.reset = reset
if (rateRemaining < RATE_LIMIT.remaining) { if (rateRemaining < RATE_LIMIT.remaining) {
RATE_LIMIT.remaining = rateRemaining; RATE_LIMIT.remaining = rateRemaining
if (__debug) { if (__debug) {
console.warn(`Github remaining requests: ${RATE_LIMIT.remaining}`); console.warn(`Github remaining requests: ${RATE_LIMIT.remaining}`)
await setTimeout(5000); await setTimeout(5000)
} }
} }
} }
if (!response.ok) { if (!response.ok) {
if (cache?.data) { if (cache?.data) {
if (__debug) console.warn(`CACHE HIT due to fail: ${route} ${response.statusText}`); if (__debug) console.warn(`CACHE HIT due to fail: ${route} ${response.statusText}`)
return cache.data; return cache.data
} }
if (response.status === 403 && RATE_LIMIT.remaining === 0) return await getGh(route); if (response.status === 403 && RATE_LIMIT.remaining === 0) return await getGh(route)
throw new Error(response.statusText); throw new Error(response.statusText)
} }
return await setCache(response, route, cache?.data); return await setCache(response, route, cache?.data)
} }
/** /**
@ -225,17 +226,17 @@ export async function getGh(route) {
* @yields {{name: string, downloadUrl: string}} * @yields {{name: string, downloadUrl: string}}
*/ */
export async function* getGhReleasesAssets(repo) { export async function* getGhReleasesAssets(repo) {
let page = 0; let page = 0
while (true) { while (true) {
// "${_gh_url}/protocolbuffers/protobuf/releases?page=${_page}&per_page=100" // "${_gh_url}/protocolbuffers/protobuf/releases?page=${_page}&per_page=100"
const releases = JSON.parse( const releases = JSON.parse(
(await getGh(path.join(repo, `${RELEASES}?page=${page++}&per_page=100`))).toString( (await getGh(path.join(repo, `${RELEASES}?page=${page++}&per_page=100`))).toString(
'utf8' 'utf8'
) )
); )
if (!Array.isArray(releases)) throw new Error(`Error: ${JSON.stringify(releases)}`); if (!Array.isArray(releases)) throw new Error(`Error: ${JSON.stringify(releases)}`)
if (releases.length === 0) return; if (releases.length === 0) return
for (const release of /** @type {unknown[]} */ (releases)) { for (const release of /** @type {unknown[]} */ (releases)) {
if ( if (
@ -246,9 +247,9 @@ export async function* getGhReleasesAssets(repo) {
Array.isArray(release.assets) Array.isArray(release.assets)
) )
) )
throw new Error(`Invalid release: ${release}`); throw new Error(`Invalid release: ${release}`)
if ('prerelease' in release && release.prerelease) continue; if ('prerelease' in release && release.prerelease) continue
for (const asset of /** @type {unknown[]} */ (release.assets)) { for (const asset of /** @type {unknown[]} */ (release.assets)) {
if ( if (
@ -261,9 +262,9 @@ export async function* getGhReleasesAssets(repo) {
typeof asset.browser_download_url === 'string' typeof asset.browser_download_url === 'string'
) )
) )
throw new Error(`Invalid release.asset: ${asset}`); throw new Error(`Invalid release.asset: ${asset}`)
yield { name: asset.name, downloadUrl: asset.browser_download_url }; yield { name: asset.name, downloadUrl: asset.browser_download_url }
} }
} }
} }
@ -276,11 +277,11 @@ export async function* getGhReleasesAssets(repo) {
* @yields {{ id: number, name: string }} * @yields {{ id: number, name: string }}
*/ */
export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) { export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) {
if (!branch) branch = 'main'; if (!branch) branch = 'main'
if (typeof branch === 'string') branch = [branch]; if (typeof branch === 'string') branch = [branch]
if (!(branch instanceof Set)) branch = new Set(branch); if (!(branch instanceof Set)) branch = new Set(branch)
let page = 0; let page = 0
while (true) { while (true) {
const workflow = /** @type {unknown} */ ( const workflow = /** @type {unknown} */ (
JSON.parse( JSON.parse(
@ -295,7 +296,7 @@ export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) {
) )
).toString('utf8') ).toString('utf8')
) )
); )
if ( if (
!( !(
workflow && workflow &&
@ -304,9 +305,9 @@ export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) {
Array.isArray(workflow.workflow_runs) Array.isArray(workflow.workflow_runs)
) )
) )
throw new Error(`Error: ${JSON.stringify(workflow)}`); throw new Error(`Error: ${JSON.stringify(workflow)}`)
if (workflow.workflow_runs.length === 0) return; if (workflow.workflow_runs.length === 0) return
for (const run of /** @type {unknown[]} */ (workflow.workflow_runs)) { for (const run of /** @type {unknown[]} */ (workflow.workflow_runs)) {
if ( if (
@ -319,13 +320,13 @@ export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) {
typeof run.artifacts_url === 'string' typeof run.artifacts_url === 'string'
) )
) )
throw new Error(`Invalid Workflow run: ${run}`); throw new Error(`Invalid Workflow run: ${run}`)
if (!branch.has(run.head_branch)) continue; if (!branch.has(run.head_branch)) continue
const response = /** @type {unknown} */ ( const response = /** @type {unknown} */ (
JSON.parse((await getGh(run.artifacts_url)).toString('utf8')) JSON.parse((await getGh(run.artifacts_url)).toString('utf8'))
); )
if ( if (
!( !(
@ -335,7 +336,7 @@ export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) {
Array.isArray(response.artifacts) Array.isArray(response.artifacts)
) )
) )
throw new Error(`Error: ${JSON.stringify(response)}`); throw new Error(`Error: ${JSON.stringify(response)}`)
for (const artifact of /** @type {unknown[]} */ (response.artifacts)) { for (const artifact of /** @type {unknown[]} */ (response.artifacts)) {
if ( if (
@ -348,9 +349,9 @@ export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) {
typeof artifact.name === 'string' typeof artifact.name === 'string'
) )
) )
throw new Error(`Invalid artifact: ${artifact}`); throw new Error(`Invalid artifact: ${artifact}`)
yield { id: artifact.id, name: artifact.name }; yield { id: artifact.id, name: artifact.name }
} }
} }
} }
@ -366,11 +367,11 @@ export async function getGhArtifactContent(repo, id) {
if (GH_HEADERS.has('Authorization')) { if (GH_HEADERS.has('Authorization')) {
try { try {
// "${_gh_url}/${_sd_gh_path}/actions/artifacts/${_artifact_id}/zip" // "${_gh_url}/${_sd_gh_path}/actions/artifacts/${_artifact_id}/zip"
return await getGh(path.join(repo, ARTIFACTS, id.toString(), 'zip')); return await getGh(path.join(repo, ARTIFACTS, id.toString(), 'zip'))
} catch (error) { } catch (error) {
if (__debug) { if (__debug) {
console.warn('Failed to download artifact from github, fallback to nightly.link'); console.warn('Failed to download artifact from github, fallback to nightly.link')
console.error(error); console.error(error)
} }
} }
} }
@ -381,5 +382,5 @@ export async function getGhArtifactContent(repo, id) {
* Use it when running in evironments that are not authenticated with github * Use it when running in evironments that are not authenticated with github
* "https://nightly.link/${_sd_gh_path}/actions/artifacts/${_artifact_id}.zip" * "https://nightly.link/${_sd_gh_path}/actions/artifacts/${_artifact_id}.zip"
*/ */
return await get(new URL(path.join(repo, ARTIFACTS, `${id}.zip`), NIGTHLY), null, true); return await get(new URL(path.join(repo, ARTIFACTS, `${id}.zip`), NIGTHLY), null, true)
} }

View file

@ -0,0 +1,68 @@
import { exec as execCb } from 'node:child_process'
import * as os from 'node:os'
import { env } from 'node:process'
import { promisify } from 'node:util'
const __debug = env.NODE_ENV === 'debug'
/** @type {'musl' | 'glibc'} */
let libc = 'glibc'
if (os.type() === 'Linux') {
try {
const exec = promisify(execCb)
if ((await exec('ldd /bin/ls')).stdout.includes('musl')) {
libc = 'musl'
}
} catch (error) {
if (__debug) {
console.warn(`Failed to check libc type`)
console.error(error)
}
}
}
/** @type {Record<string, string>} */
const OS_TYPE = {
darwin: 'Darwin',
windows: 'Windows_NT',
linux: 'Linux',
}
/** @returns {['Darwin' | 'Windows_NT', 'x86_64' | 'aarch64'] | ['Linux', 'x86_64' | 'aarch64', 'musl' | 'glibc']} */
export function getMachineId() {
let _os, _arch
let _libc = libc
/**
* Supported TARGET_TRIPLE:
* x86_64-apple-darwin
* aarch64-apple-darwin
* x86_64-pc-windows-msvc
* aarch64-pc-windows-msvc
* x86_64-unknown-linux-gnu
* x86_64-unknown-linux-musl
* aarch64-unknown-linux-gnu
* aarch64-unknown-linux-musl
*/
if (env.TARGET_TRIPLE) {
const target = env.TARGET_TRIPLE.split('-')
_os = OS_TYPE[target[2] ?? '']
_arch = target[0]
if (_os === 'Linux') _libc = target[3]?.includes('musl') ? 'musl' : 'glibc'
} else {
// Current machine identifiers
_os = os.type()
_arch = os.machine()
if (_arch === 'arm64') _arch = 'aarch64'
}
if (_arch !== 'x86_64' && _arch !== 'aarch64') throw new Error(`Unsuported architecture`)
if (_os === 'Linux') {
return [_os, _arch, _libc]
} else if (_os !== 'Darwin' && _os !== 'Windows_NT') {
throw new Error(`Unsuported OS`)
}
return [_os, _arch]
}

View file

@ -0,0 +1,142 @@
import { exec as _exec } from 'node:child_process'
import * as fs from 'node:fs/promises'
import * as os from 'node:os'
import * as path from 'node:path'
import { env } from 'node:process'
import { promisify } from 'node:util'
import * as semver from 'semver'
import { copyLinuxLibs, copyWindowsDLLs } from './shared.mjs'
const exec = promisify(_exec)
const __debug = env.NODE_ENV === 'debug'
/**
* @param {string} nativeDeps
* @returns {Promise<string?>}
*/
export async function tauriUpdaterKey(nativeDeps) {
if (env.TAURI_PRIVATE_KEY) return null
// pnpm exec tauri signer generate -w
const privateKeyPath = path.join(nativeDeps, 'tauri.key')
const publicKeyPath = path.join(nativeDeps, 'tauri.key.pub')
const readKeys = () =>
Promise.all([
fs.readFile(publicKeyPath, { encoding: 'utf-8' }),
fs.readFile(privateKeyPath, { encoding: 'utf-8' }),
])
let privateKey, publicKey
try {
;[publicKey, privateKey] = await readKeys()
if (!(publicKey && privateKey)) throw new Error('Empty keys')
} catch (err) {
if (__debug) {
console.warn('Failed to read tauri updater keys')
console.error(err)
}
const quote = os.type() === 'Windows_NT' ? '"' : "'"
await exec(`pnpm exec tauri signer generate --ci -w ${quote}${privateKeyPath}${quote}`)
;[publicKey, privateKey] = await readKeys()
if (!(publicKey && privateKey)) throw new Error('Empty keys')
}
env.TAURI_PRIVATE_KEY = privateKey
return publicKey
}
/**
* @param {string} root
* @param {string} nativeDeps
* @param {string[]} args
* @returns {Promise<string[]>}
*/
export async function patchTauri(root, nativeDeps, args) {
if (args.findIndex(e => e === '-c' || e === '--config') !== -1) {
throw new Error('Custom tauri build config is not supported.')
}
// Location for desktop app tauri code
const tauriRoot = path.join(root, 'apps', 'desktop', 'src-tauri')
const osType = os.type()
const resources =
osType === 'Linux'
? await copyLinuxLibs(root, nativeDeps)
: osType === 'Windows_NT'
? await copyWindowsDLLs(root, nativeDeps)
: { files: [], toClean: [] }
const tauriPatch = {
tauri: {
bundle: {
macOS: {
minimumSystemVersion: '',
},
resources: resources.files,
},
updater: /** @type {{ pubkey?: string }} */ ({}),
},
}
const tauriConfig = await fs
.readFile(path.join(tauriRoot, 'tauri.conf.json'), 'utf-8')
.then(JSON.parse)
if (args[0] === 'build') {
if (tauriConfig?.tauri?.updater?.active) {
const pubKey = await tauriUpdaterKey(nativeDeps)
if (pubKey != null) tauriPatch.tauri.updater.pubkey = pubKey
}
}
if (osType === 'Darwin') {
// ARM64 support was added in macOS 11, but we need at least 11.2 due to our ffmpeg build
const macOSArm64MinimumVersion = '11.2'
let macOSMinimumVersion = tauriConfig?.tauri?.bundle?.macOS?.minimumSystemVersion
const targets = args
.filter((_, index, args) => {
if (index === 0) return false
const previous = args[index - 1]
return previous === '-t' || previous === '--target'
})
.flatMap(target => target.split(','))
if (
(targets.includes('aarch64-apple-darwin') ||
(targets.length === 0 && process.arch === 'arm64')) &&
(macOSMinimumVersion == null ||
semver.lt(
/** @type {import('semver').SemVer} */ (semver.coerce(macOSMinimumVersion)),
/** @type {import('semver').SemVer} */ (
semver.coerce(macOSArm64MinimumVersion)
)
))
) {
macOSMinimumVersion = macOSArm64MinimumVersion
console.log(
`aarch64-apple-darwin target detected, setting minimum system version to ${macOSMinimumVersion}`
)
}
if (macOSMinimumVersion) {
env.MACOSX_DEPLOYMENT_TARGET = macOSMinimumVersion
tauriPatch.tauri.bundle.macOS.minimumSystemVersion = macOSMinimumVersion
} else {
throw new Error('No minimum macOS version detected, please review tauri.conf.json')
}
}
const tauriPatchConf = path.join(tauriRoot, 'tauri.conf.patch.json')
await fs.writeFile(tauriPatchConf, JSON.stringify(tauriPatch, null, 2))
// Modify args to load patched tauri config
args.splice(1, 0, '-c', tauriPatchConf)
// Files to be removed
return [tauriPatchConf, ...resources.toClean]
}

200
scripts/utils/shared.mjs Normal file
View file

@ -0,0 +1,200 @@
import { exec as execCb } from 'node:child_process'
import * as fs from 'node:fs/promises'
import * as path from 'node:path'
import { env } from 'node:process'
import { promisify } from 'node:util'
const exec = promisify(execCb)
const signId = env.APPLE_SIGNING_IDENTITY || '-'
/**
* @param {string} origin
* @param {string} target
* @param {boolean} [rename]
*/
async function link(origin, target, rename) {
const parent = path.dirname(target)
await fs.mkdir(parent, { recursive: true, mode: 0o751 })
await (rename ? fs.rename(origin, target) : fs.symlink(path.relative(parent, origin), target))
}
/**
* Move headers and dylibs of external deps to our framework
* @param {string} nativeDeps
*/
export async function setupMacOsFramework(nativeDeps) {
// External deps
const lib = path.join(nativeDeps, 'lib')
const include = path.join(nativeDeps, 'include')
// Framework
const framework = path.join(nativeDeps, 'FFMpeg.framework')
const headers = path.join(framework, 'Headers')
const libraries = path.join(framework, 'Libraries')
const documentation = path.join(framework, 'Resources', 'English.lproj', 'Documentation')
// Move files
await Promise.all([
// Move pdfium license to framework
fs.rename(
path.join(nativeDeps, 'LICENSE.pdfium'),
path.join(documentation, 'LICENSE.pdfium')
),
// Move dylibs to framework
fs.readdir(lib, { recursive: true, withFileTypes: true }).then(file =>
file
.filter(
entry =>
(entry.isFile() || entry.isSymbolicLink()) && entry.name.endsWith('.dylib')
)
.map(entry => {
const file = path.join(entry.path, entry.name)
const newFile = path.resolve(libraries, path.relative(lib, file))
return link(file, newFile, true)
})
),
// Move headers to framework
fs.readdir(include, { recursive: true, withFileTypes: true }).then(file =>
file
.filter(
entry =>
(entry.isFile() || entry.isSymbolicLink()) &&
!entry.name.endsWith('.proto')
)
.map(entry => {
const file = path.join(entry.path, entry.name)
const newFile = path.resolve(headers, path.relative(include, file))
return link(file, newFile, true)
})
),
])
}
/**
* Symlink shared libs paths for Linux
* @param {string} root
* @param {string} nativeDeps
* @returns {Promise<void>}
*/
export async function symlinkSharedLibsLinux(root, nativeDeps) {
// rpath=${ORIGIN}/../lib/spacedrive
const targetLib = path.join(root, 'target', 'lib')
const targetRPath = path.join(targetLib, 'spacedrive')
await fs.unlink(targetRPath).catch(() => {})
await fs.mkdir(targetLib, { recursive: true })
await link(path.join(nativeDeps, 'lib'), targetRPath)
}
/**
* Symlink shared libs paths for macOS
* @param {string} nativeDeps
*/
export async function symlinkSharedLibsMacOS(nativeDeps) {
// External deps
const lib = path.join(nativeDeps, 'lib')
const include = path.join(nativeDeps, 'include')
// Framework
const framework = path.join(nativeDeps, 'FFMpeg.framework')
const headers = path.join(framework, 'Headers')
const libraries = path.join(framework, 'Libraries')
// Link files
await Promise.all([
// Link header files
fs.readdir(headers, { recursive: true, withFileTypes: true }).then(files =>
Promise.all(
files
.filter(entry => entry.isFile() || entry.isSymbolicLink())
.map(entry => {
const file = path.join(entry.path, entry.name)
return link(file, path.resolve(include, path.relative(headers, file)))
})
)
),
// Link dylibs
fs.readdir(libraries, { recursive: true, withFileTypes: true }).then(files =>
Promise.all(
files
.filter(
entry =>
(entry.isFile() || entry.isSymbolicLink()) &&
entry.name.endsWith('.dylib')
)
.map(entry => {
const file = path.join(entry.path, entry.name)
/** @type {Promise<unknown>[]} */
const actions = [
link(file, path.resolve(lib, path.relative(libraries, file))),
]
// Sign dylib (Required for it to work on macOS 13+)
if (entry.isFile())
actions.push(exec(`codesign -s "${signId}" -f "${file}"`))
return actions.length > 1 ? Promise.all(actions) : actions[0]
})
)
),
])
}
/**
* Copy Windows DLLs for tauri build
* @param {string} root
* @param {string} nativeDeps
* @returns {Promise<{files: string[], toClean: string[]}>}
*/
export async function copyWindowsDLLs(root, nativeDeps) {
const tauriSrc = path.join(root, 'apps', 'desktop', 'src-tauri')
const files = await Promise.all(
await fs.readdir(path.join(nativeDeps, 'bin'), { withFileTypes: true }).then(files =>
files
.filter(entry => entry.isFile() && entry.name.endsWith(`.dll`))
.map(async entry => {
await fs.copyFile(
path.join(entry.path, entry.name),
path.join(tauriSrc, entry.name)
)
return entry.name
})
)
)
return { files, toClean: files.map(file => path.join(tauriSrc, file)) }
}
/**
* Symlink shared libs paths for Linux
* @param {string} root
* @param {string} nativeDeps
* @returns {Promise<{files: string[], toClean: string[]}>}
*/
export async function copyLinuxLibs(root, nativeDeps) {
// rpath=${ORIGIN}/../lib/spacedrive
const tauriSrc = path.join(root, 'apps', 'desktop', 'src-tauri')
const files = await fs
.readdir(path.join(nativeDeps, 'lib'), { withFileTypes: true })
.then(files =>
Promise.all(
files
.filter(
entry =>
(entry.isFile() || entry.isSymbolicLink()) &&
(entry.name.endsWith('.so') || entry.name.includes('.so.'))
)
.map(async entry => {
await fs.copyFile(
path.join(entry.path, entry.name),
path.join(tauriSrc, entry.name)
)
return entry.name
})
)
)
return {
files,
toClean: files.map(file => path.join(tauriSrc, file)),
}
}

33
scripts/utils/spawn.mjs Normal file
View file

@ -0,0 +1,33 @@
import { spawn } from 'node:child_process'
/**
* @param {string} command
* @param {string[]} args
* @param {string} [cwd]
* @returns {Promise<void>}
*/
export default function (command, args, cwd) {
if (typeof command !== 'string' || command.length === 0)
throw new Error('Command must be a string and not empty')
if (args == null) args = []
else if (!Array.isArray(args) || args.some(arg => typeof arg !== 'string'))
throw new Error('Args must be an array of strings')
return new Promise((resolve, reject) => {
const child = spawn(command, args, { cwd, shell: true, stdio: 'inherit' })
process.on('SIGTERM', () => child.kill('SIGTERM'))
process.on('SIGINT', () => child.kill('SIGINT'))
process.on('SIGBREAK', () => child.kill('SIGBREAK'))
process.on('SIGHUP', () => child.kill('SIGHUP'))
child.on('error', reject)
child.on('exit', (code, signal) => {
if (code === null) code = signal === 'SIGINT' ? 0 : 1
if (code === 0) {
resolve()
} else {
reject(code)
}
})
})
}

41
scripts/utils/which.mjs Normal file
View file

@ -0,0 +1,41 @@
import { exec as execCb } from 'node:child_process'
import * as fs from 'node:fs/promises'
import * as os from 'node:os'
import * as path from 'node:path'
import { env } from 'node:process'
import { promisify } from 'node:util'
const exec = promisify(execCb)
/**
* @param {string} progName
* @returns {Promise<boolean>}
*/
async function where(progName) {
// Reject paths
if (/[\\]/.test(progName)) return false
try {
await exec(`where "${progName}"`)
} catch {
return false
}
return true
}
/**
* @param {string} progName
* @returns {Promise<boolean>}
*/
export async function which(progName) {
return os.type() === 'Windows_NT'
? where(progName)
: Promise.any(
Array.from(new Set(env.PATH?.split(':'))).map(dir =>
fs.access(path.join(dir, progName), fs.constants.X_OK)
)
).then(
() => true,
() => false
)
}

View file

@ -1,41 +0,0 @@
import { exec as execCb } from 'node:child_process';
import * as fs from 'node:fs/promises';
import * as os from 'node:os';
import * as path from 'node:path';
import { env } from 'node:process';
import { promisify } from 'node:util';
const exec = promisify(execCb);
/**
* @param {string} progName
* @returns {Promise<boolean>}
*/
async function where(progName) {
// Reject paths
if (/[\\]/.test(progName)) return false;
try {
await exec(`where "${progName}"`);
} catch {
return false;
}
return true;
}
/**
* @param {string} progName
* @returns {Promise<boolean>}
*/
export async function which(progName) {
return os.type() === 'Windows_NT'
? where(progName)
: Promise.any(
Array.from(new Set(env.PATH?.split(':'))).map((dir) =>
fs.access(path.join(dir, progName), fs.constants.X_OK)
)
).then(
() => true,
() => false
);
}

View file

@ -2,7 +2,7 @@
"$schema": "https://turborepo.org/schema.json", "$schema": "https://turborepo.org/schema.json",
"pipeline": { "pipeline": {
"build": { "build": {
"inputs": ["!src-tauri/**"], "inputs": ["**/*.ts", "!src-tauri/**", "!node_modules/**"],
"dependsOn": ["^build"], "dependsOn": ["^build"],
"outputs": ["dist/**"] "outputs": ["dist/**"]
}, },