[ENG-265] Improve setup scripts (#1368)

* Setup script revamp

* Move toml to dev dep + CI runs the postinstall in debug mode

* Fix windows CI

* chmod +x scripts/setup.sh

* Fix eslint and ts CI

* Remove binstall + Fix trying to read input in CI

* Doesn't need to check pnpm and rust in CI

* Run postinstall script for Clippy CI

* Attempt to fix windows CI not running postinstall
 - Ignore cache when running postinstall on CI

* commited generated config.toml by mistake

* Pass GITHUB_TOKEN to `pnpm i`

* Update archive-wasm + Increase minimum node version to 18.17

* CI: Move rust setup after post-install script

* Revert: CI: Move rust setup after post-install script

* Fix CI, generate dummy cargo config.toml to fix prisma generation

* Fix windows CI

* CI: Fix wrong command

---------

Co-authored-by: Utku <74243531+utkubakir@users.noreply.github.com>
This commit is contained in:
Vítor Vasconcellos 2023-09-28 07:03:46 -03:00 committed by GitHub
parent fe1350d70d
commit bd0a7ff434
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
30 changed files with 1376 additions and 738 deletions

View file

@ -1,3 +0,0 @@
[alias]
prisma = "run -p prisma-cli --bin prisma --"
prisma-sync = "run -p prisma-cli --bin sync --"

View file

@ -0,0 +1,32 @@
[env]
{{#protoc}}
PROTOC = "{{{protoc}}}"
{{/protoc}}
{{#ffmpeg}}
FFMPEG_DIR = "{{{ffmpeg}}}"
{{/ffmpeg}}
{{#isMacOS}}
[target.x86_64-apple-darwin]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib"]
[target.aarch64-apple-darwin]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib"]
{{/isMacOS}}
{{#isWin}}
[target.x86_64-pc-windows-msvc]
rustflags = ["-L", "{{{projectRoot}}}\\target\\Frameworks\\lib"]
{{/isWin}}
{{#isLinux}}
[target.x86_64-unknown-linux-gnu]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib"]
[target.aarch64-unknown-linux-gnu]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib"]
{{/isLinux}}
[alias]
prisma = "run -p prisma-cli --bin prisma --"
prisma-sync = "run -p prisma-cli --bin sync --"

View file

@ -5,6 +5,10 @@ inputs:
description: Github token
required: false
default: ''
ignorePostInstall:
description: Don't run post install
required: false
default: 'false'
runs:
using: 'composite'
steps:
@ -22,4 +26,8 @@ runs:
- name: Install pnpm deps
shell: ${{ runner.os == 'Windows' && 'powershell' || 'bash' }}
env:
NODE_ENV: debug
GITHUB_TOKEN: ${{ inputs.token }}
IGNORE_POSTINSTALL: ${{ inputs.ignorePostInstall }}
run: pnpm i --frozen-lockfile

View file

@ -26,6 +26,10 @@ runs:
prefix-key: 'v0-rust-deps'
shared-key: ${{ inputs.targets }}
- name: Cargo config.toml
shell: bash
run: echo '{}' | npx -y mustache - .cargo/config.toml.mustache .cargo/config.toml
- name: Restore cached Prisma codegen
id: cache-prisma-restore
uses: actions/cache/restore@v3

View file

@ -48,18 +48,18 @@ runs:
targets: ${{ inputs.targets }}
save-cache: ${{ inputs.save-cache }}
- name: Run 'setup-system.sh' script
- name: Run setup.sh script
shell: bash
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
run: ./.github/scripts/setup-system.sh ${{ inputs.setup-arg }}
run: ./scripts/setup.sh ${{ inputs.setup-arg }}
env:
TARGET: ${{ inputs.targets }}
GITHUB_TOKEN: ${{ inputs.token }}
APPLE_SIGNING_IDENTITY: ${{ env.APPLE_SIGNING_IDENTITY }}
- name: Run 'setup-system.ps1' script
- name: Run setup.ps1 script
shell: powershell
if: ${{ runner.os == 'Windows' }}
run: ./.github/scripts/setup-system.ps1
run: ./scripts/setup.ps1
env:
GITHUB_TOKEN: ${{ inputs.token }}

View file

@ -1,428 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
if [ "${CI:-}" = "true" ]; then
set -x
fi
# Force xz to use multhreaded extraction
export XZ_OPT='-T0'
SYSNAME="$(uname)"
FFMPEG_VERSION='6.0'
err() {
for _line in "$@"; do
echo "$@" >&2
done
exit 1
}
has() {
if [ "$#" -ne 1 ]; then
err "Usage: has <command>"
fi
command -v "$1" >/dev/null 2>&1
}
_gh_url="https://api.github.com/repos"
_sd_gh_path='spacedriveapp/spacedrive'
gh_curl() {
if [ "$#" -ne 1 ]; then
err "Usage: gh_curl <api_route>"
fi
url="$1"
# Required headers for GitHub API
set -- -LSs -H "Accept: application/vnd.github+json" -H "X-GitHub-Api-Version: 2022-11-28"
# Add authorization header if GITHUB_TOKEN is set, to avoid being rate limited
if [ -n "${GITHUB_TOKEN:-}" ]; then
set -- "$@" -H "Authorization: Bearer $GITHUB_TOKEN"
fi
curl "$@" "$url"
}
script_failure() {
if [ -n "${1:-}" ]; then
_line="on line $1"
else
_line="(unknown)"
fi
err "An error occurred $_line." "Setup failed."
}
trap 'script_failure ${LINENO:-}' ERR
echo "Setting up this system for Spacedrive development."
echo
# Change CWD to the directory of this script
CDPATH='' cd -- "$(dirname -- "$0")"
_script_path="$(pwd -P)"
_cargo_config="${_script_path}/../../.cargo"
rm -rf "$_cargo_config/config"
if ! has cargo; then
err 'Rust was not found.' \
"Ensure the 'rustc' and 'cargo' binaries are in your \$PATH." \
'https://rustup.rs'
fi
if [ "${CI:-}" != "true" ] && [ "${spacedrive_skip_pnpm_check:-}" != "true" ]; then
echo "checking for pnpm..."
if ! has pnpm; then
err 'pnpm was not found.' \
"Ensure the 'pnpm' command is in your \$PATH." \
'You must use pnpm for this project; yarn and npm are not allowed.' \
'https://pnpm.io/installation'
else
echo "found pnpm!"
fi
else
echo "Skipping pnpm check."
fi
if [ "${CI:-}" != "true" ]; then
echo "Installing Rust tools"
cargo install cargo-watch
fi
echo
if [ "${1:-}" = "mobile" ]; then
echo "Setting up for mobile development."
# iOS targets
if [ "$SYSNAME" = "Darwin" ]; then
echo "Checking for Xcode..."
if ! /usr/bin/xcodebuild -version >/dev/null; then
err "Xcode was not detected." \
"Please ensure Xcode is installed and try again."
fi
echo "Installing iOS targets for Rust..."
rustup target add aarch64-apple-ios
rustup target add aarch64-apple-ios-sim
rustup target add x86_64-apple-ios # for CI
fi
# Android requires python
if ! command -v python3 >/dev/null; then
err 'python3 was not found.' \
'This is required for Android mobile development.' \
"Ensure 'python3' is available in your \$PATH and try again."
fi
# Android targets
echo "Setting up Android targets for Rust..."
rustup target add armv7-linux-androideabi # for arm
rustup target add aarch64-linux-android # for arm64
rustup target add i686-linux-android # for x86
rustup target add x86_64-linux-android # for x86_64
rustup target add x86_64-unknown-linux-gnu # for linux-x86-64
rustup target add aarch64-apple-darwin # for darwin arm64 (if you have an M1 Mac)
rustup target add x86_64-apple-darwin # for darwin x86_64 (if you have an Intel Mac)
rustup target add x86_64-pc-windows-gnu # for win32-x86-64-gnu
rustup target add x86_64-pc-windows-msvc # for win32-x86-64-msvc
echo "Done setting up mobile targets."
echo
fi
if [ "$SYSNAME" = "Linux" ]; then
if has apt-get; then
echo "Detected apt!"
echo "Installing dependencies with apt..."
# Tauri dependencies
DEBIAN_TAURI_DEPS="libwebkit2gtk-4.0-dev build-essential curl wget libssl-dev libgtk-3-dev libayatana-appindicator3-dev librsvg2-dev patchelf"
# FFmpeg dependencies
DEBIAN_FFMPEG_DEPS="libheif-dev libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev libavutil-dev libswscale-dev libswresample-dev ffmpeg"
# Webkit2gtk requires gstreamer plugins for video playback to work
DEBIAN_VIDEO_DEPS="gstreamer1.0-alsa gstreamer1.0-gl gstreamer1.0-gtk3 gstreamer1.0-libav gstreamer1.0-pipewire gstreamer1.0-plugins-bad gstreamer1.0-plugins-base gstreamer1.0-plugins-good gstreamer1.0-plugins-ugly gstreamer1.0-pulseaudio gstreamer1.0-vaapi libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev libgstreamer-plugins-bad1.0-dev"
# Bindgen dependencies - it's used by a dependency of Spacedrive
DEBIAN_BINDGEN_DEPS="pkg-config clang"
# Protobuf compiler
DEBIAN_LIBP2P_DEPS="protobuf-compiler"
sudo apt-get -y update
sudo apt-get -y install ${SPACEDRIVE_CUSTOM_APT_FLAGS:-} $DEBIAN_TAURI_DEPS $DEBIAN_FFMPEG_DEPS $DEBIAN_BINDGEN_DEPS $DEBIAN_LIBP2P_DEPS $DEBIAN_VIDEO_DEPS
elif has pacman; then
echo "Detected pacman!"
echo "Installing dependencies with pacman..."
# Tauri deps https://tauri.studio/guides/getting-started/setup/linux#1-system-dependencies
ARCH_TAURI_DEPS="webkit2gtk base-devel curl wget openssl appmenu-gtk-module gtk3 libappindicator-gtk3 librsvg libvips patchelf"
# Webkit2gtk requires gstreamer plugins for video playback to work
ARCH_VIDEO_DEPS="gst-libav gst-plugins-bad gst-plugins-base gst-plugins-good gst-plugins-ugly gst-plugin-pipewire gstreamer-vaapi"
# FFmpeg dependencies
ARCH_FFMPEG_DEPS="libheif ffmpeg"
# Bindgen dependencies - it's used by a dependency of Spacedrive
ARCH_BINDGEN_DEPS="clang"
# Protobuf compiler - https://github.com/archlinux/svntogit-packages/blob/packages/protobuf/trunk/PKGBUILD provides `libprotoc`
ARCH_LIBP2P_DEPS="protobuf"
sudo pacman -Sy --needed $ARCH_TAURI_DEPS $ARCH_FFMPEG_DEPS $ARCH_BINDGEN_DEPS $ARCH_LIBP2P_DEPS $ARCH_VIDEO_DEPS
elif has dnf; then
echo "Detected dnf!"
echo "Installing dependencies with dnf..."
# `webkit2gtk4.0-devel` also provides `webkit2gtk3-devel`, it's just under a different package in fedora versions >= 37.
# https://koji.fedoraproject.org/koji/packageinfo?tagOrder=-blocked&packageID=26162#taglist
# https://packages.fedoraproject.org/pkgs/webkitgtk/webkit2gtk4.0-devel/fedora-38.html#provides
FEDORA_37_TAURI_WEBKIT="webkit2gtk4.0-devel"
FEDORA_36_TAURI_WEBKIT="webkit2gtk3-devel"
# Tauri dependencies
# openssl is manually declared here as i don't think openssl and openssl-devel are actually dependant on eachother
# openssl also has a habit of being missing from some of my fresh Fedora installs - i've had to install it at least twice
FEDORA_TAURI_DEPS="openssl openssl-devel curl wget libappindicator-gtk3 librsvg2-devel patchelf"
# required for building the openssl-sys crate
FEDORA_OPENSSL_SYS_DEPS="perl-FindBin perl-File-Compare perl-IPC-Cmd perl-File-Copy"
# FFmpeg dependencies
FEDORA_FFMPEG_DEPS="libheif-devel ffmpeg ffmpeg-devel"
# Webkit2gtk requires gstreamer plugins for video playback to work
FEDORA_VIDEO_DEPS="gstreamer1-devel gstreamer1-plugins-base-devel gstreamer1-plugins-good gstreamer1-plugins-good-gtk gstreamer1-plugins-good-extras gstreamer1-plugins-ugly-free gstreamer1-plugins-bad-free gstreamer1-plugins-bad-free-devel gstreamer1-plugins-bad-free-extras"
# Bindgen dependencies - it's used by a dependency of Spacedrive
FEDORA_BINDGEN_DEPS="clang clang-devel"
# Protobuf compiler
FEDORA_LIBP2P_DEPS="protobuf-compiler"
if ! sudo dnf install $FEDORA_37_TAURI_WEBKIT && ! sudo dnf install $FEDORA_36_TAURI_WEBKIT; then
err 'We were unable to install the webkit2gtk4.0-devel/webkit2gtk3-devel package.' \
'Please open an issue if you feel that this is incorrect.' \
'https://github.com/spacedriveapp/spacedrive/issues'
fi
if ! sudo dnf install $FEDORA_FFMPEG_DEPS; then
err 'We were unable to install the FFmpeg and FFmpeg-devel packages.' \
'This is likely because the RPM Fusion free repository is not enabled.' \
'https://docs.fedoraproject.org/en-US/quick-docs/setup_rpmfusion'
fi
sudo dnf group install "C Development Tools and Libraries"
sudo dnf install $FEDORA_TAURI_DEPS $FEDORA_BINDGEN_DEPS $FEDORA_LIBP2P_DEPS $FEDORA_VIDEO_DEPS
else
err "Your Linux distro '$(lsb_release -s -d)' is not supported by this script." \
'We would welcome a PR or some help adding your OS to this script:' \
'https://github.com/spacedriveapp/spacedrive/issues'
fi
elif [ "$SYSNAME" = "Darwin" ]; then
# Location for installing script dependencies
_deps_dir="${_script_path}/deps"
mkdir -p "$_deps_dir"
PATH="${_deps_dir}:$PATH"
export PATH
_arch="$(uname -m)"
# Symlink original macOS utils to avoid problems on system where the user has installed GNU utils
ln -fs "/usr/bin/tar" "${_deps_dir}/tar"
if ! has jq; then
echo "Download jq build..."
# Determine the machine's architecture
case "$_arch" in
x86_64)
_jq_url='https://packages.macports.org/jq/jq-1.6_4.darwin_19.x86_64.tbz2'
_oniguruma6_url='https://packages.macports.org/oniguruma6/oniguruma6-6.9.8_0.darwin_19.x86_64.tbz2'
;;
arm64)
_jq_url='https://packages.macports.org/jq/jq-1.6_4.darwin_20.arm64.tbz2'
_oniguruma6_url='https://packages.macports.org/oniguruma6/oniguruma6-6.9.8_0.darwin_20.arm64.tbz2'
;;
*)
err "Unsupported architecture: $_arch"
;;
esac
# Download the latest jq binary and deps from macports
curl -LSs "$_jq_url" | tar -xjOf - ./opt/local/bin/jq >"${_deps_dir}/jq"
curl -LSs "$_oniguruma6_url" | tar -xjOf - ./opt/local/lib/libonig.5.dylib >"${_deps_dir}/libonig.5.dylib"
# Make the binaries executable
chmod +x "$_deps_dir"/*
# Make jq look for deps in the same directory
install_name_tool -change '/opt/local/lib/libonig.5.dylib' '@executable_path/libonig.5.dylib' "${_deps_dir}/jq"
fi
# Create frameworks directory to put Spacedrive dependencies
_frameworks_dir="${_script_path}/../../target/Frameworks"
rm -rf "$_frameworks_dir"
mkdir -p "${_frameworks_dir}/"{bin,lib,include}
_frameworks_dir="$(CDPATH='' cd -- "$_frameworks_dir" && pwd -P)"
exec 3>&1 # Copy stdout to fd 3.
echo "Download ffmpeg build..."
_page=1
while [ $_page -gt 0 ]; do
_success=$(gh_curl "${_gh_url}/${_sd_gh_path}/actions/workflows/ffmpeg-macos.yml/runs?page=${_page}&per_page=100&status=success" \
| jq -r '. as $raw | .workflow_runs | if length == 0 then error("Error: \($raw)") else .[] | select(.head_branch == "main") | .artifacts_url end' \
| while IFS= read -r _artifacts_url; do
if _artifact_path="$(
gh_curl "$_artifacts_url" \
| jq --arg version "$FFMPEG_VERSION" --arg arch "$(
if [ "${TARGET:-}" = 'aarch64-apple-darwin' ]; then
echo 'arm64'
else
echo "$_arch"
fi
)" -r \
'. as $raw | .artifacts | if length == 0 then error("Error: \($raw)") else .[] | select(.name == "ffmpeg-\($version)-\($arch)") | "suites/\(.workflow_run.id)/artifacts/\(.id)" end'
)"; then
if {
gh_curl "${_gh_url}/${_sd_gh_path}/actions/artifacts/$(echo "$_artifact_path" | awk -F/ '{print $4}')/zip" \
| tar -xOf- | tar -xJf- -C "$_frameworks_dir"
} 2>/dev/null; then
printf 'yes'
exit
else
# nightly.link is a workaround for the lack of a public GitHub API to download artifacts from a workflow run
# https://github.com/actions/upload-artifact/issues/51
# Use it when running in evironments that are not authenticated with github
if curl -LSs "https://nightly.link/${_sd_gh_path}/${_artifact_path}" | tar -xOf- | tar -xJf- -C "$_frameworks_dir"; then
printf 'yes'
exit
fi
fi
echo "Failed to ffmpeg artifiact release, trying again in 1sec..." >&3
sleep 1
fi
done)
if [ "${_success:-}" = 'yes' ]; then
break
fi
_page=$((_page + 1))
echo "ffmpeg artifact not found, trying again in 1sec..."
sleep 1
done
# Sign and Symlink the FFMpeg.framework libs to the lib directory
for _lib in "${_frameworks_dir}/FFMpeg.framework/Libraries/"*; do
if [ -f "$_lib" ]; then
# Sign the lib with the local machine certificate (Required for it to work on macOS 13+)
if ! codesign -s "${APPLE_SIGNING_IDENTITY:--}" -f "$_lib" 1>/dev/null 2>&1; then
err "Failed to sign: ${_lib#"$_frameworks_dir"}" \
'Please open an issue on https://github.com/spacedriveapp/spacedrive/issues'
fi
fi
_lib="${_lib#"${_frameworks_dir}/FFMpeg.framework/Libraries/"}"
ln -s "../FFMpeg.framework/Libraries/${_lib}" "${_frameworks_dir}/lib/${_lib}"
done
# Symlink the FFMpeg.framework headers to the include directory
for _header in "${_frameworks_dir}/FFMpeg.framework/Headers/"*; do
_header="${_header#"${_frameworks_dir}/FFMpeg.framework/Headers/"}"
ln -s "../FFMpeg.framework/Headers/${_header}" "${_frameworks_dir}/include/${_header}"
done
# Workaround while https://github.com/tauri-apps/tauri/pull/3934 is not merged
echo "Download patched tauri cli.js build..."
(
case "$_arch" in
x86_64)
_artifact_id="866514594"
;;
arm64)
_artifact_id="866514593"
;;
*)
err "Unsupported architecture: $_arch"
;;
esac
if ! {
gh_curl "${_gh_url}/${_sd_gh_path}/actions/artifacts/${_artifact_id}/zip" \
| tar -xf- -C "${_frameworks_dir}/bin"
} 2>/dev/null; then
# nightly.link is a workaround for the lack of a public GitHub API to download artifacts from a workflow run
# https://github.com/actions/upload-artifact/issues/51
# Use it when running in evironments that are not authenticated with github
curl -LSs "https://nightly.link/${_sd_gh_path}/actions/artifacts/${_artifact_id}.zip" \
| tar -xf- -C "${_frameworks_dir}/bin"
fi
)
echo "Download protobuf build"
_page=1
while [ $_page -gt 0 ]; do
_success=$(gh_curl "${_gh_url}/protocolbuffers/protobuf/releases?page=${_page}&per_page=100" \
| jq --arg arch "$(
if [ "$_arch" = 'arm64' ]; then
echo 'aarch_64'
else
echo 'x86_64'
fi
)" -r \
'. as $raw | if length == 0 then error("Error: \($raw)") else .[] | select(.prerelease | not) | .assets[] | select(.name | endswith("osx-\($arch).zip")) | .browser_download_url end' \
| while IFS= read -r _asset_url; do
if curl -LSs "${_asset_url}" | tar -xf - -C "$_frameworks_dir"; then
printf 'yes'
exit
fi
echo "Failed to download protobuf release, trying again in 1sec..." >&3
sleep 1
done)
if [ "${_success:-}" = 'yes' ]; then
break
fi
_page=$((_page + 1))
echo "protobuf release not found, trying again in 1sec..."
sleep 1
done
# Ensure all binaries are executable
chmod +x "$_frameworks_dir"/bin/*
cat <<EOF >"${_cargo_config}/config"
[env]
PROTOC = "${_frameworks_dir}/bin/protoc"
FFMPEG_DIR = "${_frameworks_dir}"
[target.aarch64-apple-darwin]
rustflags = ["-L", "${_frameworks_dir}/lib"]
[target.x86_64-apple-darwin]
rustflags = ["-L", "${_frameworks_dir}/lib"]
$(cat "${_cargo_config}/config.toml")
EOF
else
err "Your OS ($SYSNAME) is not supported by this script." \
'We would welcome a PR or some help adding your OS to this script.' \
'https://github.com/spacedriveapp/spacedrive/issues'
fi
echo "Your machine has been successfully set up for Spacedrive development."

View file

@ -7,8 +7,8 @@ on:
push:
paths:
- 'Cargo.lock'
- '.github/scripts/setup-system.sh'
- '.github/scripts/setup-system.ps1'
- './scripts/setup.sh'
- './scripts/setup.ps1'
- '.github/workflows/cache-factory.yaml'
- '.github/actions/**/*.yml'
- '.github/actions/**/*.yaml'

View file

@ -24,6 +24,7 @@ jobs:
uses: ./.github/actions/setup-pnpm
with:
token: ${{ secrets.GITHUB_TOKEN }}
ignorePostInstall: true
- name: Perform typechecks
run: pnpm typecheck
@ -39,6 +40,7 @@ jobs:
uses: ./.github/actions/setup-pnpm
with:
token: ${{ secrets.GITHUB_TOKEN }}
ignorePostInstall: true
- name: Perform linting
run: pnpm lint
@ -142,6 +144,17 @@ jobs:
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup shared libraries
if: steps.filter.outputs.changes == 'true'
env:
NODE_ENV: debug
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
pushd ..
npm i archive-wasm mustache
popd
node scripts/post-install.mjs
- name: Run Clippy
if: steps.filter.outputs.changes == 'true'
uses: actions-rs/clippy-check@v1

View file

@ -128,17 +128,17 @@ jobs:
with:
xcode-version: latest-stable
- name: Setup Node.js, pnpm and dependencies
uses: ./.github/actions/setup-pnpm
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup System and Rust
uses: ./.github/actions/setup-system
with:
token: ${{ secrets.GITHUB_TOKEN }}
setup-arg: mobile
- name: Setup Node.js, pnpm and dependencies
uses: ./.github/actions/setup-pnpm
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Generate iOS Project
working-directory: ./apps/mobile
run: pnpm expo prebuild --platform ios --no-install

View file

@ -80,11 +80,6 @@ jobs:
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE }}
p12-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
- name: Setup Node.js, pnpm and dependencies
uses: ./.github/actions/setup-pnpm
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup System and Rust
uses: ./.github/actions/setup-system
env:
@ -93,6 +88,11 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
targets: ${{ matrix.settings.target }}
- name: Setup Node.js, pnpm and dependencies
uses: ./.github/actions/setup-pnpm
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Build
run: |
pnpm desktop build --ci -v --target ${{ matrix.settings.target }} --bundles ${{ matrix.settings.bundles }}

2
.gitignore vendored
View file

@ -78,5 +78,7 @@ dev.db-journal
sd_init.json
spacedrive
scripts/.tmp
.cargo/config
.cargo/config.toml
.github/scripts/deps

1
.npmrc
View file

@ -7,3 +7,4 @@ strict-peer-dependencies=false
node-linker=hoisted
auto-install-peers=true
max-old-space-size=4096
enable-pre-post-scripts=true

View file

@ -39,9 +39,9 @@ To make changes locally, follow these steps:
1. Clone the repository: `git clone https://github.com/spacedriveapp/spacedrive`
2. Navigate to the project directory: `cd spacedrive`
3. For Linux or MacOS users, run: `./.github/scripts/setup-system.sh`
3. For Linux or MacOS users, run: `./scripts/setup.sh`
- This will install FFmpeg and any other required dependencies for Spacedrive to build.
4. For Windows users, run the following command in PowerShell: `.\.github\scripts\setup-system.ps1`
4. For Windows users, run the following command in PowerShell: `.\scripts\setup.ps1`
- This will install pnpm, LLVM, FFmpeg, and any other required dependencies for Spacedrive to build.
5. Install dependencies: `pnpm i`
6. Prepare the build: `pnpm prep` (This will run all necessary codegen and build required dependencies)
@ -70,7 +70,7 @@ To run the landing page:
If you encounter any issues, ensure that you are using the following versions of Rust, Node and Pnpm:
- Rust version: **1.70.0**
- Node version: **18**
- Node version: **18.17**
- Pnpm version: **8.0.0**
After cleaning out your build artifacts using `pnpm clean`, `git clean`, or `cargo clean`, it is necessary to re-run the `setup-system` script.
@ -82,7 +82,7 @@ Make sure to read the [guidelines](https://spacedrive.com/docs/developers/prereq
To run the mobile app:
- Install [Android Studio](https://developer.android.com/studio) for Android and [Xcode](https://apps.apple.com/au/app/xcode/id497799835) for iOS development.
- Run `./.github/scripts/setup-system.sh mobile`
- Run `./scripts/setup.sh mobile`
- This will set up most of the dependencies required to build the mobile app.
- Make sure you have [NDK 23.1.7779620 and CMake](https://developer.android.com/studio/projects/install-ndk#default-version) installed in Android Studio.
- Run the following commands:

View file

@ -1,9 +0,0 @@
const path = require('node:path');
const platform = /^(msys|cygwin)$/.test(process.env.OSTYPE ?? '') ? 'win32' : process.platform;
module.exports = {
platform,
workspace: path.resolve(__dirname, '../../../../'),
setupScript: `.github/scripts/${platform === 'win32' ? 'setup-system.ps1' : 'setup-system.sh'}`
};

View file

@ -1,72 +0,0 @@
const fs = require('node:fs');
const path = require('node:path');
const toml = require('@iarna/toml');
const { platform, workspace, setupScript } = require('./const.js');
const cargoConfig = path.resolve(workspace, '.cargo/config');
const cargoConfigTempl = path.resolve(workspace, '.cargo/config.toml');
module.exports.setupFFMpegDlls = function setupDlls(FFMPEG_DIR, dev = false) {
const ffmpegBinDir = path.join(FFMPEG_DIR, 'bin');
const ffmpegDlls = fs.readdirSync(ffmpegBinDir).filter((file) => file.endsWith('.dll'));
let targetDir = path.join(workspace, 'apps/desktop/src-tauri');
if (dev) {
targetDir = path.join(workspace, 'target/debug');
// Ensure the target/debug directory exists
fs.mkdirSync(targetDir, { recursive: true });
}
// Copy all DLLs from the $FFMPEG_DIR/bin to targetDir
for (const dll of ffmpegDlls)
fs.copyFileSync(path.join(ffmpegBinDir, dll), path.join(targetDir, dll));
return ffmpegDlls;
};
module.exports.setupPlatformEnv = function setupEnv() {
const env = {};
if (platform === 'darwin' || platform === 'win32') {
env.PROTOC = path.join(
workspace,
'target/Frameworks/bin',
platform === 'win32' ? 'protoc.exe' : 'protoc'
);
env.FFMPEG_DIR = path.join(workspace, 'target/Frameworks');
// Check if env.PROTOC is not empty and that the value is a valid path pointing to an existing file
if (!(env.PROTOC && fs.existsSync(env.PROTOC) && fs.statSync(env.PROTOC).isFile())) {
console.error(`The path to protoc is invalid: ${env.PROTOC}`);
console.error(`Did you ran the setup script: ${setupScript}?`);
process.exit(1);
}
// Check if env.FFMPEG_DIR is not empty and that the value is a valid path pointing to an existing directory
if (
!(
env.FFMPEG_DIR &&
fs.existsSync(env.FFMPEG_DIR) &&
fs.statSync(env.FFMPEG_DIR).isDirectory()
)
) {
console.error(`The path to ffmpeg is invalid: ${env.FFMPEG_DIR}`);
console.error(`Did you ran the setup script: ${setupScript}?`);
process.exit(1);
}
// Update cargo config with the new env variables
const cargoConf = toml.parse(fs.readFileSync(cargoConfigTempl, { encoding: 'binary' }));
cargoConf.env = {
...(cargoConf.env ?? {}),
...(env ?? {}),
PROTOC: env.PROTOC,
FFMPEG_DIR: env.FFMPEG_DIR
};
fs.writeFileSync(cargoConfig, toml.stringify(cargoConf));
}
return env;
};

View file

@ -1,10 +1,18 @@
const fs = require('node:fs');
const path = require('node:path');
const toml = require('@iarna/toml');
const semver = require('semver');
const { spawn } = require('./spawn.js');
const { platform, workspace, setupScript } = require('./const.js');
const { setupFFMpegDlls, setupPlatformEnv } = require('./env.js');
const workspace = path.resolve(__dirname, '../../../../')
const cargoConfig = toml.parse(
fs.readFileSync(path.resolve(workspace, '.cargo/config.toml'), { encoding: 'binary' })
);
if (cargoConfig.env && typeof cargoConfig.env === 'object')
for (const [name, value] of Object.entries(cargoConfig.env))
if (!process.env[name]) process.env[name] = value;
const toRemove = [];
const [_, __, ...args] = process.argv;
@ -17,11 +25,14 @@ const tauriConf = JSON.parse(
switch (args[0]) {
case 'dev': {
const env = setupPlatformEnv();
if (platform === 'win32') setupFFMpegDlls(env.FFMPEG_DIR, true);
if (process.platform === 'win32') setupFFMpegDlls(true);
break;
}
case 'build': {
if (!process.env.NODE_OPTIONS || !process.env.NODE_OPTIONS.includes('--max_old_space_size')) {
process.env.NODE_OPTIONS = `--max_old_space_size=4096 ${process.env.NODE_OPTIONS ?? ''}`;
}
if (args.findIndex((e) => e === '-c' || e === '--config') !== -1) {
throw new Error('Custom tauri build config is not supported.');
}
@ -34,13 +45,11 @@ switch (args[0]) {
})
.flatMap((target) => target.split(','));
const env = setupPlatformEnv();
const tauriPatch = {
tauri: { bundle: { macOS: {} } }
};
switch (platform) {
switch (process.platform) {
case 'darwin': {
// Workaround while https://github.com/tauri-apps/tauri/pull/3934 is not merged
const cliNode =
@ -51,7 +60,7 @@ switch (args[0]) {
`Tauri cli patch not found at ${path.relative(
workspace,
tauriCliPatch
)}. Did you run the setup script: ${setupScript}?`
)}. Did you run \`pnpm i\`?`
);
}
const tauriBin = path.join(
@ -113,7 +122,7 @@ switch (args[0]) {
}
case 'win32':
// Point tauri to the ffmpeg DLLs
tauriPatch.tauri.bundle.resources = setupFFMpegDlls(env.FFMPEG_DIR);
tauriPatch.tauri.bundle.resources = setupFFMpegDlls();
toRemove.push(
...tauriPatch.tauri.bundle.resources.map((file) =>
path.join(workspace, 'apps/desktop/src-tauri', file)
@ -136,7 +145,7 @@ spawn('pnpm', ['tauri', ...args])
code = exitCode;
console.error(`tauri ${args[0]} failed with exit code ${exitCode}`);
console.error(
`If you got an error related to FFMpeg or Protoc/Protobuf you may need to run ${setupScript}`
`If you got an error related to FFMpeg or Protoc/Protobuf you may need to re-run \`pnpm i\``
);
})
.finally(() => {
@ -147,3 +156,22 @@ spawn('pnpm', ['tauri', ...args])
process.exit(code);
});
function setupFFMpegDlls(dev = false) {
if (!process.env.FFMPEG_DIR) throw new Error('Missing envvar FFMPEG_DIR');
const ffmpegBinDir = path.join(process.env.FFMPEG_DIR, 'bin');
const ffmpegDlls = fs.readdirSync(ffmpegBinDir).filter((file) => file.endsWith('.dll'));
let targetDir = path.join(workspace, 'apps/desktop/src-tauri');
if (dev) {
targetDir = path.join(workspace, 'target/debug');
// Ensure the target/debug directory exists
fs.mkdirSync(targetDir, { recursive: true });
}
// Copy all DLLs from the $FFMPEG_DIR/bin to targetDir
for (const dll of ffmpegDlls)
fs.copyFileSync(path.join(ffmpegBinDir, dll), path.join(targetDir, dll));
return ffmpegDlls;
}

View file

@ -65,7 +65,7 @@ RUN wget -qO- https://sh.rustup.rs | sh -s -- -yq --profile minimal
ENV PATH="/root/.cargo/bin:$PATH"
RUN --mount=type=cache,target=/var/cache/apt --mount=type=cache,target=/var/lib/apt \
env CI=true .github/scripts/setup-system.sh
env CI=true ./scripts/setup.sh
RUN --mount=type=cache,target=/root/.cache/prisma/binaries/cli/ \
pnpm prep

View file

@ -4,7 +4,6 @@ use crate::{
};
use std::{
ffi::OsStr,
fs::Metadata,
path::{Path, PathBuf, MAIN_SEPARATOR_STR},
time::SystemTime,
@ -132,6 +131,7 @@ pub struct FilePathMetadata {
pub fn path_is_hidden(path: &Path, metadata: &Metadata) -> bool {
#[cfg(target_family = "unix")]
{
use std::ffi::OsStr;
let _ = metadata; // just to avoid warnings on Linux
if path
.file_name()

View file

@ -19,7 +19,7 @@ This project uses [Cargo](https://doc.rust-lang.org/cargo/getting-started/instal
**For Linux or MacOS users run:**
```shell
./.github/scripts/setup-system.sh
./scripts/setup.sh
```
This will install FFmpeg and any other required dependencies for Spacedrive to build.
@ -27,7 +27,7 @@ This project uses [Cargo](https://doc.rust-lang.org/cargo/getting-started/instal
**...or for Windows users run using PowerShell:**
```shell
.\.github\scripts\setup-system.ps1
.\scripts\setup.ps1
```
_This will install pnpm, LLVM, FFmpeg and any other required dependencies for Spacedrive to build. Ensure you run it like documented above as it expects it is executed from the root of the repository._
@ -58,7 +58,7 @@ This project uses [Cargo](https://doc.rust-lang.org/cargo/getting-started/instal
To run mobile app
1. Install [Android Studio](https://developer.android.com/studio) for Android and [Xcode](https://apps.apple.com/au/app/xcode/id497799835) for IOS development
2. `./.github/scripts/setup-system.sh mobile`
2. `./scripts/setup.sh mobile`
_The should setup most of the dependencies for the mobile app to build._
3. You must also ensure you have [NDK 23.1.7779620 and CMake](https://developer.android.com/studio/projects/install-ndk#default-version) in Android Studio
4. `pnpm mobile android` - runs on Android Emulator

View file

@ -1,7 +1,9 @@
{
"private": true,
"scripts": {
"prep": "pnpm gen:prisma && pnpm codegen",
"postinstall": "pnpm exec node scripts/post-install.mjs",
"prep": "pnpm gen:prisma",
"postprep": "pnpm codegen",
"build": "turbo run build",
"landing-web": "turbo run dev --parallel --filter=@sd/landing --filter=@sd/web",
"gen:migrations": "cd core && cargo prisma migrate dev",
@ -19,7 +21,7 @@
"storybook": "pnpm --filter @sd/storybook -- ",
"prisma": "cd core && cargo prisma",
"dev:web": "turbo run dev --filter @sd/web --filter @sd/server",
"bootstrap:desktop": "cargo clean && ./.github/scripts/setup-system.sh && pnpm i && pnpm prep && pnpm desktop dev",
"bootstrap:desktop": "cargo clean && ./scripts/setup.sh && pnpm i && pnpm prep && pnpm desktop dev",
"codegen": "cargo test -p sd-core api::tests::test_and_export_rspc_bindings -- --exact",
"typecheck": "pnpm -r typecheck",
"lint": "turbo run lint",
@ -35,9 +37,11 @@
"@babel/plugin-syntax-import-assertions": "^7.22.5",
"@cspell/dict-rust": "^2.0.1",
"@cspell/dict-typescript": "^2.0.2",
"@storybook/react-vite": "^7.0.20",
"@ianvs/prettier-plugin-sort-imports": "^4.1.0",
"@storybook/react-vite": "^7.0.20",
"archive-wasm": "^1.5.1",
"cspell": "^6.31.1",
"mustache": "^4.2.0",
"prettier": "^3.0.3",
"prettier-plugin-tailwindcss": "^0.5.3",
"rimraf": "^4.4.1",
@ -54,9 +58,26 @@
"pnpm": ">=8.0.0",
"npm": "pnpm",
"yarn": "pnpm",
"node": ">=18.0.0"
"node": ">=18.17"
},
"eslintConfig": {
"root": true
"root": true,
"overrides": [
{
"files": [
"*.mjs"
],
"env": {
"node": true,
"es2022": true,
"browser": false,
"commonjs": false,
"shared-node-browser": false
},
"parserOptions": {
"sourceType": "module"
}
}
]
}
}

View file

@ -26,9 +26,15 @@ importers:
'@storybook/react-vite':
specifier: ^7.0.20
version: 7.0.20(react-dom@18.2.0)(react@18.2.0)(typescript@5.0.4)(vite@4.3.9)
archive-wasm:
specifier: ^1.5.1
version: 1.5.1
cspell:
specifier: ^6.31.1
version: 6.31.1
mustache:
specifier: ^4.2.0
version: 4.2.0
prettier:
specifier: ^3.0.3
version: 3.0.3
@ -5809,7 +5815,7 @@ packages:
magic-string: 0.27.0
react-docgen-typescript: 2.2.2(typescript@5.0.4)
typescript: 5.0.4
vite: 4.3.9(less@4.2.0)
vite: 4.3.9(@types/node@18.15.1)
/@jridgewell/gen-mapping@0.3.3:
resolution: {integrity: sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==}
@ -8859,7 +8865,7 @@ packages:
remark-slug: 6.1.0
rollup: 3.28.1
typescript: 5.0.4
vite: 4.3.9(less@4.2.0)
vite: 4.3.9(@types/node@18.15.1)
transitivePeerDependencies:
- supports-color
@ -9459,7 +9465,7 @@ packages:
react: 18.2.0
react-docgen: 6.0.0-alpha.3
react-dom: 18.2.0(react@18.2.0)
vite: 4.3.9(less@4.2.0)
vite: 4.3.9(@types/node@18.15.1)
transitivePeerDependencies:
- '@preact/preset-vite'
- supports-color
@ -11170,6 +11176,11 @@ packages:
/aproba@2.0.0:
resolution: {integrity: sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==}
/archive-wasm@1.5.1:
resolution: {integrity: sha512-jETuTnp7lcJ4OQhqvyE5PHw8izUWDArj/TPzPL2hu4ylGwc9coIOT214uRTETF3uoQwPPJcV8GTa14yfTqrEhg==}
engines: {node: '>=18'}
dev: true
/are-we-there-yet@2.0.0:
resolution: {integrity: sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==}
engines: {node: '>=10'}
@ -18502,6 +18513,11 @@ packages:
/ms@2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
/mustache@4.2.0:
resolution: {integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==}
hasBin: true
dev: true
/mv@2.1.1:
resolution: {integrity: sha512-at/ZndSy3xEGJ8i0ygALh8ru9qy7gWW1cmkaqBN29JmMlIvM//MEO9y1sk/avxuwnPcfhkejkLsuPxH81BrkSg==}
engines: {node: '>=0.8.0'}
@ -23525,7 +23541,6 @@ packages:
rollup: 3.28.1
optionalDependencies:
fsevents: 2.3.3
dev: true
/vite@4.3.9(less@4.2.0):
resolution: {integrity: sha512-qsTNZjO9NoJNW7KnOrgYwczm0WctJ8m/yqYAMAK9Lxt4SoySUfS5S8ia9K7JHpa3KEeMfyF8LoJ3c5NeBJy6pg==}

203
scripts/deps.mjs Normal file
View file

@ -0,0 +1,203 @@
import * as fs from 'node:fs/promises';
import * as os from 'node:os';
import * as path from 'node:path';
import { env } from 'node:process';
import { extractTo } from 'archive-wasm/src/fs.mjs';
import {
getGh,
getGhArtifactContent,
getGhReleasesAssets,
getGhWorkflowRunArtifacts
} from './github.mjs';
import {
FFMPEG_SUFFFIX,
FFMPEG_WORKFLOW,
getConst,
getSuffix,
PDFIUM_SUFFIX,
PROTOC_SUFFIX,
TAURI_CLI_SUFFIX
} from './suffix.mjs';
import { which } from './which.mjs';
const noop = () => {};
const __debug = env.NODE_ENV === 'debug';
const __osType = os.type();
// Github repos
const PDFIUM_REPO = 'bblanchon/pdfium-binaries';
const PROTOBUF_REPO = 'protocolbuffers/protobuf';
const SPACEDRIVE_REPO = 'spacedriveapp/spacedrive';
/**
* Download and extract protobuff compiler binary
* @param {string[]} machineId
* @param {string} framework
*/
export async function downloadProtc(machineId, framework) {
if (await which('protoc')) return;
console.log('Downloading protoc...');
const protocSuffix = getSuffix(PROTOC_SUFFIX, machineId);
if (protocSuffix == null) throw new Error('NO_PROTOC');
let found = false;
for await (const release of getGhReleasesAssets(PROTOBUF_REPO)) {
if (!protocSuffix.test(release.name)) continue;
try {
await extractTo(await getGh(release.downloadUrl), framework, {
chmod: 0o600,
overwrite: true
});
found = true;
break;
} catch (error) {
console.warn('Failed to download protoc, re-trying...');
if (__debug) console.error(error);
}
}
if (!found) throw new Error('NO_PROTOC');
// cleanup
await fs.unlink(path.join(framework, 'readme.txt')).catch(__debug ? console.error : noop);
}
/**
* Download and extract pdfium library for generating PDFs thumbnails
* @param {string[]} machineId
* @param {string} framework
*/
export async function downloadPDFium(machineId, framework) {
console.log('Downloading pdfium...');
const pdfiumSuffix = getSuffix(PDFIUM_SUFFIX, machineId);
if (pdfiumSuffix == null) throw new Error('NO_PDFIUM');
let found = false;
for await (const release of getGhReleasesAssets(PDFIUM_REPO)) {
if (!pdfiumSuffix.test(release.name)) continue;
try {
await extractTo(await getGh(release.downloadUrl), framework, {
chmod: 0o600,
overwrite: true
});
found = true;
break;
} catch (error) {
console.warn('Failed to download pdfium, re-trying...');
if (__debug) console.error(error);
}
}
if (!found) throw new Error('NO_PDFIUM');
// cleanup
const cleanup = [
fs.rename(path.join(framework, 'LICENSE'), path.join(framework, 'LICENSE.pdfium')),
...['args.gn', 'PDFiumConfig.cmake', 'VERSION'].map((file) =>
fs.unlink(path.join(framework, file)).catch(__debug ? console.error : noop)
)
];
switch (__osType) {
case 'Linux':
cleanup.push(fs.chmod(path.join(framework, 'lib', 'libpdfium.so'), 0o750));
break;
case 'Darwin':
cleanup.push(fs.chmod(path.join(framework, 'lib', 'libpdfium.dylib'), 0o750));
break;
}
await Promise.all(cleanup);
}
/**
* Download and extract ffmpeg libs for video thumbnails
* @param {string[]} machineId
* @param {string} framework
* @param {string[]} branches
*/
export async function downloadFFMpeg(machineId, framework, branches) {
const workflow = getConst(FFMPEG_WORKFLOW, machineId);
if (workflow == null) {
console.log('Checking FFMPeg...');
if (await which('ffmpeg')) {
// TODO: check ffmpeg version match what we need
return;
} else {
throw new Error('NO_FFMPEG');
}
}
console.log('Downloading FFMPeg...');
const ffmpegSuffix = getSuffix(FFMPEG_SUFFFIX, machineId);
if (ffmpegSuffix == null) throw new Error('NO_FFMPEG');
let found = false;
for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) {
if (!ffmpegSuffix.test(artifact.name)) continue;
try {
const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id);
await extractTo(
data,
framework,
{
chmod: 0o600,
recursive: true,
overwrite: true
}
);
found = true;
break;
} catch (error) {
console.warn('Failed to download FFMpeg, re-trying...');
if (__debug) console.error(error);
}
}
if (!found) throw new Error('NO_FFMPEG');
}
/**
* Workaround while https://github.com/tauri-apps/tauri/pull/3934 is not available in a Tauri stable release
* @param {string[]} machineId
* @param {string} framework
* @param {string[]} branches
*/
export async function downloadPatchedTauriCLI(machineId, framework, branches) {
console.log('Dowloading patched tauri CLI...');
const tauriCliSuffix = getSuffix(TAURI_CLI_SUFFIX, machineId);
if (tauriCliSuffix == null) throw new Error('NO_TAURI_CLI');
let found = false;
for await (const artifact of getGhWorkflowRunArtifacts(
SPACEDRIVE_REPO,
'tauri-patched-cli-js.yml',
branches
)) {
if (!tauriCliSuffix.test(artifact.name)) continue;
try {
await extractTo(
await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id),
path.join(framework, 'bin'),
{
chmod: 0o700,
overwrite: true
}
);
found = true;
break;
} catch (error) {
console.warn('Failed to download patched tauri cli.js, re-trying...');
if (__debug) console.error(error);
}
}
if (!found) throw new Error('NO_TAURI_CLI');
}

24
scripts/git.mjs Normal file
View file

@ -0,0 +1,24 @@
import * as fs from 'node:fs/promises';
import * as path from 'node:path';
const REF_REGEX = /ref:\s+refs\/heads\/(?<branch>\s+)/;
/**
* @param {string} repoPath
* @returns {Promise<string[]>}
*/
export async function getGitBranches(repoPath) {
const branches = ['main', 'master'];
let head;
try {
head = await fs.readFile(path.join(repoPath, '.git', 'HEAD'), { encoding: 'utf8' });
} catch {
return branches;
}
const match = REF_REGEX.exec(head);
if (match?.groups?.branch) branches.unshift(match.groups.branch);
return branches;
}

385
scripts/github.mjs Normal file
View file

@ -0,0 +1,385 @@
import * as fs from 'node:fs/promises';
import { dirname, join as joinPath, posix as path } from 'node:path';
import { env } from 'node:process';
import { setTimeout } from 'node:timers/promises';
import { fileURLToPath } from 'node:url';
import { extract } from 'archive-wasm';
const __debug = env.NODE_ENV === 'debug';
const __offline = env.OFFLINE === 'true';
const __filename = fileURLToPath(import.meta.url);
const __dirname = dirname(__filename);
const cacheDir = joinPath(__dirname, '.tmp');
await fs.mkdir(cacheDir, { recursive: true, mode: 0o751 });
// Note: Trailing slashs are important to correctly append paths
const GH = 'https://api.github.com/repos/';
const NIGTHLY = 'https://nightly.link/';
// Github routes
const RELEASES = 'releases';
const WORKFLOWS = 'actions/workflows';
const ARTIFACTS = 'actions/artifacts';
// Default GH headers
const GH_HEADERS = new Headers({
'Accept': 'application/vnd.github+json',
'X-GitHub-Api-Version': '2022-11-28'
});
// Load github auth token if available
if ('GITHUB_TOKEN' in env && env.GITHUB_TOKEN)
GH_HEADERS.append('Authorization', `Bearer ${env.GITHUB_TOKEN}`);
/**
* @param {string} resource
* @param {Headers} [headers]
* @returns {Promise<null | {data: Buffer, header: [string, string] | undefined}>}
*/
async function getCache(resource, headers) {
/** @type {Buffer | undefined} */
let data;
/** @type {[string, string] | undefined} */
let header;
// Don't cache in CI
if (env.CI === 'true') return null
if (headers)
resource += Array.from(headers.entries())
.filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since')
.flat()
.join(':');
try {
const cache = JSON.parse(
await fs.readFile(joinPath(cacheDir, Buffer.from(resource).toString('base64url')), {
encoding: 'utf8'
})
);
if (cache && typeof cache === 'object') {
if (cache.etag && typeof cache.etag === 'string') {
header = ['If-None-Match', cache.etag];
} else if (cache.modifiedSince && typeof cache.modifiedSince === 'string') {
header = ['If-Modified-Since', cache.modifiedSince];
}
if (cache.data && typeof cache.data === 'string')
data = Buffer.from(cache.data, 'base64');
}
} catch (error) {
if (__debug) {
console.warn(`CACHE MISS: ${resource}`);
console.error(error);
}
}
return data ? { data, header } : null;
}
/**
* @param {Response} response
* @param {string} resource
* @param {Buffer} [cachedData]
* @param {Headers} [headers]
* @returns {Promise<Buffer>}
*/
async function setCache(response, resource, cachedData, headers) {
const data = Buffer.from(await response.arrayBuffer());
// Don't cache in CI
if (env.CI === 'true') return data
const etag = response.headers.get('ETag') || undefined;
const modifiedSince = response.headers.get('Last-Modified') || undefined;
if (headers)
resource += Array.from(headers.entries())
.filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since')
.flat()
.join(':');
if (response.status === 304 || (response.ok && data.length === 0)) {
// Cache hit
if (!cachedData) throw new Error('Empty cache hit ????');
return cachedData;
}
try {
await fs.writeFile(
joinPath(cacheDir, Buffer.from(resource).toString('base64url')),
JSON.stringify({
etag,
modifiedSince,
data: data.toString('base64')
}),
{ mode: 0o640, flag: 'w+' }
);
} catch (error) {
if (__debug) {
console.warn(`CACHE WRITE FAIL: ${resource}`);
console.error(error);
}
}
return data;
}
/**
* @param {URL | string} resource
* @param {Headers?} [headers]
* @param {boolean} [preferCache]
* @returns {Promise<Buffer>}
*/
export async function get(resource, headers, preferCache) {
if (headers == null) headers = new Headers();
if (resource instanceof URL) resource = resource.toString();
const cache = await getCache(resource, headers);
if (__offline) {
if (cache?.data == null)
throw new Error(`OFFLINE MODE: Cache for request ${resource} doesn't exist`);
return cache.data;
}
if (preferCache && cache?.data != null) return cache.data;
if (cache?.header) headers.append(...cache.header);
const response = await fetch(resource, { headers });
if (!response.ok) {
if (cache?.data) {
if (__debug) console.warn(`CACHE HIT due to fail: ${resource} ${response.statusText}`);
return cache.data;
}
throw new Error(response.statusText);
}
return await setCache(response, resource, cache?.data, headers);
}
// Header name Description
// x-ratelimit-limit The maximum number of requests you're permitted to make per hour.
// x-ratelimit-remaining The number of requests remaining in the current rate limit window.
// x-ratelimit-used The number of requests you've made in the current rate limit window.
// x-ratelimit-reset The time at which the current rate limit window resets in UTC epoch seconds.
const RATE_LIMIT = {
reset: 0,
remaining: Infinity
};
/**
* Get resource from a Github route with some pre-defined parameters
* @param {string} route
* @returns {Promise<Buffer>}
*/
export async function getGh(route) {
route = new URL(route, GH).toString();
const cache = await getCache(route);
if (__offline) {
if (cache?.data == null)
throw new Error(`OFFLINE MODE: Cache for request ${route} doesn't exist`);
return cache?.data;
}
if (RATE_LIMIT.remaining === 0) {
if (cache?.data) return cache.data;
console.warn(
`RATE LIMIT: Waiting ${RATE_LIMIT.reset} seconds before contacting Github again... [CTRL+C to cancel]`
);
await setTimeout(RATE_LIMIT.reset * 1000);
}
const headers = new Headers(GH_HEADERS);
if (cache?.header) headers.append(...cache.header);
const response = await fetch(route, { method: 'GET', headers });
const rateReset = Number.parseInt(response.headers.get('x-ratelimit-reset') ?? '');
const rateRemaining = Number.parseInt(response.headers.get('x-ratelimit-remaining') ?? '');
if (!(Number.isNaN(rateReset) || Number.isNaN(rateRemaining))) {
const reset = rateReset - Date.now() / 1000;
if (reset > RATE_LIMIT.reset) RATE_LIMIT.reset = reset;
if (rateRemaining < RATE_LIMIT.remaining) {
RATE_LIMIT.remaining = rateRemaining;
if (__debug) {
console.warn(`Github remaining requests: ${RATE_LIMIT.remaining}`);
await setTimeout(5000);
}
}
}
if (!response.ok) {
if (cache?.data) {
if (__debug) console.warn(`CACHE HIT due to fail: ${route} ${response.statusText}`);
return cache.data;
}
if (response.status === 403 && RATE_LIMIT.remaining === 0) return await getGh(route);
throw new Error(response.statusText);
}
return await setCache(response, route, cache?.data);
}
/**
* @param {string} repo
* @yields {{name: string, downloadUrl: string}}
*/
export async function* getGhReleasesAssets(repo) {
let page = 0;
while (true) {
// "${_gh_url}/protocolbuffers/protobuf/releases?page=${_page}&per_page=100"
const releases = JSON.parse(
(await getGh(path.join(repo, `${RELEASES}?page=${page++}&per_page=100`))).toString(
'utf8'
)
);
if (!Array.isArray(releases)) throw new Error(`Error: ${JSON.stringify(releases)}`);
if (releases.length === 0) return;
for (const release of /** @type {unknown[]} */ (releases)) {
if (
!(
release &&
typeof release === 'object' &&
'assets' in release &&
Array.isArray(release.assets)
)
)
throw new Error(`Invalid release: ${release}`);
if ('prerelease' in release && release.prerelease) continue;
for (const asset of /** @type {unknown[]} */ (release.assets)) {
if (
!(
asset &&
typeof asset === 'object' &&
'name' in asset &&
typeof asset.name === 'string' &&
'browser_download_url' in asset &&
typeof asset.browser_download_url === 'string'
)
)
throw new Error(`Invalid release.asset: ${asset}`);
yield { name: asset.name, downloadUrl: asset.browser_download_url };
}
}
}
}
/**
* @param {string} repo
* @param {string} yaml
* @param {string | Array.<string> | Set.<string>} [branch]
* @yields {{ id: number, name: string }}
*/
export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) {
if (!branch) branch = 'main';
if (typeof branch === 'string') branch = [branch];
if (!(branch instanceof Set)) branch = new Set(branch);
let page = 0;
while (true) {
const workflow = /** @type {unknown} */ (
JSON.parse(
(
await getGh(
path.join(
repo,
WORKFLOWS,
yaml,
`runs?page=${page++}&per_page=100&status=success`
)
)
).toString('utf8')
)
);
if (
!(
workflow &&
typeof workflow === 'object' &&
'workflow_runs' in workflow &&
Array.isArray(workflow.workflow_runs)
)
)
throw new Error(`Error: ${JSON.stringify(workflow)}`);
if (workflow.workflow_runs.length === 0) return;
for (const run of /** @type {unknown[]} */ (workflow.workflow_runs)) {
if (
!(
run &&
typeof run === 'object' &&
'head_branch' in run &&
typeof run.head_branch === 'string' &&
'artifacts_url' in run &&
typeof run.artifacts_url === 'string'
)
)
throw new Error(`Invalid Workflow run: ${run}`);
if (!branch.has(run.head_branch)) continue;
const response = /** @type {unknown} */ (
JSON.parse((await getGh(run.artifacts_url)).toString('utf8'))
);
if (
!(
response &&
typeof response === 'object' &&
'artifacts' in response &&
Array.isArray(response.artifacts)
)
)
throw new Error(`Error: ${JSON.stringify(response)}`);
for (const artifact of /** @type {unknown[]} */ (response.artifacts)) {
if (
!(
artifact &&
typeof artifact === 'object' &&
'id' in artifact &&
typeof artifact.id === 'number' &&
'name' in artifact &&
typeof artifact.name === 'string'
)
)
throw new Error(`Invalid artifact: ${artifact}`);
yield { id: artifact.id, name: artifact.name };
}
}
}
}
/**
* @param {string} repo
* @param {number} id
* @returns {Promise<Buffer>}
*/
export async function getGhArtifactContent(repo, id) {
// Artifacts can only be downloaded directly from Github with authorized requests
if (GH_HEADERS.has('Authorization')) {
try {
// "${_gh_url}/${_sd_gh_path}/actions/artifacts/${_artifact_id}/zip"
return await getGh(path.join(repo, ARTIFACTS, id.toString(), 'zip'));
} catch (error) {
if (__debug) {
console.warn('Failed to download artifact from github, fallback to nightly.link');
console.error(error);
}
}
}
/**
* nightly.link is a workaround for the lack of a public GitHub API to download artifacts from a workflow run
* https://github.com/actions/upload-artifact/issues/51
* Use it when running in evironments that are not authenticated with github
* "https://nightly.link/${_sd_gh_path}/actions/artifacts/${_artifact_id}.zip"
*/
return await get(new URL(path.join(repo, ARTIFACTS, `${id}.zip`), NIGTHLY), null, true);
}

13
scripts/musl.mjs Normal file
View file

@ -0,0 +1,13 @@
import { exec as execCb } from 'node:child_process';
import { promisify } from 'node:util';
const exec = promisify(execCb);
/** @returns {Promise<boolean>} */
export async function isMusl() {
try {
return (await exec('ldd /bin/ls')).stdout.includes('musl');
} catch {
return false;
}
}

232
scripts/post-install.mjs Normal file
View file

@ -0,0 +1,232 @@
import { exec as _exec } from 'node:child_process';
import * as fs from 'node:fs/promises';
import * as os from 'node:os';
import * as path from 'node:path';
import { env, umask } from 'node:process';
import { fileURLToPath } from 'node:url';
import { promisify } from 'node:util';
import mustache from 'mustache';
import { downloadFFMpeg, downloadPatchedTauriCLI, downloadPDFium, downloadProtc } from './deps.mjs';
import { getGitBranches } from './git.mjs';
import { isMusl } from './musl.mjs';
import { which } from './which.mjs';
umask(0o026);
if (env.IGNORE_POSTINSTALL === 'true') process.exit(0);
if (/^(msys|mingw|cygwin)$/i.test(env.OSTYPE ?? '')) {
console.error('Bash for windows is not supported, please execute this from Powershell or CMD');
process.exit(255);
}
const exec = promisify(_exec);
const __debug = env.NODE_ENV === 'debug';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
// NOTE: Must point to package root path
const __root = path.resolve(path.join(__dirname, '..'));
// Current machine identifiers
const machineId = [os.type(), os.machine()];
if (machineId[0] === 'Linux') machineId.push((await isMusl()) ? 'musl' : 'glibc');
// Basic dependeny check
if (
(await Promise.all([which('cargo'), which('rustc'), which('pnpm'), which('node')])).some(
(found) => !found
)
) {
console.error(`Basic dependencies missing.
Make sure you have rust, node.js and pnpm installed:
https://rustup.rs
https://nodejs.org/en/download
https://pnpm.io/installation
Also that you have run the setup script:
packages/scripts/${machineId[0] === 'Windows_NT' ? 'setup.ps1' : 'setup.sh'}
`);
}
// Accepted git branches for querying for artifacts (current, main, master)
const branches = await getGitBranches(__root);
// Create the basic target directory hierarchy
const framework = path.join(__root, 'target', 'Frameworks');
await fs.rm(framework, { force: true, recursive: true });
await Promise.all(
['bin', 'lib', 'include'].map((dir) =>
fs.mkdir(path.join(framework, dir), { mode: 0o750, recursive: true })
)
);
// Download all necessary external dependencies
const deps = [
downloadProtc(machineId, framework).catch((e) => {
console.error(
'Failed to download protoc, this is required for Spacedrive to compile. ' +
'Please install it with your system package manager'
);
throw e;
}),
downloadPDFium(machineId, framework).catch((e) => {
console.warn(
'Failed to download pdfium lib. ' +
"This is optional, but if one isn't configured Spacedrive won't be able to generate thumbnails for PDF files"
);
if (__debug) console.error(e);
}),
downloadFFMpeg(machineId, framework, branches).catch((e) => {
console.error(
'Failed to download ffmpeg. This is probably a bug, please open a issue with you system info at: ' +
'https://github.com/spacedriveapp/spacedrive/issues/new/choose'
);
throw e;
})
];
if (machineId[0] === 'Darwin')
deps.push(
downloadPatchedTauriCLI(machineId, framework, branches).catch((e) => {
console.error(
'Failed to download patched tauri CLI. This is probably a bug, please open a issue with you system info at: ' +
'https://github.com/spacedriveapp/spacedrive/issues/new/choose'
);
throw e;
})
);
await Promise.all(deps).catch((e) => {
if (__debug) console.error(e);
process.exit(1);
});
// Generate .cargo/config.toml
console.log('Generating cargo config...');
try {
await fs.writeFile(
path.join(__root, '.cargo', 'config.toml'),
mustache
.render(
await fs.readFile(path.join(__root, '.cargo', 'config.toml.mustache'), {
encoding: 'utf8'
}),
{
ffmpeg: machineId[0] === 'Linux' ? false : framework.replaceAll('\\', '\\\\'),
protoc: path
.join(
framework,
'bin',
machineId[0] === 'Windows_NT' ? 'protoc.exe' : 'protoc'
)
.replaceAll('\\', '\\\\'),
projectRoot: __root.replaceAll('\\', '\\\\'),
isWin: machineId[0] === 'Windows_NT',
isMacOS: machineId[0] === 'Darwin',
isLinux: machineId[0] === 'Linux'
}
)
.replace(/\n\n+/g, '\n'),
{ mode: 0o751, flag: 'w+' }
);
} catch (error) {
console.error(
'Failed to generate .cargo/config.toml, please open an issue on: ' +
'https://github.com/spacedriveapp/spacedrive/issues/new/choose'
);
if (__debug) console.error(error);
process.exit(1);
}
// Setup macOS Frameworks
if (machineId[0] === 'Darwin') {
try {
console.log('Setup Frameworks & Sign libraries...');
const ffmpegFramework = path.join(framework, 'FFMpeg.framework');
// Move pdfium License to FFMpeg.framework
await fs.rename(
path.join(framework, 'LICENSE.pdfium'),
path.join(
ffmpegFramework,
'Resources',
'English.lproj',
'Documentation',
'LICENSE.pdfium'
)
);
// Move include files to FFMpeg.framework
const include = path.join(framework, 'include');
const headers = path.join(ffmpegFramework, 'Headers');
const includeFiles = await fs.readdir(include, { recursive: true, withFileTypes: true });
const moveIncludes = includeFiles
.filter(
(entry) =>
(entry.isFile() || entry.isSymbolicLink()) && !entry.name.endsWith('.proto')
)
.map(async (entry) => {
const file = path.join(entry.path, entry.name);
const newFile = path.resolve(headers, path.relative(include, file));
await fs.mkdir(path.dirname(newFile), { mode: 0o751, recursive: true });
await fs.rename(file, newFile);
});
// Move libs to FFMpeg.framework
const lib = path.join(framework, 'lib');
const libraries = path.join(ffmpegFramework, 'Libraries');
const libFiles = await fs.readdir(lib, { recursive: true, withFileTypes: true });
const moveLibs = libFiles
.filter(
(entry) =>
(entry.isFile() || entry.isSymbolicLink()) && entry.name.endsWith('.dylib')
)
.map(async (entry) => {
const file = path.join(entry.path, entry.name);
const newFile = path.resolve(libraries, path.relative(lib, file));
await fs.mkdir(path.dirname(newFile), { mode: 0o751, recursive: true });
await fs.rename(file, newFile);
});
await Promise.all([...moveIncludes, ...moveLibs]);
// Symlink headers
const headerFiles = await fs.readdir(headers, { recursive: true, withFileTypes: true });
const linkHeaders = headerFiles
.filter((entry) => entry.isFile() || entry.isSymbolicLink())
.map(async (entry) => {
const file = path.join(entry.path, entry.name);
const link = path.resolve(include, path.relative(headers, file));
const linkDir = path.dirname(link);
await fs.mkdir(linkDir, { mode: 0o751, recursive: true });
await fs.symlink(path.relative(linkDir, file), link);
});
// Symlink libraries
const libraryFiles = await fs.readdir(libraries, { recursive: true, withFileTypes: true });
const linkLibs = libraryFiles
.filter(
(entry) =>
(entry.isFile() || entry.isSymbolicLink()) && entry.name.endsWith('.dylib')
)
.map(async (entry) => {
const file = path.join(entry.path, entry.name);
const link = path.resolve(lib, path.relative(libraries, file));
const linkDir = path.dirname(link);
await fs.mkdir(linkDir, { mode: 0o751, recursive: true });
await fs.symlink(path.relative(linkDir, file), link);
if (entry.isFile()) {
// Sign the lib with the local machine certificate (Required for it to work on macOS 13+)
await exec(`codesign -s "${env.APPLE_SIGNING_IDENTITY || '-'}" -f "${file}"`);
}
});
await Promise.all([...linkHeaders, ...linkLibs]);
} catch (error) {
console.error(
'Failed to configure required Frameworks.This is probably a bug, please open a issue with you system info at: ' +
'https://github.com/spacedriveapp/spacedrive/issues/new/choose'
);
if (__debug) console.error(error);
process.exit(1);
}
}

View file

@ -64,65 +64,11 @@ function Add-DirectoryToPath($directory) {
Reset-Path
}
$ghUrl = 'https://api.github.com/repos'
$sdGhPath = 'spacedriveapp/spacedrive'
function Invoke-RestMethodGithub {
[CmdletBinding()]
param (
[Parameter(Mandatory = $true)]
[string]$Uri,
[string]$Method = 'GET',
[string]$OutFile = $null,
[hashtable]$Headers = @{},
[string]$UserAgent = 'PowerShell'
)
$headers.Add('Accept', 'application/vnd.github+json')
$headers.Add('X-GitHub-Api-Version', '2022-11-28')
if (![string]::IsNullOrEmpty($env:GITHUB_TOKEN)) {
$headers.Add('Authorization', "Bearer $($env:GITHUB_TOKEN)")
}
$params = @{
Uri = $Uri
Method = $Method
OutFile = $OutFile
Headers = $Headers
UserAgent = $UserAgent
}
Invoke-RestMethod @params
}
function DownloadArtifact {
param (
[Parameter(Mandatory = $true)]
[ValidateNotNullOrEmpty()]
[string]$ArtifactPath,
[string]$OutFile
)
try {
Invoke-RestMethodGithub -Uri "$ghUrl/$sdGhPath/actions/artifacts/$($($ArtifactPath -split '/')[3])/zip" -OutFile $OutFile
} catch {
# nightly.link is a workaround for the lack of a public GitHub API to download artifacts from a workflow run
# https://github.com/actions/upload-artifact/issues/51
# Use it when running in environments that are not authenticated with GitHub
Write-Host 'Failed to download artifact from Github, falling back to nightly.link' -ForegroundColor Yellow
Invoke-RestMethodGithub -Uri "https://nightly.link/${sdGhPath}/${ArtifactPath}" -OutFile $OutFile
}
}
# Reset PATH to ensure the script doesn't have stale Path entries
Reset-Path
# Get temp folder
$temp = [System.IO.Path]::GetTempPath()
# Get project dir (get grandparent dir from script location: <PROJECT_ROOT>\.github\scripts)
$projectRoot = Split-Path -Path (Split-Path -Path $PSScriptRoot -Parent) -Parent
# Get project dir (get grandparent dir from script location: <PROJECT_ROOT>\scripts\setup.ps1)
$projectRoot = Split-Path -Path $PSScriptRoot -Parent
$packageJson = Get-Content -Raw -Path "$projectRoot\package.json" | ConvertFrom-Json
# Valid winget exit status
@ -132,13 +78,6 @@ $wingetValidExit = 0, -1978335189, -1978335153, -1978335135
# See https://github.com/spacedriveapp/spacedrive/issues/677
$llvmVersion = [Version]'15.0.7'
$ffmpegVersion = '6.0'
# Change CWD to project root
Set-Location $projectRoot
Remove-Item -Force -ErrorAction SilentlyContinue -Path "$projectRoot\.cargo\config"
Remove-Item -Force -ErrorAction SilentlyContinue -Path "$projectRoot\target\Frameworks" -Recurse
Write-Host 'Spacedrive Development Environment Setup' -ForegroundColor Magenta
Write-Host @"
@ -149,9 +88,7 @@ To set up your machine for Spacedrive development, this script will do the follo
4) Install Rust tools
5) Install Strawberry perl (used by to build the openssl-sys crate)
6) Install Node.js, npm and pnpm
7) Install LLVM $llvmVersion (compiler for ffmpeg-rust)
8) Download the protbuf compiler
9) Download a compatible ffmpeg build
7) Install LLVM $llvmVersion (compiler for ffmpeg-sys-next crate)
"@
# Install System dependencies (GitHub Actions already has all of those installed)
@ -306,127 +243,13 @@ https://learn.microsoft.com/windows/package-manager/winget/
}
}
# Create target folder, continue if already exists
New-Item -Force -ErrorAction SilentlyContinue -ItemType Directory -Path "$projectRoot\target\Frameworks" | Out-Null
# --
Write-Host
Write-Host 'Retrieving protobuf build...' -ForegroundColor Yellow
$filename = $null
$downloadUri = $null
$releasesUri = "${ghUrl}/protocolbuffers/protobuf/releases"
$filenamePattern = '*-win64.zip'
$releases = Invoke-RestMethodGithub -Uri $releasesUri
for ($i = 0; $i -lt $releases.Count; $i++) {
$release = $releases[$i]
foreach ($asset in $release.assets) {
if ($asset.name -like $filenamePattern) {
$filename = $asset.name
$downloadUri = $asset.browser_download_url
$i = $releases.Count
break
}
}
if ($LASTEXITCODE -ne 0) {
Exit-WithError "Something went wrong, exit code: $LASTEXITCODE"
}
if (-not ($filename -and $downloadUri)) {
Exit-WithError "Couldn't find a protobuf compiler installer"
}
Write-Host "Dowloading protobuf zip from ${downloadUri}..." -ForegroundColor Yellow
Invoke-RestMethodGithub -Uri $downloadUri -OutFile "$temp\protobuf.zip"
Write-Host 'Expanding protobuf zip...' -ForegroundColor Yellow
Expand-Archive "$temp\protobuf.zip" "$projectRoot\target\Frameworks" -Force
Remove-Item -Force -ErrorAction SilentlyContinue -Path "$temp\protobuf.zip"
# --
Write-Host "Retrieving ffmpeg-${ffmpegVersion} build..." -ForegroundColor Yellow
$page = 1
while ($page -gt 0) {
$success = ''
Invoke-RestMethodGithub -Uri `
"${ghUrl}/${sdGhPath}/actions/workflows/ffmpeg-windows.yml/runs?page=$page&per_page=100&status=success" `
| ForEach-Object {
if (-not $_.workflow_runs) {
Exit-WithError "Error: $_"
}
$_.workflow_runs | ForEach-Object {
$artifactPath = (
(Invoke-RestMethodGithub -Uri ($_.artifacts_url | Out-String) -Method Get).artifacts `
| Where-Object {
$_.name -eq "ffmpeg-${ffmpegVersion}-x86_64"
} | ForEach-Object {
$id = $_.id
$workflowRunId = $_.workflow_run.id
"suites/${workflowRunId}/artifacts/${id}"
} | Select-Object -First 1
)
try {
if ([string]::IsNullOrEmpty($artifactPath)) {
throw 'Empty argument'
}
# Download and extract the artifact
Write-Host "Dowloading ffmpeg-${ffmpegVersion} zip from artifact ${artifactPath}..." -ForegroundColor Yellow
DownloadArtifact -ArtifactPath $artifactPath -OutFile "$temp/ffmpeg.zip"
Write-Host "Expanding ffmpeg-${ffmpegVersion} zip..." -ForegroundColor Yellow
Expand-Archive "$temp/ffmpeg.zip" "$projectRoot\target\Frameworks" -Force
Remove-Item -Force -ErrorAction SilentlyContinue -Path "$temp/ffmpeg.zip"
$success = 'yes'
break
} catch {
$errorMessage = $_.Exception.Message
Write-Host "Error: $errorMessage" -ForegroundColor Red
Write-Host 'Failed to download ffmpeg artifact release, trying again in 1sec...'
Start-Sleep -Seconds 1
continue
}
}
}
if ($success -eq 'yes') {
break
}
$page++
Write-Output 'ffmpeg artifact not found, trying again in 1sec...'
Start-Sleep -Seconds 1
}
if ($success -ne 'yes') {
Exit-WithError 'Failed to download ffmpeg files'
}
@(
'[env]',
"PROTOC = `"$("$projectRoot\target\Frameworks\bin\protoc" -replace '\\', '\\')`"",
"FFMPEG_DIR = `"$("$projectRoot\target\Frameworks" -replace '\\', '\\')`"",
'',
'[target.x86_64-pc-windows-msvc]',
"rustflags = [`"-L`", `"$("$projectRoot\target\Frameworks\lib" -replace '\\', '\\')`"]",
'',
(Get-Content "$projectRoot\.cargo\config.toml" -Encoding utf8)
) | Out-File -Force -Encoding utf8 -FilePath "$projectRoot\.cargo\config"
if (-not $env:CI) {
Write-Host
Write-Host 'Your machine has been setup for Spacedrive development!' -ForegroundColor Green
Write-Host 'You will need to re-run this script if there are rust dependencies changes or you use `pnpm clean` or `cargo clean`!' -ForegroundColor Red
Write-Host
Read-Host 'Press Enter to continue'
}
if ($LASTEXITCODE -ne 0) {
Exit-WithError "Something went wrong, exit code: $LASTEXITCODE"
}

212
scripts/setup.sh Executable file
View file

@ -0,0 +1,212 @@
#!/usr/bin/env bash
set -euo pipefail
if [ "${CI:-}" = "true" ]; then
set -x
fi
err() {
for _line in "$@"; do
echo "$@" >&2
done
exit 1
}
has() {
for prog in "$@"; do
if ! command -v "$prog" 1>/dev/null 2>&1; then
return 1
fi
done
}
script_failure() {
if [ -n "${1:-}" ]; then
_line="on line $1"
else
_line="(unknown)"
fi
err "An error occurred $_line." "Setup failed."
}
trap 'script_failure ${LINENO:-}' ERR
if [ "${CI:-}" != "true" ]; then
echo 'Spacedrive Development Environment Setup'
echo 'To set up your machine for Spacedrive development, this script will install some required dependencies with your system package manager'
echo
echo 'Press Enter to continue'
read -r
if ! has pnpm; then
err 'pnpm was not found.' \
"Ensure the 'pnpm' command is in your \$PATH." \
'You must use pnpm for this project; yarn and npm are not allowed.' \
'https://pnpm.io/installation'
fi
if ! has rustc cargo; then
err 'Rust was not found.' \
"Ensure the 'rustc' and 'cargo' binaries are in your \$PATH." \
'https://rustup.rs'
fi
echo "Installing Rust tools..."
cargo install cargo-watch
echo
fi
# Install rust deps for android
if [ "${1:-}" = "mobile" ]; then
MOBILE=1
# Android requires python
if ! { has python3 || { has python && python -c 'import sys; exit(0 if sys.version_info[0] == 3 else 1)'; }; }; then
err 'python3 was not found.' \
'This is required for Android mobile development.' \
"Ensure 'python3' is available in your \$PATH and try again."
fi
# Android targets
echo "Installing Android targets for Rust..."
rustup target add armv7-linux-androideabi # for arm
rustup target add aarch64-linux-android # for arm64
rustup target add i686-linux-android # for x86
rustup target add x86_64-linux-android # for x86_64
rustup target add x86_64-unknown-linux-gnu # for linux-x86-64
rustup target add aarch64-apple-darwin # for darwin arm64 (if you have an M1 Mac)
rustup target add x86_64-apple-darwin # for darwin x86_64 (if you have an Intel Mac)
rustup target add x86_64-pc-windows-gnu # for win32-x86-64-gnu
rustup target add x86_64-pc-windows-msvc # for win32-x86-64-msvc
echo
else
MOBILE=0
fi
# Install system deps
case "$(uname)" in
"Darwin")
# Install rust deps for iOS
if [ $MOBILE -eq 1 ]; then
echo "Checking for Xcode..."
if ! /usr/bin/xcodebuild -version >/dev/null; then
err "Xcode was not detected." \
"Please ensure Xcode is installed and try again."
fi
echo "Installing iOS targets for Rust..."
rustup target add aarch64-apple-ios
rustup target add aarch64-apple-ios-sim
rustup target add x86_64-apple-ios # for CI
echo
fi
;;
"Linux") # https://github.com/tauri-apps/tauri-docs/blob/dev/docs/guides/getting-started/prerequisites.md
if has apt-get; then
echo "Detected apt!"
echo "Installing dependencies with apt..."
# Tauri dependencies
set -- build-essential curl wget file patchelf libssl-dev libgtk-3-dev librsvg2-dev \
libwebkit2gtk-4.0-dev libayatana-appindicator3-dev
# FFmpeg dependencies
set -- "$@" ffmpeg libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev \
libavutil-dev libswscale-dev libswresample-dev
# Webkit2gtk requires gstreamer plugins for video playback to work
set -- "$@" gstreamer1.0-alsa gstreamer1.0-gl gstreamer1.0-gtk3 gstreamer1.0-libav \
gstreamer1.0-pipewire gstreamer1.0-plugins-bad gstreamer1.0-plugins-base \
gstreamer1.0-plugins-good gstreamer1.0-plugins-ugly gstreamer1.0-pulseaudio \
gstreamer1.0-vaapi libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \
libgstreamer-plugins-bad1.0-dev
# Bindgen dependencies - it's used by a dependency of Spacedrive
set -- "$@" pkg-config clang
sudo apt-get -y update
sudo apt-get -y install "$@"
elif has pacman; then
echo "Detected pacman!"
echo "Installing dependencies with pacman..."
# Tauri dependencies
set -- base-devel curl wget file patchelf openssl gtk3 librsvg webkit2gtk libayatana-appindicator
# FFmpeg dependencies
set -- "$@" libheif ffmpeg
# Webkit2gtk requires gstreamer plugins for video playback to work
set -- "$@" gst-libav gst-plugins-bad gst-plugins-base gst-plugins-good gst-plugins-ugly \
gst-plugin-pipewire gstreamer-vaapi
# Bindgen dependencies - it's used by a dependency of Spacedrive
set -- "$@" pkgconf clang
sudo pacman -Sy --needed "$@"
elif has dnf; then
echo "Detected dnf!"
echo "Installing dependencies with dnf..."
# For Enterprise Linux, you also need "Development Tools" instead of "C Development Tools and Libraries"
if ! { sudo dnf group install "C Development Tools and Libraries" || sudo sudo dnf group install "Development Tools"; }; then
err 'We were unable to install the "C Development Tools and Libraries"/"Development Tools" package.' \
'Please open an issue if you feel that this is incorrect.' \
'https://github.com/spacedriveapp/spacedrive/issues'
fi
# For Fedora 36 and below, and all Enterprise Linux Distributions, you need to install webkit2gtk3-devel instead of webkit2gtk4.0-devel
if ! { sudo dnf install webkit2gtk4.0-devel || sudo dnf install webkit2gtk3-devel; }; then
err 'We were unable to install the webkit2gtk4.0-devel/webkit2gtk3-devel package.' \
'Please open an issue if you feel that this is incorrect.' \
'https://github.com/spacedriveapp/spacedrive/issues'
fi
# Tauri dependencies
# openssl is manually declared here as i don't think openssl and openssl-devel are actually dependant on eachother
# openssl also has a habit of being missing from some of my fresh Fedora installs - i've had to install it at least twice
set -- openssl openssl-devel curl wget file patchelf libappindicator-gtk3-devel librsvg2-devel
# Webkit2gtk requires gstreamer plugins for video playback to work
set -- "$@" gstreamer1-devel gstreamer1-plugins-base-devel \
gstreamer1-plugins-good gstreamer1-plugins-good-gtk \
gstreamer1-plugins-good-extras gstreamer1-plugins-ugly-free \
gstreamer1-plugins-bad-free gstreamer1-plugins-bad-free-devel \
streamer1-plugins-bad-free-extras
# Bindgen dependencies - it's used by a dependency of Spacedrive
set -- "$@" clang pkgconf clang-devel
sudo dnf install "$@"
# FFmpeg dependencies
if ! sudo dnf install libheif-devel ffmpeg ffmpeg-devel; then
err 'We were unable to install the FFmpeg and FFmpeg-devel packages.' \
'This is likely because the RPM Fusion free repository is not enabled.' \
'https://docs.fedoraproject.org/en-US/quick-docs/setup_rpmfusion'
fi
# required for building the openssl-sys crate
# perl-FindBin perl-File-Compare perl-IPC-Cmd perl-File-Copy
else
if has lsb_release; then
_distro="'$(lsb_release -s -d)' "
fi
err "Your Linux distro ${_distro:-}is not supported by this script." \
'We would welcome a PR or some help adding your OS to this script:' \
'https://github.com/spacedriveapp/spacedrive/issues'
fi
;;
*)
err "Your OS ($(uname)) is not supported by this script." \
'We would welcome a PR or some help adding your OS to this script.' \
'https://github.com/spacedriveapp/spacedrive/issues'
;;
esac
echo 'Your machine has been setup for Spacedrive development!'

93
scripts/suffix.mjs Normal file
View file

@ -0,0 +1,93 @@
// Suffixes
export const PROTOC_SUFFIX = {
Linux: {
i386: 'linux-x86_32',
i686: 'linux-x86_32',
x86_64: 'linux-x86_64',
arm64: 'linux-aarch_64',
aarch64: 'linux-aarch_64'
},
Darwin: {
x86_64: 'osx-x86_64',
arm64: 'osx-aarch_64',
aarch64: 'osx-aarch_64'
},
Windows_NT: {
i386: 'win32',
i686: 'win32',
x86_64: 'win64'
}
};
export const PDFIUM_SUFFIX = {
Linux: {
x86_64: {
musl: 'linux-musl-x64',
glibc: 'linux-x64'
},
arm64: 'linux-arm64',
aarch64: 'linux-arm64'
},
Darwin: {
x86_64: 'mac-x64',
arm64: 'mac-arm64',
aarch64: 'mac-arm64'
},
Windows_NT: {
x86_64: 'win-x64',
arm64: 'win-arm64',
aarch64: 'win-arm64'
}
};
export const FFMPEG_SUFFFIX = {
Darwin: {
x86_64: 'x86_64',
arm64: 'arm64',
aarch64: 'arm64'
},
Windows_NT: {
x86_64: 'x86_64'
}
};
export const FFMPEG_WORKFLOW = {
Darwin: 'ffmpeg-macos.yml',
Windows_NT: 'ffmpeg-windows.yml'
};
export const TAURI_CLI_SUFFIX = {
Darwin: {
x86_64: 'x86_64-apple-darwin',
arm64: 'aarch64-apple-darwin',
aarch64: 'aarch64-apple-darwin'
}
};
/**
* @param {Record<string, unknown>} constants
* @param {string[]} identifiers
* @returns {string?}
*/
export function getConst(constants, identifiers) {
/** @type {string | Record<string, unknown>} */
let constant = constants;
for (const id of identifiers) {
constant = /** @type {string | Record<string, unknown>} */ (constant[id]);
if (!constant) return null;
if (typeof constant !== 'object') break;
}
return typeof constant === 'string' ? constant : null;
}
/**
* @param {Record<string, unknown>} suffixes
* @param {string[]} identifiers
* @returns {RegExp?}
*/
export function getSuffix(suffixes, identifiers) {
const suffix = getConst(suffixes, identifiers);
return suffix ? new RegExp(`${suffix}(\\.[^\\.]+)*$`) : null;
}

41
scripts/which.mjs Normal file
View file

@ -0,0 +1,41 @@
import { exec as execCb } from 'node:child_process';
import * as fs from 'node:fs/promises';
import * as os from 'node:os';
import * as path from 'node:path';
import { env } from 'node:process';
import { promisify } from 'node:util';
const exec = promisify(execCb);
/**
* @param {string} progName
* @returns {Promise<boolean>}
*/
async function where(progName) {
// Reject paths
if (/[\\]/.test(progName)) return false;
try {
await exec(`where "${progName}"`);
} catch {
return false;
}
return true;
}
/**
* @param {string} progName
* @returns {Promise<boolean>}
*/
export async function which(progName) {
return os.type() === 'Windows_NT'
? where(progName)
: Promise.any(
Array.from(new Set(env.PATH?.split(':'))).map((dir) =>
fs.access(path.join(dir, progName), fs.constants.X_OK)
)
).then(
() => true,
() => false
);
}