[ENG-1054] libheif for Linux (plus fixes) + Webp update (#1405)

* Move postinstall script to a preprep script
 - Fix libheif crate failing to build with our libheif
 - Rework CI due to postinstall to preprep changes

* Linux heif build script + Update webp

* Fix ctrl+c/ctrl+v bug

* Improve libheif linux script
 - Add support for linux aarch64
 - Add CI workflow to build libheif for linux
 - Some other misc fixes

* Fix libheif CI requires sudo

* Fix wrong path for libheif build.rs override in Windows

* Fix wrong path manipulations in libheif build script

* 🤦

* Use ubuntu-latest in libheif action
 - Specify glib version in target triple to support old distros
 - Fix libheif artifact publishing

* Fix artifact upload path again

* Add musl support for libheif
 - Remove unused files from libheif artifact
 - Add setup logic for libheif in postinstall script

* Build libheif for linux as a shared lib

* Fix meson not building the correct arch
 - Add logic to get git branch from githubs CI env vars

* libheif finnaly works on linux
 - Make spacedrive binary rpath point to where appimage and deb expects our libs to be
 - Add some logic to tauri.js to convince tauri to bundle our shared libs
 - Work-around appimage bundling step breaking sometimes
 - Add logic to handle sigint in tauri.js to ensure we clean up after ourselves
 - Rename postinstall.mjs to setup.mjs
 - Add logic to setup.mjs to point our dev build to our shared libs in linux

* Fix windows dekstop dev
 - Rename setup.mjs to preprep.mjs

* test cache-factory

* Fix preprep script not parsing the cross compilation target triple and always using the host info to download dependencies
 - Fix action env vars not being correctly passed
 - Remove possibility to pass multiple targests to rust action

* Don't compile mobile crates on desktop targets

* Remove cache-factory pull_request trigger

* remove patched tauri cli

* Use git plumbing command to get remote branch name
 - Fallback to reading .git/HEAD if remote name was not retrieved

* fix type

---------

Co-authored-by: Brendan Allan <brendonovich@outlook.com>
This commit is contained in:
Vítor Vasconcellos 2023-10-03 14:31:33 -03:00 committed by GitHub
parent 8af1019850
commit d75a6e9b12
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
37 changed files with 715 additions and 275 deletions

View file

@ -10,21 +10,41 @@ FFMPEG_DIR = "{{{ffmpeg}}}"
[target.x86_64-apple-darwin]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib"]
[target.x86_64-apple-darwin.heif]
rustc-link-search = ["{{{projectRoot}}}/target/Frameworks/lib"]
rustc-link-lib = ["heif"]
[target.aarch64-apple-darwin]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib"]
[target.aarch64-apple-darwin.heif]
rustc-link-search = ["{{{projectRoot}}}/target/Frameworks/lib"]
rustc-link-lib = ["heif"]
{{/isMacOS}}
{{#isWin}}
[target.x86_64-pc-windows-msvc]
rustflags = ["-L", "{{{projectRoot}}}\\target\\Frameworks\\lib"]
[target.x86_64-pc-windows-msvc.heif]
rustc-link-search = ["{{{projectRoot}}}\\target\\Frameworks\\lib"]
rustc-link-lib = ["heif"]
{{/isWin}}
{{#isLinux}}
[target.x86_64-unknown-linux-gnu]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib"]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib", "-C", "link-arg=-Wl,-rpath=${ORIGIN}/../lib/spacedrive"]
[target.x86_64-unknown-linux-gnu.heif]
rustc-link-search = ["{{{projectRoot}}}/target/Frameworks/lib"]
rustc-link-lib = ["heif"]
[target.aarch64-unknown-linux-gnu]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib"]
rustflags = ["-L", "{{{projectRoot}}}/target/Frameworks/lib", "-C", "link-arg=-Wl,-rpath=${ORIGIN}/../lib/spacedrive"]
[target.aarch64-unknown-linux-gnu.heif]
rustc-link-search = ["{{{projectRoot}}}/target/Frameworks/lib"]
rustc-link-lib = ["heif"]
{{/isLinux}}
[alias]

View file

@ -5,10 +5,6 @@ inputs:
description: Github token
required: false
default: ''
ignorePostInstall:
description: Don't run post install
required: false
default: 'false'
runs:
using: 'composite'
steps:
@ -29,5 +25,4 @@ runs:
env:
NODE_ENV: debug
GITHUB_TOKEN: ${{ inputs.token }}
IGNORE_POSTINSTALL: ${{ inputs.ignorePostInstall }}
run: pnpm i --frozen-lockfile

View file

@ -1,8 +1,8 @@
name: Setup Rust and Prisma
description: Setup Rust and Prisma
inputs:
targets:
description: Comma-separated list of target triples to install for this toolchain
target:
description: toolchain target triple
required: false
save-cache:
description: Whether to save the Rust cache
@ -15,7 +15,7 @@ runs:
id: toolchain
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ inputs.targets }}
target: ${{ inputs.target }}
toolchain: stable
components: clippy, rustfmt
@ -24,7 +24,7 @@ runs:
with:
save-if: ${{ inputs.save-cache }}
prefix-key: 'v0-rust-deps'
shared-key: ${{ inputs.targets }}
shared-key: ${{ inputs.target }}
- name: Cargo config.toml
shell: bash

View file

@ -5,8 +5,8 @@ inputs:
description: Github token
required: false
default: ''
targets:
description: Comma-separated list of target triples to install for this toolchain
target:
description: toolchain target triple
required: false
setup-arg:
description: Argument for the system setup script
@ -45,21 +45,26 @@ runs:
- name: Setup Rust and Dependencies
uses: ./.github/actions/setup-rust
with:
targets: ${{ inputs.targets }}
target: ${{ inputs.target }}
save-cache: ${{ inputs.save-cache }}
- name: Run setup.sh script
shell: bash
if: ${{ runner.os == 'Linux' || runner.os == 'macOS' }}
run: ./scripts/setup.sh ${{ inputs.setup-arg }}
env:
TARGET: ${{ inputs.targets }}
GITHUB_TOKEN: ${{ inputs.token }}
APPLE_SIGNING_IDENTITY: ${{ env.APPLE_SIGNING_IDENTITY }}
- name: Run setup.ps1 script
shell: powershell
if: ${{ runner.os == 'Windows' }}
run: ./scripts/setup.ps1
- name: Setup shared libraries
shell: bash
env:
TARGET_TRIPLE: ${{ inputs.target }}
GITHUB_TOKEN: ${{ inputs.token }}
run: |
pushd ..
npm i archive-wasm mustache
popd
node scripts/preprep.mjs

View file

@ -1,7 +1,7 @@
ARG FAKE_DEPS="gettext-runtime libiconv ncurses" \
FFMPEG_DEPS="brotli bzip2 dav1d libde265 libjxl libopus libpng libvorbis libvpx-devel openjpeg \
soxr xz zimg" \
LIBWEBP_VERSION=1.3.1 \
LIBWEBP_VERSION=1.3.2 \
FFMPEG_VERSION=6.0 \
LIBHEIF_VERSION=1.16.2

View file

@ -1,7 +1,7 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/madler/zlib.git"
SCRIPT_TAG="v1.2.13"
SCRIPT_TAG="v1.3"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" zlib

View file

@ -1,7 +1,7 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/strukturag/libde265.git"
SCRIPT_TAG="v1.0.11"
SCRIPT_TAG="v1.0.12"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" libde265

View file

@ -1,7 +1,7 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/webmproject/libwebp.git"
SCRIPT_TAG="1.3.1"
SCRIPT_TAG="1.3.2"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" libwebp

321
.github/scripts/libheif-linux.sh vendored Executable file
View file

@ -0,0 +1,321 @@
#!/usr/bin/env bash
set -euo pipefail
case "${1:-}" in
'' | x86_64-linux-gnu)
export TARGET_TRIPLE='x86_64-linux-gnu.2.23'
;;
aarch64-linux-gnu)
export TARGET_TRIPLE='aarch64-linux-gnu.2.23'
;;
x86_64-linux-musl)
export TARGET_TRIPLE='x86_64-linux-musl'
;;
aarch64-linux-musl)
export TARGET_TRIPLE='aarch64-linux-musl'
;;
*)
echo "Unsupported target triple '${1}'"
exit 1
;;
esac
# Change CWD to script dir
CDPATH='' cd "$(dirname "$0")"
echo "Install required build dependencies..."
apt-get update -yqq
apt-get install -yqq -o=Dpkg::Use-Pty=0 ninja-build cmake curl nasm pkg-config xz-utils patch python3
echo "Configure sysroot and prefix..."
mkdir -p "./src/prefix/bin" "./src/sysroot/bin"
_prefix="$(CDPATH='' cd ./src/prefix && pwd)"
_sysroot="$(CDPATH='' cd ./src/sysroot && pwd)"
# Configure PATH to use our sysroot bin
export PATH="${_sysroot}/bin:$PATH"
# Configure pkgconfig to look for our built libs
export PKG_CONFIG_LIBDIR="${_prefix}/lib/pkgconfig:${_prefix}/share/pkgconfig"
# Download zig to use as a C/C++ cross compiler
echo "Download zig..."
curl -LSs "https://ziglang.org/download/0.11.0/zig-linux-$(uname -m)-0.11.0.tar.xz" \
| tar -xJf- --strip-component 1 -C "$_sysroot"
mv "${_sysroot}/zig" "${_sysroot}/bin/zig"
# Create scripts for some zig internal commands, because cmake doesn't allow passing arguments to tools
for _arg in ar ranlib; do
cat <<EOF >"${_sysroot}/bin/${_arg}"
#!/usr/bin/env bash
exec zig $_arg "\$@"
EOF
chmod +x "${_sysroot}/bin/${_arg}"
done
echo "Download meson..."
mkdir -p ./src/meson
curl -LSs 'https://github.com/mesonbuild/meson/archive/refs/tags/1.2.1.tar.gz' \
| tar -xzf- --strip-component 1 -C ./src/meson
pushd ./src/meson
# Patch meson to support zig as a C/C++ compiler
curl -LSs 'https://github.com/mesonbuild/meson/pull/12293.patch' | patch -p1
# Install meson binary
./packaging/create_zipapp.py --outfile "${_sysroot}/bin/meson" --compress
popd
cat <<EOF >./src/cross.meson
[binaries]
c = ['zig', 'cc', '-s', '-target', '$TARGET_TRIPLE']
cpp = ['zig', 'c++', '-s', '-target', '$TARGET_TRIPLE']
ar = ['zig', 'ar']
ranlib = ['zig', 'ranlib']
lib = ['zig', 'lib']
dlltool = ['zig', 'dlltool']
[properties]
sys_root = '${_sysroot}'
pkg_config_libdir = ['${_prefix}/lib/pkgconfig', '${_prefix}/share/pkgconfig']
EOF
case "$TARGET_TRIPLE" in
x86_64-*)
cat <<EOF >>./src/cross.meson
[host_machine]
system = 'linux'
cpu_family = 'x86_64'
cpu = 'x86_64'
endian = 'little'
EOF
;;
aarch64-*)
cat <<EOF >>./src/cross.meson
[host_machine]
system = 'linux'
cpu_family = 'aarch64'
cpu = 'arm64'
endian = 'little'
EOF
;;
*)
echo "Unsupported target triple '${1}'"
exit 1
;;
esac
cat <<EOF >./src/toolchain.cmake
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_PROCESSOR x86_64)
set(triple $TARGET_TRIPLE)
set(CMAKE_CROSSCOMPILING TRUE)
set_property(GLOBAL PROPERTY TARGET_SUPPORTS_SHARED_LIBS FALSE)
# Do a no-op access on the CMAKE_TOOLCHAIN_FILE variable so that CMake will not
# issue a warning on it being unused.
if (CMAKE_TOOLCHAIN_FILE)
endif()
set(CMAKE_C_COMPILER zig cc -s -target $TARGET_TRIPLE)
set(CMAKE_CXX_COMPILER zig c++ -s -target $TARGET_TRIPLE)
set(CMAKE_RANLIB ranlib)
set(CMAKE_C_COMPILER_RANLIB ranlib)
set(CMAKE_CXX_COMPILER_RANLIB ranlib)
set(CMAKE_AR ar)
set(CMAKE_C_COMPILER_AR ar)
set(CMAKE_CXX_COMPILER_AR ar)
set(CMAKE_FIND_ROOT_PATH ${_prefix} ${_sysroot})
set(CMAKE_SYSTEM_PREFIX_PATH /)
if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
set(CMAKE_INSTALL_PREFIX "${_prefix}" CACHE PATH
"Install path prefix, prepended onto install directories." FORCE)
endif()
# To find programs to execute during CMake run time with find_program(), e.g.
# 'git' or so, we allow looking into system paths.
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
if (NOT CMAKE_FIND_ROOT_PATH_MODE_LIBRARY)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
endif()
if (NOT CMAKE_FIND_ROOT_PATH_MODE_INCLUDE)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
endif()
if (NOT CMAKE_FIND_ROOT_PATH_MODE_PACKAGE)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
endif()
# TODO: CMake appends <sysroot>/usr/include to implicit includes; switching to use usr/include will make this redundant.
if ("\${CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES}" STREQUAL "")
set(CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES "${_prefix}/include")
endif()
if ("\${CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES}" STREQUAL "")
set(CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES "${_prefix}/include")
endif()
EOF
# --
echo "Download zlib..."
mkdir -p ./src/zlib/build
curl -LSs 'https://github.com/madler/zlib/archive/refs/tags/v1.3.tar.gz' \
| tar -xzf- --strip-component 1 -C ./src/zlib
pushd ./src/zlib/build
echo "Build zlib..."
cmake \
-GNinja \
-DCMAKE_TOOLCHAIN_FILE=../../toolchain.cmake \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=Off \
-DCMAKE_POSITION_INDEPENDENT_CODE=On \
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=On \
-DCMAKE_INSTALL_PREFIX="$_prefix" \
..
ninja -j"$(nproc)" zlibstatic
# Stub .so files so install doesn't fail
touch libz.so.1.3 libz.so.1 libz.so
ninja install
# Remove stub .so files
rm "${_prefix}"/lib/{libz.so.1.3,libz.so.1,libz.so}
popd
# --
echo "Download dav1d..."
mkdir -p ./src/dav1d/build
curl -LSs 'https://code.videolan.org/videolan/dav1d/-/archive/1.2.1/dav1d-1.2.1.tar.gz' \
| tar -xzf- --strip-component 1 -C ./src/dav1d
pushd ./src/dav1d/build
echo "Build dav1d..."
meson setup \
--cross-file=../../cross.meson \
-Denable_docs=false \
-Denable_tools=false \
-Denable_tests=false \
-Denable_examples=false \
--prefix="$_prefix" \
--buildtype=release \
--default-library=static \
..
ninja -j"$(nproc)"
ninja install
popd
# --
echo "Download libde265..."
mkdir -p ./src/libde265/build
curl -#LSs 'https://github.com/strukturag/libde265/archive/refs/tags/v1.0.12.tar.gz' \
| tar -xzf- --strip-component 1 -C ./src/libde265
pushd ./src/libde265/build
echo "Build libde265..."
cmake \
-GNinja \
-DCMAKE_TOOLCHAIN_FILE=../../toolchain.cmake \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=Off \
-DCMAKE_POSITION_INDEPENDENT_CODE=On \
-DCMAKE_INSTALL_PREFIX="$_prefix" \
-DENABLE_SDL=Off \
-DENABLE_DECODER=Off \
-DENABLE_ENCODER=Off \
..
ninja -j"$(nproc)"
ninja install
popd
# --
echo "Download libwebp..."
mkdir -p ./src/libwebp/build
curl -#LSs 'https://github.com/webmproject/libwebp/archive/refs/tags/v1.3.2.tar.gz' \
| tar -xzf- --strip-component 1 -C ./src/libwebp
pushd ./src/libwebp/build
echo "Build libwebp..."
cmake \
-GNinja \
-DCMAKE_TOOLCHAIN_FILE=../../toolchain.cmake \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=Off \
-DCMAKE_POSITION_INDEPENDENT_CODE=On \
-DCMAKE_INSTALL_PREFIX="$_prefix" \
-DWEBP_LINK_STATIC=On \
-DWEBP_BUILD_CWEBP=Off \
-DWEBP_BUILD_DWEBP=Off \
-DWEBP_BUILD_GIF2WEBP=Off \
-DWEBP_BUILD_IMG2WEBP=Off \
-DWEBP_BUILD_VWEBP=Off \
-DWEBP_BUILD_WEBPINFO=Off \
-DWEBP_BUILD_WEBPMUX=Off \
-DWEBP_BUILD_EXTRAS=Off \
-DWEBP_BUILD_ANIM_UTILS=Off \
..
ninja -j"$(nproc)"
ninja install
popd
# --
echo "Download libheif..."
mkdir -p ./src/libheif/build
curl -#LSs 'https://github.com/strukturag/libheif/archive/refs/tags/v1.16.2.tar.gz' \
| tar -xzf- --strip-component 1 -C ./src/libheif
pushd ./src/libheif/build
echo "Build libheif..."
cmake \
-GNinja \
-DCMAKE_TOOLCHAIN_FILE=../../toolchain.cmake \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=On \
-DCMAKE_POSITION_INDEPENDENT_CODE=On \
-DCMAKE_INSTALL_PREFIX="$_prefix" \
-DBUILD_TESTING=OFF \
-DWITH_DAV1D=ON \
-DWITH_DAV1D_PLUGIN=OFF \
-DWITH_LIBDE265=ON \
-DWITH_LIBDE265_PLUGIN=OFF \
-DWITH_LIBSHARPYUV=ON \
-DWITH_FUZZERS=OFF \
-DWITH_EXAMPLES=OFF \
-DWITH_UNCOMPRESSED_CODEC=ON \
-DWITH_REDUCED_VISIBILITY=ON \
-DWITH_DEFLATE_HEADER_COMPRESSION=ON \
-DENABLE_PLUGIN_LOADING=OFF \
-DENABLE_MULTITHREADING_SUPPORT=ON \
..
ninja -j"$(nproc)"
ninja install
popd

View file

@ -74,7 +74,7 @@ jobs:
uses: ./.github/actions/setup-system
with:
token: ${{ secrets.GITHUB_TOKEN }}
targets: ${{ matrix.settings.target }}
target: ${{ matrix.settings.target }}
save-cache: 'true'
- name: Clippy

View file

@ -24,7 +24,6 @@ jobs:
uses: ./.github/actions/setup-pnpm
with:
token: ${{ secrets.GITHUB_TOKEN }}
ignorePostInstall: true
- name: Perform typechecks
run: pnpm typecheck
@ -40,7 +39,6 @@ jobs:
uses: ./.github/actions/setup-pnpm
with:
token: ${{ secrets.GITHUB_TOKEN }}
ignorePostInstall: true
- name: Perform linting
run: pnpm lint
@ -144,17 +142,6 @@ jobs:
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Setup shared libraries
if: steps.filter.outputs.changes == 'true'
env:
NODE_ENV: debug
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
pushd ..
npm i archive-wasm mustache
popd
node scripts/post-install.mjs
- name: Run Clippy
if: steps.filter.outputs.changes == 'true'
uses: actions-rs/clippy-check@v1

View file

@ -3,13 +3,13 @@ name: Build ffmpeg macos
on:
push:
paths:
- '.github/workflows/ffmpeg.yml'
- '.github/workflows/ffmpeg-macos.yml'
- '.github/scripts/ffmpeg-macos/**'
branches:
- main
pull_request:
paths:
- '.github/workflows/ffmpeg.yml'
- '.github/workflows/ffmpeg-macos.yml'
- '.github/scripts/ffmpeg-macos/**'
workflow_dispatch:

64
.github/workflows/libheif-linux.yml vendored Normal file
View file

@ -0,0 +1,64 @@
name: Build libheif for linux
on:
push:
paths:
- '.github/workflows/libheif-linux.yml'
- '.github/scripts/libheif-linux.sh'
branches:
- main
pull_request:
paths:
- '.github/workflows/libheif-linux.yml'
- '.github/scripts/libheif-linux.sh'
workflow_dispatch:
# Cancel previous runs of the same workflow on the same branch.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build-libheif-linux:
strategy:
fail-fast: true
matrix:
settings:
- host: ubuntu-latest
target: x86_64-linux-gnu
- host: ubuntu-latest
target: aarch64-linux-gnu
- host: ubuntu-latest
target: x86_64-linux-musl
- host: ubuntu-latest
target: aarch64-linux-musl
name: Build LibHeif Linux ${{ matrix.settings.target }}
runs-on: ${{ matrix.settings.host }}
defaults:
run:
shell: bash
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Build libheif
run: |
set -euxo pipefail
sudo .github/scripts/libheif-linux.sh ${{ matrix.settings.target }}
sudo chown -R "$(id -u):$(id -g)" .github/scripts/src/prefix
# Remove uneeded files
rm -rf .github/scripts/src/prefix/{share,lib/{cmake,pkgconfig}}
find .github/scripts/src/prefix -empty -delete
find .github/scripts/src/prefix -name 'libheif.so*' -exec realpath -s --relative-to=.github/scripts/src/prefix {} + \
| xargs env XZ_OPT='-T0 -9' tar -cJf "libheif-${{ matrix.settings.target }}.tar.xz" -C .github/scripts/src/prefix include/libheif
- name: Publish libheif
uses: actions/upload-artifact@v3
with:
name: libheif-${{ matrix.settings.target }}
path: libheif-${{ matrix.settings.target }}.tar.xz
if-no-files-found: error

View file

@ -86,7 +86,7 @@ jobs:
APPLE_SIGNING_IDENTITY: ${{ secrets.APPLE_SIGNING_IDENTITY }}
with:
token: ${{ secrets.GITHUB_TOKEN }}
targets: ${{ matrix.settings.target }}
target: ${{ matrix.settings.target }}
- name: Setup Node.js, pnpm and dependencies
uses: ./.github/actions/setup-pnpm

View file

@ -1,69 +0,0 @@
# Copyright 2019-2023 Tauri Programme within The Commons Conservancy
# SPDX-License-Identifier: Apache-2.0
# SPDX-License-Identifier: MIT
name: publish cli.js
env:
DEBUG: napi:*
APP_NAME: cli
MACOSX_DEPLOYMENT_TARGET: '10.13'
on:
workflow_dispatch:
defaults:
run:
working-directory: tooling/cli/node/
jobs:
build:
strategy:
fail-fast: true
matrix:
settings:
- host: macos-latest
target: x86_64-apple-darwin
architecture: x64
build: |
yarn build:release --features rustls
strip -x *.node
- host: macos-latest
target: aarch64-apple-darwin
build: |
yarn build:release --features rustls --target=aarch64-apple-darwin
strip -x *.node
name: stable - ${{ matrix.settings.target }} - node@16
runs-on: ${{ matrix.settings.host }}
steps:
- uses: actions/checkout@v3
with:
repository: 'dceddia/tauri'
ref: 'dd32f97335a6105b134e70dad02d269e49a75b56'
- name: Setup node
uses: actions/setup-node@v3
with:
node-version: 16
check-latest: true
architecture: ${{ matrix.settings.architecture }}
- name: Install
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.settings.target }}
toolchain: stable
- name: Install dependencies
run: yarn install --ignore-scripts --frozen-lockfile --registry https://registry.npmjs.org --network-timeout 300000
- name: Build
run: ${{ matrix.settings.build }}
shell: bash
- name: Upload artifact
uses: actions/upload-artifact@v3
with:
name: bindings-${{ matrix.settings.target }}
path: tooling/cli/node/${{ env.APP_NAME }}.*.node
if-no-files-found: error

View file

@ -52,11 +52,11 @@ To quickly run only the desktop app after `prep`, you can use:
If necessary, the webview devtools can be opened automatically by passing the following environment variable before starting the desktop app:
- \[Bash]: `export SD_DEVTOOLS=1`
- \[Bash]: `export SD_DEVTOOLS=1`
- \[Powershell]: `$env:SD_DEVTOOLS=1`
- \[Powershell]: `$env:SD_DEVTOOLS=1`
Also, the react-devtools can be launched using `pnpm dlx react-devtools`.
Also, the react-devtools can be launched using `pnpm dlx react-devtools`.
However, it must be executed before starting the desktop app for it qto connect.
To run the web app:

8
Cargo.lock generated
View file

@ -3777,9 +3777,9 @@ dependencies = [
[[package]]
name = "libwebp-sys"
version = "0.9.2"
version = "0.9.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a5df1e76f0acef0058aa2164ccf74e610e716e7f9eeb3ee2283de7d43659d823"
checksum = "3e0df0a0f9444d52aee6335cd724d21a2ee3285f646291799a72be518ec8ee3c"
dependencies = [
"cc",
"glob",
@ -9094,9 +9094,9 @@ dependencies = [
[[package]]
name = "webp"
version = "0.2.5"
version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "12ff0ebb440d1db63b778cb609db8a8abfda825a7841664a76a70b628502c7e1"
checksum = "4bb5d8e7814e92297b0e1c773ce43d290bef6c17452dafd9fc49e5edb5beba71"
dependencies = [
"image",
"libwebp-sys",

View file

@ -2,3 +2,7 @@
# will have compiled files and executables
/target/
WixTools
*.dll
*.dll.*
*.so
*.so.*

View file

@ -23,9 +23,11 @@ const tauriConf = JSON.parse(
fs.readFileSync(path.resolve(__dirname, '..', 'tauri.conf.json'), 'utf-8')
);
const framework = path.join(workspace, 'target/Frameworks');
switch (args[0]) {
case 'dev': {
if (process.platform === 'win32') setupFFMpegDlls(true);
if (process.platform === 'win32') setupSharedLibs('dll', path.join(framework, 'bin'), true);
break;
}
case 'build': {
@ -51,40 +53,11 @@ switch (args[0]) {
.flatMap((target) => target.split(','));
const tauriPatch = {
tauri: { bundle: { macOS: {} } }
tauri: { bundle: { macOS: {}, resources: [] } }
};
switch (process.platform) {
case 'darwin': {
// Workaround while https://github.com/tauri-apps/tauri/pull/3934 is not merged
const cliNode =
process.arch === 'arm64' ? 'cli.darwin-arm64.node' : 'cli.darwin-x64.node';
const tauriCliPatch = path.join(workspace, 'target/Frameworks/bin/', cliNode);
if (!fs.existsSync(tauriCliPatch)) {
throw new Error(
`Tauri cli patch not found at ${path.relative(
workspace,
tauriCliPatch
)}. Did you run \`pnpm i\`?`
);
}
const tauriBin = path.join(
workspace,
'node_modules/@tauri-apps',
cliNode.replace(/\.[^.]+$/, '').replace(/\./g, '-'),
cliNode
);
if (!fs.existsSync(tauriBin)) {
throw new Error('tauri bin not found at ${tauriBin}. Did you run `pnpm i`?');
}
console.log(
`WORKAROUND tauri-apps/tauri#3933: Replace ${path.relative(
workspace,
tauriBin
)} -> ${path.relative(workspace, tauriCliPatch)}`
);
fs.copyFileSync(tauriCliPatch, tauriBin);
// ARM64 support was added in macOS 11, but we need at least 11.2 due to our ffmpeg build
let macOSMinimumVersion = tauriConf?.tauri?.bundle?.macOS?.minimumSystemVersion;
let macOSArm64MinimumVersion = '11.2';
@ -125,17 +98,30 @@ switch (args[0]) {
break;
}
case 'linux':
fs.rmSync(path.join(workspace, 'target/release/bundle/appimage'), {
recursive: true,
force: true
});
// Point tauri to the ffmpeg DLLs
tauriPatch.tauri.bundle.resources.push(
...setupSharedLibs('so', path.join(framework, 'lib'))
);
break;
case 'win32':
// Point tauri to the ffmpeg DLLs
tauriPatch.tauri.bundle.resources = setupFFMpegDlls();
toRemove.push(
...tauriPatch.tauri.bundle.resources.map((file) =>
path.join(workspace, 'apps/desktop/src-tauri', file)
)
tauriPatch.tauri.bundle.resources.push(
...setupSharedLibs('dll', path.join(framework, 'bin'))
);
break;
}
toRemove.push(
...tauriPatch.tauri.bundle.resources.map((file) =>
path.join(workspace, 'apps/desktop/src-tauri', file)
)
);
const tauriPatchConf = path.resolve(__dirname, '..', 'tauri.conf.patch.json');
fs.writeFileSync(tauriPatchConf, JSON.stringify(tauriPatch, null, 2));
@ -144,9 +130,34 @@ switch (args[0]) {
}
}
process.on('SIGINT', () => {
for (const file of toRemove)
try {
fs.unlinkSync(file);
} catch (e) {}
});
let code = 0;
spawn('pnpm', ['exec', 'tauri', ...args])
.catch((exitCode) => {
if (args[0] === 'build' || process.platform === 'linux') {
// Work around appimage buindling not working sometimes
appimageDir = path.join(workspace, 'target/release/bundle/appimage');
appDir = path.join(appimageDir, 'spacedrive.AppDir');
if (
fs.existsSync(path.join(appimageDir, 'build_appimage.sh')) &&
fs.existsSync(appDir) &&
!fs.readdirSync(appimageDir).filter((file) => file.endsWith('.AppImage')).length
) {
process.chdir(appimageDir);
fs.rmSync(appDir, { recursive: true, force: true });
return spawn('bash', ['build_appimage.sh']).catch((exitCode) => {
code = exitCode;
console.error(`tauri ${args[0]} failed with exit code ${exitCode}`);
});
}
}
code = exitCode;
console.error(`tauri ${args[0]} failed with exit code ${exitCode}`);
console.error(
@ -162,10 +173,10 @@ spawn('pnpm', ['exec', 'tauri', ...args])
process.exit(code);
});
function setupFFMpegDlls(dev = false) {
if (!process.env.FFMPEG_DIR) throw new Error('Missing envvar FFMPEG_DIR');
const ffmpegBinDir = path.join(process.env.FFMPEG_DIR, 'bin');
const ffmpegDlls = fs.readdirSync(ffmpegBinDir).filter((file) => file.endsWith('.dll'));
function setupSharedLibs(sufix, binDir, dev = false) {
const sharedLibs = fs
.readdirSync(binDir)
.filter((file) => file.endsWith(`.${sufix}`) || file.includes(`.${sufix}.`));
let targetDir = path.join(workspace, 'apps/desktop/src-tauri');
if (dev) {
@ -174,9 +185,9 @@ function setupFFMpegDlls(dev = false) {
fs.mkdirSync(targetDir, { recursive: true });
}
// Copy all DLLs from the $FFMPEG_DIR/bin to targetDir
for (const dll of ffmpegDlls)
fs.copyFileSync(path.join(ffmpegBinDir, dll), path.join(targetDir, dll));
// Copy all shared libs to targetDir
for (const dll of sharedLibs)
fs.copyFileSync(path.join(binDir, dll), path.join(targetDir, dll));
return ffmpegDlls;
return sharedLibs;
}

View file

@ -1,3 +1,5 @@
#![cfg(target_os = "android")]
use std::panic;
use jni::{

View file

@ -1,3 +1,5 @@
#![cfg(target_os = "ios")]
use std::{
ffi::{CStr, CString},
os::raw::{c_char, c_void},

View file

@ -69,7 +69,7 @@ thiserror = "1.0.48"
include_dir = { version = "0.7.3", features = ["glob"] }
async-trait = "^0.1.73"
image = "0.24.7"
webp = "0.2.5"
webp = "0.2.6"
tracing = { workspace = true }
tracing-subscriber = { workspace = true, features = ["env-filter"] }
async-stream = "0.3.5"

View file

@ -14,7 +14,7 @@ ffmpeg-sys-next = "6.0.1"
tracing = { workspace = true }
thiserror = "1.0.48"
webp = "0.2.5"
webp = "0.2.6"
tokio = { workspace = true, features = ["fs", "rt"] }
[dev-dependencies]

View file

@ -16,9 +16,7 @@ heif = ["dep:libheif-rs", "dep:libheif-sys"]
image = "0.24.7"
thiserror = "1.0.48"
resvg = "0.35.0"
[target.'cfg(any(not(any(target_os = "linux", target_os = "windows")), heif_images))'.dependencies]
# both of these added *default* bindgen features in 0.22.0 and 2.0.0+1.16.2 respectively
# this broke builds as we build our own liibheif, so i disabled their default features
libheif-rs = { version = "0.22.0", default-features = false, optional = true }
libheif-sys = { version = "2.0.0+1.16.2", default-features = false, optional = true }
libheif-sys = { version = "2.0.0", default-features = false, optional = true }

View file

@ -1,19 +1,13 @@
/// The size of 1MiB in bytes
const MIB: u64 = 1_048_576;
#[cfg(all(
feature = "heif",
any(not(any(target_os = "linux", target_os = "windows")), heif_images)
))]
#[cfg(feature = "heif")]
pub const HEIF_EXTENSIONS: [&str; 7] = ["heif", "heifs", "heic", "heics", "avif", "avci", "avcs"];
/// The maximum file size that an image can be in order to have a thumbnail generated.
///
/// This value is in MiB.
#[cfg(all(
feature = "heif",
any(not(any(target_os = "linux", target_os = "windows")), heif_images)
))]
#[cfg(feature = "heif")]
pub const HEIF_MAXIMUM_FILE_SIZE: u64 = MIB * 32;
pub const SVG_EXTENSIONS: [&str; 2] = ["svg", "svgz"];

View file

@ -4,10 +4,7 @@ pub type Result<T> = std::result::Result<T, Error>;
#[derive(thiserror::Error, Debug)]
pub enum Error {
#[cfg(all(
feature = "heif",
any(not(any(target_os = "linux", target_os = "windows")), heif_images)
))]
#[cfg(feature = "heif")]
#[error("error with libheif: {0}")]
LibHeif(#[from] libheif_rs::HeifError),

View file

@ -11,10 +11,7 @@ use std::{
path::Path,
};
#[cfg(all(
feature = "heif",
any(not(any(target_os = "linux", target_os = "windows")), heif_images)
))]
#[cfg(feature = "heif")]
use crate::heif::HeifHandler;
pub fn format_image(path: impl AsRef<Path>) -> Result<DynamicImage> {
@ -29,10 +26,7 @@ pub fn format_image(path: impl AsRef<Path>) -> Result<DynamicImage> {
fn match_to_handler(ext: &OsStr) -> Box<dyn ImageHandler> {
let mut handler: Box<dyn ImageHandler> = Box::new(GenericHandler {});
#[cfg(all(
feature = "heif",
any(not(any(target_os = "linux", target_os = "windows")), heif_images)
))]
#[cfg(feature = "heif")]
if consts::HEIF_EXTENSIONS
.iter()
.map(OsString::from)

View file

@ -24,10 +24,7 @@ mod consts;
mod error;
mod formatter;
mod generic;
#[cfg(all(
feature = "heif",
any(not(any(target_os = "linux", target_os = "windows")), heif_images)
))]
#[cfg(feature = "heif")]
mod heif;
mod svg;

View file

@ -1,7 +1,7 @@
{
"private": true,
"scripts": {
"postinstall": "pnpm exec node scripts/post-install.mjs",
"preprep": "pnpm exec node scripts/preprep.mjs",
"prep": "pnpm gen:prisma",
"postprep": "pnpm codegen",
"build": "turbo run build",
@ -40,7 +40,7 @@
"@cspell/dict-typescript": "^2.0.2",
"@ianvs/prettier-plugin-sort-imports": "^4.1.0",
"@storybook/react-vite": "^7.0.20",
"archive-wasm": "^1.5.1",
"archive-wasm": "^1.5.3",
"cspell": "^6.31.1",
"mustache": "^4.2.0",
"prettier": "^3.0.3",

View file

@ -27,8 +27,8 @@ importers:
specifier: ^7.0.20
version: 7.0.20(react-dom@18.2.0)(react@18.2.0)(typescript@5.0.4)(vite@4.3.9)
archive-wasm:
specifier: ^1.5.1
version: 1.5.1
specifier: ^1.5.3
version: 1.5.3
cspell:
specifier: ^6.31.1
version: 6.31.1
@ -5812,7 +5812,7 @@ packages:
magic-string: 0.27.0
react-docgen-typescript: 2.2.2(typescript@5.0.4)
typescript: 5.0.4
vite: 4.3.9(@types/node@18.15.1)
vite: 4.3.9(less@4.2.0)
/@jridgewell/gen-mapping@0.3.3:
resolution: {integrity: sha512-HLhSWOLRi875zjjMG/r+Nv0oCW8umGb0BgEhyX3dDX3egwZtB8PqLnjz3yedt8R5StBrzcg4aBpnh8UA9D1BoQ==}
@ -8862,7 +8862,7 @@ packages:
remark-slug: 6.1.0
rollup: 3.28.1
typescript: 5.0.4
vite: 4.3.9(@types/node@18.15.1)
vite: 4.3.9(less@4.2.0)
transitivePeerDependencies:
- supports-color
@ -9462,7 +9462,7 @@ packages:
react: 18.2.0
react-docgen: 6.0.0-alpha.3
react-dom: 18.2.0(react@18.2.0)
vite: 4.3.9(@types/node@18.15.1)
vite: 4.3.9(less@4.2.0)
transitivePeerDependencies:
- '@preact/preset-vite'
- supports-color
@ -11179,8 +11179,8 @@ packages:
/aproba@2.0.0:
resolution: {integrity: sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==}
/archive-wasm@1.5.1:
resolution: {integrity: sha512-jETuTnp7lcJ4OQhqvyE5PHw8izUWDArj/TPzPL2hu4ylGwc9coIOT214uRTETF3uoQwPPJcV8GTa14yfTqrEhg==}
/archive-wasm@1.5.3:
resolution: {integrity: sha512-dxAKM63Y+1dXYIH7t3rgIj1/w/q0CdujmW3WIoIJVFdgAMhAdTcmbkPdw/Gj9xZ2J0DcdW5fZOukYEIrN6DYQg==}
engines: {node: '>=18'}
dev: true
@ -23523,6 +23523,7 @@ packages:
rollup: 3.28.1
optionalDependencies:
fsevents: 2.3.3
dev: true
/vite@4.3.9(less@4.2.0):
resolution: {integrity: sha512-qsTNZjO9NoJNW7KnOrgYwczm0WctJ8m/yqYAMAK9Lxt4SoySUfS5S8ia9K7JHpa3KEeMfyF8LoJ3c5NeBJy6pg==}

View file

@ -15,9 +15,10 @@ import {
FFMPEG_WORKFLOW,
getConst,
getSuffix,
LIBHEIF_SUFFIX,
LIBHEIF_WORKFLOW,
PDFIUM_SUFFIX,
PROTOC_SUFFIX,
TAURI_CLI_SUFFIX
PROTOC_SUFFIX
} from './suffix.mjs';
import { which } from './which.mjs';
@ -160,40 +161,37 @@ export async function downloadFFMpeg(machineId, framework, branches) {
}
/**
* Workaround while https://github.com/tauri-apps/tauri/pull/3934 is not available in a Tauri stable release
* Download and extract libheif libs for heif thumbnails
* @param {string[]} machineId
* @param {string} framework
* @param {string[]} branches
*/
export async function downloadPatchedTauriCLI(machineId, framework, branches) {
console.log('Dowloading patched tauri CLI...');
export async function downloadLibHeif(machineId, framework, branches) {
const workflow = getConst(LIBHEIF_WORKFLOW, machineId);
if (workflow == null) return;
const tauriCliSuffix = getSuffix(TAURI_CLI_SUFFIX, machineId);
if (tauriCliSuffix == null) throw new Error('NO_TAURI_CLI');
console.log('Downloading LibHeif...');
const libHeifSuffix = getSuffix(LIBHEIF_SUFFIX, machineId);
if (libHeifSuffix == null) throw new Error('NO_LIBHEIF');
let found = false;
for await (const artifact of getGhWorkflowRunArtifacts(
SPACEDRIVE_REPO,
'tauri-patched-cli-js.yml',
branches
)) {
if (!tauriCliSuffix.test(artifact.name)) continue;
for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) {
if (!libHeifSuffix.test(artifact.name)) continue;
try {
await extractTo(
await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id),
path.join(framework, 'bin'),
{
chmod: 0o700,
overwrite: true
}
);
const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id);
await extractTo(data, framework, {
chmod: 0o600,
recursive: true,
overwrite: true
});
found = true;
break;
} catch (error) {
console.warn('Failed to download patched tauri cli.js, re-trying...');
console.warn('Failed to download LibHeif, re-trying...');
if (__debug) console.error(error);
}
}
if (!found) throw new Error('NO_TAURI_CLI');
if (!found) throw new Error('NO_LIBHEIF');
}

View file

@ -1,7 +1,54 @@
import { exec as execCb } from 'node:child_process';
import * as fs from 'node:fs/promises';
import * as path from 'node:path';
import { env } from 'node:process';
import { promisify } from 'node:util';
const REF_REGEX = /ref:\s+refs\/heads\/(?<branch>\s+)/;
const __debug = env.NODE_ENV === 'debug';
const exec = promisify(execCb);
/**
* @param {string} repoPath
* @returns {string?}
*/
async function getRemoteBranchName(repoPath) {
let branchName;
try {
branchName = (await exec('git symbolic-ref --short HEAD', { cwd: repoPath })).stdout.trim();
if (!branchName) throw 'Empty local branch name';
} catch (error) {
if (__debug) {
console.warn(`Failed to read git local branch name`);
console.error(error);
}
return null;
}
let remoteBranchName;
try {
remoteBranchName = (
await exec(`git for-each-ref --format="%(upstream:short)" refs/heads/${branchName}`, {
cwd: repoPath
})
).stdout.trim();
const [remote, branch] = remoteBranchName.split('/');
if (!branch) throw 'Empty remote branch name';
remoteBranchName = branch;
} catch (error) {
if (__debug) {
console.warn(`Failed to read git remote branch name`);
console.error(error);
}
return null;
}
return remoteBranchName;
}
// https://stackoverflow.com/q/3651860#answer-67151923
const REF_REGEX = /ref:\s+refs\/heads\/(?<branch>[^\s\x00-\x1F\:\?\[\\\^\~]+)/;
const GITHUB_REF_REGEX = /^refs\/heads\//;
/**
* @param {string} repoPath
@ -10,15 +57,30 @@ const REF_REGEX = /ref:\s+refs\/heads\/(?<branch>\s+)/;
export async function getGitBranches(repoPath) {
const branches = ['main', 'master'];
let head;
try {
head = await fs.readFile(path.join(repoPath, '.git', 'HEAD'), { encoding: 'utf8' });
} catch {
return branches;
if (env.GITHUB_HEAD_REF) {
branches.unshift(env.GITHUB_HEAD_REF);
} else if (env.GITHUB_REF) {
branches.unshift(env.GITHUB_REF.replace(GITHUB_REF_REGEX, ''));
}
const match = REF_REGEX.exec(head);
if (match?.groups?.branch) branches.unshift(match.groups.branch);
const remoteBranchName = await getRemoteBranchName(repoPath);
if (remoteBranchName) {
branches.unshift(remoteBranchName);
} else {
let head;
try {
head = await fs.readFile(path.join(repoPath, '.git', 'HEAD'), { encoding: 'utf8' });
} catch (error) {
if (__debug) {
console.warn(`Failed to read git HEAD file`);
console.error(error);
}
return branches;
}
const match = REF_REGEX.exec(head);
if (match?.groups?.branch) branches.unshift(match.groups.branch);
}
return branches;
}

60
scripts/machineId.mjs Normal file
View file

@ -0,0 +1,60 @@
import { exec as execCb } from 'node:child_process';
import * as os from 'node:os';
import { env } from 'node:process';
import { promisify } from 'node:util';
const __debug = env.NODE_ENV === 'debug';
let libc = 'glibc';
if (os.type() === 'Linux') {
try {
const exec = promisify(execCb);
if ((await exec('ldd /bin/ls')).stdout.includes('musl')) {
libc = 'musl';
}
} catch (error) {
if (__debug) {
console.warn(`Failed to check libc type`);
console.error(error);
}
}
}
const OS_TYPE = {
darwin: 'Darwin',
windows: 'Windows_NT',
linux: 'Linux'
};
export function getMachineId() {
let machineId;
/**
* Possible TARGET_TRIPLE:
* x86_64-apple-darwin
* aarch64-apple-darwin
* x86_64-pc-windows-msvc
* aarch64-pc-windows-msvc
* x86_64-unknown-linux-gnu
* x86_64-unknown-linux-musl
* aarch64-unknown-linux-gnu
* aarch64-unknown-linux-musl
* armv7-unknown-linux-gnueabihf
*/
if (env.TARGET_TRIPLE) {
const target = env.TARGET_TRIPLE.split('-');
const osType = OS_TYPE[target[2]];
if (!osType) throw new Error(`Unknown OS type: ${target[2]}`);
if (!target[0]) throw new Error(`Unknown machine type: ${target[0]}`);
machineId = [osType, target[0]];
if (machineId[0] === 'Linux') machineId.push(target[3].includes('musl') ? 'musl' : 'glibc');
} else {
// Current machine identifiers
machineId = [os.type(), os.machine()];
if (machineId[0] === 'Linux') machineId.push(libc);
}
return machineId;
}

View file

@ -1,13 +0,0 @@
import { exec as execCb } from 'node:child_process';
import { promisify } from 'node:util';
const exec = promisify(execCb);
/** @returns {Promise<boolean>} */
export async function isMusl() {
try {
return (await exec('ldd /bin/ls')).stdout.includes('musl');
} catch {
return false;
}
}

View file

@ -7,15 +7,13 @@ import { fileURLToPath } from 'node:url';
import { promisify } from 'node:util';
import mustache from 'mustache';
import { downloadFFMpeg, downloadPatchedTauriCLI, downloadPDFium, downloadProtc } from './deps.mjs';
import { downloadFFMpeg, downloadLibHeif, downloadPDFium, downloadProtc } from './deps.mjs';
import { getGitBranches } from './git.mjs';
import { isMusl } from './musl.mjs';
import { getMachineId } from './machineId.mjs';
import { which } from './which.mjs';
umask(0o026);
if (env.IGNORE_POSTINSTALL === 'true') process.exit(0);
if (/^(msys|mingw|cygwin)$/i.test(env.OSTYPE ?? '')) {
console.error('Bash for windows is not supported, please execute this from Powershell or CMD');
process.exit(255);
@ -31,8 +29,7 @@ const __dirname = path.dirname(__filename);
const __root = path.resolve(path.join(__dirname, '..'));
// Current machine identifiers
const machineId = [os.type(), os.machine()];
if (machineId[0] === 'Linux') machineId.push((await isMusl()) ? 'musl' : 'glibc');
const machineId = getMachineId();
// Basic dependeny check
if (
@ -64,7 +61,7 @@ await Promise.all(
);
// Download all necessary external dependencies
const deps = [
await Promise.all([
downloadProtc(machineId, framework).catch((e) => {
console.error(
'Failed to download protoc, this is required for Spacedrive to compile. ' +
@ -85,21 +82,15 @@ const deps = [
'https://github.com/spacedriveapp/spacedrive/issues/new/choose'
);
throw e;
}),
downloadLibHeif(machineId, framework, branches).catch((e) => {
console.error(
'Failed to download libheif. This is probably a bug, please open a issue with you system info at: ' +
'https://github.com/spacedriveapp/spacedrive/issues/new/choose'
);
throw e;
})
];
if (machineId[0] === 'Darwin')
deps.push(
downloadPatchedTauriCLI(machineId, framework, branches).catch((e) => {
console.error(
'Failed to download patched tauri CLI. This is probably a bug, please open a issue with you system info at: ' +
'https://github.com/spacedriveapp/spacedrive/issues/new/choose'
);
throw e;
})
);
await Promise.all(deps).catch((e) => {
]).catch((e) => {
if (__debug) console.error(e);
process.exit(1);
});
@ -141,8 +132,14 @@ try {
process.exit(1);
}
// Setup macOS Frameworks
if (machineId[0] === 'Darwin') {
if (machineId[0] === 'Linux') {
// Setup Linux libraries
const libDir = path.join(__root, 'target', 'lib');
await fs.rm(libDir, { force: true, recursive: true });
await fs.mkdir(libDir, { recursive: true, mode: 0o751 });
await fs.symlink(path.join(framework, 'lib'), path.join(__root, 'target', 'lib', 'spacedrive'));
} else if (machineId[0] === 'Darwin') {
// Setup macOS Frameworks
try {
console.log('Setup Frameworks & Sign libraries...');
const ffmpegFramework = path.join(framework, 'FFMpeg.framework');

View file

@ -46,7 +46,7 @@ if [ "${CI:-}" != "true" ]; then
'https://pnpm.io/installation'
fi
if ! has rustc cargo; then
if ! has rustup rustc cargo; then
err 'Rust was not found.' \
"Ensure the 'rustc' and 'cargo' binaries are in your \$PATH." \
'https://rustup.rs'
@ -127,7 +127,7 @@ case "$(uname)" in
libgstreamer-plugins-bad1.0-dev
# Bindgen dependencies - it's used by a dependency of Spacedrive
set -- "$@" pkg-config clang
set -- "$@" llvm-dev libclang-dev clang
sudo apt-get -y update
sudo apt-get -y install "$@"
@ -139,14 +139,14 @@ case "$(uname)" in
set -- base-devel curl wget file patchelf openssl gtk3 librsvg webkit2gtk libayatana-appindicator
# FFmpeg dependencies
set -- "$@" libheif ffmpeg
set -- "$@" ffmpeg
# Webkit2gtk requires gstreamer plugins for video playback to work
set -- "$@" gst-libav gst-plugins-bad gst-plugins-base gst-plugins-good gst-plugins-ugly \
gst-plugin-pipewire gstreamer-vaapi
# Bindgen dependencies - it's used by a dependency of Spacedrive
set -- "$@" pkgconf clang
set -- "$@" clang
sudo pacman -Sy --needed "$@"
elif has dnf; then
@ -180,12 +180,12 @@ case "$(uname)" in
streamer1-plugins-bad-free-extras
# Bindgen dependencies - it's used by a dependency of Spacedrive
set -- "$@" clang pkgconf clang-devel
set -- "$@" clang clang-devel
sudo dnf install "$@"
# FFmpeg dependencies
if ! sudo dnf install libheif-devel ffmpeg ffmpeg-devel; then
if ! sudo dnf install ffmpeg ffmpeg-devel; then
err 'We were unable to install the FFmpeg and FFmpeg-devel packages.' \
'This is likely because the RPM Fusion free repository is not enabled.' \
'https://docs.fedoraproject.org/en-US/quick-docs/setup_rpmfusion'

View file

@ -56,14 +56,27 @@ export const FFMPEG_WORKFLOW = {
Windows_NT: 'ffmpeg-windows.yml'
};
export const TAURI_CLI_SUFFIX = {
Darwin: {
x86_64: 'x86_64-apple-darwin',
arm64: 'aarch64-apple-darwin',
aarch64: 'aarch64-apple-darwin'
export const LIBHEIF_SUFFIX = {
Linux: {
x86_64: {
musl: 'x86_64-linux-musl',
glibc: 'x86_64-linux-gnu'
},
arm64: {
musl: 'aarch64-linux-musl',
glibc: 'aarch64-linux-gnu'
},
aarch64: {
musl: 'aarch64-linux-musl',
glibc: 'aarch64-linux-gnu'
}
}
};
export const LIBHEIF_WORKFLOW = {
Linux: 'libheif-linux.yml'
};
/**
* @param {Record<string, unknown>} constants
* @param {string[]} identifiers