[ENG-1184, ENG-1286, ENG-1330] Rework native dependencies (+ deb fixes) (#1685)

* Almost working

* Downgrade libplacebo
 - FFMpeg 6.0 uses some now removed deprecated functions

* Use -Oz for zimg

* Fix CI script to run the new ffmpeg build script

* Fix heif step name + Ignore docker cache while building in CI

* Fix Opencl build on linux

* Fix adding incorrect -target argument to linker
 - Update zig for windows target

* Disable opengl for ffmpeg, it only uses it as an outdev, not for processing
 - Disable opengl and directx for libplacebo, ffmpeg only supports vulkan when using it
 - Add WIN32_LEAN_AND_MEAN to global cflags to optimize windows api usage
 - Fix 99-heif.sh incorrect bsdtar flag

* Remove WIN32_LEAN_AND_MEAN from global CFLAGS as that was breaking OpenCL build
 - Fix Dockerfile step for cleaning up the out dir
 - Improve licensing handling

* x86_64 windows and linux builds are working

* Fix aarch64 build for windows and linux

* Fix symbol visibility in linux builds
 - Fix soxr failing to download due to sourcefourge
 - Only patch zimg on windows targets
 - Tell cmake to hide libheif symbols

* Fix Linux .so rpath
 - Add lzo dependency
 - Publish source for the built libs
 - Add warning for missing nasm in tauri.mjs
 - Remove ffmpeg install from setup.sh
 - Add download logic for our linux ffmpeg bundle in preprep.mjs

* Remove jobs, docker doesn't support this

* Fix typing

* Change ffmpeg references to native deps
 - Rename FFMpeg.framework to Spacedrive.framework
 - Centralize the macOS native deps build with the windows and linux one
 - Change the preprep script to only download our native deps
 - Remove old macOS ffmpeg build scripts

* Compress native deps before creating github artifact
 - The zip implementation for github artifact does not mantain symlinks and permissions
 - Remove conditional protoc, it is now always included

* Don't strip dylibs, it was breaking them
 - Only download macOS Framework for darwin targets
 - Fix preprep script
 - Improve README.md for native-deps
 - Fix not finding native-deps src

* Attempt to fix macOS dylib

* Fix macOS dylibs
 - Replace lld.ld64 with apple's own linker
 - Add stages for building apple's compiler tools to use instead of LLVM ones

* Ensure sourced file exists

* All targets should build now
 - Fix environment sourcing in build.sh
 - Some minor improvements to cc.sh
 - Fix incorrect flag in zlib.sh
 - Improve how -f[...] flags are passed to compiler and linker
 - Add more stack hardening flags

* We now can support macOS 11.0 on arm64

* Improve macOS Framework generation
 - Remove installed unused deps
 - Improve cleanup and organization logic in Dockerfile last step
 - Move libav* .dll.a to .lib to fix missing files in windows target
 - Remove apple tools from /srv folder after installation to prevent their files from being copied by other stage steps
 - Create all the necessary symlinks for the macOS targets while building
 - Remove symlink logic for macOS target from preprep.mjs

* Remove native-deps from spacedrive repo
 - It now resides in https://github.com/spacedriveapp/native-deps
 - Modify preprep script to dowload native-deps from new location
 - Remove Github API code from scripts (not needed anymore)
 - Add flock.mjs to allow running tauri.mjs cleanup as soon as cargo finishes building in linux

* Handle flock not present in system
 - Allow macOS to try using flock

* Fix preprep on macOS

* Add script that patch deb to fix errors and warnings raised by lintian

* Fix ctrl+c/ctrl+v typo

* Remove gstreamer1.0-gtk3 from deb dependencies

* eval is evil

* Handle tauri build release with an explicit target in fix-deb.sh

* Preserve environment variables when re-executing fix-deb with sudo

* Only execute fix-deb.sh when building a deb bundle

* Improvements fix-deb.sh

* Improve setup.sh (Add experiemental alpine support)
This commit is contained in:
Vítor Vasconcellos 2023-11-17 16:20:14 -03:00 committed by GitHub
parent 0457643179
commit 2e6e00bc6d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
63 changed files with 548 additions and 3378 deletions

View file

@ -1,10 +1,6 @@
[env]
{{#protoc}}
PROTOC = { force = true, value = "{{{protoc}}}" }
{{/protoc}}
{{^isLinux}}
FFMPEG_DIR = { force = true, value = "{{{nativeDeps}}}" }
{{/isLinux}}
OPENSSL_STATIC = { force = true, value = "1" }
OPENSSL_NO_VENDOR = { force = true, value = "0" }
OPENSSL_RUST_USE_NASM = { force = true, value = "1" }

View file

@ -1,87 +0,0 @@
ARG FAKE_DEPS="gettext-runtime libiconv ncurses" \
FFMPEG_DEPS="brotli bzip2 dav1d libde265 libjxl libopus libpng libvorbis libvpx-devel openjpeg \
soxr xz zimg" \
LIBWEBP_VERSION=1.3.2 \
FFMPEG_VERSION=6.0 \
LIBHEIF_VERSION=1.16.2
FROM vvasconcellos/osxcross:12.3-564e2b9-8 as base
SHELL ["/bin/bash", "-eux", "-o", "pipefail", "-c"]
WORKDIR /srv
ARG LIBWEBP_VERSION
ADD "https://github.com/webmproject/libwebp/archive/refs/tags/v${LIBWEBP_VERSION}.tar.gz" ./
RUN tar -xf "v${LIBWEBP_VERSION}.tar.gz" && rm "v${LIBWEBP_VERSION}.tar.gz" \
&& \
mv "/srv/libwebp-${LIBWEBP_VERSION}" /srv/libwebp
ARG LIBHEIF_VERSION
ADD "https://github.com/strukturag/libheif/releases/download/v${LIBHEIF_VERSION}/libheif-${LIBHEIF_VERSION}.tar.gz" ./
RUN tar -xf "libheif-${LIBHEIF_VERSION}.tar.gz" && rm "libheif-${LIBHEIF_VERSION}.tar.gz" \
&& \
mv "/srv/libheif-${LIBHEIF_VERSION}" /srv/libheif
ARG FFMPEG_VERSION
ADD "https://ffmpeg.org/releases/ffmpeg-${FFMPEG_VERSION}.tar.xz" ./
RUN tar -xf "ffmpeg-${FFMPEG_VERSION}.tar.xz" && rm "ffmpeg-${FFMPEG_VERSION}.tar.xz" \
&& \
mv "/srv/ffmpeg-${FFMPEG_VERSION}" /srv/ffmpeg \
&& \
cd /srv/ffmpeg \
&& \
for patch in \
'https://github.com/macports/macports-ports/raw/0e62a6d66fbaa7faf7b4eb9029647d3d5651fb2e/multimedia/ffmpeg6/files/patch-libavcodec-audiotoolboxenc.c.diff' \
'https://github.com/macports/macports-ports/raw/0e62a6d66fbaa7faf7b4eb9029647d3d5651fb2e/multimedia/ffmpeg6/files/patch-avutil-builtin-available.diff' \
'https://github.com/macports/macports-ports/raw/0e62a6d66fbaa7faf7b4eb9029647d3d5651fb2e/multimedia/ffmpeg6/files/patch-libavcodec-profvidworkflow.diff' \
; do curl -LSs "$patch" | patch -p0; done
# ---
FROM base as x86_64
# Fake Install macOS dependencies not required to build ffmpeg
ARG FAKE_DEPS
# hadolint ignore=SC2086
RUN osxcross-macports fake-install $FAKE_DEPS
# Install macOS dependencies required to build ffmpeg
ARG FFMPEG_DEPS
# hadolint ignore=SC2086
RUN --mount=type=cache,id=macports-x86_64,target=/opt/osxcross/macports/cache \
osxcross-macports install $FFMPEG_DEPS
# Build ffmpeg
RUN --mount=src=build.sh,dst=/srv/build.sh /srv/build.sh x86_64 "$MACOSX_SDK"
# ---
FROM base as aarch64
# https://ffmpeg.org/pipermail/ffmpeg-user/2016-January/030202.html
ADD https://raw.githubusercontent.com/yuvi/gas-preprocessor/master/gas-preprocessor.pl /usr/local/bin/
RUN chmod +x /usr/local/bin/gas-preprocessor.pl
# Update min macOS version for arm64
# libbrotli macports precompiled binaries are only available for macOS 11.2+
ENV OSX_VERSION_MIN="11.2" \
MACOSX_DEPLOYMENT_TARGET="11.2"
# Fake Install macOS dependencies not required to build ffmpeg
ARG FAKE_DEPS
# hadolint ignore=SC2086
RUN osxcross-macports fake-install --arm64 $FAKE_DEPS
# Install macOS dependencies required to build ffmpeg
ARG FFMPEG_DEPS
# hadolint ignore=SC2086
RUN --mount=type=cache,id=macports-arm64,target=/opt/osxcross/macports/cache \
osxcross-macports install --arm64 $FFMPEG_DEPS
# Build ffmpeg
RUN --mount=src=build.sh,dst=/srv/build.sh /srv/build.sh aarch64 "$MACOSX_SDK"
# ---
FROM scratch
COPY --from=x86_64 /FFMpeg.framework /ffmpeg/x86_64/FFMpeg.framework
COPY --from=aarch64 /FFMpeg.framework /ffmpeg/aarch64/FFMpeg.framework

View file

@ -1,25 +0,0 @@
# FFMpeg.framework
## Build instructions
To build `FFMpeg.framework` a `docker` or `podman` installation is required.
It is recomended to enable [`BuildKit`](https://docs.docker.com/build/buildkit/#getting-started) in docker.
Just run the following inside this directory:
```sh
$> docker build -o . .
```
or
```sh
$> podman build -o . .
```
After some time (it takes aroung 15min in Github CI) a directory named `ffmpeg` will show up with both a `x86_64` and `arm64` directory inside,
both will have a `FFMpeg.framework` for their respective architecture.
### How does the build process work?
The `FFMpeg.framework` is built inside an Alpine Linux container that contains a copy of [`osxcross`](https://github.com/tpoechtrager/osxcross), which is a cross toolchain that enables building native macOS binaries on Linux. Most of the build process is similar to how you would do it in macOS. The main advantage of using `osxcross` is that it handles the configuration for both `x86` and `arm64` and all the required compiling tools without the need for Xcode and with a more direct and easier managment of macOS SDKs. Any required macOS dependencies are handled by a MacPorts-compatible package manager.

View file

@ -1,386 +0,0 @@
#!/usr/bin/env bash
# This script builds ffmpeg for macOS using osxcross.
# This script is heavly influenced by:
# https://github.com/FFmpeg/FFmpeg/blob/ea3d24bbe3c58b171e55fe2151fc7ffaca3ab3d2/configure
# https://github.com/GerardSoleCa/macports-ports/blob/6f646dfaeb58ccb4a8b877df1ae4eecc4650fac7/multimedia/ffmpeg-upstream/Portfile
# https://github.com/arthenica/ffmpeg-kit/blob/47f85fa9ea3f8c34f3c817b87d8667b61b87d0bc/scripts/apple/ffmpeg.sh
# https://github.com/zimbatm/ffmpeg-static/blob/3206c0d74cd129c2ddfc3e928dcd3ea317d54857/build.sh
set -e # exit immediate if an error occurs in a pipeline
set -E # make commands inherit ERR trap
set -u # don't allow not set variables to be utilized
set -o pipefail # trace ERR through pipes
set -o errtrace # trace ERR through 'time command' and other functions
if [ "$#" -ne 2 ]; then
echo "Usage: $0 <target-arch> <macos-version>" >&2
exit 1
fi
if [ -z "$MACOSX_DEPLOYMENT_TARGET" ]; then
echo "You must set MACOSX_DEPLOYMENT_TARGET first." >&2
exit 1
fi
ARCH="$1"
MACOS_VERSION="$2"
set -- # Clear command line arguments
if [ "$ARCH" = "x86_64" ]; then
TARGET_CPU="x86_64"
TARGET_ARCH="x86_64"
set -- --enable-x86asm
elif [ "$ARCH" = "aarch64" ]; then
TARGET_CPU="armv8"
TARGET_ARCH="aarch64"
set -- --enable-neon --enable-asm
else
echo "Unsupported architecture: $ARCH" >&2
exit 1
fi
# Get darwin version and build compiler triple
DARWIN_VERSION="$(basename "$(realpath "$(command -v "oa64-clang")")" | awk -F- '{print $3}')"
TRIPLE="${ARCH}-apple-${DARWIN_VERSION}"
# Check macOS clang exists
CC="${TRIPLE}-clang"
if ! command -v "$CC" 2>/dev/null; then
echo "$CC not found" >&2
exit 1
fi
# Get osxcross root directory
_osxcross_root="$(dirname "$(dirname "$(command -v "$CC")")")"
# Check macports root exists
_macports_root="${_osxcross_root}/macports/pkgs/opt/local"
if ! [ -d "$_macports_root" ]; then
echo "macports root not found: $_macports_root" >&2
exit 1
fi
ln -s "$_macports_root" /opt/local
# Check SDK exists
_sdk="${_osxcross_root}/SDK/MacOSX${MACOS_VERSION}.sdk"
if ! [ -d "$_sdk" ]; then
echo "Invalid MacOS version: $MACOS_VERSION" >&2
exit 1
fi
# Gather all SDK libs
_skd_libs="$(
while IFS= read -r -d '' _lib; do
_lib="${_lib#"${_sdk}/usr/lib/"}"
_lib="${_lib%.*}"
printf '%s.dylib\n' "$_lib"
done < <(find "${_sdk}/usr/lib" \( -name '*.tbd' -o -name '*.dylib' \) -print0) \
| sort -u
)"
setup_cross_env() {
export CC
export LD="${TRIPLE}-ld"
export AR="${TRIPLE}-ar"
export CXX="${TRIPLE}-clang++"
export STRIP="${TRIPLE}-strip"
export CMAKE="${TRIPLE}-cmake"
export RANLIB="${TRIPLE}-ranlib"
export PKG_CONFIG="${TRIPLE}-pkg-config"
}
# Change cwd to libwebp source root
CDPATH='' cd -- /srv/libwebp
# Configure libwebp
(
setup_cross_env
./autogen.sh
./configure \
--host="$TRIPLE" \
--prefix="/opt/local" \
--disable-shared \
--enable-static \
--with-sysroot="${_sdk}" \
--with-pic \
--enable-everything \
--disable-sdl \
--disable-png \
--disable-jpeg \
--disable-tiff \
--disable-gif
# Build libwebp
make -j"$(nproc)" install
)
# Create a tmp TARGET_DIR
TARGET_DIR="$(mktemp -d -t target-XXXXXXXXXX)"
# Change cwd to libheif source root
mkdir -p /srv/libheif/build
CDPATH='' cd -- /srv/libheif/build
# Configure libheif
"${TRIPLE}-cmake" \
-GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX="${TARGET_DIR}" \
-DCMAKE_INSTALL_BINDIR="${TARGET_DIR}/bin" \
-DCMAKE_INSTALL_LIBDIR="${TARGET_DIR}/lib" \
-DCMAKE_TOOLCHAIN_FILE="${_osxcross_root}/toolchain.cmake" \
-DLIBSHARPYUV_INCLUDE_DIR="${_macports_root}/include/webp" \
-DBUILD_TESTING=OFF \
-DBUILD_SHARED_LIBS=ON \
-DWITH_DAV1D=ON \
-DWITH_DAV1D_PLUGIN=OFF \
-DWITH_LIBDE265=ON \
-DWITH_LIBDE265_PLUGIN=OFF \
-DWITH_LIBSHARPYUV=ON \
-DWITH_FUZZERS=OFF \
-DWITH_EXAMPLES=OFF \
-DWITH_UNCOMPRESSED_CODEC=ON \
-DWITH_REDUCED_VISIBILITY=ON \
-DWITH_DEFLATE_HEADER_COMPRESSION=ON \
-DENABLE_PLUGIN_LOADING=OFF \
-DENABLE_MULTITHREADING_SUPPORT=ON \
..
# Build libheif
ninja -j"$(nproc)" install
# Change cwd to ffmpeg source root
CDPATH='' cd -- /srv/ffmpeg
# Save FFmpeg version
FFMPEG_VERSION="$(xargs printf '%s' <VERSION)"
# Configure FFMpeg.
# NOTICE: This isn't autotools
# TODO: Metal suport is disabled because no open source toolchain is available for it
# TODO: Maybe try macOS own metal compiler under darling? https://github.com/darlinghq/darling/issues/326
./configure \
--nm="${TRIPLE}-nm" \
--ar="${TRIPLE}-ar" \
--as="$CC" \
--ld="$CC" \
--cc="$CC" \
--cxx="${TRIPLE}-clang++" \
--arch="${ARCH}" \
--objcc="$CC" \
--strip="${TRIPLE}-strip" \
--dep-cc="$CC" \
--sysroot="$_sdk" \
--cross-prefix="${TRIPLE}-" \
--ranlib="${TRIPLE}-ranlib" \
--prefix="${TARGET_DIR}" \
--arch="${TARGET_ARCH}" \
--cpu="${TARGET_CPU}" \
--target-os=darwin \
--pkg-config="${TRIPLE}-pkg-config" \
--pkg-config-flags="--static" \
--extra-ldflags="-Bstatic -headerpad_max_install_names" \
--extra-ldexeflags="-Bstatic" \
--extra-cflags=-DLIBTWOLAME_STATIC \
--extra-cxxflags="-xc++-header" \
--disable-static \
--disable-debug \
--disable-doc \
--disable-htmlpages \
--disable-txtpages \
--disable-manpages \
--disable-podpages \
--disable-indevs \
--disable-outdevs \
--disable-parser=avs2 \
--disable-parser=avs3 \
--disable-postproc \
--disable-programs \
--disable-libwebp \
--disable-sdl2 \
--disable-metal \
--disable-network \
--disable-openssl \
--disable-schannel \
--disable-securetransport \
--disable-xlib \
--disable-libxcb \
--disable-libxcb-shm \
--disable-libxcb-xfixes \
--disable-libxcb-shape \
--disable-libv4l2 \
--disable-v4l2-m2m \
--disable-vulkan \
--disable-cuda-llvm \
--disable-w32threads \
--disable-xmm-clobber-test \
--disable-neon-clobber-test \
--enable-appkit \
--enable-audiotoolbox \
--enable-avcodec \
--enable-avfilter \
--enable-avformat \
--enable-avfoundation \
--enable-bzlib \
--enable-coreimage \
--enable-cross-compile \
--enable-gpl \
--enable-gray \
--enable-iconv \
--enable-inline-asm \
--enable-libdav1d \
--enable-libjxl \
--enable-libopenjpeg \
--enable-libopus \
--enable-libsoxr \
--enable-libvorbis \
--enable-libvpx \
--enable-libzimg \
--enable-lto \
--enable-lzma \
--enable-opencl \
--enable-opengl \
--enable-optimizations \
--enable-pic \
--enable-postproc \
--enable-pthreads \
--enable-small \
--enable-shared \
--enable-swscale \
--enable-version3 \
--enable-videotoolbox \
--enable-zlib \
"$@"
# Build FFMpeg
make -j"$(nproc)" install
# Create FFMpeg.framework
# https://developer.apple.com/library/archive/documentation/MacOSX/Conceptual/BPFrameworks/Concepts/FrameworkAnatomy.html
# Create the framework basic directory structure
_framework="FFMpeg.framework"
mkdir -p "/${_framework}/Versions/A/"{Headers,Resources,Libraries}
# Copy licenses to Framework
_framework_docs="/${_framework}/Versions/A/Resources/English.lproj/Documentation"
mkdir -p "$_framework_docs"
# FFMpeg license
cp -avt "$_framework_docs" COPYING* LICENSE*
# Dependency licenses which are not covered by FFMpeg licenses
(cd "${_macports_root}/share/doc" \
&& cp -avt "$_framework_docs" --parents \
zimg/COPYING \
libvorbis/COPYING)
(cd /srv && cp -avt "$_framework_docs" --parents libwebp/COPYING)
# libvorbis, libogg share the same license
ln -s libvorbis "${_framework_docs}/libogg"
# Create required framework symlinks
ln -s A "/${_framework}/Versions/Current"
ln -s Versions/Current/Headers "/${_framework}/Headers"
ln -s Versions/Current/Resources "/${_framework}/Resources"
ln -s Versions/Current/Libraries "/${_framework}/Libraries"
# Framework Info.plist (based on macOS internal OpenGL.framework Info.plist)
cat <<EOF >"/${_framework}/Versions/Current/Resources/Info.plist"
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>CFBundleDevelopmentRegion</key>
<string>English</string>
<key>CFBundleExecutable</key>
<string>FFMpeg</string>
<key>CFBundleGetInfoString</key>
<string>FFMpeg ${FFMPEG_VERSION}</string>
<key>CFBundleIdentifier</key>
<string>com.spacedrive.ffmpeg</string>
<key>CFBundleInfoDictionaryVersion</key>
<string>6.0</string>
<key>CFBundleName</key>
<string>FFMpeg</string>
<key>CFBundlePackageType</key>
<string>FMWK</string>
<key>CFBundleShortVersionString</key>
<string>${FFMPEG_VERSION}</string>
<key>CFBundleSignature</key>
<string>????</string>
<key>CFBundleVersion</key>
<string>${FFMPEG_VERSION}</string>
</dict>
</plist>
EOF
# Process built libraries to be compatible with the Framework structure
cd "$TARGET_DIR/lib"
# Move all symlinks of built libraries to Framework
while IFS= read -r -d '' _lib; do
# Copy symlinks to the output directory
cp -Ppv "$_lib" "/${_framework}/Libraries/${_lib#./}"
rm "$_lib"
done < <(find . -type l -print0)
# Populate queue with built libraries
set -- # Clear command line arguments
while IFS= read -r -d '' _lib; do
set -- "$@" "${_lib#./}"
done < <(find . -name '*.dylib' -print0)
while [ $# -gt 0 ]; do
# Loop through each of the library's dependencies
for _dep in $("${TRIPLE}-otool" -L "$1" | tail -n+3 | awk '{print $1}'); do
case "$_dep" in
# Built libs inter dependency
"${TARGET_DIR}/lib/"*)
_linker_path="@loader_path/${_dep#"${TARGET_DIR}/lib/"}"
;;
# Macports dependency (/opt/local/lib means it was installed by Macports)
"@rpath/"* | /opt/local/lib/*)
_dep_rel="${_dep#'@rpath/'}"
_dep_rel="${_dep_rel#/opt/local/lib/}"
# Check if the macports dependency is already included in the macOS SDK
if [ -n "$(comm -12 <(printf "%s" "$_dep_rel") <(printf "%s" "$_skd_libs"))" ]; then
# Relink libs already included in macOS SDK
_linker_path="/usr/lib/${_dep_rel}"
else
_linker_path="@loader_path/${_dep_rel}"
if ! [ -e "${_macports_root}/lib/${_dep_rel}" ]; then
echo "Missing macports dependency: ${_dep_rel}"
exit 1
elif ! { [ -f "$_dep_rel" ] || [ -e "/${_framework}/Libraries/${_dep_rel}" ]; }; then
# Copy dependency to the current directory if this is the first time we see it
cp -Lpv "${_macports_root}/lib/${_dep_rel}" "./${_dep_rel}"
# Add it to the queue to have it's own dependencies processed
set -- "$@" "$_dep_rel"
fi
fi
;;
*) # Ignore system libraries
continue
;;
esac
# Change the dependency linker path to make it compatible with an .app bundle
"${TRIPLE}-install_name_tool" -change "$_dep" "$_linker_path" "$1"
done
# Update the library's own id
"${TRIPLE}-install_name_tool" -id "@executable_path/../Frameworks/${_framework}/Libraries/${1}" "$1"
# Copy the library to framework
cp -Lpv "$1" "/${_framework}/Libraries/${1}"
# Remove library from queue
shift
done
# Copy all built headers to framework
cp -av "${TARGET_DIR}/include/"* "/${_framework}/Headers/"
# Strip all libraries
"${TRIPLE}-strip" -S "/${_framework}/Libraries/"*.dylib

View file

@ -1 +0,0 @@
dlls

View file

@ -1,200 +0,0 @@
ARG FFMPEG_VERSION=6.0
FROM vvasconcellos/base-win64:2023_06_10 AS base
ENV TARGET=win64
FROM base AS layer-10-mingw
RUN --mount=src=scripts.d/10-mingw.sh,dst=/stage.sh run_stage /stage.sh
FROM base AS layer-10-mingw-std-threads
RUN --mount=src=scripts.d/10-mingw-std-threads.sh,dst=/stage.sh run_stage /stage.sh
FROM base AS layer-10
COPY --from=layer-10-mingw /opt/mingw/. /
COPY --from=layer-10-mingw /opt/mingw/. /opt/mingw
COPY --from=layer-10-mingw-std-threads $FFBUILD_PREFIX/. $FFBUILD_PREFIX
FROM layer-10 AS layer-20-brotli
RUN --mount=src=scripts.d/20-brotli.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-10 AS layer-20-bzip2
RUN --mount=src=scripts.d/20-bzip2.sh,dst=/stage.sh --mount=src=patches/bzip2,dst=/patches run_stage /stage.sh
FROM layer-10 AS layer-20-iconv
RUN --mount=src=scripts.d/20-iconv.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-10 AS layer-20-zlib
RUN --mount=src=scripts.d/20-zlib.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-10 AS layer-20
COPY --from=layer-20-brotli $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-20-bzip2 $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-20-iconv $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-20-zlib $FFBUILD_PREFIX/. $FFBUILD_PREFIX
FROM layer-20 AS layer-25-libogg
RUN --mount=src=scripts.d/25-libogg.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-20 AS layer-25-xz
RUN --mount=src=scripts.d/25-xz.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-20 AS layer-25
COPY --from=layer-25-libogg $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-25-xz $FFBUILD_PREFIX/. $FFBUILD_PREFIX
FROM layer-25 AS layer-45-libvorbis
RUN --mount=src=scripts.d/45-libvorbis.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-25 AS layer-45-opencl
RUN --mount=src=scripts.d/45-opencl.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-25 AS layer-45
COPY --from=layer-45-libvorbis $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-45-opencl $FFBUILD_PREFIX/. $FFBUILD_PREFIX
FROM layer-45 AS layer-50-amf
RUN --mount=src=scripts.d/50-amf.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-dav1d
RUN --mount=src=scripts.d/50-dav1d.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-ffnvcodec
RUN --mount=src=scripts.d/50-ffnvcodec.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-libde265
RUN --mount=src=scripts.d/50-libde265.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-libjxl
RUN --mount=src=scripts.d/50-libjxl.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-libopus
RUN --mount=src=scripts.d/50-libopus.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-libvpx
RUN --mount=src=scripts.d/50-libvpx.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-libwebp
RUN --mount=src=scripts.d/50-libwebp.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-onevpl
RUN --mount=src=scripts.d/50-onevpl.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-openal
RUN --mount=src=scripts.d/50-openal.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-openjpeg
RUN --mount=src=scripts.d/50-openjpeg.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-soxr
RUN --mount=src=scripts.d/50-soxr.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-vulkan
RUN --mount=src=scripts.d/50-vulkan/45-vulkan.sh,dst=/stage.sh run_stage /stage.sh
RUN --mount=src=scripts.d/50-vulkan/50-shaderc.sh,dst=/stage.sh run_stage /stage.sh
RUN --mount=src=scripts.d/50-vulkan/55-spirv-cross.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50-zimg
RUN --mount=src=scripts.d/50-zimg.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-45 AS layer-50
COPY --from=layer-50-amf $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-dav1d $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-dav1d /opt/dlls /opt/dlls
COPY --from=layer-50-ffnvcodec $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-libde265 $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-libjxl $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-libopus $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-libvpx $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-libwebp $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-onevpl $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-openal $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-openjpeg $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-soxr $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-vulkan $FFBUILD_PREFIX/. $FFBUILD_PREFIX
COPY --from=layer-50-zimg $FFBUILD_PREFIX/. $FFBUILD_PREFIX
FROM layer-50 AS layer-99-libheif
RUN --mount=src=scripts.d/99-libheif.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-50 AS layer-99-ffmpeg
ARG FFMPEG_VERSION
ENV FFMPEG_VERSION="$FFMPEG_VERSION"
RUN --mount=src=scripts.d/99-ffmpeg.sh,dst=/stage.sh run_stage /stage.sh
FROM layer-50 AS layer-99
COPY --from=layer-50 /opt/dlls /opt/dlls
COPY --from=layer-99-libheif /opt/dlls /opt/dlls
COPY --from=layer-99-ffmpeg /opt/dlls /opt/dlls
RUN rm -rf /opt/dlls/share /opt/dlls/lib/pkgconfig /opt/dlls/lib/cmake
RUN cd /opt/dlls/bin \
&& find . -name '*.lib' -exec install -Dt ../lib/ -m a-rwx,u+rw,g+r,o+r {} + \
&& find . -name '*.lib' -delete \
&& find . -name '*.dll' -exec x86_64-w64-mingw32-strip -s {} \;
RUN find /opt/dlls -type d -delete || true
RUN find /opt/dlls/lib \( -name '*dav1d*' -o -name '*.def' \) -delete || true
FROM scratch
COPY --from=layer-99 /opt/dlls /dlls

View file

@ -1,25 +0,0 @@
# FFMpeg DLLs for Windows
## Build instructions
To build the FFMpeg `DLLs` a `docker` or `podman` installation is required.
It is recomended to enable [`BuildKit`](https://docs.docker.com/build/buildkit/#getting-started) in docker.
Just run the following inside this directory:
```sh
$> docker build -o . .
```
or
```sh
$> podman build -o . .
```
After some time (it takes aroung 60min in Github CI) a directory named `dlls` will show up with the `DLLs` inside.
### How does the build process work?
This is a modified Dockerfile generate by using https://github.com/BtbN/FFmpeg-Builds
Thanks @BtbN for your great work

View file

@ -1,33 +0,0 @@
diff -u bzlib.h.bak bzlib.h
--- bzlib.h.bak 2010-09-11 01:08:42.000000000 +0200
+++ bzlib.h 2013-02-06 04:03:34.782383139 +0100
@@ -82,7 +82,7 @@
# undef small
# endif
# ifdef BZ_EXPORT
-# define BZ_API(func) WINAPI func
+# define BZ_API(func) func
# define BZ_EXTERN extern
# else
/* import windows dll dynamically */
diff -u Makefile.bak Makefile
--- Makefile.bak 2010-09-11 00:46:02.000000000 +0200
+++ Makefile 2013-02-06 04:04:07.210382474 +0100
@@ -15,13 +15,13 @@
SHELL=/bin/sh
# To assist in cross-compiling
-CC=gcc
-AR=ar
-RANLIB=ranlib
+CC?=gcc
+AR?=ar
+RANLIB?=ranlib
LDFLAGS=
BIGFILES=-D_FILE_OFFSET_BITS=64
-CFLAGS=-Wall -Winline -O2 -g $(BIGFILES)
+CFLAGS:=-Wall -Winline -O2 -g $(BIGFILES) $(CFLAGS)
# Where you want it installed when you do 'make install'
PREFIX=/usr/local

View file

@ -1,12 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/meganz/mingw-std-threads.git"
SCRIPT_COMMIT="6c2061b7da41d6aa1b2162ff4383ec3ece864bc6"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_COMMIT" mingw-std-threads
cd mingw-std-threads
mkdir -p "$FFBUILD_PREFIX"/include
cp *.h "$FFBUILD_PREFIX"/include
}

View file

@ -1,45 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://git.code.sf.net/p/mingw-w64/mingw-w64.git"
SCRIPT_COMMIT="9df2e604ddf16765410724716a8d1887ffc61fa9"
ffbuild_dockerbuild() {
retry-tool sh -c "rm -rf mingw && git clone '$SCRIPT_REPO' mingw"
cd mingw
git checkout "$SCRIPT_COMMIT"
cd mingw-w64-headers
unset CFLAGS
unset CXXFLAGS
unset LDFLAGS
unset PKG_CONFIG_LIBDIR
GCC_SYSROOT="$(${FFBUILD_CROSS_PREFIX}gcc -print-sysroot)"
local myconf=(
--prefix="$GCC_SYSROOT/usr/$FFBUILD_TOOLCHAIN"
--host="$FFBUILD_TOOLCHAIN"
--with-default-win32-winnt="0x601"
--with-default-msvcrt=ucrt
--enable-idl
)
./configure "${myconf[@]}"
make -j"$(nproc)"
make install DESTDIR="/opt/mingw"
cd ../mingw-w64-libraries/winpthreads
local myconf=(
--prefix="$GCC_SYSROOT/usr/$FFBUILD_TOOLCHAIN"
--host="$FFBUILD_TOOLCHAIN"
--with-pic
--disable-shared
--enable-static
)
./configure "${myconf[@]}"
make -j"$(nproc)"
make install DESTDIR="/opt/mingw"
}

View file

@ -1,24 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/google/brotli.git"
SCRIPT_COMMIT="50ebce107f5b1eb36760c7ec2d4726ec56784373"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_COMMIT" brotli
cd brotli
mkdir build && cd build
cmake -G Ninja \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_TOOLCHAIN_FILE="$FFBUILD_CMAKE_TOOLCHAIN" \
-DCMAKE_INSTALL_PREFIX="$FFBUILD_PREFIX" \
-DCMAKE_POSITION_INDEPENDENT_CODE=ON \
-DBUILD_SHARED_LIBS=OFF \
-DBROTLI_DISABLE_TESTS=ON \
-DBROTLI_BUNDLED_MODE=OFF \
..
ninja -j"$(nproc)"
ninja install
}

View file

@ -1,26 +0,0 @@
#!/bin/bash
SCRIPT_REPO="git://sourceware.org/git/bzip2.git"
SCRIPT_REF="bzip2-1.0.8"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_REF" bzip2
cd bzip2
patch -p0 </patches/bzip2-1.0.8_brokenstuff.diff
make \
CC="${FFBUILD_CROSS_PREFIX}gcc" \
AR="${FFBUILD_CROSS_PREFIX}ar" \
LD="${FFBUILD_CROSS_PREFIX}ld" \
CXX="${FFBUILD_CROSS_PREFIX}g++" \
STRIP="${FFBUILD_CROSS_PREFIX}strip" \
PREFIX="$FFBUILD_PREFIX" \
RANLIB="${FFBUILD_CROSS_PREFIX}ranlib" \
libbz2.a \
-j"$(nproc)"
install -m644 -D bzlib.h "${FFBUILD_PREFIX}/include/bzlib.h"
install -m644 -D libbz2.a "${FFBUILD_PREFIX}/lib/libbz2.a"
}

View file

@ -1,25 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://git.savannah.gnu.org/git/libiconv.git"
SCRIPT_TAG="v1.17"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" libiconv
cd libiconv
retry-tool ./gitsub.sh pull
(unset CC CFLAGS GMAKE && ./autogen.sh)
local myconf=(
--host="$FFBUILD_TOOLCHAIN"
--prefix="$FFBUILD_PREFIX"
--enable-extra-encodings
--disable-shared
--enable-static
--with-pic
)
./configure "${myconf[@]}"
make -j"$(nproc)"
make install
}

View file

@ -1,21 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/madler/zlib.git"
SCRIPT_TAG="v1.3"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" zlib
cd zlib
local myconf=(
--prefix="$FFBUILD_PREFIX"
--static
)
export CC="${FFBUILD_CROSS_PREFIX}gcc"
export AR="${FFBUILD_CROSS_PREFIX}ar"
./configure "${myconf[@]}"
make -j"$(nproc)"
make install
}

View file

@ -1,23 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/xiph/ogg.git"
SCRIPT_TAG="v1.3.5"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" ogg
cd ogg
./autogen.sh
local myconf=(
--host="$FFBUILD_TOOLCHAIN"
--prefix="$FFBUILD_PREFIX"
--disable-shared
--enable-static
--with-pic
)
./configure "${myconf[@]}"
make -j"$(nproc)"
make install
}

View file

@ -1,32 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/tukaani-project/xz.git"
SCRIPT_TAG="v5.4.3"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" xz
cd xz
./autogen.sh --no-po4a --no-doxygen
local myconf=(
--host="$FFBUILD_TOOLCHAIN"
--prefix="$FFBUILD_PREFIX"
--enable-small
--disable-xz
--disable-xzdec
--disable-lzmadec
--disable-lzmainfo
--disable-lzma-links
--disable-scripts
--disable-doc
--disable-symbol-versions
--disable-shared
--enable-static
--with-pic
)
./configure "${myconf[@]}"
make -j"$(nproc)"
make install
}

View file

@ -1,23 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/xiph/vorbis.git"
SCRIPT_TAG="v1.3.7"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" vorbis
cd vorbis
./autogen.sh
local myconf=(
--host="$FFBUILD_TOOLCHAIN"
--prefix="$FFBUILD_PREFIX"
--disable-shared
--enable-static
--disable-oggtest
)
./configure "${myconf[@]}"
make -j"$(nproc)"
make install
}

View file

@ -1,46 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/KhronosGroup/OpenCL-Headers.git"
SCRIPT_REPO2="https://github.com/KhronosGroup/OpenCL-ICD-Loader.git"
SCRIPT_TAG="v2023.04.17"
ffbuild_dockerbuild() {
mkdir opencl && cd opencl
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" headers
mkdir -p "$FFBUILD_PREFIX"/include/CL
cp -r headers/CL/* "$FFBUILD_PREFIX"/include/CL/.
git-mini-clone "$SCRIPT_REPO2" "$SCRIPT_TAG" loader
cd loader
mkdir build && cd build
cmake -DCMAKE_TOOLCHAIN_FILE="$FFBUILD_CMAKE_TOOLCHAIN" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="$FFBUILD_PREFIX" \
-DOPENCL_ICD_LOADER_HEADERS_DIR="$FFBUILD_PREFIX"/include -DOPENCL_ICD_LOADER_BUILD_SHARED_LIBS=OFF \
-DOPENCL_ICD_LOADER_DISABLE_OPENCLON12=ON -DOPENCL_ICD_LOADER_PIC=ON \
-DOPENCL_ICD_LOADER_BUILD_TESTING=OFF -DBUILD_TESTING=OFF ..
make -j"$(nproc)"
make install
echo "prefix=$FFBUILD_PREFIX" >OpenCL.pc
echo "exec_prefix=\${prefix}" >>OpenCL.pc
echo "libdir=\${exec_prefix}/lib" >>OpenCL.pc
echo "includedir=\${prefix}/include" >>OpenCL.pc
echo >>OpenCL.pc
echo "Name: OpenCL" >>OpenCL.pc
echo "Description: OpenCL ICD Loader" >>OpenCL.pc
echo "Version: 9999" >>OpenCL.pc
echo "Cflags: -I\${includedir}" >>OpenCL.pc
if [[ $TARGET == linux* ]]; then
echo "Libs: -L\${libdir} -lOpenCL" >>OpenCL.pc
echo "Libs.private: -ldl" >>OpenCL.pc
elif [[ $TARGET == win* ]]; then
echo "Libs: -L\${libdir} -l:OpenCL.a" >>OpenCL.pc
echo "Libs.private: -lole32 -lshlwapi -lcfgmgr32" >>OpenCL.pc
fi
mkdir -p "$FFBUILD_PREFIX"/lib/pkgconfig
mv OpenCL.pc "$FFBUILD_PREFIX"/lib/pkgconfig/OpenCL.pc
}

View file

@ -1,12 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/GPUOpen-LibrariesAndSDKs/AMF.git"
SCRIPT_COMMIT="bd5db31d3d8ea1fae7002dd903898599bdb7d97a"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_COMMIT" amf
cd amf
mkdir -p "$FFBUILD_PREFIX"/include
mv amf/public/include "$FFBUILD_PREFIX"/include/AMF
}

View file

@ -1,33 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://code.videolan.org/videolan/dav1d.git"
SCRIPT_TAG="1.2.1"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" dav1d
cd dav1d
mkdir build && cd build
local myconf=(
--cross-file=/cross.meson
-Denable_docs=false
-Denable_tools=false
-Denable_tests=false
-Denable_examples=false
--prefix=/opt/dav1d
--buildtype=release
--default-library=shared
)
meson "${myconf[@]}" ..
ninja -j"$(nproc)"
ninja install
sed -i "s@^prefix=/opt/dav1d\$@prefix=${FFBUILD_PREFIX}@" /opt/dav1d/lib/pkgconfig/dav1d.pc
cp -nav /opt/dav1d/* "${FFBUILD_PREFIX}/"
mkdir -p /opt/dlls/
cp -nav /opt/dav1d/* /opt/dlls/
rm -r /opt/dlls/include /opt/dlls/lib/pkgconfig
}

View file

@ -1,11 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/FFmpeg/nv-codec-headers.git"
SCRIPT_COMMIT="4fd7be29a431441ca31b8db0155875ae2ff4ed47"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_COMMIT" ffnvcodec
cd ffnvcodec
make PREFIX="$FFBUILD_PREFIX" install
}

View file

@ -1,24 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/strukturag/libde265.git"
SCRIPT_TAG="v1.0.12"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" libde265
cd libde265
autoreconf -i
local myconf=(
--host="$FFBUILD_TOOLCHAIN"
--prefix="$FFBUILD_PREFIX"
--disable-shared
--enable-static
--disable-dec265
--disable-sherlock265
)
./configure "${myconf[@]}"
make -j"$(nproc)"
make install
}

View file

@ -1,57 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/libjxl/libjxl.git"
SCRIPT_COMMIT="7263ec97397a8113cfa4012bf84ca4618198df3b"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_COMMIT" jxl
cd jxl
git submodule update --init --recursive --depth 1 --recommend-shallow third_party/{highway,skcms}
mkdir build && cd build
if [[ $TARGET == linux* ]]; then
# our glibc is too old(<2.25), and their detection fails for some reason
export CXXFLAGS="$CXXFLAGS -DVQSORT_GETRANDOM=0 -DVQSORT_SECURE_SEED=0"
elif [[ $TARGET == win* ]]; then
# Fix AVX2 related crash due to unaligned stack memory
export CXXFLAGS="$CXXFLAGS -Wa,-muse-unaligned-vector-move"
export CFLAGS="$CFLAGS -Wa,-muse-unaligned-vector-move"
fi
cmake -G Ninja \
-DCMAKE_INSTALL_PREFIX="$FFBUILD_PREFIX" \
-DCMAKE_TOOLCHAIN_FILE="$FFBUILD_CMAKE_TOOLCHAIN" \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_POSITION_INDEPENDENT_CODE=ON \
-DBUILD_SHARED_LIBS=OFF \
-DJPEGXL_STATIC=OFF \
-DJPEGXL_ENABLE_TOOLS=OFF \
-DJPEGXL_ENABLE_VIEWERS=OFF \
-DJPEGXL_EMSCRIPTEN=OFF \
-DJPEGXL_ENABLE_DOXYGEN=OFF \
-DBUILD_TESTING=OFF \
-DJPEGXL_ENABLE_EXAMPLES=OFF \
-DJPEGXL_ENABLE_MANPAGES=OFF \
-DJPEGXL_ENABLE_JNI=OFF \
-DJPEGXL_ENABLE_PLUGINS=OFF \
-DJPEGXL_ENABLE_DEVTOOLS=OFF \
-DJPEGXL_ENABLE_BENCHMARK=OFF \
-DJPEGXL_BUNDLE_LIBPNG=OFF \
-DJPEGXL_ENABLE_SJPEG=OFF \
-DJPEGXL_FORCE_SYSTEM_BROTLI=ON \
..
ninja -j"$(nproc)"
ninja install
echo "Cflags.private: -DJXL_STATIC_DEFINE=1" >>"${FFBUILD_PREFIX}"/lib/pkgconfig/libjxl.pc
echo "Libs.private: -lstdc++" >>"${FFBUILD_PREFIX}"/lib/pkgconfig/libjxl.pc
echo "Cflags.private: -DJXL_STATIC_DEFINE=1" >>"${FFBUILD_PREFIX}"/lib/pkgconfig/libjxl_threads.pc
echo "Libs.private: -lstdc++" >>"${FFBUILD_PREFIX}"/lib/pkgconfig/libjxl_threads.pc
if [[ $TARGET == win* ]]; then
echo "Libs.private: -ladvapi32" >>"${FFBUILD_PREFIX}"/lib/pkgconfig/libjxl.pc
echo "Libs.private: -ladvapi32" >>"${FFBUILD_PREFIX}"/lib/pkgconfig/libjxl_threads.pc
fi
}

View file

@ -1,23 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/xiph/opus.git"
SCRIPT_TAG="v1.4"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" opus
cd opus
./autogen.sh
local myconf=(
--host="$FFBUILD_TOOLCHAIN"
--prefix="$FFBUILD_PREFIX"
--disable-shared
--enable-static
--disable-extra-programs
)
./configure "${myconf[@]}"
make -j"$(nproc)"
make install
}

View file

@ -1,53 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://chromium.googlesource.com/webm/libvpx"
SCRIPT_TAG="v1.13.0"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" libvpx
cd libvpx
local myconf=(
--disable-shared
--enable-static
--enable-pic
--disable-examples
--disable-tools
--disable-docs
--disable-unit-tests
--enable-vp9-highbitdepth
--prefix="$FFBUILD_PREFIX"
)
if [[ $TARGET == win64 ]]; then
myconf+=(
--target=x86_64-win64-gcc
)
export CROSS="$FFBUILD_CROSS_PREFIX"
elif [[ $TARGET == win32 ]]; then
myconf+=(
--target=x86-win32-gcc
)
export CROSS="$FFBUILD_CROSS_PREFIX"
elif [[ $TARGET == linux64 ]]; then
myconf+=(
--target=x86_64-linux-gcc
)
export CROSS="$FFBUILD_CROSS_PREFIX"
elif [[ $TARGET == linuxarm64 ]]; then
myconf+=(
--target=arm64-linux-gcc
)
export CROSS="$FFBUILD_CROSS_PREFIX"
else
echo "Unknown target"
return 255
fi
./configure "${myconf[@]}"
make -j"$(nproc)"
make install
# Work around strip breaking LTO symbol index
"$RANLIB" "$FFBUILD_PREFIX"/lib/libvpx.a
}

View file

@ -1,29 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/webmproject/libwebp.git"
SCRIPT_TAG="1.3.2"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" libwebp
cd libwebp
./autogen.sh
local myconf=(
--host="$FFBUILD_TOOLCHAIN"
--prefix="$FFBUILD_PREFIX"
--disable-shared
--enable-static
--with-pic
--enable-everything
--disable-sdl
--disable-png
--disable-jpeg
--disable-tiff
--disable-gif
)
./configure "${myconf[@]}"
make -j"$(nproc)"
make install
}

View file

@ -1,22 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/oneapi-src/oneVPL.git"
SCRIPT_TAG="v2023.3.0"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" onevpl
cd onevpl
mkdir build && cd build
cmake -GNinja -DCMAKE_TOOLCHAIN_FILE="$FFBUILD_CMAKE_TOOLCHAIN" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="$FFBUILD_PREFIX" \
-DCMAKE_INSTALL_BINDIR="$FFBUILD_PREFIX"/bin -DCMAKE_INSTALL_LIBDIR="$FFBUILD_PREFIX"/lib \
-DBUILD_DISPATCHER=ON -DBUILD_DEV=ON \
-DBUILD_PREVIEW=OFF -DBUILD_TOOLS=OFF -DBUILD_TOOLS_ONEVPL_EXPERIMENTAL=OFF -DINSTALL_EXAMPLE_CODE=OFF \
-DBUILD_SHARED_LIBS=OFF -DBUILD_TESTS=OFF ..
ninja -j"$(nproc)"
ninja install
rm -rf "$FFBUILD_PREFIX"/{etc,share}
}

View file

@ -1,22 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/kcat/openal-soft.git"
SCRIPT_COMMIT="cd781b1511d437816aac65f89646bd80dbf7c040"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_COMMIT" openal
cd openal
mkdir cm_build && cd cm_build
cmake -DCMAKE_TOOLCHAIN_FILE="$FFBUILD_CMAKE_TOOLCHAIN" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="$FFBUILD_PREFIX" \
-DLIBTYPE=STATIC -DALSOFT_UTILS=OFF -DALSOFT_EXAMPLES=OFF ..
make -j"$(nproc)"
make install
echo "Libs.private: -lstdc++" >>"$FFBUILD_PREFIX"/lib/pkgconfig/openal.pc
if [[ $TARGET == win* ]]; then
echo "Libs.private: -lole32" >>"$FFBUILD_PREFIX"/lib/pkgconfig/openal.pc
fi
}

View file

@ -1,15 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/uclouvain/openjpeg.git"
SCRIPT_COMMIT="1ee6d115e80036d1d38bad7f95a680bfc612c1bf"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_COMMIT" openjpeg
cd openjpeg
mkdir build && cd build
cmake -DCMAKE_TOOLCHAIN_FILE="$FFBUILD_CMAKE_TOOLCHAIN" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="$FFBUILD_PREFIX" -DBUILD_SHARED_LIBS=OFF -DBUILD_PKGCONFIG_FILES=ON -DBUILD_CODEC=OFF -DWITH_ASTYLE=OFF -DBUILD_TESTING=OFF ..
make -j"$(nproc)"
make install
}

View file

@ -1,16 +0,0 @@
#!/bin/bash
ARTIFACT_URL='https://sourceforge.net/projects/soxr/files/soxr-0.1.3-Source.tar.xz'
ffbuild_dockerbuild() {
retry-tool sh -c "rm -rf soxr && mkdir -p soxr && curl -LSs '${ARTIFACT_URL}' | tar -xJf- --strip-components=1 -C soxr"
cd soxr
mkdir build && cd build
cmake -DCMAKE_TOOLCHAIN_FILE="$FFBUILD_CMAKE_TOOLCHAIN" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="$FFBUILD_PREFIX" -DWITH_OPENMP=ON -DBUILD_TESTS=OFF -DBUILD_EXAMPLES=OFF -DBUILD_SHARED_LIBS=OFF ..
make -j"$(nproc)"
make install
echo "Libs.private: -lgomp" >>"$FFBUILD_PREFIX"/lib/pkgconfig/soxr.pc
}

View file

@ -1,25 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/KhronosGroup/Vulkan-Headers.git"
SCRIPT_COMMIT="v1.3.252"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_COMMIT" vkheaders
cd vkheaders
mkdir build && cd build
cmake -DCMAKE_TOOLCHAIN_FILE="$FFBUILD_CMAKE_TOOLCHAIN" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="$FFBUILD_PREFIX" ..
make -j"$(nproc)"
make install
cat >"$FFBUILD_PREFIX"/lib/pkgconfig/vulkan.pc <<EOF
prefix=$FFBUILD_PREFIX
includedir=\${prefix}/include
Name: vulkan
Version: ${SCRIPT_COMMIT:1}
Description: Vulkan (Headers Only)
Cflags: -I\${includedir}
EOF
}

View file

@ -1,37 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/google/shaderc.git"
SCRIPT_TAG="v2023.4"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" shaderc
cd shaderc
./utils/git-sync-deps
mkdir build && cd build
cmake -GNinja -DCMAKE_TOOLCHAIN_FILE="$FFBUILD_CMAKE_TOOLCHAIN" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="$FFBUILD_PREFIX" \
-DSHADERC_SKIP_TESTS=ON -DSHADERC_SKIP_EXAMPLES=ON -DSHADERC_SKIP_COPYRIGHT_CHECK=ON \
-DENABLE_EXCEPTIONS=ON -DENABLE_CTEST=OFF -DENABLE_GLSLANG_BINARIES=OFF -DSPIRV_SKIP_EXECUTABLES=ON \
-DSPIRV_TOOLS_BUILD_STATIC=ON -DBUILD_SHARED_LIBS=OFF ..
ninja -j"$(nproc)"
ninja install
# for some reason, this does not get installed...
cp libshaderc_util/libshaderc_util.a "$FFBUILD_PREFIX"/lib
echo "Libs: -lstdc++" >>"$FFBUILD_PREFIX"/lib/pkgconfig/shaderc_combined.pc
echo "Libs: -lstdc++" >>"$FFBUILD_PREFIX"/lib/pkgconfig/shaderc_static.pc
cp "$FFBUILD_PREFIX"/lib/pkgconfig/{shaderc_combined,shaderc}.pc
if [[ $TARGET == win* ]]; then
rm -r "$FFBUILD_PREFIX"/bin "$FFBUILD_PREFIX"/lib/*.dll.a
elif [[ $TARGET == linux* ]]; then
rm -r "$FFBUILD_PREFIX"/bin "$FFBUILD_PREFIX"/lib/*.so*
else
echo "Unknown target"
return 255
fi
}

View file

@ -1,37 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/KhronosGroup/SPIRV-Cross.git"
SCRIPT_TAG="sdk-1.3.250.0"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" spirv-cross
cd spirv-cross
VER_MAJ="$(grep 'set(spirv-cross-abi-major' CMakeLists.txt | sed -re 's/.* ([0-9]+)\)/\1/')"
VER_MIN="$(grep 'set(spirv-cross-abi-minor' CMakeLists.txt | sed -re 's/.* ([0-9]+)\)/\1/')"
VER_PCH="$(grep 'set(spirv-cross-abi-patch' CMakeLists.txt | sed -re 's/.* ([0-9]+)\)/\1/')"
VER_FULL="$VER_MAJ.$VER_MIN.$VER_PCH"
mkdir build && cd build
cmake -DCMAKE_TOOLCHAIN_FILE="$FFBUILD_CMAKE_TOOLCHAIN" -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="$FFBUILD_PREFIX" \
-DSPIRV_CROSS_SHARED=OFF -DSPIRV_CROSS_STATIC=ON -DSPIRV_CROSS_CLI=OFF -DSPIRV_CROSS_ENABLE_TESTS=OFF -DSPIRV_CROSS_FORCE_PIC=ON -DSPIRV_CROSS_ENABLE_CPP=OFF ..
make -j"$(nproc)"
make install
cat >"$FFBUILD_PREFIX"/lib/pkgconfig/spirv-cross-c-shared.pc <<EOF
prefix=$FFBUILD_PREFIX
exec_prefix=\${prefix}
libdir=\${prefix}/lib
sharedlibdir=\${prefix}/lib
includedir=\${prefix}/include/spirv_cross
Name: spirv-cross-c-shared
Description: C API for SPIRV-Cross
Version: $VER_FULL
Requires:
Libs: -L\${libdir} -L\${sharedlibdir} -lspirv-cross-c -lspirv-cross-glsl -lspirv-cross-hlsl -lspirv-cross-reflect -lspirv-cross-msl -lspirv-cross-util -lspirv-cross-core -lstdc++
Cflags: -I\${includedir}
EOF
}

View file

@ -1,24 +0,0 @@
#!/bin/bash
SCRIPT_REPO="https://github.com/sekrit-twc/zimg.git"
SCRIPT_COMMIT="332aaac5e99de46ddd5663092779742ec1958b11"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_COMMIT" zimg
cd zimg
git submodule update --init --recursive --depth=1
./autogen.sh
local myconf=(
--host="$FFBUILD_TOOLCHAIN"
--prefix="$FFBUILD_PREFIX"
--disable-shared
--enable-static
--with-pic
)
./configure "${myconf[@]}"
make -j"$(nproc)"
make install
}

View file

@ -1,97 +0,0 @@
#!/bin/bash
SCRIPT_REPO='https://github.com/FFmpeg/FFmpeg.git'
SCRIPT_BRANCH="release/${FFMPEG_VERSION:-6.0}"
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_BRANCH" ffmpeg
cd ffmpeg
# Broken configs:
# --enable-lto (Broken on Windows)
./configure \
--cpu="x86_64" \
--arch='x86_64' \
--prefix="/opt/dlls" \
--target-os=mingw32 \
--pkg-config=pkg-config \
--pkg-config-flags="--static" \
--cross-prefix="$FFBUILD_CROSS_PREFIX" \
--disable-static \
--disable-debug \
--disable-doc \
--disable-htmlpages \
--disable-txtpages \
--disable-manpages \
--disable-podpages \
--disable-indevs \
--disable-outdevs \
--disable-parser=avs2 \
--disable-parser=avs3 \
--disable-postproc \
--disable-programs \
--disable-libwebp \
--disable-sdl2 \
--disable-metal \
--disable-network \
--disable-openssl \
--disable-schannel \
--disable-securetransport \
--disable-xlib \
--disable-libxcb \
--disable-libxcb-shm \
--disable-libxcb-xfixes \
--disable-libxcb-shape \
--disable-libv4l2 \
--disable-v4l2-m2m \
--disable-w32threads \
--disable-xmm-clobber-test \
--disable-neon-clobber-test \
--enable-amf \
--enable-avcodec \
--enable-avfilter \
--enable-avformat \
--enable-bzlib \
--enable-cuda-llvm \
--enable-ffnvcodec \
--enable-gpl \
--enable-gray \
--enable-iconv \
--enable-inline-asm \
--enable-libdav1d \
--enable-libjxl \
--enable-libopenjpeg \
--enable-libopus \
--enable-libshaderc \
--enable-libsoxr \
--enable-libvorbis \
--enable-libvpl \
--enable-libvpx \
--enable-libzimg \
--enable-lzma \
--enable-openal \
--enable-opencl \
--enable-opengl \
--enable-optimizations \
--enable-pic \
--enable-postproc \
--enable-pthreads \
--enable-shared \
--enable-small \
--enable-swscale \
--enable-version3 \
--enable-vulkan \
--enable-zlib \
--enable-cross-compile \
--extra-cflags='-DLIBTWOLAME_STATIC' \
--extra-cxxflags='' \
--extra-ldflags='-pthread' \
--extra-ldexeflags='' \
--extra-libs='-lgomp -lstdc++'
make -j"$(nproc)" V=1
make PREFIX="/opt/dlls" install
}

View file

@ -1,46 +0,0 @@
#!/bin/bash
SCRIPT_REPO='https://github.com/strukturag/libheif.git'
SCRIPT_TAG='v1.16.2'
ffbuild_dockerbuild() {
git-mini-clone "$SCRIPT_REPO" "$SCRIPT_TAG" libheif
cd libheif
mkdir build && cd build
cmake \
-GNinja \
-DCMAKE_BUILD_TYPE=Release \
-DCMAKE_INSTALL_PREFIX='/opt/dlls' \
-DCMAKE_INSTALL_BINDIR='/opt/dlls/bin' \
-DCMAKE_INSTALL_LIBDIR='/opt/dlls/lib' \
-DCMAKE_TOOLCHAIN_FILE="$FFBUILD_CMAKE_TOOLCHAIN" \
-DBUILD_TESTING=OFF \
-DBUILD_SHARED_LIBS=ON \
-DWITH_DAV1D=ON \
-DWITH_DAV1D_PLUGIN=OFF \
-DWITH_LIBDE265=ON \
-DWITH_LIBDE265_PLUGIN=OFF \
-DWITH_LIBSHARPYUV=ON \
-DWITH_FUZZERS=OFF \
-DWITH_EXAMPLES=OFF \
-DWITH_UNCOMPRESSED_CODEC=ON \
-DWITH_REDUCED_VISIBILITY=ON \
-DWITH_DEFLATE_HEADER_COMPRESSION=ON \
-DENABLE_PLUGIN_LOADING=OFF \
-DENABLE_MULTITHREADING_SUPPORT=ON \
..
ninja -j"$(nproc)"
ninja install
cd /opt/dlls/bin
"${FFBUILD_CROSS_PREFIX}gendef" libheif.dll
"${FFBUILD_CROSS_PREFIX}dlltool" -m i386:x86-64 -d libheif.def -l heif.lib -D libheif.dll
mv libheif.def heif.lib ../lib/
}

View file

@ -1,321 +0,0 @@
#!/usr/bin/env bash
set -euo pipefail
case "${1:-}" in
'' | x86_64-linux-gnu)
export TARGET_TRIPLE='x86_64-linux-gnu.2.23'
;;
aarch64-linux-gnu)
export TARGET_TRIPLE='aarch64-linux-gnu.2.23'
;;
x86_64-linux-musl)
export TARGET_TRIPLE='x86_64-linux-musl'
;;
aarch64-linux-musl)
export TARGET_TRIPLE='aarch64-linux-musl'
;;
*)
echo "Unsupported target triple '${1}'"
exit 1
;;
esac
# Change CWD to script dir
CDPATH='' cd "$(dirname "$0")"
echo "Install required build dependencies..."
apt-get update -yqq
apt-get install -yqq -o=Dpkg::Use-Pty=0 ninja-build cmake curl nasm pkg-config xz-utils patch python3
echo "Configure sysroot and prefix..."
mkdir -p "./src/prefix/bin" "./src/sysroot/bin"
_prefix="$(CDPATH='' cd ./src/prefix && pwd)"
_sysroot="$(CDPATH='' cd ./src/sysroot && pwd)"
# Configure PATH to use our sysroot bin
export PATH="${_sysroot}/bin:$PATH"
# Configure pkgconfig to look for our built libs
export PKG_CONFIG_LIBDIR="${_prefix}/lib/pkgconfig:${_prefix}/share/pkgconfig"
# Download zig to use as a C/C++ cross compiler
echo "Download zig..."
curl -LSs "https://ziglang.org/download/0.11.0/zig-linux-$(uname -m)-0.11.0.tar.xz" \
| tar -xJf- --strip-component 1 -C "$_sysroot"
mv "${_sysroot}/zig" "${_sysroot}/bin/zig"
# Create scripts for some zig internal commands, because cmake doesn't allow passing arguments to tools
for _arg in ar ranlib; do
cat <<EOF >"${_sysroot}/bin/${_arg}"
#!/usr/bin/env bash
exec zig $_arg "\$@"
EOF
chmod +x "${_sysroot}/bin/${_arg}"
done
echo "Download meson..."
mkdir -p ./src/meson
curl -LSs 'https://github.com/mesonbuild/meson/archive/refs/tags/1.2.1.tar.gz' \
| tar -xzf- --strip-component 1 -C ./src/meson
pushd ./src/meson
# Patch meson to support zig as a C/C++ compiler
curl -LSs 'https://github.com/mesonbuild/meson/pull/12293.patch' | patch -p1
# Install meson binary
./packaging/create_zipapp.py --outfile "${_sysroot}/bin/meson" --compress
popd
cat <<EOF >./src/cross.meson
[binaries]
c = ['zig', 'cc', '-s', '-target', '$TARGET_TRIPLE']
cpp = ['zig', 'c++', '-s', '-target', '$TARGET_TRIPLE']
ar = ['zig', 'ar']
ranlib = ['zig', 'ranlib']
lib = ['zig', 'lib']
dlltool = ['zig', 'dlltool']
[properties]
sys_root = '${_sysroot}'
pkg_config_libdir = ['${_prefix}/lib/pkgconfig', '${_prefix}/share/pkgconfig']
EOF
case "$TARGET_TRIPLE" in
x86_64-*)
cat <<EOF >>./src/cross.meson
[host_machine]
system = 'linux'
cpu_family = 'x86_64'
cpu = 'x86_64'
endian = 'little'
EOF
;;
aarch64-*)
cat <<EOF >>./src/cross.meson
[host_machine]
system = 'linux'
cpu_family = 'aarch64'
cpu = 'arm64'
endian = 'little'
EOF
;;
*)
echo "Unsupported target triple '${1}'"
exit 1
;;
esac
cat <<EOF >./src/toolchain.cmake
set(CMAKE_SYSTEM_NAME Linux)
set(CMAKE_SYSTEM_PROCESSOR x86_64)
set(triple $TARGET_TRIPLE)
set(CMAKE_CROSSCOMPILING TRUE)
set_property(GLOBAL PROPERTY TARGET_SUPPORTS_SHARED_LIBS FALSE)
# Do a no-op access on the CMAKE_TOOLCHAIN_FILE variable so that CMake will not
# issue a warning on it being unused.
if (CMAKE_TOOLCHAIN_FILE)
endif()
set(CMAKE_C_COMPILER zig cc -s -target $TARGET_TRIPLE)
set(CMAKE_CXX_COMPILER zig c++ -s -target $TARGET_TRIPLE)
set(CMAKE_RANLIB ranlib)
set(CMAKE_C_COMPILER_RANLIB ranlib)
set(CMAKE_CXX_COMPILER_RANLIB ranlib)
set(CMAKE_AR ar)
set(CMAKE_C_COMPILER_AR ar)
set(CMAKE_CXX_COMPILER_AR ar)
set(CMAKE_FIND_ROOT_PATH ${_prefix} ${_sysroot})
set(CMAKE_SYSTEM_PREFIX_PATH /)
if(CMAKE_INSTALL_PREFIX_INITIALIZED_TO_DEFAULT)
set(CMAKE_INSTALL_PREFIX "${_prefix}" CACHE PATH
"Install path prefix, prepended onto install directories." FORCE)
endif()
# To find programs to execute during CMake run time with find_program(), e.g.
# 'git' or so, we allow looking into system paths.
set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
if (NOT CMAKE_FIND_ROOT_PATH_MODE_LIBRARY)
set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
endif()
if (NOT CMAKE_FIND_ROOT_PATH_MODE_INCLUDE)
set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
endif()
if (NOT CMAKE_FIND_ROOT_PATH_MODE_PACKAGE)
set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
endif()
# TODO: CMake appends <sysroot>/usr/include to implicit includes; switching to use usr/include will make this redundant.
if ("\${CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES}" STREQUAL "")
set(CMAKE_C_IMPLICIT_INCLUDE_DIRECTORIES "${_prefix}/include")
endif()
if ("\${CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES}" STREQUAL "")
set(CMAKE_CXX_IMPLICIT_INCLUDE_DIRECTORIES "${_prefix}/include")
endif()
EOF
# --
echo "Download zlib..."
mkdir -p ./src/zlib/build
curl -LSs 'https://github.com/madler/zlib/archive/refs/tags/v1.3.tar.gz' \
| tar -xzf- --strip-component 1 -C ./src/zlib
pushd ./src/zlib/build
echo "Build zlib..."
cmake \
-GNinja \
-DCMAKE_TOOLCHAIN_FILE=../../toolchain.cmake \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=Off \
-DCMAKE_POSITION_INDEPENDENT_CODE=On \
-DCMAKE_SKIP_INSTALL_ALL_DEPENDENCY=On \
-DCMAKE_INSTALL_PREFIX="$_prefix" \
..
ninja -j"$(nproc)" zlibstatic
# Stub .so files so install doesn't fail
touch libz.so.1.3 libz.so.1 libz.so
ninja install
# Remove stub .so files
rm "${_prefix}"/lib/{libz.so.1.3,libz.so.1,libz.so}
popd
# --
echo "Download dav1d..."
mkdir -p ./src/dav1d/build
curl -LSs 'https://code.videolan.org/videolan/dav1d/-/archive/1.2.1/dav1d-1.2.1.tar.gz' \
| tar -xzf- --strip-component 1 -C ./src/dav1d
pushd ./src/dav1d/build
echo "Build dav1d..."
meson setup \
--cross-file=../../cross.meson \
-Denable_docs=false \
-Denable_tools=false \
-Denable_tests=false \
-Denable_examples=false \
--prefix="$_prefix" \
--buildtype=release \
--default-library=static \
..
ninja -j"$(nproc)"
ninja install
popd
# --
echo "Download libde265..."
mkdir -p ./src/libde265/build
curl -#LSs 'https://github.com/strukturag/libde265/archive/refs/tags/v1.0.12.tar.gz' \
| tar -xzf- --strip-component 1 -C ./src/libde265
pushd ./src/libde265/build
echo "Build libde265..."
cmake \
-GNinja \
-DCMAKE_TOOLCHAIN_FILE=../../toolchain.cmake \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=Off \
-DCMAKE_POSITION_INDEPENDENT_CODE=On \
-DCMAKE_INSTALL_PREFIX="$_prefix" \
-DENABLE_SDL=Off \
-DENABLE_DECODER=Off \
-DENABLE_ENCODER=Off \
..
ninja -j"$(nproc)"
ninja install
popd
# --
echo "Download libwebp..."
mkdir -p ./src/libwebp/build
curl -#LSs 'https://github.com/webmproject/libwebp/archive/refs/tags/v1.3.2.tar.gz' \
| tar -xzf- --strip-component 1 -C ./src/libwebp
pushd ./src/libwebp/build
echo "Build libwebp..."
cmake \
-GNinja \
-DCMAKE_TOOLCHAIN_FILE=../../toolchain.cmake \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=Off \
-DCMAKE_POSITION_INDEPENDENT_CODE=On \
-DCMAKE_INSTALL_PREFIX="$_prefix" \
-DWEBP_LINK_STATIC=On \
-DWEBP_BUILD_CWEBP=Off \
-DWEBP_BUILD_DWEBP=Off \
-DWEBP_BUILD_GIF2WEBP=Off \
-DWEBP_BUILD_IMG2WEBP=Off \
-DWEBP_BUILD_VWEBP=Off \
-DWEBP_BUILD_WEBPINFO=Off \
-DWEBP_BUILD_WEBPMUX=Off \
-DWEBP_BUILD_EXTRAS=Off \
-DWEBP_BUILD_ANIM_UTILS=Off \
..
ninja -j"$(nproc)"
ninja install
popd
# --
echo "Download libheif..."
mkdir -p ./src/libheif/build
curl -#LSs 'https://github.com/strukturag/libheif/archive/refs/tags/v1.16.2.tar.gz' \
| tar -xzf- --strip-component 1 -C ./src/libheif
pushd ./src/libheif/build
echo "Build libheif..."
cmake \
-GNinja \
-DCMAKE_TOOLCHAIN_FILE=../../toolchain.cmake \
-DCMAKE_BUILD_TYPE=Release \
-DBUILD_SHARED_LIBS=On \
-DCMAKE_POSITION_INDEPENDENT_CODE=On \
-DCMAKE_INSTALL_PREFIX="$_prefix" \
-DBUILD_TESTING=OFF \
-DWITH_DAV1D=ON \
-DWITH_DAV1D_PLUGIN=OFF \
-DWITH_LIBDE265=ON \
-DWITH_LIBDE265_PLUGIN=OFF \
-DWITH_LIBSHARPYUV=ON \
-DWITH_FUZZERS=OFF \
-DWITH_EXAMPLES=OFF \
-DWITH_UNCOMPRESSED_CODEC=ON \
-DWITH_REDUCED_VISIBILITY=ON \
-DWITH_DEFLATE_HEADER_COMPRESSION=ON \
-DENABLE_PLUGIN_LOADING=OFF \
-DENABLE_MULTITHREADING_SUPPORT=ON \
..
ninja -j"$(nproc)"
ninja install
popd

View file

@ -1,51 +0,0 @@
diff --git a/tools/osxcross-macports b/tools/osxcross-macports
index f008a8d..725fac1 100755
--- a/tools/osxcross-macports
+++ b/tools/osxcross-macports
@@ -306,20 +306,34 @@ getPkgUrl()
verboseMsg " $p"
done
- local pkg=$(echo "$pkgs" | \
- grep "$pkgname-$pkgversion" | grep $OSXVERSION | grep $ARCH | \
- uniq | tail -n1)
- if [ -z "$pkg" ]; then
+ local allpkgs="$pkgs"
+ if [ $ARCH != "i386-x86_64" ]; then
+ pkgs=$(echo "$pkgs" | grep -v universal)
+ fi
+
+ while [ -z "$pkg" ] && [ -n "$pkgs" ]; do
pkg=$(echo "$pkgs" | \
- grep "$pkgname-$pkgversion" | grep $OSXVERSION | grep "noarch" | \
+ grep "$pkgname-$pkgversion" | grep $OSXVERSION | grep $ARCH | \
uniq | tail -n1)
- fi
- if [ -z "$pkg" ]; then
- pkg=$(echo "$pkgs" | grep $OSXVERSION | grep $ARCH | uniq | tail -n1)
- fi
- if [ -z "$pkg" ]; then
- pkg=$(echo "$pkgs" | grep $OSXVERSION | grep "noarch" | uniq | tail -n1)
- fi
+ if [ -z "$pkg" ]; then
+ pkg=$(echo "$pkgs" | \
+ grep "$pkgname-$pkgversion" | grep $OSXVERSION | grep "noarch" | \
+ uniq | tail -n1)
+ fi
+ if [ -z "$pkg" ]; then
+ pkg=$(echo "$pkgs" | grep $OSXVERSION | grep $ARCH | uniq | tail -n1)
+ fi
+ if [ -z "$pkg" ]; then
+ pkg=$(echo "$pkgs" | grep $OSXVERSION | grep "noarch" | uniq | tail -n1)
+ fi
+
+ if [ $ARCH != "i386-x86_64" ]; then
+ pkgs="$allpkgs"
+ allpkgs=""
+ else
+ pkgs=""
+ fi
+ done
verboseMsg " selected: $pkg"

View file

@ -1,41 +0,0 @@
diff --git a/tools/osxcross-macports b/tools/osxcross-macports
index f008a8d..23dd68d 100755
--- a/tools/osxcross-macports
+++ b/tools/osxcross-macports
@@ -181,7 +181,7 @@ selectMirror()
}
function download()
-{
+(
local uri=$1
local filename
@@ -191,6 +191,18 @@ function download()
filename=$(basename $1)
fi
+ # Remove file if any error occurs during download
+ trap 'rm -f "$filename"' ERR
+
+ # Return if file already exists in cache
+ case "$filename" in
+ "$CACHE"*)
+ if [ -e "$filename" ]; then
+ return
+ fi
+ ;;
+ esac
+
if command -v curl &>/dev/null; then
## cURL ##
local curl_opts="-L -C - "
@@ -220,7 +232,7 @@ function download()
echo "Required dependency 'curl or wget' not installed" 1>&2
exit 1
fi
-}
+)
getFileStdout()
{

View file

@ -1,15 +0,0 @@
diff --git a/wrapper/target.cpp b/wrapper/target.cpp
index 82bf65c40..a81ce97b7 100644
--- a/wrapper/target.cpp
+++ b/wrapper/target.cpp
@@ -741,6 +741,10 @@ bool Target::setup() {
(stdlib == StdLib::libstdcxx && usegcclibs)) {
fargs.push_back("-nostdinc++");
fargs.push_back("-Qunused-arguments");
+
+ if ((SDKOSNum >= OSVersion(11, 1)) && (stdlib == StdLib::libcxx)) {
+ fargs.push_back("-lc++");
+ }
}
if (stdlib == StdLib::libstdcxx && usegcclibs && targetarch.size() < 2 &&

View file

@ -1,71 +0,0 @@
ARG MACOS_VERSION=12.3 \
MACOS_MIN_VERSION=10.15 \
# aarch64 requires a higher min macOS version to build ffmpeg
ARM64_MACOS_MIN_VERSION=11.0
FROM alpine:3.17 as base
WORKDIR /srv
# Host dependencies, required to build osxcross, gcc for macOS and ffmpeg. ~1GiB
# hadolint ignore=DL3018
RUN --mount=type=cache,target=/var/cache/apk ln -vs /var/cache/apk /etc/apk/cache && apk add --update \
autoconf automake bash bsd-compat-headers build-base bzip2-dev clang15 cmake curl gettext gettext-dev git gmp-dev \
libc++-dev libc-dev libtool libuuid libxml2-dev llvm15-dev llvm15-static meson mpc1-dev mpfr-dev musl-fts-dev nasm \
ninja openssl openssl-dev perl python3 xz yasm zlib-dev
# Download osxcross, use a specific commit to avoid breaking changes and allow docker to cache it
ADD https://github.com/tpoechtrager/osxcross/archive/564e2b9.zip /srv/osxcross.zip
RUN unzip osxcross.zip && mv osxcross-* osxcross && rm osxcross.zip
WORKDIR /srv/osxcross/tarballs
# Download MacOS SDK
ARG MACOS_VERSION
ENV MACOSX_SDK="$MACOS_VERSION"
ADD "https://github.com/joseluisq/macosx-sdks/releases/download/${MACOS_VERSION}/MacOSX${MACOS_VERSION}.sdk.tar.xz" ./
# Setupt osxcross environment variables
ARG MACOS_MIN_VERSION ARM64_MACOS_MIN_VERSION
ENV PATH="$PATH:/opt/osxcross/bin" \
UNATTENDED=yes \
OSXCROSS_MP_INC=1 \
OSX_VERSION_MIN="$MACOS_MIN_VERSION" \
MACOSX_DEPLOYMENT_TARGET="$MACOS_MIN_VERSION" \
MACOSX_ARM64_DEPLOYMENT_TARGET="$ARM64_MACOS_MIN_VERSION"
WORKDIR /srv/osxcross
# Some important patches from unmerged PRs
# PR 180 code needed to be updated to work with the latest osxcross
# 181 is not related to the 181 PR. It's just custom code that needed to be patched after 180 and before 379
COPY 180.diff 181.diff 314.diff ./
RUN set -eux; for patch in *.diff; do patch -p1 < "$patch"; done
# Build osxcross
RUN set -eux; export TARGET_DIR=/opt/osxcross \
&& \
./build.sh \
&& \
./build_compiler_rt.sh \
&& \
# Ugly workaround for linker not finding the macOS SDK's Framework directory
ln -fs "${TARGET_DIR}/SDK/MacOSX${MACOS_VERSION}.sdk/System" '/System' \
&& \
./cleanup.sh
WORKDIR /srv
# Setup macports
RUN osxcross-macports --help
# Setup meson cross-compilation toolchain file
RUN --mount=src=cross.meson.sh,dst=/srv/cross.meson.sh /srv/cross.meson.sh
LABEL org.opencontainers.image.title="osxcross" \
# Version is macOS SDK version + osxcross commit hash
org.opencontainers.image.version="12.3-564e2b9" \
org.opencontainers.image.authors="Vítor Vasconcellos <vasconcellos.dev@gmail.com>, Spacedrive <support@spacedrive.com>" \
org.opencontainers.image.revision="8" \
org.opencontainers.image.licenses="GPL-2.0" \
org.opencontainers.image.description="macOS cross toolchain configured inside Alpine Linux"

View file

@ -1,8 +0,0 @@
# macOS cross toolchain configured inside Alpine Linux
This container based on alpine 3.17, with the most common build decencies installed, and a built version of [`osxcross`](https://github.com/tpoechtrager/osxcross) plus the macOS SDK 12.3 (Monterey) targeting a minimum compatibility of macOS 10.15 (Catalina) for x86_64 and macOS 11.0 (BigSur) for arm64.
**Image Tag**: macOS SDK version + osxcross commit hash + revision
This container is currently available at:
https://hub.docker.com/r/vvasconcellos/osxcross.

View file

@ -1,61 +0,0 @@
#!/usr/bin/env bash
set -e # exit immediate if an error occurs in a pipeline
set -E # make commands inherit ERR trap
set -u # don't allow not set variables to be utilized
set -o pipefail # trace ERR through pipes
set -o errtrace # trace ERR through 'time command' and other functions
declare -A cpu_arch_mappings=(
["x86_64"]="x86_64"
["armv8"]="aarch64"
)
# Get darwin version and build compiler triple
DARWIN_VERSION="$(basename "$(realpath "$(command -v "oa64-clang")")" | awk -F- '{print $3}')"
for TARGET_CPU in "${!cpu_arch_mappings[@]}"; do
TARGET_ARCH="${cpu_arch_mappings[$TARGET_CPU]}"
TRIPLE="${TARGET_ARCH}-apple-${DARWIN_VERSION}"
# Check macOS clang exists
CC="${TRIPLE}-clang"
if ! command -v "$CC" 2>/dev/null; then
echo "$CC not found" >&2
exit 1
fi
# Get osxcross root directory
_osxcross_root="$(dirname "$(dirname "$(command -v "$CC")")")"
# Check SDK exists
SYSROOT="${_osxcross_root}/SDK/MacOSX${MACOS_VERSION}.sdk"
if ! [ -d "$SYSROOT" ]; then
echo "Invalid MacOS version: $MACOS_VERSION" >&2
exit 1
fi
# Configure Meson for osxcross
# TODO: This should in the base osxcross image
cat <<EOF >"${_osxcross_root}/${TRIPLE}.meson"
[binaries]
c = '$CC'
cpp = '${TRIPLE}-clang++'
ld = '$CC'
ar = '${TRIPLE}-ar'
strip = '${TRIPLE}-strip'
cmake = '${TRIPLE}-cmake'
ranlib = '${TRIPLE}-ranlib'
pkgconfig = '${TRIPLE}-pkg-config'
[properties]
sys_root = '${SYSROOT}'
[host_machine]
system = 'darwin'
cpu_family = '$TARGET_ARCH'
cpu = '$TARGET_CPU'
endian = 'little'
EOF
done

View file

@ -1,66 +0,0 @@
name: Build ffmpeg macos
on:
push:
paths:
- '.github/workflows/ffmpeg-macos.yml'
- '.github/scripts/ffmpeg-macos/**'
branches:
- main
pull_request:
paths:
- '.github/workflows/ffmpeg-macos.yml'
- '.github/scripts/ffmpeg-macos/**'
workflow_dispatch:
# Cancel previous runs of the same workflow on the same branch.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
FFMPEG_VERSION: '6.0'
jobs:
build-ffmpeg-macos:
name: Build ffmpeg for macos
runs-on: ubuntu-20.04
defaults:
run:
shell: bash
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
install: true
platforms: linux/amd64
driver-opts: |
image=moby/buildkit:master
network=host
- name: Build ffmpeg
run: |
set -euxo pipefail
cd .github/scripts/ffmpeg-macos
docker build --build-arg FFMPEG_VERSION=$FFMPEG_VERSION -o . .
export XZ_OPT='-T0 -9'
tar -cJf "ffmpeg-${FFMPEG_VERSION}-x86_64.tar.xz" -C ffmpeg/x86_64 .
tar -cJf "ffmpeg-${FFMPEG_VERSION}-arm64.tar.xz" -C ffmpeg/aarch64 .
rm -rf ffmpeg
- name: Publish ffmpeg x86_64
uses: actions/upload-artifact@v3
with:
name: ffmpeg-${{ env.FFMPEG_VERSION }}-x86_64
path: .github/scripts/ffmpeg-macos/ffmpeg-${{ env.FFMPEG_VERSION }}-x86_64.tar.xz
if-no-files-found: error
- name: Publish ffmpeg arm64
uses: actions/upload-artifact@v3
with:
name: ffmpeg-${{ env.FFMPEG_VERSION }}-arm64
path: .github/scripts/ffmpeg-macos/ffmpeg-${{ env.FFMPEG_VERSION }}-arm64.tar.xz
if-no-files-found: error

View file

@ -1,55 +0,0 @@
name: Build ffmpeg windows
on:
push:
paths:
- '.github/workflows/ffmpeg-windows.yml'
- '.github/scripts/ffmpeg-windows/**'
branches:
- main
pull_request:
paths:
- '.github/workflows/ffmpeg-windows.yml'
- '.github/scripts/ffmpeg-windows/**'
workflow_dispatch:
# Cancel previous runs of the same workflow on the same branch.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
FFMPEG_VERSION: '6.0'
jobs:
build-ffmpeg-windows:
name: Build ffmpeg for windows
runs-on: ubuntu-20.04
defaults:
run:
shell: bash
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
with:
install: true
platforms: linux/amd64
driver-opts: |
image=moby/buildkit:master
network=host
- name: Build ffmpeg
run: |
set -euxo pipefail
cd .github/scripts/ffmpeg-windows
docker build --build-arg FFMPEG_VERSION=$FFMPEG_VERSION -o . .
- name: Publish ffmpeg x86_64
uses: actions/upload-artifact@v3
with:
name: ffmpeg-${{ env.FFMPEG_VERSION }}-x86_64
path: .github/scripts/ffmpeg-windows/dlls/*
if-no-files-found: error

View file

@ -1,64 +0,0 @@
name: Build libheif for linux
on:
push:
paths:
- '.github/workflows/libheif-linux.yml'
- '.github/scripts/libheif-linux.sh'
branches:
- main
pull_request:
paths:
- '.github/workflows/libheif-linux.yml'
- '.github/scripts/libheif-linux.sh'
workflow_dispatch:
# Cancel previous runs of the same workflow on the same branch.
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
jobs:
build-libheif-linux:
strategy:
fail-fast: true
matrix:
settings:
- host: ubuntu-latest
target: x86_64-linux-gnu
- host: ubuntu-latest
target: aarch64-linux-gnu
- host: ubuntu-latest
target: x86_64-linux-musl
- host: ubuntu-latest
target: aarch64-linux-musl
name: Build LibHeif Linux ${{ matrix.settings.target }}
runs-on: ${{ matrix.settings.host }}
defaults:
run:
shell: bash
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Build libheif
run: |
set -euxo pipefail
sudo .github/scripts/libheif-linux.sh ${{ matrix.settings.target }}
sudo chown -R "$(id -u):$(id -g)" .github/scripts/src/prefix
# Remove uneeded files
rm -rf .github/scripts/src/prefix/{share,lib/{cmake,pkgconfig}}
find .github/scripts/src/prefix -empty -delete
find .github/scripts/src/prefix -name 'libheif.so*' -exec realpath -s --relative-to=.github/scripts/src/prefix {} + \
| xargs env XZ_OPT='-T0 -9' tar -cJf "libheif-${{ matrix.settings.target }}.tar.xz" -C .github/scripts/src/prefix include/libheif
- name: Publish libheif
uses: actions/upload-artifact@v3
with:
name: libheif-${{ matrix.settings.target }}
path: libheif-${{ matrix.settings.target }}.tar.xz
if-no-files-found: error

View file

@ -38,8 +38,6 @@ jobs:
# bundles: deb # no appimage for now unfortunetly
# - host: ubuntu-20.04
# target: aarch64-unknown-linux-musl
# - host: ubuntu-20.04
# target: armv7-unknown-linux-gnueabihf
name: Desktop - Main ${{ matrix.settings.target }}
runs-on: ${{ matrix.settings.host }}
steps:

View file

@ -2,7 +2,7 @@
name = "sd-desktop"
version = "0.1.2"
description = "The universal file manager."
authors = ["Spacedrive Technology Inc."]
authors = ["Spacedrive Technology Inc <support@spacedrive.com>"]
default-run = "sd-desktop"
license = { workspace = true }
repository = { workspace = true }

View file

@ -15,7 +15,7 @@
"bundleMediaFramework": true
},
"active": true,
"targets": "all",
"targets": ["deb", "msi", "dmg", "updater"],
"identifier": "com.spacedrive.desktop",
"icon": [
"icons/32x32.png",
@ -27,19 +27,13 @@
"resources": [],
"externalBin": [],
"copyright": "Spacedrive Technology Inc.",
"shortDescription": "Spacedrive",
"longDescription": "A cross-platform universal file explorer, powered by an open-source virtual distributed filesystem.",
"shortDescription": "File explorer from the future.",
"longDescription": "Cross-platform universal file explorer, powered by an open-source virtual distributed filesystem.",
"deb": {
"depends": [
"ffmpeg",
"gstreamer1.0-plugins-bad",
"gstreamer1.0-plugins-ugly",
"gstreamer1.0-gtk3",
"gstreamer1.0-libav"
]
"depends": ["libc6"]
},
"macOS": {
"frameworks": ["../../.deps/FFMpeg.framework"],
"frameworks": ["../../.deps/Spacedrive.framework"],
"minimumSystemVersion": "10.15",
"exceptionDomain": "",
"entitlements": null

View file

@ -7,18 +7,18 @@ if [ "${CI:-}" = "true" ]; then
fi
if [ -z "${HOME:-}" ]; then
HOME="$(CDPATH='' cd -- "$(osascript -e 'set output to (POSIX path of (path to home folder))')" && pwd)"
HOME="$(CDPATH='' cd -- "$(osascript -e 'set output to (POSIX path of (path to home folder))')" && pwd -P)"
export HOME
fi
echo "Building 'sd-mobile-ios' library..."
__dirname="$(CDPATH='' cd -- "$(dirname -- "$0")" && pwd)"
__dirname="$(CDPATH='' cd -- "$(dirname -- "$0")" && pwd -P)"
# Ensure target dir exists
TARGET_DIRECTORY="${__dirname}/../../../../../target"
mkdir -p "$TARGET_DIRECTORY"
TARGET_DIRECTORY="$(CDPATH='' cd -- "$TARGET_DIRECTORY" && pwd)"
TARGET_DIRECTORY="$(CDPATH='' cd -- "$TARGET_DIRECTORY" && pwd -P)"
if [ "${CONFIGURATION:-}" != "Debug" ]; then
CARGO_FLAGS=--release

View file

@ -15,7 +15,7 @@ use tracing::error;
const BINDING_LOCATION: &str = ".";
#[cfg(unix)]
const BINDING_LOCATION: &str = if cfg!(target_os = "macos") {
"../Frameworks/FFMpeg.framework/Libraries"
"../Frameworks/Spacedrive.framework/Libraries"
} else {
"../lib/spacedrive"
};

172
scripts/fix-deb.sh Executable file
View file

@ -0,0 +1,172 @@
#!/usr/bin/env bash
set -eEuo pipefail
if [ "${CI:-}" = "true" ]; then
set -x
fi
if [ "$(id -u)" -ne 0 ]; then
echo "This script requires root privileges." >&2
exec sudo -E env _UID="$(id -u)" _GID="$(id -g)" "$0" "$@"
fi
echo "Fixing deb bundle..." >&2
umask 0
err() {
for _line in "$@"; do
echo "$_line" >&2
done
exit 1
}
has() {
for prog in "$@"; do
if ! command -v "$prog" 1>/dev/null 2>&1; then
return 1
fi
done
}
if ! has tar curl gzip strip; then
err 'Dependencies missing.' \
"This script requires 'tar', 'curl', 'gzip' and 'strip' to be installed and available on \$PATH."
fi
# Go to script root
CDPATH='' cd -- "$(dirname "$0")"
_root="$(pwd -P)"
if [ -n "${TARGET:-}" ]; then
cd "../target/${TARGET}/release/bundle/deb" || err 'Failed to find deb bundle'
else
cd ../target/release/bundle/deb || err 'Failed to find deb bundle'
fi
# Find deb file with the highest version number, name format: spacedrive_<version>_<arch>.deb
_deb="$(find . -type f -name '*.deb' | sort -t '_' -k '2,2' -V | tail -n 1)"
# Clean up build unused artifacts
rm -rf "$(basename "$_deb" .deb)"
# Make a backup of deb
cp "$_deb" "$_deb.bak"
# Temporary directory
_tmp="$(mktemp -d)"
cleanup() {
_err=$?
rm -rf "$_tmp"
# Restore backed up deb if something goes wrong
if [ $_err -ne 0 ]; then
mv "${_deb:?}.bak" "$_deb"
fi
# Ensure deb owner is the same as the user who ran the script
chown "${_UID:-0}:${_GID:-0}" "$_deb" 2>/dev/null || true
rm -f "${_deb:?}.bak"
exit "$_err"
}
trap 'cleanup' EXIT
# Extract deb to a tmp dir
ar x "$_deb" --output="$_tmp"
# Extract data.tar.xz
mkdir -p "${_tmp}/data"
tar -xzf "${_tmp}/data.tar.gz" -C "${_tmp}/data"
# Extract control.tar.xz
mkdir -p "${_tmp}/control"
tar -xzf "${_tmp}/control.tar.gz" -C "${_tmp}/control"
# Fix files owner
chown -R root:root "$_tmp"
# Create doc directory
mkdir -p "$_tmp"/data/usr/share/{doc/spacedrive,man/man1}
# Create changelog.gz
curl -LSs 'https://gist.githubusercontent.com/HeavenVolkoff/0993c42bdb0b952eb5bf765398e9b921/raw/changelog' \
| gzip -9 >"${_tmp}/data/usr/share/doc/spacedrive/changelog.gz"
# Copy LICENSE to copyright
cp "${_root}/../LICENSE" "${_tmp}/data/usr/share/doc/spacedrive/copyright"
# Copy dependencies licenses
(
for _license in "${_root}"/../apps/.deps/licenses/*; do
cat <<EOF
$(basename "$_license"):
$(cat "$_license")
===============================================================================
EOF
done
) | gzip -9 >"${_tmp}/data/usr/share/doc/spacedrive/thrid-party-licenses.gz"
# Create manual page
curl -LSs 'https://gist.githubusercontent.com/HeavenVolkoff/0993c42bdb0b952eb5bf765398e9b921/raw/spacedrive.1' \
| gzip -9 >"${_tmp}/data/usr/share/man/man1/spacedrive.1.gz"
# Fill the Categories entry in .desktop file
sed -i 's/^Categories=.*/Categories=System;FileTools;FileManager;/' "${_tmp}/data/usr/share/applications/spacedrive.desktop"
# Fix data permissions
find "${_tmp}/data" -type d -exec chmod 755 {} +
find "${_tmp}/data" -type f -exec chmod 644 {} +
# Fix main executable permission
chmod 755 "${_tmp}/data/usr/bin/spacedrive"
# Make generic named shared libs symlinks to the versioned ones
find "${_tmp}/data/usr/lib" -type f -name '*.so.*' -exec sh -euc \
'for _lib in "$@"; do _link="$_lib" && while { _link="${_link%.*}" && [ "$_link" != "${_lib%.so*}" ]; }; do if [ -f "$_link" ]; then ln -sf "$(basename "$_lib")" "$_link"; fi; done; done' \
sh {} +
# Strip all executables and shared libs
find "${_tmp}/data/usr/bin" "${_tmp}/data/usr/lib" -type f -exec strip --strip-unneeded {} \;
# Add Section field to control file, if it doesnt exists
if ! grep -q '^Section:' "${_tmp}/control/control"; then
echo 'Section: contrib/utils' >>"${_tmp}/control/control"
fi
# Add Recommends field to control file after Depends field
_recomends='gstreamer1.0-plugins-ugly'
if grep -q '^Recommends:' "${_tmp}/control/control"; then
sed -i "s/^Recommends:.*/Recommends: ${_recomends}/" "${_tmp}/control/control"
else
sed -i "/^Depends:/a Recommends: ${_recomends}" "${_tmp}/control/control"
fi
# Add Suggests field to control file after Recommends field
_suggests='gstreamer1.0-plugins-bad'
if grep -q '^Suggests:' "${_tmp}/control/control"; then
sed -i "s/^Suggests:.*/Suggests: ${_suggests}/" "${_tmp}/control/control"
else
sed -i "/^Recommends:/a Suggests: ${_suggests}" "${_tmp}/control/control"
fi
# Re-calculate md5sums
(cd "${_tmp}/data" && find . -type f -exec md5sum {} + >"${_tmp}/control/md5sums")
# Fix control files permission
find "${_tmp}/control" -type f -exec chmod 644 {} +
# Compress data.tar.xz
tar -czf "${_tmp}/data.tar.gz" -C "${_tmp}/data" .
# Compress control.tar.xz
tar -czf "${_tmp}/control.tar.gz" -C "${_tmp}/control" .
# Compress deb
ar rcs "$_deb" "${_tmp}/debian-binary" "${_tmp}/control.tar.gz" "${_tmp}/data.tar.gz"

69
scripts/preprep.mjs Normal file → Executable file
View file

@ -1,18 +1,17 @@
#!/usr/bin/env node
import * as fs from 'node:fs/promises'
import * as path from 'node:path'
import { env, exit, umask } from 'node:process'
import { fileURLToPath } from 'node:url'
import { extractTo } from 'archive-wasm/src/fs.mjs'
import * as _mustache from 'mustache'
import { downloadFFMpeg, downloadLibHeif, downloadPDFium, downloadProtc } from './utils/deps.mjs'
import { getGitBranches } from './utils/git.mjs'
import { getConst, NATIVE_DEPS_URL, NATIVE_DEPS_ASSETS } from './utils/consts.mjs'
import { get } from './utils/fetch.mjs'
import { getMachineId } from './utils/machineId.mjs'
import {
setupMacOsFramework,
symlinkSharedLibsMacOS,
symlinkSharedLibsLinux,
} from './utils/shared.mjs'
import { symlinkSharedLibsMacOS, symlinkSharedLibsLinux } from './utils/shared.mjs'
import { which } from './utils/which.mjs'
if (/^(msys|mingw|cygwin)$/i.test(env.OSTYPE ?? '')) {
@ -57,43 +56,26 @@ packages/scripts/${machineId[0] === 'Windows_NT' ? 'setup.ps1' : 'setup.sh'}
// Directory where the native deps will be downloaded
const nativeDeps = path.join(__root, 'apps', '.deps')
await fs.rm(nativeDeps, { force: true, recursive: true })
await Promise.all(
['bin', 'lib', 'include'].map(dir =>
fs.mkdir(path.join(nativeDeps, dir), { mode: 0o750, recursive: true })
)
)
await fs.mkdir(nativeDeps, { mode: 0o750, recursive: true })
// Accepted git branches for querying for artifacts (current, main, master)
const branches = await getGitBranches(__root)
try {
console.log('Downloading Native dependencies...')
// Download all necessary external dependencies
await Promise.all([
downloadProtc(machineId, nativeDeps).catch(e => {
console.error(
'Failed to download protobuf compiler, this is required to build Spacedrive. ' +
'Please install it with your system package manager'
)
throw e
}),
downloadPDFium(machineId, nativeDeps).catch(e => {
console.warn(
'Failed to download pdfium lib. ' +
"This is optional, but if one isn't present Spacedrive won't be able to generate thumbnails for PDF files"
)
if (__debug) console.error(e)
}),
downloadFFMpeg(machineId, nativeDeps, branches).catch(e => {
console.error(`Failed to download ffmpeg. ${bugWarn}`)
throw e
}),
downloadLibHeif(machineId, nativeDeps, branches).catch(e => {
console.error(`Failed to download libheif. ${bugWarn}`)
throw e
}),
]).catch(e => {
const assetName = getConst(NATIVE_DEPS_ASSETS, machineId)
if (assetName == null) throw new Error('NO_ASSET')
const archiveData = await get(`${NATIVE_DEPS_URL}/${assetName}`)
await extractTo(archiveData, nativeDeps, {
chmod: 0o600,
recursive: true,
overwrite: true,
})
} catch (e) {
console.error(`Failed to download native dependencies. ${bugWarn}`)
if (__debug) console.error(e)
exit(1)
})
}
// Extra OS specific setup
try {
@ -104,14 +86,9 @@ try {
throw e
})
} else if (machineId[0] === 'Darwin') {
console.log(`Setup Framework...`)
await setupMacOsFramework(nativeDeps).catch(e => {
console.error(`Failed to setup Framework. ${bugWarn}`)
throw e
})
// This is still required due to how ffmpeg-sys-next builds script works
console.log(`Symlink shared libs...`)
await symlinkSharedLibsMacOS(nativeDeps).catch(e => {
await symlinkSharedLibsMacOS(__root, nativeDeps).catch(e => {
console.error(`Failed to symlink shared libs. ${bugWarn}`)
throw e
})

View file

@ -21,6 +21,14 @@ has() {
done
}
sudo() {
if [ "$(id -u)" -eq 0 ]; then
"$@"
else
env sudo "$@"
fi
}
script_failure() {
if [ -n "${1:-}" ]; then
_line="on line $1"
@ -58,9 +66,6 @@ if [ "${CI:-}" != "true" ]; then
'https://rustup.rs'
fi
echo "Installing Rust tools..."
cargo install cargo-watch
echo
fi
@ -134,19 +139,14 @@ case "$(uname)" in
set -- build-essential curl wget file patchelf openssl libssl-dev libgtk-3-dev librsvg2-dev \
libwebkit2gtk-4.0-dev libayatana-appindicator3-dev
# FFmpeg dependencies
set -- "$@" ffmpeg libavcodec-dev libavdevice-dev libavfilter-dev libavformat-dev \
libavutil-dev libswscale-dev libswresample-dev
# Webkit2gtk requires gstreamer plugins for video playback to work
set -- "$@" gstreamer1.0-alsa gstreamer1.0-gl gstreamer1.0-gtk3 gstreamer1.0-libav \
gstreamer1.0-pipewire gstreamer1.0-plugins-bad gstreamer1.0-plugins-base \
gstreamer1.0-plugins-good gstreamer1.0-plugins-ugly gstreamer1.0-pulseaudio \
gstreamer1.0-vaapi libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev \
libgstreamer-plugins-bad1.0-dev
set -- "$@" gstreamer1.0-plugins-good gstreamer1.0-plugins-ugly libgstreamer1.0-dev libgstreamer-plugins-base1.0-dev
# C/C++ build dependencies, required to build some *-sys crates
set -- "$@" llvm-dev libclang-dev clang nasm
set -- "$@" llvm-dev libclang-dev clang nasm perl
# React dependencies
set -- "$@" libvips42
sudo apt-get -y update
sudo apt-get -y install "$@"
@ -157,15 +157,11 @@ case "$(uname)" in
# Tauri dependencies
set -- base-devel curl wget file patchelf openssl gtk3 librsvg webkit2gtk libayatana-appindicator
# FFmpeg dependencies
set -- "$@" ffmpeg
# Webkit2gtk requires gstreamer plugins for video playback to work
set -- "$@" gst-libav gst-plugins-bad gst-plugins-base gst-plugins-good gst-plugins-ugly \
gst-plugin-pipewire gstreamer-vaapi
set -- "$@" gst-plugins-base gst-plugins-good gst-plugins-ugly
# C/C++ build dependencies, required to build some *-sys crates
set -- "$@" clang nasm
set -- "$@" clang nasm perl
# React dependencies
set -- "$@" libvips
@ -176,7 +172,7 @@ case "$(uname)" in
echo "Installing dependencies with dnf..."
# For Enterprise Linux, you also need "Development Tools" instead of "C Development Tools and Libraries"
if ! { sudo dnf group install "C Development Tools and Libraries" || sudo sudo dnf group install "Development Tools"; }; then
if ! { sudo dnf group install "C Development Tools and Libraries" || sudo dnf group install "Development Tools"; }; then
err 'We were unable to install the "C Development Tools and Libraries"/"Development Tools" package.' \
'Please open an issue if you feel that this is incorrect.' \
'https://github.com/spacedriveapp/spacedrive/issues'
@ -190,26 +186,38 @@ case "$(uname)" in
fi
# Tauri dependencies
set -- openssl curl wget file patchelf libappindicator-gtk3-devel librsvg2-devel
set -- openssl openssl-dev curl wget file patchelf libappindicator-gtk3-devel librsvg2-devel
# Webkit2gtk requires gstreamer plugins for video playback to work
set -- "$@" gstreamer1-devel gstreamer1-plugins-base-devel \
gstreamer1-plugins-good gstreamer1-plugins-good-gtk \
gstreamer1-plugins-good-extras gstreamer1-plugins-ugly-free \
gstreamer1-plugins-bad-free gstreamer1-plugins-bad-free-devel \
gstreamer1-plugins-bad-free-extras
set -- "$@" gstreamer1-devel gstreamer1-plugins-base-devel gstreamer1-plugins-good \
gstreamer1-plugins-good-extras gstreamer1-plugins-ugly-free
# C/C++ build dependencies, required to build some *-sys crates
set -- "$@" clang clang-devel nasm
set -- "$@" clang clang-devel nasm perl-core
# React dependencies
set -- "$@" vips
sudo dnf install "$@"
elif has apk; then
echo "Detected apk!"
echo "Installing dependencies with apk..."
echo "Alpine suport is experimental" >&2
# FFmpeg dependencies
if ! sudo dnf install ffmpeg ffmpeg-devel; then
err 'We were unable to install the FFmpeg and FFmpeg-devel packages.' \
'This is likely because the RPM Fusion free repository is not enabled.' \
'https://docs.fedoraproject.org/en-US/quick-docs/setup_rpmfusion'
fi
# Tauri dependencies
set -- build-base curl wget file patchelf openssl-dev gtk+3.0-dev librsvg-dev \
webkit2gtk-dev libayatana-indicator-dev
# Webkit2gtk requires gstreamer plugins for video playback to work
set -- "$@" gst-plugins-base-dev gst-plugins-good gst-plugins-ugly
# C/C++ build dependencies, required to build some *-sys crates
set -- "$@" llvm16-dev clang16 nasm perl
# React dependencies
set -- "$@" vips
sudo apk add "$@"
else
if has lsb_release; then
_distro="'$(lsb_release -s -d)' "
@ -226,4 +234,7 @@ case "$(uname)" in
;;
esac
echo "Installing Rust tools..."
cargo install cargo-watch
echo 'Your machine has been setup for Spacedrive development!'

119
scripts/tauri.mjs Normal file → Executable file
View file

@ -1,10 +1,14 @@
#!/usr/bin/env node
import * as fs from 'node:fs/promises'
import * as path from 'node:path'
import { env, exit, umask, platform } from 'node:process'
import { setTimeout } from 'node:timers/promises'
import { fileURLToPath } from 'node:url'
import * as toml from '@iarna/toml'
import { waitLockUnlock } from './utils/flock.mjs'
import { patchTauri } from './utils/patchTauri.mjs'
import spawn from './utils/spawn.mjs'
@ -53,11 +57,38 @@ if (cargoConfig.env && typeof cargoConfig.env === 'object')
// Default command
if (args.length === 0) args.push('build')
const targets = args
.filter((_, index, args) => {
if (index === 0) return false
const previous = args[index - 1]
return previous === '-t' || previous === '--target'
})
.flatMap(target => target.split(','))
const bundles = args
.filter((_, index, args) => {
if (index === 0) return false
const previous = args[index - 1]
return previous === '-b' || previous === '--bundles'
})
.flatMap(target => target.split(','))
let code = 0
try {
switch (args[0]) {
case 'dev': {
__cleanup.push(...(await patchTauri(__root, nativeDeps, args)))
__cleanup.push(...(await patchTauri(__root, nativeDeps, targets, bundles, args)))
switch (process.platform) {
case 'darwin':
case 'linux':
void waitLockUnlock(path.join(__root, 'target', 'debug', '.cargo-lock')).then(
() => setTimeout(1000).then(cleanUp),
() => {}
)
break
}
break
}
case 'build': {
@ -65,68 +96,48 @@ try {
env.NODE_OPTIONS = `--max_old_space_size=4096 ${env.NODE_OPTIONS ?? ''}`
}
__cleanup.push(...(await patchTauri(__root, nativeDeps, args)))
__cleanup.push(...(await patchTauri(__root, nativeDeps, targets, bundles, args)))
switch (process.platform) {
case 'darwin': {
// Configure DMG background
env.BACKGROUND_FILE = path.resolve(
desktopApp,
'src-tauri',
'dmg-background.png'
if (process.platform === 'darwin') {
// Configure DMG background
env.BACKGROUND_FILE = path.resolve(desktopApp, 'src-tauri', 'dmg-background.png')
env.BACKGROUND_FILE_NAME = path.basename(env.BACKGROUND_FILE)
env.BACKGROUND_CLAUSE = `set background picture of opts to file ".background:${env.BACKGROUND_FILE_NAME}"`
if (!(await exists(env.BACKGROUND_FILE)))
console.warn(
`WARNING: DMG background file not found at ${env.BACKGROUND_FILE}`
)
env.BACKGROUND_FILE_NAME = path.basename(env.BACKGROUND_FILE)
env.BACKGROUND_CLAUSE = `set background picture of opts to file ".background:${env.BACKGROUND_FILE_NAME}"`
if (!(await exists(env.BACKGROUND_FILE)))
console.warn(
`WARNING: DMG background file not found at ${env.BACKGROUND_FILE}`
)
break
}
case 'linux':
// Cleanup appimage bundle to avoid build_appimage.sh failing
await fs.rm(path.join(__root, 'target', 'release', 'bundle', 'appimage'), {
recursive: true,
force: true,
})
break
break
}
}
}
await spawn('pnpm', ['exec', 'tauri', ...args], desktopApp).catch(async error => {
if (args[0] === 'build' || platform === 'linux') {
// Work around appimage buindling not working sometimes
const appimageDir = path.join(__root, 'target', 'release', 'bundle', 'appimage')
if (
(await exists(path.join(appimageDir, 'build_appimage.sh'))) &&
(await fs.readdir(appimageDir).then(f => f.every(f => !f.endsWith('.AppImage'))))
) {
// Remove AppDir to allow build_appimage to rebuild it
await fs.rm(path.join(appimageDir, 'spacedrive.AppDir'), {
recursive: true,
force: true,
})
return spawn('bash', ['build_appimage.sh'], appimageDir).catch(exitCode => {
code = exitCode
console.error(`tauri ${args[0]} failed with exit code ${exitCode}`)
})
await spawn('pnpm', ['exec', 'tauri', ...args], desktopApp)
if (args[0] === 'build' && bundles.some(bundle => bundle === 'deb' || bundle === 'all')) {
const linuxTargets = targets.filter(target => target.includes('-linux-'))
if (linuxTargets.length > 0)
for (const target of linuxTargets) {
env.TARGET = target
await spawn(path.join(__dirname, 'fix-deb.sh'), [], __dirname)
}
}
console.error(
`tauri ${args[0]} failed with exit code ${typeof error === 'number' ? error : 1}`
)
console.warn(
`If you got an error related to FFMpeg or Protoc/Protobuf you may need to re-run \`pnpm prep\``
)
throw error
})
else if (process.platform === 'linux')
await spawn(path.join(__dirname, 'fix-deb.sh'), [], __dirname)
}
} catch (error) {
console.error(
`tauri ${args[0]} failed with exit code ${typeof error === 'number' ? error : 1}`
)
console.warn(
`If you got an error related to libav*/FFMpeg or Protoc/Protobuf you may need to re-run \`pnpm prep\``,
`If you got an error related to missing nasm you need to run ${
platform === 'win32' ? './scripts/setup.ps1' : './scripts/setup.sh'
}`
)
if (typeof error === 'number') {
code = error
} else {

View file

@ -1,71 +1,25 @@
// Suffixes
export const PROTOC_SUFFIX = {
Linux: {
i386: 'linux-x86_32',
i686: 'linux-x86_32',
x86_64: 'linux-x86_64',
aarch64: 'linux-aarch_64',
},
Darwin: {
x86_64: 'osx-x86_64',
export const NATIVE_DEPS_URL =
'https://github.com/spacedriveapp/native-deps/releases/latest/download'
aarch64: 'osx-aarch_64',
},
Windows_NT: {
i386: 'win32',
i686: 'win32',
x86_64: 'win64',
},
}
export const PDFIUM_SUFFIX = {
export const NATIVE_DEPS_ASSETS = {
Linux: {
x86_64: {
musl: 'linux-musl-x64',
glibc: 'linux-x64',
},
aarch64: 'linux-arm64',
},
Darwin: {
x86_64: 'mac-x64',
aarch64: 'mac-arm64',
},
Windows_NT: {
x86_64: 'win-x64',
aarch64: 'win-arm64',
},
}
export const FFMPEG_SUFFFIX = {
Darwin: {
x86_64: 'x86_64',
aarch64: 'arm64',
},
Windows_NT: {
x86_64: 'x86_64',
},
}
export const FFMPEG_WORKFLOW = {
Darwin: 'ffmpeg-macos.yml',
Windows_NT: 'ffmpeg-windows.yml',
}
export const LIBHEIF_SUFFIX = {
Linux: {
x86_64: {
musl: 'x86_64-linux-musl',
glibc: 'x86_64-linux-gnu',
musl: 'native-deps-x86_64-linux-musl.tar.xz',
glibc: 'native-deps-x86_64-linux-gnu.tar.xz',
},
aarch64: {
musl: 'aarch64-linux-musl',
glibc: 'aarch64-linux-gnu',
musl: 'native-deps-aarch64-linux-musl.tar.xz',
glibc: 'native-deps-aarch64-linux-gnu.tar.xz',
},
},
}
export const LIBHEIF_WORKFLOW = {
Linux: 'libheif-linux.yml',
Darwin: {
x86_64: 'native-deps-x86_64-darwin-apple.tar.xz',
aarch64: 'native-deps-aarch64-darwin-apple.tar.xz',
},
Windows_NT: {
x86_64: 'native-deps-x86_64-windows-gnu.tar.xz ',
aarch64: 'native-deps-aarch64-windows-gnu.tar.xz',
},
}
/**
@ -85,13 +39,3 @@ export function getConst(constants, identifiers) {
return typeof constant === 'string' ? constant : null
}
/**
* @param {Record<string, unknown>} suffixes
* @param {string[]} identifiers
* @returns {RegExp?}
*/
export function getSuffix(suffixes, identifiers) {
const suffix = getConst(suffixes, identifiers)
return suffix ? new RegExp(`${suffix}(\\.[^\\.]+)*$`) : null
}

View file

@ -1,198 +0,0 @@
import * as fs from 'node:fs/promises'
import * as os from 'node:os'
import * as path from 'node:path'
import { env } from 'node:process'
import { extractTo } from 'archive-wasm/src/fs.mjs'
import {
FFMPEG_SUFFFIX,
FFMPEG_WORKFLOW,
getConst,
getSuffix,
LIBHEIF_SUFFIX,
LIBHEIF_WORKFLOW,
PDFIUM_SUFFIX,
PROTOC_SUFFIX,
} from './consts.mjs'
import {
getGh,
getGhArtifactContent,
getGhReleasesAssets,
getGhWorkflowRunArtifacts,
} from './github.mjs'
import { which } from './which.mjs'
const noop = () => {}
const __debug = env.NODE_ENV === 'debug'
const __osType = os.type()
// Github repos
const PDFIUM_REPO = 'bblanchon/pdfium-binaries'
const PROTOBUF_REPO = 'protocolbuffers/protobuf'
const SPACEDRIVE_REPO = 'spacedriveapp/spacedrive'
/**
* Download and extract protobuff compiler binary
* @param {string[]} machineId
* @param {string} nativeDeps
*/
export async function downloadProtc(machineId, nativeDeps) {
if (await which('protoc')) return
console.log('Downloading protoc...')
const protocSuffix = getSuffix(PROTOC_SUFFIX, machineId)
if (protocSuffix == null) throw new Error('NO_PROTOC')
let found = false
for await (const release of getGhReleasesAssets(PROTOBUF_REPO)) {
if (!protocSuffix.test(release.name)) continue
try {
await extractTo(await getGh(release.downloadUrl), nativeDeps, {
chmod: 0o600,
overwrite: true,
})
found = true
break
} catch (error) {
console.warn('Failed to download protoc, re-trying...')
if (__debug) console.error(error)
}
}
if (!found) throw new Error('NO_PROTOC')
// cleanup
await fs.unlink(path.join(nativeDeps, 'readme.txt')).catch(__debug ? console.error : noop)
}
/**
* Download and extract pdfium library for generating PDFs thumbnails
* @param {string[]} machineId
* @param {string} nativeDeps
*/
export async function downloadPDFium(machineId, nativeDeps) {
console.log('Downloading pdfium...')
const pdfiumSuffix = getSuffix(PDFIUM_SUFFIX, machineId)
if (pdfiumSuffix == null) throw new Error('NO_PDFIUM')
let found = false
for await (const release of getGhReleasesAssets(PDFIUM_REPO)) {
if (!pdfiumSuffix.test(release.name)) continue
try {
await extractTo(await getGh(release.downloadUrl), nativeDeps, {
chmod: 0o600,
overwrite: true,
})
found = true
break
} catch (error) {
console.warn('Failed to download pdfium, re-trying...')
if (__debug) console.error(error)
}
}
if (!found) throw new Error('NO_PDFIUM')
// cleanup
const cleanup = [
fs.rename(path.join(nativeDeps, 'LICENSE'), path.join(nativeDeps, 'LICENSE.pdfium')),
...['args.gn', 'PDFiumConfig.cmake', 'VERSION'].map(file =>
fs.unlink(path.join(nativeDeps, file)).catch(__debug ? console.error : noop)
),
]
switch (__osType) {
case 'Linux':
cleanup.push(fs.chmod(path.join(nativeDeps, 'lib', 'libpdfium.so'), 0o750))
break
case 'Darwin':
cleanup.push(fs.chmod(path.join(nativeDeps, 'lib', 'libpdfium.dylib'), 0o750))
break
}
await Promise.all(cleanup)
}
/**
* Download and extract ffmpeg libs for video thumbnails
* @param {string[]} machineId
* @param {string} nativeDeps
* @param {string[]} branches
*/
export async function downloadFFMpeg(machineId, nativeDeps, branches) {
const workflow = getConst(FFMPEG_WORKFLOW, machineId)
if (workflow == null) {
console.log('Checking FFMPeg...')
if (await which('ffmpeg')) {
// TODO: check ffmpeg version match what we need
return
} else {
throw new Error('NO_FFMPEG')
}
}
console.log('Downloading FFMPeg...')
const ffmpegSuffix = getSuffix(FFMPEG_SUFFFIX, machineId)
if (ffmpegSuffix == null) throw new Error('NO_FFMPEG')
let found = false
for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) {
if (!ffmpegSuffix.test(artifact.name)) continue
try {
const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id)
await extractTo(data, nativeDeps, {
chmod: 0o600,
recursive: true,
overwrite: true,
})
found = true
break
} catch (error) {
console.warn('Failed to download FFMpeg, re-trying...')
if (__debug) console.error(error)
}
}
if (!found) throw new Error('NO_FFMPEG')
}
/**
* Download and extract libheif libs for heif thumbnails
* @param {string[]} machineId
* @param {string} nativeDeps
* @param {string[]} branches
*/
export async function downloadLibHeif(machineId, nativeDeps, branches) {
const workflow = getConst(LIBHEIF_WORKFLOW, machineId)
if (workflow == null) return
console.log('Downloading LibHeif...')
const libHeifSuffix = getSuffix(LIBHEIF_SUFFIX, machineId)
if (libHeifSuffix == null) throw new Error('NO_LIBHEIF')
let found = false
for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) {
if (!libHeifSuffix.test(artifact.name)) continue
try {
const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id)
await extractTo(data, nativeDeps, {
chmod: 0o600,
recursive: true,
overwrite: true,
})
found = true
break
} catch (error) {
console.warn('Failed to download LibHeif, re-trying...')
if (__debug) console.error(error)
}
}
if (!found) throw new Error('NO_LIBHEIF')
}

138
scripts/utils/fetch.mjs Normal file
View file

@ -0,0 +1,138 @@
import * as fs from 'node:fs/promises'
import { dirname, join as joinPath } from 'node:path'
import { env } from 'node:process'
import { fileURLToPath } from 'node:url'
import { fetch, Headers } from 'undici'
const __debug = env.NODE_ENV === 'debug'
const __offline = env.OFFLINE === 'true'
const __filename = fileURLToPath(import.meta.url)
const __dirname = dirname(__filename)
const cacheDir = joinPath(__dirname, '.tmp')
await fs.mkdir(cacheDir, { recursive: true, mode: 0o751 })
/**
* @param {string} resource
* @param {Headers} [headers]
* @returns {Promise<null | {data: Buffer, header: [string, string] | undefined}>}
*/
async function getCache(resource, headers) {
/** @type {Buffer | undefined} */
let data
/** @type {[string, string] | undefined} */
let header
// Don't cache in CI
if (env.CI === 'true') return null
if (headers)
resource += Array.from(headers.entries())
.filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since')
.flat()
.join(':')
try {
const cache = JSON.parse(
await fs.readFile(joinPath(cacheDir, Buffer.from(resource).toString('base64url')), {
encoding: 'utf8',
})
)
if (cache && typeof cache === 'object') {
if (cache.etag && typeof cache.etag === 'string') {
header = ['If-None-Match', cache.etag]
} else if (cache.modifiedSince && typeof cache.modifiedSince === 'string') {
header = ['If-Modified-Since', cache.modifiedSince]
}
if (cache.data && typeof cache.data === 'string')
data = Buffer.from(cache.data, 'base64')
}
} catch (error) {
if (__debug) {
console.warn(`CACHE MISS: ${resource}`)
console.error(error)
}
}
return data ? { data, header } : null
}
/**
* @param {import('undici').Response} response
* @param {string} resource
* @param {Buffer} [cachedData]
* @param {Headers} [headers]
* @returns {Promise<Buffer>}
*/
async function setCache(response, resource, cachedData, headers) {
const data = Buffer.from(await response.arrayBuffer())
// Don't cache in CI
if (env.CI === 'true') return data
const etag = response.headers.get('ETag') || undefined
const modifiedSince = response.headers.get('Last-Modified') || undefined
if (headers)
resource += Array.from(headers.entries())
.filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since')
.flat()
.join(':')
if (response.status === 304 || (response.ok && data.length === 0)) {
// Cache hit
if (!cachedData) throw new Error('Empty cache hit ????')
return cachedData
}
try {
await fs.writeFile(
joinPath(cacheDir, Buffer.from(resource).toString('base64url')),
JSON.stringify({
etag,
modifiedSince,
data: data.toString('base64'),
}),
{ mode: 0o640, flag: 'w+' }
)
} catch (error) {
if (__debug) {
console.warn(`CACHE WRITE FAIL: ${resource}`)
console.error(error)
}
}
return data
}
/**
* @param {URL | string} resource
* @param {Headers?} [headers]
* @param {boolean} [preferCache]
* @returns {Promise<Buffer>}
*/
export async function get(resource, headers, preferCache) {
if (headers == null) headers = new Headers()
if (resource instanceof URL) resource = resource.toString()
const cache = await getCache(resource, headers)
if (__offline) {
if (cache?.data == null)
throw new Error(`OFFLINE MODE: Cache for request ${resource} doesn't exist`)
return cache.data
}
if (preferCache && cache?.data != null) return cache.data
if (cache?.header) headers.append(...cache.header)
const response = await fetch(resource, { headers })
if (!response.ok) {
if (cache?.data) {
if (__debug) console.warn(`CACHE HIT due to fail: ${resource} ${response.statusText}`)
return cache.data
}
throw new Error(response.statusText)
}
return await setCache(response, resource, cache?.data, headers)
}

35
scripts/utils/flock.mjs Normal file
View file

@ -0,0 +1,35 @@
import { exec as execCb } from 'node:child_process'
import { setTimeout } from 'node:timers/promises'
import { promisify } from 'node:util'
import { which } from './which.mjs'
const exec = promisify(execCb)
/**
* @param {string} file
* @returns {Promise<void>}
*/
export async function waitLockUnlock(file) {
if (!(await which('flock'))) throw new Error('flock is not installed')
let locked = false
while (!locked) {
try {
await exec(`flock -ns "${file}" -c true`)
await setTimeout(100)
} catch {
locked = true
}
}
while (locked) {
try {
await exec(`flock -ns "${file}" -c true`)
} catch {
await setTimeout(100)
continue
}
locked = false
}
}

View file

@ -1,87 +0,0 @@
import { exec as execCb } from 'node:child_process'
import * as fs from 'node:fs/promises'
import * as path from 'node:path'
import { env } from 'node:process'
import { promisify } from 'node:util'
const __debug = env.NODE_ENV === 'debug'
const exec = promisify(execCb)
/**
* @param {string} repoPath
* @returns {Promise<string?>}
*/
async function getRemoteBranchName(repoPath) {
let branchName
try {
branchName = (await exec('git symbolic-ref --short HEAD', { cwd: repoPath })).stdout.trim()
if (!branchName) throw new Error('Empty local branch name')
} catch (error) {
if (__debug) {
console.warn(`Failed to read git local branch name`)
console.error(error)
}
return null
}
let remoteBranchName
try {
remoteBranchName = (
await exec(`git for-each-ref --format="%(upstream:short)" refs/heads/${branchName}`, {
cwd: repoPath,
})
).stdout.trim()
const [_, branch] = remoteBranchName.split('/')
if (!branch) throw new Error('Empty remote branch name')
remoteBranchName = branch
} catch (error) {
if (__debug) {
console.warn(`Failed to read git remote branch name`)
console.error(error)
}
return null
}
return remoteBranchName
}
// https://stackoverflow.com/q/3651860#answer-67151923
// eslint-disable-next-line no-control-regex
const REF_REGEX = /ref:\s+refs\/heads\/(?<branch>[^\s\x00-\x1F:?[\\^~]+)/
const GITHUB_REF_REGEX = /^refs\/heads\//
/**
* @param {string} repoPath
* @returns {Promise<string[]>}
*/
export async function getGitBranches(repoPath) {
const branches = ['main', 'master']
if (env.GITHUB_HEAD_REF) {
branches.unshift(env.GITHUB_HEAD_REF)
} else if (env.GITHUB_REF) {
branches.unshift(env.GITHUB_REF.replace(GITHUB_REF_REGEX, ''))
}
const remoteBranchName = await getRemoteBranchName(repoPath)
if (remoteBranchName) {
branches.unshift(remoteBranchName)
} else {
let head
try {
head = await fs.readFile(path.join(repoPath, '.git', 'HEAD'), { encoding: 'utf8' })
} catch (error) {
if (__debug) {
console.warn(`Failed to read git HEAD file`)
console.error(error)
}
return branches
}
const match = REF_REGEX.exec(head)
if (match?.groups?.branch) branches.unshift(match.groups.branch)
}
return branches
}

View file

@ -1,386 +0,0 @@
import * as fs from 'node:fs/promises'
import { dirname, join as joinPath, posix as path } from 'node:path'
import { env } from 'node:process'
import { setTimeout } from 'node:timers/promises'
import { fileURLToPath } from 'node:url'
import { fetch, Headers } from 'undici'
const __debug = env.NODE_ENV === 'debug'
const __offline = env.OFFLINE === 'true'
const __filename = fileURLToPath(import.meta.url)
const __dirname = dirname(__filename)
const cacheDir = joinPath(__dirname, '.tmp')
await fs.mkdir(cacheDir, { recursive: true, mode: 0o751 })
// Note: Trailing slashs are important to correctly append paths
const GH = 'https://api.github.com/repos/'
const NIGTHLY = 'https://nightly.link/'
// Github routes
const RELEASES = 'releases'
const WORKFLOWS = 'actions/workflows'
const ARTIFACTS = 'actions/artifacts'
// Default GH headers
const GH_HEADERS = new Headers({
Accept: 'application/vnd.github+json',
'X-GitHub-Api-Version': '2022-11-28',
})
// Load github auth token if available
if ('GITHUB_TOKEN' in env && env.GITHUB_TOKEN)
GH_HEADERS.append('Authorization', `Bearer ${env.GITHUB_TOKEN}`)
/**
* @param {string} resource
* @param {Headers} [headers]
* @returns {Promise<null | {data: Buffer, header: [string, string] | undefined}>}
*/
async function getCache(resource, headers) {
/** @type {Buffer | undefined} */
let data
/** @type {[string, string] | undefined} */
let header
// Don't cache in CI
if (env.CI === 'true') return null
if (headers)
resource += Array.from(headers.entries())
.filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since')
.flat()
.join(':')
try {
const cache = JSON.parse(
await fs.readFile(joinPath(cacheDir, Buffer.from(resource).toString('base64url')), {
encoding: 'utf8',
})
)
if (cache && typeof cache === 'object') {
if (cache.etag && typeof cache.etag === 'string') {
header = ['If-None-Match', cache.etag]
} else if (cache.modifiedSince && typeof cache.modifiedSince === 'string') {
header = ['If-Modified-Since', cache.modifiedSince]
}
if (cache.data && typeof cache.data === 'string')
data = Buffer.from(cache.data, 'base64')
}
} catch (error) {
if (__debug) {
console.warn(`CACHE MISS: ${resource}`)
console.error(error)
}
}
return data ? { data, header } : null
}
/**
* @param {import('undici').Response} response
* @param {string} resource
* @param {Buffer} [cachedData]
* @param {Headers} [headers]
* @returns {Promise<Buffer>}
*/
async function setCache(response, resource, cachedData, headers) {
const data = Buffer.from(await response.arrayBuffer())
// Don't cache in CI
if (env.CI === 'true') return data
const etag = response.headers.get('ETag') || undefined
const modifiedSince = response.headers.get('Last-Modified') || undefined
if (headers)
resource += Array.from(headers.entries())
.filter(([name]) => name !== 'If-None-Match' && name !== 'If-Modified-Since')
.flat()
.join(':')
if (response.status === 304 || (response.ok && data.length === 0)) {
// Cache hit
if (!cachedData) throw new Error('Empty cache hit ????')
return cachedData
}
try {
await fs.writeFile(
joinPath(cacheDir, Buffer.from(resource).toString('base64url')),
JSON.stringify({
etag,
modifiedSince,
data: data.toString('base64'),
}),
{ mode: 0o640, flag: 'w+' }
)
} catch (error) {
if (__debug) {
console.warn(`CACHE WRITE FAIL: ${resource}`)
console.error(error)
}
}
return data
}
/**
* @param {URL | string} resource
* @param {Headers?} [headers]
* @param {boolean} [preferCache]
* @returns {Promise<Buffer>}
*/
export async function get(resource, headers, preferCache) {
if (headers == null) headers = new Headers()
if (resource instanceof URL) resource = resource.toString()
const cache = await getCache(resource, headers)
if (__offline) {
if (cache?.data == null)
throw new Error(`OFFLINE MODE: Cache for request ${resource} doesn't exist`)
return cache.data
}
if (preferCache && cache?.data != null) return cache.data
if (cache?.header) headers.append(...cache.header)
const response = await fetch(resource, { headers })
if (!response.ok) {
if (cache?.data) {
if (__debug) console.warn(`CACHE HIT due to fail: ${resource} ${response.statusText}`)
return cache.data
}
throw new Error(response.statusText)
}
return await setCache(response, resource, cache?.data, headers)
}
// Header name Description
// x-ratelimit-limit The maximum number of requests you're permitted to make per hour.
// x-ratelimit-remaining The number of requests remaining in the current rate limit window.
// x-ratelimit-used The number of requests you've made in the current rate limit window.
// x-ratelimit-reset The time at which the current rate limit window resets in UTC epoch seconds.
const RATE_LIMIT = {
reset: 0,
remaining: Infinity,
}
/**
* Get resource from a Github route with some pre-defined parameters
* @param {string} route
* @returns {Promise<Buffer>}
*/
export async function getGh(route) {
route = new URL(route, GH).toString()
const cache = await getCache(route)
if (__offline) {
if (cache?.data == null)
throw new Error(`OFFLINE MODE: Cache for request ${route} doesn't exist`)
return cache?.data
}
if (RATE_LIMIT.remaining === 0) {
if (cache?.data) return cache.data
console.warn(
`RATE LIMIT: Waiting ${RATE_LIMIT.reset} seconds before contacting Github again... [CTRL+C to cancel]`
)
await setTimeout(RATE_LIMIT.reset * 1000)
}
const headers = new Headers(GH_HEADERS)
if (cache?.header) headers.append(...cache.header)
const response = await fetch(route, { method: 'GET', headers })
const rateReset = Number.parseInt(response.headers.get('x-ratelimit-reset') ?? '')
const rateRemaining = Number.parseInt(response.headers.get('x-ratelimit-remaining') ?? '')
if (!(Number.isNaN(rateReset) || Number.isNaN(rateRemaining))) {
const reset = rateReset - Date.now() / 1000
if (reset > RATE_LIMIT.reset) RATE_LIMIT.reset = reset
if (rateRemaining < RATE_LIMIT.remaining) {
RATE_LIMIT.remaining = rateRemaining
if (__debug) {
console.warn(`Github remaining requests: ${RATE_LIMIT.remaining}`)
await setTimeout(5000)
}
}
}
if (!response.ok) {
if (cache?.data) {
if (__debug) console.warn(`CACHE HIT due to fail: ${route} ${response.statusText}`)
return cache.data
}
if (response.status === 403 && RATE_LIMIT.remaining === 0) return await getGh(route)
throw new Error(response.statusText)
}
return await setCache(response, route, cache?.data)
}
/**
* @param {string} repo
* @yields {{name: string, downloadUrl: string}}
*/
export async function* getGhReleasesAssets(repo) {
let page = 0
while (true) {
// "${_gh_url}/protocolbuffers/protobuf/releases?page=${_page}&per_page=100"
const releases = JSON.parse(
(await getGh(path.join(repo, `${RELEASES}?page=${page++}&per_page=100`))).toString(
'utf8'
)
)
if (!Array.isArray(releases)) throw new Error(`Error: ${JSON.stringify(releases)}`)
if (releases.length === 0) return
for (const release of /** @type {unknown[]} */ (releases)) {
if (
!(
release &&
typeof release === 'object' &&
'assets' in release &&
Array.isArray(release.assets)
)
)
throw new Error(`Invalid release: ${release}`)
if ('prerelease' in release && release.prerelease) continue
for (const asset of /** @type {unknown[]} */ (release.assets)) {
if (
!(
asset &&
typeof asset === 'object' &&
'name' in asset &&
typeof asset.name === 'string' &&
'browser_download_url' in asset &&
typeof asset.browser_download_url === 'string'
)
)
throw new Error(`Invalid release.asset: ${asset}`)
yield { name: asset.name, downloadUrl: asset.browser_download_url }
}
}
}
}
/**
* @param {string} repo
* @param {string} yaml
* @param {string | Array.<string> | Set.<string>} [branch]
* @yields {{ id: number, name: string }}
*/
export async function* getGhWorkflowRunArtifacts(repo, yaml, branch) {
if (!branch) branch = 'main'
if (typeof branch === 'string') branch = [branch]
if (!(branch instanceof Set)) branch = new Set(branch)
let page = 0
while (true) {
const workflow = /** @type {unknown} */ (
JSON.parse(
(
await getGh(
path.join(
repo,
WORKFLOWS,
yaml,
`runs?page=${page++}&per_page=100&status=success`
)
)
).toString('utf8')
)
)
if (
!(
workflow &&
typeof workflow === 'object' &&
'workflow_runs' in workflow &&
Array.isArray(workflow.workflow_runs)
)
)
throw new Error(`Error: ${JSON.stringify(workflow)}`)
if (workflow.workflow_runs.length === 0) return
for (const run of /** @type {unknown[]} */ (workflow.workflow_runs)) {
if (
!(
run &&
typeof run === 'object' &&
'head_branch' in run &&
typeof run.head_branch === 'string' &&
'artifacts_url' in run &&
typeof run.artifacts_url === 'string'
)
)
throw new Error(`Invalid Workflow run: ${run}`)
if (!branch.has(run.head_branch)) continue
const response = /** @type {unknown} */ (
JSON.parse((await getGh(run.artifacts_url)).toString('utf8'))
)
if (
!(
response &&
typeof response === 'object' &&
'artifacts' in response &&
Array.isArray(response.artifacts)
)
)
throw new Error(`Error: ${JSON.stringify(response)}`)
for (const artifact of /** @type {unknown[]} */ (response.artifacts)) {
if (
!(
artifact &&
typeof artifact === 'object' &&
'id' in artifact &&
typeof artifact.id === 'number' &&
'name' in artifact &&
typeof artifact.name === 'string'
)
)
throw new Error(`Invalid artifact: ${artifact}`)
yield { id: artifact.id, name: artifact.name }
}
}
}
}
/**
* @param {string} repo
* @param {number} id
* @returns {Promise<Buffer>}
*/
export async function getGhArtifactContent(repo, id) {
// Artifacts can only be downloaded directly from Github with authorized requests
if (GH_HEADERS.has('Authorization')) {
try {
// "${_gh_url}/${_sd_gh_path}/actions/artifacts/${_artifact_id}/zip"
return await getGh(path.join(repo, ARTIFACTS, id.toString(), 'zip'))
} catch (error) {
if (__debug) {
console.warn('Failed to download artifact from github, fallback to nightly.link')
console.error(error)
}
}
}
/**
* nightly.link is a workaround for the lack of a public GitHub API to download artifacts from a workflow run
* https://github.com/actions/upload-artifact/issues/51
* Use it when running in evironments that are not authenticated with github
* "https://nightly.link/${_sd_gh_path}/actions/artifacts/${_artifact_id}.zip"
*/
return await get(new URL(path.join(repo, ARTIFACTS, `${id}.zip`), NIGTHLY), null, true)
}

View file

@ -52,10 +52,12 @@ export async function tauriUpdaterKey(nativeDeps) {
/**
* @param {string} root
* @param {string} nativeDeps
* @param {string[]} targets
* @param {string[]} bundles
* @param {string[]} args
* @returns {Promise<string[]>}
*/
export async function patchTauri(root, nativeDeps, args) {
export async function patchTauri(root, nativeDeps, targets, bundles, args) {
if (args.findIndex(e => e === '-c' || e === '--config') !== -1) {
throw new Error('Custom tauri build config is not supported.')
}
@ -66,7 +68,7 @@ export async function patchTauri(root, nativeDeps, args) {
const osType = os.type()
const resources =
osType === 'Linux'
? await copyLinuxLibs(root, nativeDeps)
? await copyLinuxLibs(root, nativeDeps, args[0] === 'dev')
: osType === 'Windows_NT'
? await copyWindowsDLLs(root, nativeDeps)
: { files: [], toClean: [] }
@ -86,6 +88,12 @@ export async function patchTauri(root, nativeDeps, args) {
.readFile(path.join(tauriRoot, 'tauri.conf.json'), 'utf-8')
.then(JSON.parse)
if (bundles.length === 0) {
const defaultBundles = tauriConfig.tauri?.bundle?.targets
if (Array.isArray(defaultBundles)) bundles.push(...defaultBundles)
if (bundles.length === 0) bundles.push('all')
}
if (args[0] === 'build') {
if (tauriConfig?.tauri?.updater?.active) {
const pubKey = await tauriUpdaterKey(nativeDeps)
@ -94,19 +102,10 @@ export async function patchTauri(root, nativeDeps, args) {
}
if (osType === 'Darwin') {
// ARM64 support was added in macOS 11, but we need at least 11.2 due to our ffmpeg build
const macOSArm64MinimumVersion = '11.2'
const macOSArm64MinimumVersion = '11.0'
let macOSMinimumVersion = tauriConfig?.tauri?.bundle?.macOS?.minimumSystemVersion
const targets = args
.filter((_, index, args) => {
if (index === 0) return false
const previous = args[index - 1]
return previous === '-t' || previous === '--target'
})
.flatMap(target => target.split(','))
if (
(targets.includes('aarch64-apple-darwin') ||
(targets.length === 0 && process.arch === 'arm64')) &&

View file

@ -18,58 +18,6 @@ async function link(origin, target, rename) {
await (rename ? fs.rename(origin, target) : fs.symlink(path.relative(parent, origin), target))
}
/**
* Move headers and dylibs of external deps to our framework
* @param {string} nativeDeps
*/
export async function setupMacOsFramework(nativeDeps) {
// External deps
const lib = path.join(nativeDeps, 'lib')
const include = path.join(nativeDeps, 'include')
// Framework
const framework = path.join(nativeDeps, 'FFMpeg.framework')
const headers = path.join(framework, 'Headers')
const libraries = path.join(framework, 'Libraries')
const documentation = path.join(framework, 'Resources', 'English.lproj', 'Documentation')
// Move files
await Promise.all([
// Move pdfium license to framework
fs.rename(
path.join(nativeDeps, 'LICENSE.pdfium'),
path.join(documentation, 'LICENSE.pdfium')
),
// Move dylibs to framework
fs.readdir(lib, { recursive: true, withFileTypes: true }).then(file =>
file
.filter(
entry =>
(entry.isFile() || entry.isSymbolicLink()) && entry.name.endsWith('.dylib')
)
.map(entry => {
const file = path.join(entry.path, entry.name)
const newFile = path.resolve(libraries, path.relative(lib, file))
return link(file, newFile, true)
})
),
// Move headers to framework
fs.readdir(include, { recursive: true, withFileTypes: true }).then(file =>
file
.filter(
entry =>
(entry.isFile() || entry.isSymbolicLink()) &&
!entry.name.endsWith('.proto')
)
.map(entry => {
const file = path.join(entry.path, entry.name)
const newFile = path.resolve(headers, path.relative(include, file))
return link(file, newFile, true)
})
),
])
}
/**
* Symlink shared libs paths for Linux
* @param {string} root
@ -87,56 +35,33 @@ export async function symlinkSharedLibsLinux(root, nativeDeps) {
/**
* Symlink shared libs paths for macOS
* @param {string} root
* @param {string} nativeDeps
*/
export async function symlinkSharedLibsMacOS(nativeDeps) {
// External deps
const lib = path.join(nativeDeps, 'lib')
const include = path.join(nativeDeps, 'include')
export async function symlinkSharedLibsMacOS(root, nativeDeps) {
// rpath=@executable_path/../Frameworks/Spacedrive.framework
const targetFrameworks = path.join(root, 'target', 'Frameworks')
// Framework
const framework = path.join(nativeDeps, 'FFMpeg.framework')
const headers = path.join(framework, 'Headers')
const libraries = path.join(framework, 'Libraries')
const framework = path.join(nativeDeps, 'Spacedrive.framework')
// Link files
await Promise.all([
// Link header files
fs.readdir(headers, { recursive: true, withFileTypes: true }).then(files =>
// Link Spacedrive.framework to target folder so sd-server can work ootb
await fs.rm(targetFrameworks, { recursive: true }).catch(() => {})
await fs.mkdir(targetFrameworks, { recursive: true })
await link(framework, path.join(targetFrameworks, 'Spacedrive.framework'))
// Sign dylibs (Required for them to work on macOS 13+)
await fs
.readdir(path.join(framework, 'Libraries'), { recursive: true, withFileTypes: true })
.then(files =>
Promise.all(
files
.filter(entry => entry.isFile() || entry.isSymbolicLink())
.map(entry => {
const file = path.join(entry.path, entry.name)
return link(file, path.resolve(include, path.relative(headers, file)))
})
)
),
// Link dylibs
fs.readdir(libraries, { recursive: true, withFileTypes: true }).then(files =>
Promise.all(
files
.filter(
entry =>
(entry.isFile() || entry.isSymbolicLink()) &&
entry.name.endsWith('.dylib')
.filter(entry => entry.isFile() && entry.name.endsWith('.dylib'))
.map(entry =>
exec(`codesign -s "${signId}" -f "${path.join(entry.path, entry.name)}"`)
)
.map(entry => {
const file = path.join(entry.path, entry.name)
/** @type {Promise<unknown>[]} */
const actions = [
link(file, path.resolve(lib, path.relative(libraries, file))),
]
// Sign dylib (Required for it to work on macOS 13+)
if (entry.isFile())
actions.push(exec(`codesign -s "${signId}" -f "${file}"`))
return actions.length > 1 ? Promise.all(actions) : actions[0]
})
)
),
])
)
}
/**
@ -168,9 +93,10 @@ export async function copyWindowsDLLs(root, nativeDeps) {
* Symlink shared libs paths for Linux
* @param {string} root
* @param {string} nativeDeps
* @param {boolean} isDev
* @returns {Promise<{files: string[], toClean: string[]}>}
*/
export async function copyLinuxLibs(root, nativeDeps) {
export async function copyLinuxLibs(root, nativeDeps, isDev) {
// rpath=${ORIGIN}/../lib/spacedrive
const tauriSrc = path.join(root, 'apps', 'desktop', 'src-tauri')
const files = await fs
@ -184,10 +110,17 @@ export async function copyLinuxLibs(root, nativeDeps) {
(entry.name.endsWith('.so') || entry.name.includes('.so.'))
)
.map(async entry => {
await fs.copyFile(
path.join(entry.path, entry.name),
path.join(tauriSrc, entry.name)
)
if (entry.isSymbolicLink()) {
await fs.symlink(
await fs.readlink(path.join(entry.path, entry.name)),
path.join(tauriSrc, entry.name)
)
} else {
const target = path.join(tauriSrc, entry.name)
await fs.copyFile(path.join(entry.path, entry.name), target)
// https://web.archive.org/web/20220731055320/https://lintian.debian.org/tags/shared-library-is-executable
await fs.chmod(target, 0o644)
}
return entry.name
})
)
@ -195,6 +128,9 @@ export async function copyLinuxLibs(root, nativeDeps) {
return {
files,
toClean: files.map(file => path.join(tauriSrc, file)),
toClean: [
...files.map(file => path.join(tauriSrc, file)),
...files.map(file => path.join(root, 'target', isDev ? 'debug' : 'release', file)),
],
}
}