mirror of
https://github.com/spacedriveapp/spacedrive
synced 2024-07-04 14:33:34 +00:00
* Move postinstall script to a preprep script
- Fix libheif crate failing to build with our libheif
- Rework CI due to postinstall to preprep changes
* Linux heif build script + Update webp
* Fix ctrl+c/ctrl+v bug
* Improve libheif linux script
- Add support for linux aarch64
- Add CI workflow to build libheif for linux
- Some other misc fixes
* Fix libheif CI requires sudo
* Fix wrong path for libheif build.rs override in Windows
* Fix wrong path manipulations in libheif build script
* 🤦
* Use ubuntu-latest in libheif action
- Specify glib version in target triple to support old distros
- Fix libheif artifact publishing
* Fix artifact upload path again
* Add musl support for libheif
- Remove unused files from libheif artifact
- Add setup logic for libheif in postinstall script
* Build libheif for linux as a shared lib
* Fix meson not building the correct arch
- Add logic to get git branch from githubs CI env vars
* libheif finnaly works on linux
- Make spacedrive binary rpath point to where appimage and deb expects our libs to be
- Add some logic to tauri.js to convince tauri to bundle our shared libs
- Work-around appimage bundling step breaking sometimes
- Add logic to handle sigint in tauri.js to ensure we clean up after ourselves
- Rename postinstall.mjs to setup.mjs
- Add logic to setup.mjs to point our dev build to our shared libs in linux
* Fix windows dekstop dev
- Rename setup.mjs to preprep.mjs
* test cache-factory
* Fix preprep script not parsing the cross compilation target triple and always using the host info to download dependencies
- Fix action env vars not being correctly passed
- Remove possibility to pass multiple targests to rust action
* Don't compile mobile crates on desktop targets
* Remove cache-factory pull_request trigger
* remove patched tauri cli
* Use git plumbing command to get remote branch name
- Fallback to reading .git/HEAD if remote name was not retrieved
* fix type
---------
Co-authored-by: Brendan Allan <brendonovich@outlook.com>
198 lines
5.2 KiB
JavaScript
198 lines
5.2 KiB
JavaScript
import * as fs from 'node:fs/promises';
|
|
import * as os from 'node:os';
|
|
import * as path from 'node:path';
|
|
import { env } from 'node:process';
|
|
import { extractTo } from 'archive-wasm/src/fs.mjs';
|
|
|
|
import {
|
|
getGh,
|
|
getGhArtifactContent,
|
|
getGhReleasesAssets,
|
|
getGhWorkflowRunArtifacts
|
|
} from './github.mjs';
|
|
import {
|
|
FFMPEG_SUFFFIX,
|
|
FFMPEG_WORKFLOW,
|
|
getConst,
|
|
getSuffix,
|
|
LIBHEIF_SUFFIX,
|
|
LIBHEIF_WORKFLOW,
|
|
PDFIUM_SUFFIX,
|
|
PROTOC_SUFFIX
|
|
} from './suffix.mjs';
|
|
import { which } from './which.mjs';
|
|
|
|
const noop = () => {};
|
|
|
|
const __debug = env.NODE_ENV === 'debug';
|
|
const __osType = os.type();
|
|
|
|
// Github repos
|
|
const PDFIUM_REPO = 'bblanchon/pdfium-binaries';
|
|
const PROTOBUF_REPO = 'protocolbuffers/protobuf';
|
|
const SPACEDRIVE_REPO = 'spacedriveapp/spacedrive';
|
|
|
|
/**
|
|
* Download and extract protobuff compiler binary
|
|
* @param {string[]} machineId
|
|
* @param {string} framework
|
|
*/
|
|
export async function downloadProtc(machineId, framework) {
|
|
if (await which('protoc')) return;
|
|
|
|
console.log('Downloading protoc...');
|
|
|
|
const protocSuffix = getSuffix(PROTOC_SUFFIX, machineId);
|
|
if (protocSuffix == null) throw new Error('NO_PROTOC');
|
|
|
|
let found = false;
|
|
for await (const release of getGhReleasesAssets(PROTOBUF_REPO)) {
|
|
if (!protocSuffix.test(release.name)) continue;
|
|
try {
|
|
await extractTo(await getGh(release.downloadUrl), framework, {
|
|
chmod: 0o600,
|
|
overwrite: true
|
|
});
|
|
found = true;
|
|
break;
|
|
} catch (error) {
|
|
console.warn('Failed to download protoc, re-trying...');
|
|
if (__debug) console.error(error);
|
|
}
|
|
}
|
|
|
|
if (!found) throw new Error('NO_PROTOC');
|
|
|
|
// cleanup
|
|
await fs.unlink(path.join(framework, 'readme.txt')).catch(__debug ? console.error : noop);
|
|
}
|
|
|
|
/**
|
|
* Download and extract pdfium library for generating PDFs thumbnails
|
|
* @param {string[]} machineId
|
|
* @param {string} framework
|
|
*/
|
|
export async function downloadPDFium(machineId, framework) {
|
|
console.log('Downloading pdfium...');
|
|
|
|
const pdfiumSuffix = getSuffix(PDFIUM_SUFFIX, machineId);
|
|
if (pdfiumSuffix == null) throw new Error('NO_PDFIUM');
|
|
|
|
let found = false;
|
|
for await (const release of getGhReleasesAssets(PDFIUM_REPO)) {
|
|
if (!pdfiumSuffix.test(release.name)) continue;
|
|
try {
|
|
await extractTo(await getGh(release.downloadUrl), framework, {
|
|
chmod: 0o600,
|
|
overwrite: true
|
|
});
|
|
found = true;
|
|
break;
|
|
} catch (error) {
|
|
console.warn('Failed to download pdfium, re-trying...');
|
|
if (__debug) console.error(error);
|
|
}
|
|
}
|
|
|
|
if (!found) throw new Error('NO_PDFIUM');
|
|
|
|
// cleanup
|
|
const cleanup = [
|
|
fs.rename(path.join(framework, 'LICENSE'), path.join(framework, 'LICENSE.pdfium')),
|
|
...['args.gn', 'PDFiumConfig.cmake', 'VERSION'].map((file) =>
|
|
fs.unlink(path.join(framework, file)).catch(__debug ? console.error : noop)
|
|
)
|
|
];
|
|
|
|
switch (__osType) {
|
|
case 'Linux':
|
|
cleanup.push(fs.chmod(path.join(framework, 'lib', 'libpdfium.so'), 0o750));
|
|
break;
|
|
case 'Darwin':
|
|
cleanup.push(fs.chmod(path.join(framework, 'lib', 'libpdfium.dylib'), 0o750));
|
|
break;
|
|
}
|
|
|
|
await Promise.all(cleanup);
|
|
}
|
|
|
|
/**
|
|
* Download and extract ffmpeg libs for video thumbnails
|
|
* @param {string[]} machineId
|
|
* @param {string} framework
|
|
* @param {string[]} branches
|
|
*/
|
|
export async function downloadFFMpeg(machineId, framework, branches) {
|
|
const workflow = getConst(FFMPEG_WORKFLOW, machineId);
|
|
if (workflow == null) {
|
|
console.log('Checking FFMPeg...');
|
|
if (await which('ffmpeg')) {
|
|
// TODO: check ffmpeg version match what we need
|
|
return;
|
|
} else {
|
|
throw new Error('NO_FFMPEG');
|
|
}
|
|
}
|
|
|
|
console.log('Downloading FFMPeg...');
|
|
|
|
const ffmpegSuffix = getSuffix(FFMPEG_SUFFFIX, machineId);
|
|
if (ffmpegSuffix == null) throw new Error('NO_FFMPEG');
|
|
|
|
let found = false;
|
|
for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) {
|
|
if (!ffmpegSuffix.test(artifact.name)) continue;
|
|
try {
|
|
const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id);
|
|
await extractTo(data, framework, {
|
|
chmod: 0o600,
|
|
recursive: true,
|
|
overwrite: true
|
|
});
|
|
found = true;
|
|
break;
|
|
} catch (error) {
|
|
console.warn('Failed to download FFMpeg, re-trying...');
|
|
if (__debug) console.error(error);
|
|
}
|
|
}
|
|
|
|
if (!found) throw new Error('NO_FFMPEG');
|
|
}
|
|
|
|
/**
|
|
* Download and extract libheif libs for heif thumbnails
|
|
* @param {string[]} machineId
|
|
* @param {string} framework
|
|
* @param {string[]} branches
|
|
*/
|
|
export async function downloadLibHeif(machineId, framework, branches) {
|
|
const workflow = getConst(LIBHEIF_WORKFLOW, machineId);
|
|
if (workflow == null) return;
|
|
|
|
console.log('Downloading LibHeif...');
|
|
|
|
const libHeifSuffix = getSuffix(LIBHEIF_SUFFIX, machineId);
|
|
if (libHeifSuffix == null) throw new Error('NO_LIBHEIF');
|
|
|
|
let found = false;
|
|
for await (const artifact of getGhWorkflowRunArtifacts(SPACEDRIVE_REPO, workflow, branches)) {
|
|
if (!libHeifSuffix.test(artifact.name)) continue;
|
|
try {
|
|
const data = await getGhArtifactContent(SPACEDRIVE_REPO, artifact.id);
|
|
await extractTo(data, framework, {
|
|
chmod: 0o600,
|
|
recursive: true,
|
|
overwrite: true
|
|
});
|
|
found = true;
|
|
break;
|
|
} catch (error) {
|
|
console.warn('Failed to download LibHeif, re-trying...');
|
|
if (__debug) console.error(error);
|
|
}
|
|
}
|
|
|
|
if (!found) throw new Error('NO_LIBHEIF');
|
|
}
|