Initial commit of native dependency building.

This commit is contained in:
David Baker 2020-02-13 23:52:16 +00:00
parent 3716545085
commit 0258d456e6
15 changed files with 2939 additions and 48 deletions

2
.gitignore vendored
View file

@ -6,3 +6,5 @@
/node_modules
/docker_node_modules
/pkg/control
/.hak
/.yarnrc

109
hak/matrix-seshat/build.js Normal file
View file

@ -0,0 +1,109 @@
/*
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const path = require('path');
const child_process = require('child_process');
module.exports = async function(hakEnv, moduleInfo) {
if (hakEnv.isWin()) await buildOpenSsl(hakEnv, moduleInfo);
await buildSqlCipher(hakEnv, moduleInfo);
await buildMatrixSeshat(hakEnv, moduleInfo);
}
async function buildOpenSsl(hakEnv, moduleInfo) {
const openSslDir = path.join(moduleInfo.moduleHakDir, 'openssl-1.1.1d');
}
async function buildSqlCipher(hakEnv, moduleInfo) {
const sqlCipherDir = path.join(moduleInfo.moduleHakDir, 'sqlcipher-4.3.0');
const args = [
'--prefix=' + moduleInfo.depPrefix + '',
'--enable-tempstore=yes',
'--enable-shared=no',
];
if (hakEnv.isMac()) {
args.push('--with-crypto-lib=commoncrypto');
}
args.push('CFLAGS=-DSQLITE_HAS_CODEC');
if (hakEnv.isMac()) {
args.push('LDFLAGS=-framework Security -framework Foundation');
}
await new Promise((resolve, reject) => {
const proc = child_process.spawn(
path.join(sqlCipherDir, 'configure'),
args,
{
cwd: sqlCipherDir,
stdio: 'inherit',
},
);
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});
await new Promise((resolve, reject) => {
const proc = child_process.spawn(
'make',
[],
{
cwd: sqlCipherDir,
stdio: 'inherit',
},
);
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});
await new Promise((resolve, reject) => {
const proc = child_process.spawn(
'make',
['install'],
{
cwd: sqlCipherDir,
stdio: 'inherit',
},
);
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});
}
async function buildMatrixSeshat(hakEnv, moduleInfo) {
await new Promise((resolve) => {
const proc = child_process.spawn(
path.join(moduleInfo.nodeModuleBinDir, 'neon'),
['build', '--release'],
{
cwd: moduleInfo.moduleBuildDir,
env: Object.assign({
SQLCIPHER_STATIC: 1,
SQLCIPHER_LIB_DIR: path.join(moduleInfo.depPrefix, 'lib'),
SQLCIPHER_INCLUDE_DIR: path.join(moduleInfo.depPrefix, 'include'),
}, hakEnv.makeGypEnv()),
stdio: 'inherit',
},
);
proc.on('exit', (code) => {
code ? reject(code) : resolve();
});
});
}

View file

@ -0,0 +1,98 @@
/*
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const path = require('path');
const fsProm = require('fs').promises;
const needle = require('needle');
const tar = require('tar');
module.exports = async function(hakEnv, moduleInfo) {
await getSqlCipher(hakEnv, moduleInfo);
if (hakEnv.isWin()) {
getOpenSsl(hakEnv, moduleInfo);
}
}
async function getSqlCipher(hakEnv, moduleInfo) {
const sqlCipherDir = path.join(moduleInfo.moduleHakDir, 'sqlcipher-4.3.0');
let haveSqlcipher;
try {
await fsProm.stat(sqlCipherDir);
haveSqlcipher = true;
} catch (e) {
haveSqlcipher = false;
}
if (haveSqlcipher) return;
const sqlCipherTarball = path.join(moduleInfo.moduleHakDir, 'sqlcipher-4.3.0.tar.gz');
let haveSqlcipherTar;
try {
await fsProm.stat(sqlCipherTarball);
haveSqlcipherTar = true;
} catch (e) {
haveSqlcipherTar = false;
}
if (!haveSqlcipherTar) {
const bob = needle('get', 'https://github.com/sqlcipher/sqlcipher/archive/v4.3.0.tar.gz', {
follow: 10,
output: sqlCipherTarball,
});
await bob;
}
await tar.x({
file: sqlCipherTarball,
cwd: moduleInfo.moduleHakDir,
});
}
async function getOpenSsl(hakEnv, moduleInfo) {
const openSslDir = path.join(moduleInfo.moduleHakDir, 'openssl-1.1.1d');
let haveOpenSsl;
try {
await fsProm.stat(openSslDir);
haveOpenSsl = true;
} catch (e) {
haveOpenSsl = false;
}
if (haveOpenSsl) return;
const openSslTarball = path.join(moduleInfo.depDir, 'openssl-1.1.1d.tar.gz');
let haveOpenSslTar;
try {
await fsProm.stat(openSslTarball);
haveOpenSslTar = true;
} catch (e) {
haveOpenSslTar = false;
}
if (!haveOpenSslTar) {
await needle('get', 'https://www.openssl.org/source/openssl-1.1.1d.tar.gz', {
follow: 10,
output: openSslTarball,
});
}
await tar.x({
file: openSslTarball,
cwd: moduleInfo.depDir,
});
}

View file

@ -0,0 +1,9 @@
{
"scripts": {
"check": "check.js",
"fetchDeps": "fetchDeps.js",
"build": "build.js"
},
"prune": "native",
"copy": "native/index.node"
}

View file

@ -16,13 +16,14 @@
"fetch": "yarn run mkdirs && node scripts/fetch-package.js",
"setversion": "node scripts/set-version.js",
"start": "electron .",
"lint": "eslint src/",
"lint": "eslint src/ scripts/",
"build": "yarn run setversion && electron-builder",
"in-docker": "scripts/in-docker.sh",
"docker:build": "yarn run in-docker yarn run build",
"docker:install": "yarn run in-docker yarn install",
"debrepo": "scripts/mkrepo.sh",
"clean": "rimraf webapp.asar dist packages deploys"
"clean": "rimraf webapp.asar dist packages deploys",
"hak": "node scripts/hak/index.js"
},
"dependencies": {
"auto-launch": "^5.0.1",
@ -40,11 +41,20 @@
"eslint": "^5.8.0",
"eslint-config-google": "^0.7.1",
"eslint-plugin-babel": "^4.1.2",
"find-npm-prefix": "^1.0.2",
"follow-redirects": "^1.9.0",
"glob": "^7.1.6",
"matrix-js-sdk": "^2.4.6-rc.1",
"mkdirp": "^0.5.1",
"rimraf": "^3.0.0",
"tar": "^5.0.5"
"mkdirp": "^1.0.3",
"needle": "^2.3.2",
"node-pre-gyp": "^0.14.0",
"npm": "^6.13.7",
"rimraf": "^3.0.2",
"semver": "^7.1.3",
"tar": "^6.0.1"
},
"hakDependencies": {
"matrix-seshat": "^1.0.0"
},
"build": {
"appId": "im.riot.app",

78
scripts/hak/README.md Normal file
View file

@ -0,0 +1,78 @@
hak
===
This tool builds native dependencies for riot-desktop. Here follows dome very minimal
documentation for it.
Goals:
* Must build compiled native node modules in a shippable state
(ie. only dynamically linked against libraries that will be on the
target system, all unnecessary files removed).
* Must be able to build any native module, no matter what build system
it uses (electron-rebuild is supposed to do this job but only works
for modules that use gyp).
It's also loosely designed to be a general tool and agnostic to what it's
actually building. It's used here to build modules for the electron app
but should work equally well for building modules for normal node.
Running
=======
Hak is invoked with a command and a dependency, eg. `yarn run hak fetch matrix-seshat`.
If no dependencies are given, hak runs the command on all dependencies.
Files
=====
There are a lot of files involved:
* scripts/hak/... - The tool itself
* hak/[dependency] - Files provided by the app that tell hak how to build each of its native dependencies.
Contains a hak.json file and also some script files, each of which must be referenced in hak.json.
* .hak/ - Files generated by hak in the course of doing its job. Includes the dependency module itself and
any of the native dependency's native dependencies.
* .hak/[dependency]/build - An extracted copy of the dependency's node module used to build it.
* .hak/[dependency]/out - Another extracted copy of the dependency, this one contains only what will be shipped.
Workings
========
Hak works around native node modules that try to fetch or build their native component in
the npm 'install' phase - modules that do this will typically end up with native components
targeted to the build platform and the node that npm/yarn is using, which is no good for an]
electron app.
It does this by installing it with `--ignore-scripts` and then using `yarn link` to keep the
dependency module separate so yarn doesn't try to run its install / postinstall script
at other points (eg. whenever you `yarn add` a random other dependency).
This also means that the dependencies cannot be listed in `dependencies` or
`devDependencies` in the project, since this would cause npm / yarn to install them and
trey to fetch their native parts. Instead, they are listed in `hakDependencies` which
hak reads to install them for you.
Hak will *not* install dependencies for the copy of the module it links into your
project, so if your native module has javascript dependencies that are actually needed at
runtime (and not just to fetch / build the native parts), it won't work.
Hak will generate a `.yarnrc` in the project directory to set the link directory to its
own in the .hak directory (unless one already exists, in which case this is your problem).
Lifecycle
=========
Hak is divided into lifecycle stages, in order:
* fetch - Download and extract the source of the dependency
* link - Link the copy of the dependency into your node_modules directory
* fetchDeps - Fetch & extract any native dependencies required to build the module.
* build - The Good Stuff. Configure and build any native dependencies, then the module itself.
* copy - Copy the built artifact from the module build dorectory to the module output directory.
hak.json
========
The scripts section contains scripts used for lifecycle stages that need them (fetch, fetchDeps, build).
It also contains 'prune' and 'copy' which are globs of files to delete from the output module directory
and copy over from the module build directory to the output module directory, respectively.
Shortcomings
============
Hak doesn't know about dependencies between lifecycle stages, ie. it doesn't know that you need to
'fetch' and 'fetchDeps' before you can 'build', etc. You get to run each individually, and remember
the right order.

26
scripts/hak/build.js Normal file
View file

@ -0,0 +1,26 @@
/*
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const path = require('path');
const url = require('url');
const fsProm = require('fs').promises;
const child_process = require('child_process');
async function build(hakEnv, moduleInfo) {
moduleInfo.scripts.build(hakEnv, moduleInfo);
};
module.exports = build;

53
scripts/hak/clean.js Normal file
View file

@ -0,0 +1,53 @@
/*
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const path = require('path');
const rimraf = require('rimraf');
async function clean(hakEnv, moduleInfo) {
await new Promise((resolve, reject) => {
rimraf(moduleInfo.moduleHakDir, (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
await new Promise((resolve, reject) => {
rimraf(path.join(hakEnv.dotHakDir, 'links', moduleInfo.name), (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
await new Promise((resolve, reject) => {
rimraf(path.join(hakEnv.projectRoot, 'node_modules', moduleInfo.name), (err) => {
if (err) {
reject(err);
} else {
resolve();
}
});
});
}
module.exports = clean;

67
scripts/hak/copy.js Normal file
View file

@ -0,0 +1,67 @@
/*
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const path = require('path');
const fsProm = require('fs').promises;
const rimraf = require('rimraf');
const glob = require('glob');
const mkdirp = require('mkdirp');
async function copy(hakEnv, moduleInfo) {
if (moduleInfo.cfg.prune) {
console.log("Removing " + moduleInfo.cfg.prune + " from " + moduleInfo.moduleOutDir);
// rimraf doesn't have a 'cwd' option: it always uses process.cwd()
// (and if you set glob.cwd it just breaks because it can't find the files)
const oldCwd = process.cwd();
try {
process.chdir(moduleInfo.moduleOutDir);
await new Promise((resolve, reject) => {
rimraf(moduleInfo.cfg.prune, {}, err => {
err ? reject(err) : resolve();
});
});
} finally {
process.chdir(oldCwd);
}
}
if (moduleInfo.cfg.copy) {
console.log(
"Copying " + moduleInfo.cfg.prune + " from " +
moduleInfo.moduleOutDir + " to " + moduleInfo.moduleOutDir,
);
const files = await new Promise(async (resolve, reject) => {
glob(moduleInfo.cfg.copy, {
nosort: true,
silent: true,
cwd: moduleInfo.moduleBuildDir,
}, (err, files) => {
err ? reject(err) : resolve(files);
});
});
for (const f of files) {
console.log("\t" + f);
const src = path.join(moduleInfo.moduleBuildDir, f);
const dst = path.join(moduleInfo.moduleOutDir, f);
await mkdirp(path.dirname(dst));
await fsProm.copyFile(src, dst);
}
}
};
module.exports = copy;

116
scripts/hak/fetch.js Normal file
View file

@ -0,0 +1,116 @@
/*
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const path = require('path');
const url = require('url');
const fsProm = require('fs').promises;
const child_process = require('child_process');
const npm = require('npm');
const semver = require('semver');
const needle = require('needle');
const mkdirp = require('mkdirp');
const tar = require('tar');
async function fetch(hakEnv, moduleInfo) {
let haveModuleBuildDir;
try {
const stats = await fsProm.stat(moduleInfo.moduleBuildDir);
haveModuleBuildDir = stats.isDirectory();
} catch (e) {
haveModuleBuildDir = false;
}
if (haveModuleBuildDir) return;
await new Promise((resolve) => {
npm.load({'loglevel': 'silent'}, resolve);
});
console.log("Fetching " + moduleInfo.name + " at version " + moduleInfo.version);
const versions = await new Promise((resolve, reject) => {
npm.view([
moduleInfo.name + '@' + moduleInfo.version,
'dist.tarball',
(err, versions) => {
if (err) {
reject(err);
} else {
resolve(versions);
}
},
]);
});
const orderedVersions = Object.keys(versions);
semver.sort(orderedVersions);
console.log("Resolved version " + orderedVersions[0] + " for " + moduleInfo.name);
const tarballUrl = versions[orderedVersions[0]]['dist.tarball'];
await mkdirp(moduleInfo.moduleHakDir);
const parsedUrl = url.parse(tarballUrl);
const tarballFile = path.join(moduleInfo.moduleHakDir, path.basename(parsedUrl.path));
let haveTarball;
try {
await fsProm.stat(tarballFile);
haveTarball = true;
} catch (e) {
haveTarball = false;
}
if (!haveTarball) {
console.log("Downloading " + tarballUrl);
await needle('get', tarballUrl, { output: tarballFile });
} else {
console.log(tarballFile + " already exists.");
}
await mkdirp(moduleInfo.moduleBuildDir);
await tar.x({
file: tarballFile,
cwd: moduleInfo.moduleBuildDir,
strip: 1,
});
await new Promise((resolve, reject) => {
const proc = child_process.spawn(
'yarn',
['install', '--ignore-scripts'],
{
stdio: 'inherit',
cwd: moduleInfo.moduleBuildDir,
},
);
proc.on('exit', code => {
code ? reject(code) : resolve();
});
});
// also extract another copy to the output directory at this point
// nb. we do not yarn install in the output copy
await mkdirp(moduleInfo.moduleOutDir);
await tar.x({
file: tarballFile,
cwd: moduleInfo.moduleOutDir,
strip: 1,
});
}
module.exports = fetch;

23
scripts/hak/fetchDeps.js Normal file
View file

@ -0,0 +1,23 @@
/*
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
async function fetchDeps(hakEnv, moduleInfo) {
if (moduleInfo.scripts.fetchDeps) {
await moduleInfo.scripts.fetchDeps(hakEnv, moduleInfo);
}
};
module.exports = fetchDeps;

100
scripts/hak/hakEnv.js Normal file
View file

@ -0,0 +1,100 @@
/*
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const path = require('path');
const os = require('os');
const nodePreGypVersioning = require('node-pre-gyp/lib/util/versioning');
function getElectronVersion(packageJson) {
// should we pick the version of an installed electron
// dependency, and if so, before or after electronVersion?
if (packageJson.build && packageJson.build.electronVersion) {
return packageJson.build.electronVersion;
}
return null;
}
function getRuntime(packageJson) {
const electronVersion = getElectronVersion(packageJson);
return electronVersion ? 'electron' : 'node-webkit';
}
function getTarget(packageJson) {
const electronVersion = getElectronVersion(packageJson);
if (electronVersion) {
return electronVersion;
} else {
return process.version.substr(1);
}
}
module.exports = class HakEnv {
constructor(prefix, packageJson) {
Object.assign(this, {
// what we're targeting
runtime: getRuntime(packageJson),
target: getTarget(packageJson),
platform: process.platform,
arch: process.arch,
// paths
projectRoot: prefix,
dotHakDir: path.join(prefix, '.hak'),
});
}
getRuntimeAbi() {
return nodePreGypVersioning.get_runtime_abi(
this.runtime,
this.target,
);
}
// {node_abi}-{platform}-{arch}
getNodeTriple() {
return this.getRuntimeAbi() + '-' + this.platform + '-' + this.arch;
}
isWin() {
return this.platform === 'win32';
}
isMac() {
return this.platform === 'darwin';
}
isLinux() {
return this.platform === 'linux';
}
makeGypEnv() {
return Object.assign({}, process.env, {
npm_config_target: this.target,
npm_config_arch: this.arch,
npm_config_target_arch: this.arch,
npm_config_disturl: 'https://atom.io/download/electron',
npm_config_runtime: this.runtime,
npm_config_build_from_source: true,
npm_config_devdir: path.join(os.homedir(), ".electron-gyp"),
});
}
getNodeModuleBin(name) {
return path.join(this.projectRoot, 'node_modules', '.bin', name);
}
};

131
scripts/hak/index.js Normal file
View file

@ -0,0 +1,131 @@
/*
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const path = require('path');
const fsProm = require('fs').promises;
const findNpmPrefix = require('find-npm-prefix');
const HakEnv = require('./hakEnv');
const GENERALCOMMANDS = [
'target',
];
// These can only be run on specific modules
const MODULECOMMANDS = [
'fetch',
'link',
'fetchDeps',
'build',
'copy',
'clean',
];
const HAKSCRIPTS = [
'fetch',
'fetchDeps',
'build',
];
async function main() {
const prefix = await findNpmPrefix(process.cwd());
let packageJson;
try {
packageJson = require(path.join(prefix, "package.json"));
} catch (e) {
console.error("Can't find a package.json!");
process.exit(1);
}
const hakEnv = new HakEnv(prefix, packageJson);
const deps = {};
const hakDepsCfg = packageJson.hakDependencies || {};
for (const dep of Object.keys(hakDepsCfg)) {
const hakJsonPath = path.join(prefix, 'hak', dep, 'hak.json');
let hakJson;
try {
hakJson = await require(hakJsonPath);
} catch (e) {
console.error("No hak.json found for " + dep + ".");
console.log("Expecting " + hakJsonPath);
process.exit(1);
}
deps[dep] = {
name: dep,
version: hakDepsCfg[dep],
cfg: hakJson,
moduleHakDir: path.join(hakEnv.dotHakDir, dep),
moduleBuildDir: path.join(hakEnv.dotHakDir, dep, 'build'),
moduleOutDir: path.join(hakEnv.dotHakDir, dep, 'out'),
nodeModuleBinDir: path.join(hakEnv.dotHakDir, dep, 'build', 'node_modules', '.bin'),
depPrefix: path.join(hakEnv.dotHakDir, dep, 'opt'),
scripts: {},
};
for (const s of HAKSCRIPTS) {
if (hakJson.scripts && hakJson.scripts[s]) {
deps[dep].scripts[s] = require(path.join(prefix, 'hak', dep, hakJson.scripts[s]));
}
}
}
if (process.argv.length < 3) {
console.log("Usage: hak <command> [modules...]");
process.exit(1);
}
const cmd = process.argv[2];
if (GENERALCOMMANDS.includes(cmd)) {
if (cmd === 'target') {
console.log(hakEnv.getNodeTriple());
}
return;
}
if (!MODULECOMMANDS.includes(cmd)) {
console.error("Unknown command: " + cmd);
console.log("Commands I know about:");
for (const cmd of MODULECOMMANDS) {
console.log("\t" + cmd);
}
process.exit(1);
}
const cmdFunc = require('./' + cmd);
let modules = process.argv.slice(3);
if (modules.length === 0) modules = Object.keys(deps);
for (const mod of modules) {
const depInfo = deps[mod];
if (depInfo === undefined) {
console.log(
"Module " + mod + " not found - is it in hakDependencies " +
"in your package.json?",
);
process.exit(1);
}
console.log("hak " + cmd + ": " + mod);
await cmdFunc(hakEnv, depInfo);
}
}
main();

60
scripts/hak/link.js Normal file
View file

@ -0,0 +1,60 @@
/*
Copyright 2020 The Matrix.org Foundation C.I.C.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
const path = require('path');
const os = require('os');
const fsProm = require('fs').promises;
const child_process = require('child_process');
async function link(hakEnv, moduleInfo) {
const yarnrc = path.join(hakEnv.projectRoot, '.yarnrc');
// this is fairly terrible but it's reasonably clunky to either parse a yarnrc
// properly or get yarn to do it, so this will probably suffice for now.
// We just check to see if there is a local .yarnrc at all, and assume that
// if you've put one there yourself, you probably know what you're doing and
// we won't meddle with it.
// Also we do this for each module which is unnecessary, but meh.
try {
await fsProm.stat(yarnrc);
} catch (e) {
await fsProm.writeFile(
yarnrc,
'--link-folder ' + path.join(hakEnv.dotHakDir, 'links') + os.EOL,
);
}
await new Promise((resolve, reject) => {
const proc = child_process.spawn('yarn', ['link'], {
cwd: moduleInfo.moduleOutDir,
stdio: 'inherit',
});
proc.on('exit', code => {
code ? reject(code) : resolve();
});
});
await new Promise((resolve, reject) => {
const proc = child_process.spawn('yarn', ['link', moduleInfo.name], {
cwd: hakEnv.projectRoot,
stdio: 'inherit',
});
proc.on('exit', code => {
code ? reject(code) : resolve();
});
});
};
module.exports = link;

2095
yarn.lock

File diff suppressed because it is too large Load diff