nix-ros-build-action: update dependencies and support multiple outputs

This commit is contained in:
Ben Wolsieffer 2023-08-08 15:59:50 -04:00
parent 1b242a4032
commit 7c5425b6ac
8 changed files with 12965 additions and 3935 deletions

View file

@ -0,0 +1,452 @@
"use strict";
exports.id = 37;
exports.ids = [37];
exports.modules = {
/***/ 4037:
/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __webpack_require__) => {
__webpack_require__.r(__webpack_exports__);
/* harmony export */ __webpack_require__.d(__webpack_exports__, {
/* harmony export */ "toFormData": () => (/* binding */ toFormData)
/* harmony export */ });
/* harmony import */ var fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__ = __webpack_require__(2777);
/* harmony import */ var formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__ = __webpack_require__(8010);
let s = 0;
const S = {
START_BOUNDARY: s++,
HEADER_FIELD_START: s++,
HEADER_FIELD: s++,
HEADER_VALUE_START: s++,
HEADER_VALUE: s++,
HEADER_VALUE_ALMOST_DONE: s++,
HEADERS_ALMOST_DONE: s++,
PART_DATA_START: s++,
PART_DATA: s++,
END: s++
};
let f = 1;
const F = {
PART_BOUNDARY: f,
LAST_BOUNDARY: f *= 2
};
const LF = 10;
const CR = 13;
const SPACE = 32;
const HYPHEN = 45;
const COLON = 58;
const A = 97;
const Z = 122;
const lower = c => c | 0x20;
const noop = () => {};
class MultipartParser {
/**
* @param {string} boundary
*/
constructor(boundary) {
this.index = 0;
this.flags = 0;
this.onHeaderEnd = noop;
this.onHeaderField = noop;
this.onHeadersEnd = noop;
this.onHeaderValue = noop;
this.onPartBegin = noop;
this.onPartData = noop;
this.onPartEnd = noop;
this.boundaryChars = {};
boundary = '\r\n--' + boundary;
const ui8a = new Uint8Array(boundary.length);
for (let i = 0; i < boundary.length; i++) {
ui8a[i] = boundary.charCodeAt(i);
this.boundaryChars[ui8a[i]] = true;
}
this.boundary = ui8a;
this.lookbehind = new Uint8Array(this.boundary.length + 8);
this.state = S.START_BOUNDARY;
}
/**
* @param {Uint8Array} data
*/
write(data) {
let i = 0;
const length_ = data.length;
let previousIndex = this.index;
let {lookbehind, boundary, boundaryChars, index, state, flags} = this;
const boundaryLength = this.boundary.length;
const boundaryEnd = boundaryLength - 1;
const bufferLength = data.length;
let c;
let cl;
const mark = name => {
this[name + 'Mark'] = i;
};
const clear = name => {
delete this[name + 'Mark'];
};
const callback = (callbackSymbol, start, end, ui8a) => {
if (start === undefined || start !== end) {
this[callbackSymbol](ui8a && ui8a.subarray(start, end));
}
};
const dataCallback = (name, clear) => {
const markSymbol = name + 'Mark';
if (!(markSymbol in this)) {
return;
}
if (clear) {
callback(name, this[markSymbol], i, data);
delete this[markSymbol];
} else {
callback(name, this[markSymbol], data.length, data);
this[markSymbol] = 0;
}
};
for (i = 0; i < length_; i++) {
c = data[i];
switch (state) {
case S.START_BOUNDARY:
if (index === boundary.length - 2) {
if (c === HYPHEN) {
flags |= F.LAST_BOUNDARY;
} else if (c !== CR) {
return;
}
index++;
break;
} else if (index - 1 === boundary.length - 2) {
if (flags & F.LAST_BOUNDARY && c === HYPHEN) {
state = S.END;
flags = 0;
} else if (!(flags & F.LAST_BOUNDARY) && c === LF) {
index = 0;
callback('onPartBegin');
state = S.HEADER_FIELD_START;
} else {
return;
}
break;
}
if (c !== boundary[index + 2]) {
index = -2;
}
if (c === boundary[index + 2]) {
index++;
}
break;
case S.HEADER_FIELD_START:
state = S.HEADER_FIELD;
mark('onHeaderField');
index = 0;
// falls through
case S.HEADER_FIELD:
if (c === CR) {
clear('onHeaderField');
state = S.HEADERS_ALMOST_DONE;
break;
}
index++;
if (c === HYPHEN) {
break;
}
if (c === COLON) {
if (index === 1) {
// empty header field
return;
}
dataCallback('onHeaderField', true);
state = S.HEADER_VALUE_START;
break;
}
cl = lower(c);
if (cl < A || cl > Z) {
return;
}
break;
case S.HEADER_VALUE_START:
if (c === SPACE) {
break;
}
mark('onHeaderValue');
state = S.HEADER_VALUE;
// falls through
case S.HEADER_VALUE:
if (c === CR) {
dataCallback('onHeaderValue', true);
callback('onHeaderEnd');
state = S.HEADER_VALUE_ALMOST_DONE;
}
break;
case S.HEADER_VALUE_ALMOST_DONE:
if (c !== LF) {
return;
}
state = S.HEADER_FIELD_START;
break;
case S.HEADERS_ALMOST_DONE:
if (c !== LF) {
return;
}
callback('onHeadersEnd');
state = S.PART_DATA_START;
break;
case S.PART_DATA_START:
state = S.PART_DATA;
mark('onPartData');
// falls through
case S.PART_DATA:
previousIndex = index;
if (index === 0) {
// boyer-moore derrived algorithm to safely skip non-boundary data
i += boundaryEnd;
while (i < bufferLength && !(data[i] in boundaryChars)) {
i += boundaryLength;
}
i -= boundaryEnd;
c = data[i];
}
if (index < boundary.length) {
if (boundary[index] === c) {
if (index === 0) {
dataCallback('onPartData', true);
}
index++;
} else {
index = 0;
}
} else if (index === boundary.length) {
index++;
if (c === CR) {
// CR = part boundary
flags |= F.PART_BOUNDARY;
} else if (c === HYPHEN) {
// HYPHEN = end boundary
flags |= F.LAST_BOUNDARY;
} else {
index = 0;
}
} else if (index - 1 === boundary.length) {
if (flags & F.PART_BOUNDARY) {
index = 0;
if (c === LF) {
// unset the PART_BOUNDARY flag
flags &= ~F.PART_BOUNDARY;
callback('onPartEnd');
callback('onPartBegin');
state = S.HEADER_FIELD_START;
break;
}
} else if (flags & F.LAST_BOUNDARY) {
if (c === HYPHEN) {
callback('onPartEnd');
state = S.END;
flags = 0;
} else {
index = 0;
}
} else {
index = 0;
}
}
if (index > 0) {
// when matching a possible boundary, keep a lookbehind reference
// in case it turns out to be a false lead
lookbehind[index - 1] = c;
} else if (previousIndex > 0) {
// if our boundary turned out to be rubbish, the captured lookbehind
// belongs to partData
const _lookbehind = new Uint8Array(lookbehind.buffer, lookbehind.byteOffset, lookbehind.byteLength);
callback('onPartData', 0, previousIndex, _lookbehind);
previousIndex = 0;
mark('onPartData');
// reconsider the current character even so it interrupted the sequence
// it could be the beginning of a new sequence
i--;
}
break;
case S.END:
break;
default:
throw new Error(`Unexpected state entered: ${state}`);
}
}
dataCallback('onHeaderField');
dataCallback('onHeaderValue');
dataCallback('onPartData');
// Update properties for the next call
this.index = index;
this.state = state;
this.flags = flags;
}
end() {
if ((this.state === S.HEADER_FIELD_START && this.index === 0) ||
(this.state === S.PART_DATA && this.index === this.boundary.length)) {
this.onPartEnd();
} else if (this.state !== S.END) {
throw new Error('MultipartParser.end(): stream ended unexpectedly');
}
}
}
function _fileName(headerValue) {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bfilename=("(.*?)"|([^()<>@,;:\\"/[\]?={}\s\t]+))($|;\s)/i);
if (!m) {
return;
}
const match = m[2] || m[3] || '';
let filename = match.slice(match.lastIndexOf('\\') + 1);
filename = filename.replace(/%22/g, '"');
filename = filename.replace(/&#(\d{4});/g, (m, code) => {
return String.fromCharCode(code);
});
return filename;
}
async function toFormData(Body, ct) {
if (!/multipart/i.test(ct)) {
throw new TypeError('Failed to fetch');
}
const m = ct.match(/boundary=(?:"([^"]+)"|([^;]+))/i);
if (!m) {
throw new TypeError('no or bad content-type header, no multipart boundary');
}
const parser = new MultipartParser(m[1] || m[2]);
let headerField;
let headerValue;
let entryValue;
let entryName;
let contentType;
let filename;
const entryChunks = [];
const formData = new formdata_polyfill_esm_min_js__WEBPACK_IMPORTED_MODULE_1__/* .FormData */ .Ct();
const onPartData = ui8a => {
entryValue += decoder.decode(ui8a, {stream: true});
};
const appendToFile = ui8a => {
entryChunks.push(ui8a);
};
const appendFileToFormData = () => {
const file = new fetch_blob_from_js__WEBPACK_IMPORTED_MODULE_0__/* .File */ .$B(entryChunks, filename, {type: contentType});
formData.append(entryName, file);
};
const appendEntryToFormData = () => {
formData.append(entryName, entryValue);
};
const decoder = new TextDecoder('utf-8');
decoder.decode();
parser.onPartBegin = function () {
parser.onPartData = onPartData;
parser.onPartEnd = appendEntryToFormData;
headerField = '';
headerValue = '';
entryValue = '';
entryName = '';
contentType = '';
filename = null;
entryChunks.length = 0;
};
parser.onHeaderField = function (ui8a) {
headerField += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderValue = function (ui8a) {
headerValue += decoder.decode(ui8a, {stream: true});
};
parser.onHeaderEnd = function () {
headerValue += decoder.decode();
headerField = headerField.toLowerCase();
if (headerField === 'content-disposition') {
// matches either a quoted-string or a token (RFC 2616 section 19.5.1)
const m = headerValue.match(/\bname=("([^"]*)"|([^()<>@,;:\\"/[\]?={}\s\t]+))/i);
if (m) {
entryName = m[2] || m[3] || '';
}
filename = _fileName(headerValue);
if (filename) {
parser.onPartData = appendToFile;
parser.onPartEnd = appendFileToFormData;
}
} else if (headerField === 'content-type') {
contentType = headerValue;
}
headerValue = '';
headerField = '';
};
for await (const chunk of Body) {
parser.write(chunk);
}
parser.end();
return formData;
}
/***/ })
};
;

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -22,20 +22,19 @@
"author": "Ben Wolsieffer", "author": "Ben Wolsieffer",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/core": "^1.2.6", "@actions/core": "^1.10.0",
"@actions/exec": "^1.0.4", "@actions/exec": "^1.1.1",
"@actions/io": "^1.0.2", "@actions/io": "^1.1.3",
"@types/node-fetch": "^2.5.7", "node-fetch": "^3.3.2",
"node-fetch": "^2.6.1", "p-limit": "^4.0.0"
"p-limit": "^2.3.0"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^12.19.6", "@types/node": "^18.16.1",
"@typescript-eslint/eslint-plugin": "^2.34.0", "@typescript-eslint/eslint-plugin": "^6.3.0",
"@typescript-eslint/parser": "^2.34.0", "@typescript-eslint/parser": "^6.3.0",
"@zeit/ncc": "^0.20.5", "@vercel/ncc": "^0.36.1",
"eslint": "^5.16.0", "eslint": "^8.46.0",
"js-yaml": "^3.14.0", "js-yaml": "^3.14.0",
"typescript": "^3.9.7" "typescript": "^5.1.6"
} }
} }

View file

@ -1,5 +1,8 @@
{ pkgs ? import <nixpkgs> {} }: with pkgs; { pkgs ? import <nixpkgs> {} }: with pkgs;
mkShell { mkShell {
buildInputs = [ nodejs-12_x cachix ]; buildInputs = [
nodejs
cachix
];
} }

View file

@ -11,6 +11,6 @@ export async function query(name: string, drvPath: string) {
drvPath, `https://${name}.cachix.org`) drvPath, `https://${name}.cachix.org`)
} }
export async function push(name: string, paths: Array<string>) { export async function push(name: string, paths: string[]) {
await execFile('cachix', ['push', name].concat(paths)) await execFile('cachix', ['push', name].concat(paths))
} }

View file

@ -60,10 +60,10 @@ class PackageSet {
private async buildPackage(attr: string): Promise<BuildResult> { private async buildPackage(attr: string): Promise<BuildResult> {
core.debug(`Instantiating ${attr}`) core.debug(`Instantiating ${attr}`)
let drvPaths: Array<string> let drvPaths: string[]
try { try {
drvPaths = await nix.instantiate(this.nixFile, attr, this.drvDir, this.system) drvPaths = await nix.instantiate(this.nixFile, attr, this.drvDir, this.system)
} catch (e) { } catch (e: any) {
core.debug(`${attr} failed to evaluate`) core.debug(`${attr} failed to evaluate`)
return { return {
status: BuildStatus.EVALUATION_FAILURE, status: BuildStatus.EVALUATION_FAILURE,
@ -113,10 +113,10 @@ class PackageSet {
} }
core.debug(`Building ${attr} (${drvPath})`) core.debug(`Building ${attr} (${drvPath})`)
let resultPath: string let resultPaths: string[]
try { try {
resultPath = await nix.realize(drvPath, attr, this.resultDir) resultPaths = await nix.realize(drvPath, attr, this.resultDir)
} catch (e) { } catch (e: any) {
this.failedPackages.set(drvPath, attr) this.failedPackages.set(drvPath, attr)
core.debug(`${attr} (${drvPath}) failed to build`) core.debug(`${attr} (${drvPath}) failed to build`)
// Get last 10 lines of stderr // Get last 10 lines of stderr
@ -128,7 +128,7 @@ class PackageSet {
} }
core.debug(`Pushing ${attr} (${drvPath})`) core.debug(`Pushing ${attr} (${drvPath})`)
await cachix.push(this.cachixCache, [resultPath]) await cachix.push(this.cachixCache, resultPaths)
return { return {
status: BuildStatus.SUCCESS, status: BuildStatus.SUCCESS,
attr, drvPath, attr, drvPath,
@ -243,7 +243,7 @@ async function run() {
statusResults.get(BuildStatus.CACHED)! statusResults.get(BuildStatus.CACHED)!
.forEach(r => core.info(`${r.attr} (${r.drvPath})`)); .forEach(r => core.info(`${r.attr} (${r.drvPath})`));
core.endGroup() core.endGroup()
} catch (error) { } catch (error: any) {
core.setFailed(error.message) core.setFailed(error.message)
} }
} }

View file

@ -12,7 +12,7 @@ const execFile = util.promisify(childProcess.execFile)
export async function listAttrs( export async function listAttrs(
file: string, file: string,
parentAttr: string parentAttr: string
): Promise<Array<string>> { ): Promise<string[]> {
const { stdout } = await execFile('nix-instantiate', [ const { stdout } = await execFile('nix-instantiate', [
'--eval', '--json', '-E', '--eval', '--json', '-E',
`with (import (./. + "/${file}") {}); builtins.attrNames (${parentAttr})` `with (import (./. + "/${file}") {}); builtins.attrNames (${parentAttr})`
@ -20,20 +20,20 @@ export async function listAttrs(
return JSON.parse(stdout).map((a: string) => `${parentAttr}.${a}`) return JSON.parse(stdout).map((a: string) => `${parentAttr}.${a}`)
} }
function parseLines(lines: string): Array<string> { function parseLines(lines: string): string[] {
return lines.split('\n') return lines.split('\n')
.map(r => r.trim()) .map(r => r.trim())
.filter(r => r !== '') .filter(r => r !== '')
} }
export async function getRequisites(drvPath: string): Promise<Array<string>> { export async function getRequisites(drvPath: string): Promise<string[]> {
const { stdout: requisites } = await execFile( const { stdout: requisites } = await execFile(
'nix-store', ['--query', '--requisites', drvPath] 'nix-store', ['--query', '--requisites', drvPath]
) )
return parseLines(requisites) return parseLines(requisites)
} }
export async function getOutputs(drvPath: string): Promise<Array<string>> { export async function getOutputs(drvPath: string): Promise<string[]> {
const { stdout: outputs } = await execFile( const { stdout: outputs } = await execFile(
'nix-store', ['--query', '--outputs', drvPath] 'nix-store', ['--query', '--outputs', drvPath]
) )
@ -74,7 +74,7 @@ export async function instantiate(
attribute: string, attribute: string,
drvDir: string, drvDir: string,
system?: string system?: string
): Promise<Array<string>> { ): Promise<string[]> {
let args = [ let args = [
file, '-A', attribute, file, '-A', attribute,
'--add-root', path.join(drvDir, attribute), '--indirect' '--add-root', path.join(drvDir, attribute), '--indirect'
@ -85,7 +85,7 @@ export async function instantiate(
let drvPaths let drvPaths
try { try {
drvPaths = (await execFile('nix-instantiate', args)).stdout drvPaths = (await execFile('nix-instantiate', args)).stdout
} catch (e) { } catch (e: any) {
throw e.stderr throw e.stderr
} }
return parseLines(drvPaths) return parseLines(drvPaths)
@ -95,10 +95,10 @@ export async function realize(
drvPath: string, drvPath: string,
attribute: string, attribute: string,
resultDir: string resultDir: string
): Promise<string> { ): Promise<string[]> {
const { stdout: resultPath } = await execFile('nix-store', [ const { stdout: resultPaths } = await execFile('nix-store', [
'--realise', drvPath, '--no-build-output', '--realise', drvPath, '--no-build-output',
'--add-root', path.join(resultDir, attribute), '--indirect' '--add-root', path.join(resultDir, attribute), '--indirect'
]) ])
return resultPath.trim() return parseLines(resultPaths)
} }