Compare commits
14 Commits
d080d1058f
...
v3.1.4
Author | SHA1 | Date | |
---|---|---|---|
c0561a542f | |||
1d7e66459a | |||
c1b59b8e20 | |||
fe95766606 | |||
362593798c | |||
362656d13e | |||
d7723efb18 | |||
9a79e6a830 | |||
a1338aa9bc | |||
5e105efd9d | |||
95ba56491d | |||
6ae97994c3 | |||
8e374cae37 | |||
0a2f9c3995 |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -97,5 +97,7 @@ public
|
||||
# DynamoDB Local files
|
||||
.dynamodb/
|
||||
|
||||
distribution/*.mts
|
||||
distribution/*.mjs
|
||||
jobs/*
|
||||
!jobs/example.json5
|
22
.vscode/launch.json
vendored
22
.vscode/launch.json
vendored
@@ -5,16 +5,30 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "pwa-node",
|
||||
"type": "node",
|
||||
"request": "launch",
|
||||
"name": "Launch Program",
|
||||
"skipFiles": [
|
||||
"<node_internals>/**"
|
||||
],
|
||||
"env": {
|
||||
"JOB": "jobs/example.json5"
|
||||
"GPGPASS": "${input:gpgpass}"
|
||||
},
|
||||
"program": "${workspaceFolder}/index.js"
|
||||
"program": "${workspaceFolder}/bin/artix-metro.mjs",
|
||||
"args": [
|
||||
"--job",
|
||||
"jobs/kde-01.json5"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/distribution/*"
|
||||
]
|
||||
}
|
||||
]
|
||||
],
|
||||
"inputs": [
|
||||
{
|
||||
"id": "gpgpass",
|
||||
"type": "promptString",
|
||||
"description": "Enter your GPG password"
|
||||
}
|
||||
]
|
||||
}
|
@@ -1,75 +0,0 @@
|
||||
const spawn = require('child_process').spawn;
|
||||
const clc = require('cli-color');
|
||||
|
||||
const TimeOut = 600000;
|
||||
const ExtraSpace = new RegExp('\\s+', 'g');
|
||||
|
||||
class Checkupdates {
|
||||
upgradable = [];
|
||||
|
||||
/**
|
||||
* runs comparepkg -u
|
||||
* @param {number} timeout max execution time
|
||||
* @returns {Promise}
|
||||
*/
|
||||
FetchUpgradable(timeout = TimeOut) {
|
||||
return new Promise((resolve, reject) => {
|
||||
this.upgradable = [];
|
||||
let process = spawn('artix-checkupdates', ['-u']);
|
||||
let to = setTimeout(async () => {
|
||||
process.kill() && await cleanUpLockfiles();
|
||||
reject('Timed out');
|
||||
}, timeout);
|
||||
let outputstr = '';
|
||||
let errorOutput = '';
|
||||
process.stdout.on('data', data => {
|
||||
outputstr += data.toString();
|
||||
});
|
||||
process.stderr.on('data', err => {
|
||||
const errstr = err.toString();
|
||||
errorOutput += `${errstr}, `;
|
||||
console.error(errstr);
|
||||
})
|
||||
process.on('exit', async (code) => {
|
||||
clearTimeout(to);
|
||||
if (code !== 0 || errorOutput.length !== 0) {
|
||||
errorOutput.includes('unable to lock database') && cleanUpLockfiles();
|
||||
reject((code && `exited with ${code}`) || errorOutput);
|
||||
}
|
||||
else {
|
||||
this.upgradable = this.parseCheckUpdatesOutput(outputstr);
|
||||
this.upgradable.forEach(pkg => console.log(clc.blue(pkg)));
|
||||
resolve(code);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* parse output of checkupdates
|
||||
* @param {*} output output of artix-checkupdates
|
||||
* @returns an array of package names from the checkupdates output
|
||||
*/
|
||||
parseCheckUpdatesOutput(output) {
|
||||
let packages = [];
|
||||
let lines = output.split('\n');
|
||||
lines.forEach(l => {
|
||||
let p = l.trim().replace(ExtraSpace, ' ');
|
||||
if (p.length > 0 && p.indexOf('Package basename') < 0) {
|
||||
packages.push(p.split(' ', 2)[0]);
|
||||
}
|
||||
});
|
||||
return packages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Whether a package has an upgrade or rebuild pending
|
||||
* @param {string} pkg the package name
|
||||
* @returns {boolean} if it's upgradable
|
||||
*/
|
||||
IsUpgradable(pkg) {
|
||||
return this.upgradable.includes(pkg);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Checkupdates;
|
5
bin/artix-metro.mjs
Executable file
5
bin/artix-metro.mjs
Executable file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
import { artixMetro } from '../distribution/index.mjs';
|
||||
|
||||
artixMetro();
|
99
completion/bash
Normal file
99
completion/bash
Normal file
@@ -0,0 +1,99 @@
|
||||
#/usr/bin/env bash
|
||||
|
||||
LIBDIR=${LIBDIR:-'/usr/share/artools/lib'}
|
||||
|
||||
_artixpkg_pkgbase() {
|
||||
source "${LIBDIR}"/pkg/util.sh
|
||||
ls -1 "${TREE_DIR_ARTIX}" | tr '\n' ' '
|
||||
}
|
||||
|
||||
_artix_metro_completion() {
|
||||
local cur prev comps repos autorepos comp_cword_exflag
|
||||
source "${LIBDIR}"/pkg/db/db.sh 2>/dev/null
|
||||
|
||||
cur="${COMP_WORDS[COMP_CWORD]}"
|
||||
prev="${COMP_WORDS[COMP_CWORD-1]}"
|
||||
|
||||
for ((i = COMP_CWORD - 1; i >= 0; i--)); do
|
||||
if [[ ${COMP_WORDS[i]} != -* ]]; then
|
||||
last_non_flag_word="${COMP_WORDS[i]}"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
comps=""
|
||||
comp_cword_exflag=0
|
||||
comp_cword_all=0
|
||||
for ((i = 0; i < ${#COMP_WORDS[@]} - 1; i++)); do
|
||||
word="${COMP_WORDS[i]}"
|
||||
comps_all+=" $word"
|
||||
((comp_cword_all++))
|
||||
if [[ $word != -* ]]; then
|
||||
comps+=" $word"
|
||||
((comp_cword_exflag++))
|
||||
fi
|
||||
done
|
||||
comps="${comps:1}"
|
||||
|
||||
repos=""
|
||||
for word in "${ARTIX_DB[@]}"; do
|
||||
if [[ $word != -* ]]; then
|
||||
repos+=" $word"
|
||||
fi
|
||||
done
|
||||
repos="${repos:1}"
|
||||
autorepos=""
|
||||
for word in "${ARTIX_DB_MAP[@]}"; do
|
||||
if [[ $word != -* ]]; then
|
||||
autorepos+=" $word"
|
||||
fi
|
||||
done
|
||||
autorepos="${autorepos:1}"
|
||||
|
||||
case "${prev}" in
|
||||
"--token")
|
||||
# this flag expects a parameter
|
||||
COMPREPLY=()
|
||||
;;
|
||||
"-j"|"--job")
|
||||
compopt -o filenames
|
||||
COMPREPLY=( $(compgen -f -- "$cur") )
|
||||
;;
|
||||
"--workspace")
|
||||
COMPREPLY=( $(compgen -d -- "$cur") )
|
||||
;;
|
||||
"--start")
|
||||
COMPREPLY=($(compgen -W "$(_artixpkg_pkgbase)" -- ${cur}))
|
||||
;;
|
||||
*)
|
||||
local metroCommon="-h --help --start --token --workspace --increment "
|
||||
case "${comps}" in
|
||||
"artix-metro add"*)
|
||||
case "${comp_cword_exflag}" in
|
||||
2)
|
||||
COMPREPLY=($(compgen -W "$metroCommon $autorepos $repos" -- ${cur}))
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=($(compgen -W "$metroCommon $(_artixpkg_pkgbase)" -- ${cur}))
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
"artix-metro move"*)
|
||||
case "${comp_cword_exflag}" in
|
||||
2|3)
|
||||
COMPREPLY=($(compgen -W "$metroCommon $autorepos $repos" -- ${cur}))
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=($(compgen -W "$metroCommon $(_artixpkg_pkgbase)" -- ${cur}))
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=($(compgen -W "$metroCommon -j --job add move" -- ${cur}))
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
complete -F _artix_metro_completion artix-metro
|
61
completion/zsh
Normal file
61
completion/zsh
Normal file
@@ -0,0 +1,61 @@
|
||||
# Load necessary library files
|
||||
LIBDIR=${LIBDIR:-'/usr/share/artools/lib'}
|
||||
|
||||
_artix_metro_completion() {
|
||||
local -a metroCommon repos autorepos pkgbase
|
||||
local curcontext="$curcontext" state
|
||||
|
||||
# Load external configurations
|
||||
source "${LIBDIR}/pkg/db/db.sh" 2>/dev/null
|
||||
|
||||
# Common options
|
||||
metroCommon=("-h" "--help" "--start" "--token" "--workspace" "--increment" "-j" "--job")
|
||||
|
||||
# Populate variables
|
||||
repos=("${(s: :)ARTIX_DB}")
|
||||
autorepos=("${(s: :)ARTIX_DB_MAP}")
|
||||
pkgbase=("${(s: :)$(artix-metro --completion pkgbase)}")
|
||||
|
||||
# Handle command and argument contexts
|
||||
_arguments -C \
|
||||
'--token[Provide a token]: ' \
|
||||
'-j[Specify a job]: :_files' \
|
||||
'--job[Specify a job]: :_files' \
|
||||
'--workspace[Specify a workspace]: :_files -/' \
|
||||
'--start[Start a process]:pkgbase:(${pkgbase})' \
|
||||
'1:command:(${metroCommon} add move)' \
|
||||
'2:repo:(${metroCommon} ${autorepos} ${repos})' \
|
||||
'*:pkgbase:->pkgbase'
|
||||
|
||||
# Contextual argument handling
|
||||
case $state in
|
||||
pkgbase)
|
||||
case $words[2] in
|
||||
add)
|
||||
if (( CURRENT == 3 )); then
|
||||
# First argument after "add" is a repo
|
||||
_values "repo" "${metroCommon[@]}" "${autorepos[@]}" "${repos[@]}"
|
||||
else
|
||||
# Remaining arguments are pkgbase
|
||||
_values "pkgbase" "${pkgbase[@]}"
|
||||
fi
|
||||
;;
|
||||
move)
|
||||
if (( CURRENT == 3 )); then
|
||||
# First repo for "move"
|
||||
_values "repo" "${metroCommon[@]}" "${autorepos[@]}" "${repos[@]}"
|
||||
elif (( CURRENT == 4 )); then
|
||||
# Second repo for "move"
|
||||
_values "repo" "${metroCommon[@]}" "${autorepos[@]}" "${repos[@]}"
|
||||
else
|
||||
# Remaining arguments are pkgbase
|
||||
_values "pkgbase" "${pkgbase[@]}"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Register the completion function for artix-metro
|
||||
compdef _artix_metro_completion artix-metro
|
85
gitea.js
85
gitea.js
@@ -1,85 +0,0 @@
|
||||
const p = require('phin');
|
||||
|
||||
// how is there no decent library for this shit?
|
||||
class Gitea {
|
||||
constructor(options = {}) {
|
||||
this._protocol = options._protocol || 'https';
|
||||
this._domain = options.domain || 'gitea.artixlinux.org';
|
||||
this._apiPrefix = options.apiPrefix || '/api/v1';
|
||||
this._token = options.token || null;
|
||||
}
|
||||
|
||||
getHomepage() {
|
||||
return `${this._protocol}://${this._domain}/`;
|
||||
}
|
||||
|
||||
getUrlPrefix() {
|
||||
return `${this._protocol}://${this._domain}${this._apiPrefix}`;
|
||||
}
|
||||
|
||||
getRepo(...args) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
let headers = {};
|
||||
if (this._token) {
|
||||
headers.Authorization = `token ${this._token}`
|
||||
}
|
||||
let resp = await p({
|
||||
url: `${this.getUrlPrefix()}/repos/${args.join('/')}`,
|
||||
headers,
|
||||
method: 'GET',
|
||||
parse: 'json',
|
||||
});
|
||||
resolve(resp.body);
|
||||
}
|
||||
catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getCommits(...args) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
let headers = {};
|
||||
if (this._token) {
|
||||
headers.Authorization = `token ${this._token}`
|
||||
}
|
||||
let resp = await p({
|
||||
url: `${this.getUrlPrefix()}/repos/${args.join('/')}/commits?limit=10`,
|
||||
headers,
|
||||
method: 'GET',
|
||||
parse: 'json',
|
||||
});
|
||||
resolve(resp.body);
|
||||
}
|
||||
catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getStatus(...args) {
|
||||
return new Promise(async (resolve, reject) => {
|
||||
try {
|
||||
let commits = await this.getCommits(...args);
|
||||
let headers = {};
|
||||
if (this._token) {
|
||||
headers.Authorization = `token ${this._token}`
|
||||
}
|
||||
let resp = await p({
|
||||
url: `${this.getUrlPrefix()}/repos/${args.join('/')}/commits/${commits[0].sha}/status`,
|
||||
headers,
|
||||
method: 'GET',
|
||||
parse: 'json',
|
||||
});
|
||||
resolve(resp.body);
|
||||
}
|
||||
catch (err) {
|
||||
reject(err);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Gitea;
|
295
index.js
295
index.js
@@ -1,295 +0,0 @@
|
||||
const fs = require('fs');
|
||||
const fsp = fs.promises;
|
||||
const readline = require('readline');
|
||||
const Writable = require('stream').Writable;
|
||||
const path = require('path');
|
||||
const spawn = require('child_process').spawn;
|
||||
const clc = require('cli-color');
|
||||
const JSON5 = require('json5');
|
||||
const checkupdates = require('./Checkupdates');
|
||||
const giteaapi = require('./gitea');
|
||||
|
||||
const PACKAGE_ORG = 'packages';
|
||||
const SIGNATUREEXPIRY = 30000;//in ms
|
||||
|
||||
let JOB = process.env.JOB;
|
||||
let START = null;
|
||||
let LASTSIGNTIME = new Date(0);
|
||||
|
||||
/**
|
||||
* Sleep equivalent as a promise
|
||||
* @param {number} ms Number of ms
|
||||
* @returns Promise<void>
|
||||
*/
|
||||
const snooze = ms => new Promise(resolve => setTimeout(resolve, ms));
|
||||
|
||||
/**
|
||||
* Wait for a new build to succeed
|
||||
* @param {giteaapi} tea
|
||||
* @param {string} pkg
|
||||
* @param {string} lastHash
|
||||
*/
|
||||
async function waitForBuild(tea, pkg, lastHash) {
|
||||
while (true) {
|
||||
let status;
|
||||
try {
|
||||
status = await tea.getStatus(PACKAGE_ORG, pkg);
|
||||
}
|
||||
catch {
|
||||
status = null;
|
||||
await snooze(30000);
|
||||
}
|
||||
if (status) {
|
||||
if (status.sha !== lastHash) {
|
||||
if (status.state === 'success') {
|
||||
break;
|
||||
}
|
||||
else if (status.state === 'failure') {
|
||||
throw `Build ${status.sha} failed. ${tea.getHomepage()}${PACKAGE_ORG}/${pkg}`;
|
||||
}
|
||||
}
|
||||
await snooze(5000);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Run a command (as a promise).
|
||||
* @param {string} command
|
||||
* @param {string[]} args
|
||||
* @returns Promise<number>
|
||||
*/
|
||||
function runCommand(command, args = []) {
|
||||
return new Promise((res, reject) => {
|
||||
let proc = spawn(command, args, { stdio: ['ignore', process.stdout, process.stderr] });
|
||||
proc.on('exit', code => {
|
||||
if (code === 0) {
|
||||
res();
|
||||
}
|
||||
else {
|
||||
reject(code);
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Prompts the user to input their GPG password via stdin
|
||||
* @returns a promise that resolves the password
|
||||
*/
|
||||
function getGpgPass() {
|
||||
return new Promise(resolve => {
|
||||
let mutableStdout = new Writable({
|
||||
write: function (chunk, encoding, callback) {
|
||||
if (!this.muted) {
|
||||
process.stdout.write(chunk, encoding);
|
||||
}
|
||||
callback();
|
||||
}
|
||||
});
|
||||
let rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: mutableStdout,
|
||||
terminal: true
|
||||
});
|
||||
mutableStdout.muted = false;
|
||||
rl.question(clc.yellow('Enter your GPG password: '), (password) => {
|
||||
rl.close();
|
||||
console.log();
|
||||
resolve(password);
|
||||
});
|
||||
mutableStdout.muted = true;
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Input gpg passphrase so pushing commits won't require it
|
||||
* @param {*} config the json config
|
||||
*/
|
||||
async function refreshGpg(config) {
|
||||
let currentTime = new Date();
|
||||
if (currentTime.getTime() - LASTSIGNTIME.getTime() > SIGNATUREEXPIRY) {
|
||||
console.log(clc.cyan('Refreshing signature...'));
|
||||
await runCommand('touch', ['signfile']);
|
||||
await runCommand('gpg', ['-a', '--passphrase', escapeCommandParam(config.gpgpass), '--batch', '--pinentry-mode', 'loopback', '--detach-sign', 'signfile']);
|
||||
await fsp.rm('signfile.asc');
|
||||
LASTSIGNTIME = currentTime;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Formats text to be sent as a parameter to some command
|
||||
* @param {string} param
|
||||
*/
|
||||
function escapeCommandParam(param) {
|
||||
return param.replace(/\\/g, "\\\\");
|
||||
}
|
||||
|
||||
/**
|
||||
* increment pkgrel
|
||||
* @param {string} directory location of all package git repos
|
||||
* @param {*} package package to increment
|
||||
* @returns Promise<void>
|
||||
*/
|
||||
function increment(directory, package) {
|
||||
return new Promise(async (res, reject) => {
|
||||
const pkgbuild = path.join(directory, package, 'PKGBUILD');
|
||||
let lines = [];
|
||||
|
||||
const rl = readline.createInterface({
|
||||
input: fs.createReadStream(pkgbuild),
|
||||
output: process.stdout,
|
||||
terminal: false
|
||||
});
|
||||
|
||||
rl.on('line', async line => {
|
||||
if (line.startsWith('pkgrel')) {
|
||||
let pkgrel = line.split('=')[1].trim();
|
||||
// let's not deal with floats in javascript
|
||||
let num = pkgrel.split('.');
|
||||
if (num.length == 1) {
|
||||
num.push(1);
|
||||
}
|
||||
else {
|
||||
num[1] = parseInt(num[1]) + 1;
|
||||
}
|
||||
lines.push(`pkgrel=${num.join('.')}`);
|
||||
}
|
||||
else {
|
||||
lines.push(line);
|
||||
}
|
||||
});
|
||||
rl.on('close', async () => {
|
||||
await fsp.writeFile(pkgbuild, lines.join('\n') + '\n');
|
||||
res();
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
async function isNewPackage(directory, package) {
|
||||
if (!directory) {
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
const pkgbuild = path.join(directory, package, 'PKGBUILD');
|
||||
const stat = await fsp.stat(pkgbuild);
|
||||
return !stat.size;
|
||||
}
|
||||
catch {
|
||||
console.log('PKGBUILD doesn\'t exist. Assuming package is new.');
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
process.argv.forEach((arg, i) => {
|
||||
let iPlus = i + 1;
|
||||
let args = process.argv;
|
||||
if (arg === '--job' && iPlus < args.length) {
|
||||
JOB = args[iPlus];
|
||||
}
|
||||
else if (arg === '--start' && iPlus < args.length) {
|
||||
START = args[iPlus];
|
||||
}
|
||||
});
|
||||
|
||||
if (JOB) {
|
||||
(async function () {
|
||||
let compare = null;
|
||||
let job = JSON5.parse(await fsp.readFile(JOB));
|
||||
job.source = job.source || 'trunk';
|
||||
job.gpgpass = process.env.GPGPASS || (await getGpgPass()) || '';
|
||||
let verifyJenkins = job.source === 'trunk';
|
||||
let inc = job.increment;
|
||||
let repo = job.repo;
|
||||
let directory = job.directory || job.superrepo;
|
||||
if (!repo) {
|
||||
console.error(clc.redBright('Must provide `repo` destination in config!'));
|
||||
process.exit(1);
|
||||
}
|
||||
if (inc && !directory) {
|
||||
console.error(clc.redBright('Must provide `directory` path in config if increment is enabled!'));
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log('artix-packy-pusher\nCory Sanin\n');
|
||||
|
||||
const gitea = new giteaapi(job.gitea);
|
||||
if (job.source === 'trunk') {
|
||||
console.log(clc.yellowBright('Running artix-checkupdates'));
|
||||
compare = new checkupdates();
|
||||
await compare.FetchUpgradable();
|
||||
}
|
||||
|
||||
// order is IMPORTANT. Must be BLOCKING.
|
||||
for (let i = 0; i < (job.packages || []).length; i++) {
|
||||
let lastHash = '';
|
||||
let p = job.packages[i];
|
||||
if (START === p) {
|
||||
START = null;
|
||||
}
|
||||
if (START === null) {
|
||||
if (compare === null || compare.IsUpgradable(p) || await isNewPackage(directory, p)) {
|
||||
console.log((new Date()).toLocaleTimeString() + clc.magentaBright(` Package ${i}/${job.packages.length}`));
|
||||
while (verifyJenkins) {
|
||||
try {
|
||||
lastHash = (await gitea.getStatus(PACKAGE_ORG, p)).sha
|
||||
console.log(`current sha: ${lastHash}`);
|
||||
break;
|
||||
}
|
||||
catch {
|
||||
console.log(clc.red(`Failed to get status of ${p}. Retrying...`));
|
||||
await snooze(30000);
|
||||
}
|
||||
}
|
||||
console.log(clc.yellowBright(`Pushing ${p} ...`));
|
||||
if (job.source == 'trunk') {
|
||||
if (inc) {
|
||||
await increment(directory, p);
|
||||
}
|
||||
else {
|
||||
await runCommand('artixpkg', ['repo', 'import', p]);
|
||||
}
|
||||
await refreshGpg(job);
|
||||
await runCommand('artixpkg', ['repo', 'add', '-p', repo, p]);
|
||||
}
|
||||
else {
|
||||
try {
|
||||
await refreshGpg(job);
|
||||
await runCommand('artixpkg', ['repo', 'move', '-p', job.source, repo, p]);
|
||||
}
|
||||
catch {
|
||||
console.log(clc.cyan(`Moving ${p} failed. Maybe nothing to move. Continuing.`));
|
||||
}
|
||||
}
|
||||
console.log(clc.blueBright(`${p} upgrade pushed`));
|
||||
if (verifyJenkins) {
|
||||
try {
|
||||
await waitForBuild(gitea, p, lastHash);
|
||||
console.log(clc.greenBright(`${p} built successfully.`));
|
||||
}
|
||||
catch (ex) {
|
||||
console.error(clc.redBright(`Failed on ${p}:`));
|
||||
console.error(ex);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
console.log(clc.magenta(`${p} isn't marked as upgradable. Skipping.`));
|
||||
}
|
||||
}
|
||||
}
|
||||
console.log(clc.greenBright('SUCCESS: All packages built'));
|
||||
try {
|
||||
await fsp.rm('signfile');
|
||||
}
|
||||
catch {
|
||||
console.error(clc.red('failed to remove temp signfile'));
|
||||
}
|
||||
process.exit(0);
|
||||
})();
|
||||
}
|
||||
else {
|
||||
console.error(clc.redBright('A job file must be provided.\n--job {path/to/job.json(5)}'));
|
||||
}
|
@@ -1,15 +1,8 @@
|
||||
{
|
||||
// The Gitea API is used to tell when a package builds successfully.
|
||||
"gitea": {
|
||||
"token": "youReGiteAtoKeNhERe",
|
||||
"domain": "gitea.artixlinux.org"
|
||||
},
|
||||
// use "trunk" to import and build. Or supply a repo name to move from that repo.
|
||||
"source": "trunk",
|
||||
// set source to move packages, or leave it undefined to upgrade packages
|
||||
"source": null,
|
||||
// The destination repo
|
||||
"repo": "world-gremlins",
|
||||
// The location for all the package repositories on your system
|
||||
"directory": "/home/cory/Documents/pkg/artixlinux",
|
||||
// (experimental) if set to true (and source is "trunk"),
|
||||
// package won't import from upstream and will instead do a .1 pkgrel bump on all packages.
|
||||
// Useful if packages were mistakenly built out of order.
|
||||
|
@@ -2,4 +2,4 @@
|
||||
APPRISE_API="http://localhost:8000"
|
||||
APPRISE_DESTINATION="tgram://TOKEN/CHAT_ID"
|
||||
|
||||
curl -d "{\"title\":\"artix-packy-pusher\", \"body\":\"Job done.\", \"urls\":\"${APPRISE_DESTINATION}\"}" -H "Content-Type: application/json" "${APPRISE_API}/notify/"
|
||||
curl -d "{\"title\":\"artix-metro\", \"body\":\"Job done.\", \"urls\":\"${APPRISE_DESTINATION}\"}" -H "Content-Type: application/json" "${APPRISE_API}/notify/"
|
914
package-lock.json
generated
914
package-lock.json
generated
File diff suppressed because it is too large
Load Diff
43
package.json
43
package.json
@@ -1,13 +1,46 @@
|
||||
{
|
||||
"name": "packagepusher",
|
||||
"version": "2.1.2",
|
||||
"name": "artix-metro",
|
||||
"version": "3.1.4",
|
||||
"description": "Automate pushing packages to Artix",
|
||||
"main": "index.js",
|
||||
"author": "Cory Sanin",
|
||||
"keywords": [
|
||||
"artix",
|
||||
"artixlinux"
|
||||
],
|
||||
"type": "module",
|
||||
"main": "distribution/index.mjs",
|
||||
"bin": {
|
||||
"artix-metro": "./bin/artix-metro.mjs"
|
||||
},
|
||||
"files": [
|
||||
"distribution",
|
||||
"bin",
|
||||
"completion"
|
||||
],
|
||||
"author": {
|
||||
"name": "Cory Sanin",
|
||||
"email": "corysanin@artixlinux.org",
|
||||
"url": "https://sanin.dev"
|
||||
},
|
||||
"homepage": "https://gitea.artixlinux.org/corysanin/artix-metro",
|
||||
"bugs": {
|
||||
"url": "https://github.com/CorySanin/artix-metro/issues"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://gitea.artixlinux.org/corysanin/artix-metro.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"artix-checkupdates": "1.0.2",
|
||||
"cli-color": "2.0.4",
|
||||
"ky": "1.8.1",
|
||||
"json5": "2.2.3",
|
||||
"phin": "3.7.1"
|
||||
"glob": "11.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "5.8.3",
|
||||
"@sindresorhus/tsconfig": "7.0.0",
|
||||
"@types/cli-color": "2.0.6",
|
||||
"@types/node": "22.15.14"
|
||||
}
|
||||
}
|
||||
|
33
readme.md
33
readme.md
@@ -1,27 +1,42 @@
|
||||
# artix-packy-pusher
|
||||
# artix-metro
|
||||
|
||||
Given a list of packages, build one at a time. Exits if a build fails.
|
||||
Artix package pushing automation tool that waits for builds to pass before continuing in the queue.
|
||||
|
||||
## Features
|
||||
|
||||
* `artix-checkupdates` is used to skip packages without pending operations
|
||||
* Package upgrades wait for successful builds before moving on to the next one
|
||||
* Build failures stop execution
|
||||
* Perfect for scripting large, recurring rebuilds
|
||||
* Increment mode for fixing packages built completely out-of-order
|
||||
|
||||
## Setup
|
||||
|
||||
`artix-checkupdates` is required. I highly recommend configuring it to use the developer artix mirror.
|
||||
It uses `artix-checkupdates` to retrieve a list of packages that actually do have updates pending. packy-pusher will skip packages that don't need to be updated.
|
||||
|
||||
Install node dependencies with `npm install`
|
||||
|
||||
1) Install node dependencies with `npm install`
|
||||
2) Process the typescript source with `npm exec tsc`
|
||||
|
||||
## Config
|
||||
|
||||
Please see [example.json5](jobs/example.json5). Program can parse json5 or plain json.
|
||||
In addition to the robust CLI, jobs can be defined in a JSON5 or plain JSON file. For recurring tasks, either a job file or a bash script with the CLI calls is recommended. See [example.json5](jobs/example.json5) for an example job file.
|
||||
|
||||
## Use
|
||||
In order to sign commits, packy-pusher needs your GPG password. It can be provided via the `GPGPASS` environment variable.
|
||||
|
||||
In order to sign commits, artix-metro needs your GPG password. It can be provided via the `GPGPASS` environment variable.
|
||||
Otherwise the program will prompt you for it on startup.
|
||||
|
||||
Run a job:
|
||||
```
|
||||
node index.js --job jobs/example.json5
|
||||
node bin/artix-metro.mjs --job jobs/example.json5
|
||||
```
|
||||
Run a job, skipping to a particular package:
|
||||
```
|
||||
node index.js --job jobs/example.json5 --start kmail
|
||||
```
|
||||
node bin/artix-metro.mjs --job jobs/example.json5 --start kmail
|
||||
```
|
||||
Run an ad hoc job via the CLI:
|
||||
```
|
||||
node bin/artix-metro.mjs add stable libjpeg-turbo lib32-libjpeg-turbo
|
||||
```
|
||||
Notice that as long as the same shorthand works for all packages (e.g. stable, gremlins, goblins), repos can vary from package to package.
|
||||
|
70
src/artoolsconf.mts
Normal file
70
src/artoolsconf.mts
Normal file
@@ -0,0 +1,70 @@
|
||||
import * as fsp from 'node:fs/promises';
|
||||
import os from 'node:os';
|
||||
import path from 'node:path';
|
||||
import type { PathLike } from 'node:fs';
|
||||
|
||||
interface ArtoolsConf {
|
||||
workspace: string;
|
||||
giteaToken: string | null;
|
||||
}
|
||||
|
||||
const DefaultConf: ArtoolsConf = {
|
||||
workspace: path.join(os.homedir(), 'artools-workspace'),
|
||||
giteaToken: null
|
||||
}
|
||||
|
||||
function parseProperty(line: string): string {
|
||||
return (line.split('=')[1] || '').trim();
|
||||
}
|
||||
|
||||
function removeQuotes(str: string) {
|
||||
if (
|
||||
(
|
||||
str.charAt(0) === '\'' ||
|
||||
str.charAt(0) === '"'
|
||||
) && str.charAt(0) === str.charAt(str.length - 1)) {
|
||||
return str.substring(1, str.length - 1);
|
||||
}
|
||||
return str;
|
||||
}
|
||||
|
||||
class ArtoolsConfReader {
|
||||
|
||||
async readConf(silent: boolean = false): Promise<ArtoolsConf> {
|
||||
const primaryLocation = path.join(os.homedir(), '.config', 'artools', 'artools-pkg.conf');
|
||||
const systemConf = path.join('/', 'etc', 'artools', 'artools-pkg.conf');
|
||||
try {
|
||||
return await this.readConfFile(primaryLocation);
|
||||
}
|
||||
catch (ex) {
|
||||
if (!silent) {
|
||||
console.error(`artools config at "${primaryLocation}" could not be read. ${ex}\nUsing system config "${systemConf}" instead.`);
|
||||
}
|
||||
return await this.readConfFile(systemConf);
|
||||
}
|
||||
}
|
||||
|
||||
async readConfFile(file: PathLike): Promise<ArtoolsConf> {
|
||||
const lines = (await fsp.readFile(file)).toString().split('\n');
|
||||
let workspace: string | null = null;
|
||||
let giteaToken: string | null = null;
|
||||
lines.forEach(l => {
|
||||
switch (true) {
|
||||
case l.startsWith('WORKSPACE_DIR='):
|
||||
workspace = removeQuotes(parseProperty(l));
|
||||
break;
|
||||
case l.startsWith('GIT_TOKEN='):
|
||||
giteaToken = removeQuotes(parseProperty(l));
|
||||
break;
|
||||
}
|
||||
});
|
||||
return {
|
||||
workspace: process.env['WORKSPACE'] || workspace || DefaultConf.workspace,
|
||||
giteaToken: process.env['GIT_TOKEN'] || giteaToken || null
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export default ArtoolsConfReader;
|
||||
export { ArtoolsConfReader, DefaultConf };
|
||||
export type { ArtoolsConf }
|
175
src/gitea.mts
Normal file
175
src/gitea.mts
Normal file
@@ -0,0 +1,175 @@
|
||||
import ky from 'ky';
|
||||
import { snooze } from './snooze.mjs';
|
||||
|
||||
type CiStatus = "pending" | "success" | "error" | "failure" | "";
|
||||
|
||||
interface GiteaConfig {
|
||||
protocol?: string;
|
||||
domain?: string;
|
||||
apiPrefix?: string;
|
||||
token?: string;
|
||||
}
|
||||
|
||||
interface Commit {
|
||||
sha: string;
|
||||
}
|
||||
|
||||
interface Status {
|
||||
sha: string;
|
||||
state: CiStatus;
|
||||
}
|
||||
|
||||
interface Hook {
|
||||
active: boolean;
|
||||
id: number;
|
||||
}
|
||||
|
||||
class Gitea {
|
||||
private _protocol: string;
|
||||
private _domain: string;
|
||||
private _apiPrefix: string;
|
||||
private _token: string;
|
||||
|
||||
constructor(options: GiteaConfig = {}) {
|
||||
this._protocol = options.protocol || 'https';
|
||||
this._domain = options.domain || 'gitea.artixlinux.org';
|
||||
this._apiPrefix = options.apiPrefix || '/api/v1';
|
||||
this._token = options.token || '';
|
||||
}
|
||||
|
||||
setToken(token: string | null | undefined) {
|
||||
if (token) {
|
||||
this._token = token;
|
||||
}
|
||||
}
|
||||
|
||||
getHomepage() {
|
||||
return `${this._protocol}://${this._domain}/`;
|
||||
}
|
||||
|
||||
getUrlPrefix() {
|
||||
return `${this._protocol}://${this._domain}${this._apiPrefix}`;
|
||||
}
|
||||
|
||||
async getRepo(...args: string[]) {
|
||||
try {
|
||||
let headers: HeadersInit = {};
|
||||
if (this._token) {
|
||||
headers['Authorization'] = `token ${this._token}`
|
||||
}
|
||||
const resp = await ky.get(`${this.getUrlPrefix()}/repos/${args.join('/')}`, {
|
||||
headers
|
||||
});
|
||||
return await resp.json();
|
||||
}
|
||||
catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async getCommits(...args: string[]): Promise<Commit[]> {
|
||||
try {
|
||||
let headers: HeadersInit = {};
|
||||
if (this._token) {
|
||||
headers['Authorization'] = `token ${this._token}`
|
||||
}
|
||||
const resp = await ky.get(`${this.getUrlPrefix()}/repos/${args.join('/')}/commits?limit=10`, {
|
||||
headers
|
||||
});
|
||||
return await resp.json();
|
||||
}
|
||||
catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async getStatus(...args: string[]): Promise<Status> {
|
||||
try {
|
||||
let commits = await this.getCommits(...args);
|
||||
let headers: HeadersInit = {};
|
||||
if (this._token) {
|
||||
headers['Authorization'] = `token ${this._token}`
|
||||
}
|
||||
const resp = await ky.get(`${this.getUrlPrefix()}/repos/${args.join('/')}/commits/${commits[0]?.sha}/status`, {
|
||||
headers
|
||||
});
|
||||
return await resp.json();
|
||||
}
|
||||
catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async getHooks(...args: string[]): Promise<Hook[]> {
|
||||
try {
|
||||
let headers: HeadersInit = {};
|
||||
if (this._token) {
|
||||
headers['Authorization'] = `token ${this._token}`
|
||||
}
|
||||
const resp = await ky.get(`${this.getUrlPrefix()}/repos/${args.join('/')}/hooks`, {
|
||||
headers
|
||||
});
|
||||
return await resp.json();
|
||||
}
|
||||
catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async sendTestWebhook(...args: string[]): Promise<void> {
|
||||
try {
|
||||
let headers: HeadersInit = {};
|
||||
if (this._token) {
|
||||
headers['Authorization'] = `token ${this._token}`
|
||||
}
|
||||
const hook = (await this.getHooks(...args)).find(hook => hook.active === true);
|
||||
if (!hook) {
|
||||
throw new Error('No active webhook found');
|
||||
}
|
||||
await ky.post(`${this.getUrlPrefix()}/repos/${args.join('/')}/hooks/${hook.id}/tests`, {
|
||||
headers
|
||||
});
|
||||
}
|
||||
catch (err) {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async waitForBuild(lastHash: string, ...args: string[]): Promise<void> {
|
||||
let missingStatusCount = 0;
|
||||
while (true) {
|
||||
let status: Status | null;
|
||||
try {
|
||||
status = await this.getStatus(...args);
|
||||
}
|
||||
catch {
|
||||
status = null;
|
||||
}
|
||||
if (!status) {
|
||||
await snooze(30000);
|
||||
continue;
|
||||
}
|
||||
if (!status.sha && !status.state) {
|
||||
if (++missingStatusCount > 3) {
|
||||
console.log('No build info detected. Sending test webhook...');
|
||||
missingStatusCount = 0;
|
||||
await this.sendTestWebhook(...args);
|
||||
}
|
||||
await snooze(30000);
|
||||
}
|
||||
else if (status.sha !== lastHash) {
|
||||
if (status.state === 'success') {
|
||||
break;
|
||||
}
|
||||
else if (status.state === 'failure') {
|
||||
throw new Error(`Build ${status.sha} failed.`);
|
||||
}
|
||||
}
|
||||
await snooze(5000);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default Gitea;
|
||||
export { Gitea };
|
||||
export type { GiteaConfig, Commit, Status, CiStatus };
|
226
src/index.mts
Normal file
226
src/index.mts
Normal file
@@ -0,0 +1,226 @@
|
||||
import * as fsp from 'node:fs/promises';
|
||||
import * as readline from 'node:readline/promises';
|
||||
import path from 'node:path';
|
||||
import clc from 'cli-color';
|
||||
import JSON5 from 'json5';
|
||||
import { Writable } from 'stream';
|
||||
import { glob } from 'glob'
|
||||
import { Pusher } from './pusher.mjs';
|
||||
import { isPasswordRequired } from './runCommand.mjs';
|
||||
import { ArtoolsConfReader, DefaultConf } from './artoolsconf.mjs';
|
||||
import type { Job, ArtixpkgRepo } from './pusher.mts';
|
||||
import type { ArtoolsConf } from './artoolsconf.mts';
|
||||
|
||||
/**
|
||||
* Prompts the user to input their GPG password via stdin
|
||||
* @returns a promise that resolves the password
|
||||
*/
|
||||
async function getGpgPass() {
|
||||
if ((process.env['SKIPGPGPASSPROMPT'] || '').toLowerCase() === 'true') {
|
||||
return 'SKIP';
|
||||
}
|
||||
let muted = false;
|
||||
let mutableStdout = new Writable({
|
||||
write: function (chunk, encoding, callback) {
|
||||
if (!muted) {
|
||||
process.stdout.write(chunk, encoding);
|
||||
}
|
||||
callback();
|
||||
}
|
||||
});
|
||||
if (! await isPasswordRequired()) {
|
||||
console.log(clc.green('Looks like GPG agent is currently running and password is cached. '
|
||||
+ 'If there is no timeout on your cached password, you can simply press enter.\n'
|
||||
+ 'To skip this GPG password prompt next time, set $SKIPGPGPASSPROMPT to true'));
|
||||
}
|
||||
let rl = readline.createInterface({
|
||||
input: process.stdin,
|
||||
output: mutableStdout,
|
||||
terminal: true
|
||||
});
|
||||
const passwordPromise = rl.question(clc.yellow('Enter your GPG password: '));
|
||||
muted = true;
|
||||
const password = await passwordPromise;
|
||||
rl.close();
|
||||
console.log();
|
||||
muted = false;
|
||||
return password;
|
||||
}
|
||||
|
||||
async function expandGlob(workspace: string, globby: string): Promise<string[]> {
|
||||
return (await glob(path.join(globby, 'README.md'), {
|
||||
cwd: path.join(workspace, 'artixlinux'),
|
||||
maxDepth: 2
|
||||
})).map(p => path.dirname(p));
|
||||
}
|
||||
|
||||
async function artixMetro() {
|
||||
let artoolsConf: ArtoolsConf = DefaultConf;
|
||||
let completion: boolean = false;
|
||||
let job: Partial<Job> = {
|
||||
increment: false,
|
||||
packages: []
|
||||
};
|
||||
|
||||
await (async function () {
|
||||
let mode: 'add' | 'move' | null = null;
|
||||
let startPkg: string | null = null;
|
||||
let jobfile: string | null = null;
|
||||
let skipOne = false;
|
||||
let helpFlag: boolean = false;
|
||||
|
||||
process.argv.forEach((arg, i) => {
|
||||
if (skipOne) {
|
||||
skipOne = false;
|
||||
return;
|
||||
}
|
||||
const iPlus = i + 1;
|
||||
const args = process.argv;
|
||||
switch (true) {
|
||||
case (arg === '--completion') && iPlus < args.length:
|
||||
const comm = args[iPlus] as string;
|
||||
completion = skipOne = true;
|
||||
switch (comm) {
|
||||
case ('pkgbase'):
|
||||
(new ArtoolsConfReader()).readConf(true).then(async (conf) => {
|
||||
try {
|
||||
console.log(
|
||||
(await fsp.readdir(path.join(conf.workspace, 'artixlinux'), { withFileTypes: true }))
|
||||
.filter(dirent => dirent.isDirectory())
|
||||
.map(dirent => dirent.name).join(' '));
|
||||
process.exit(0);
|
||||
}
|
||||
catch {
|
||||
process.exit(1);
|
||||
}
|
||||
})
|
||||
break;
|
||||
default:
|
||||
console.error(`command "${comm}" not recognized`)
|
||||
break;
|
||||
}
|
||||
break;
|
||||
case (arg === '--job' || arg === '-j') && iPlus < args.length:
|
||||
if (jobfile) {
|
||||
console.error(`multiple jobfiles provided. aborting.`);
|
||||
process.exit(2);
|
||||
}
|
||||
jobfile = args[iPlus] as string;
|
||||
skipOne = true;
|
||||
break;
|
||||
case arg === '--start' && iPlus < args.length:
|
||||
startPkg = args[iPlus] as string;
|
||||
skipOne = true;
|
||||
break;
|
||||
case arg === '--token' && iPlus < args.length:
|
||||
job.giteaToken = args[iPlus] as string;
|
||||
skipOne = true;
|
||||
break;
|
||||
case arg === '--workspace' && iPlus < args.length:
|
||||
job.workspace = args[iPlus] as string;
|
||||
skipOne = true;
|
||||
break;
|
||||
case arg === '--increment':
|
||||
job.increment = true;
|
||||
break;
|
||||
case arg === '-p':
|
||||
console.warn('-p option is implied.');
|
||||
break;
|
||||
case arg === '-h' || arg === '--help':
|
||||
helpFlag = true;
|
||||
break;
|
||||
case arg.startsWith('-'):
|
||||
console.error(`unrecognized option '${arg}'`);
|
||||
process.exit(1);
|
||||
case !mode && (arg === 'add' || arg === 'move'):
|
||||
mode = arg;
|
||||
break;
|
||||
case mode === 'move' && !(job as Job).source:
|
||||
job.source = arg as ArtixpkgRepo;
|
||||
break;
|
||||
case mode && !job.repo:
|
||||
job.repo = arg as ArtixpkgRepo;
|
||||
break;
|
||||
case !!job.repo:
|
||||
job.packages?.push(arg);
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
if (completion) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (helpFlag || (!jobfile && !job.repo)) {
|
||||
console.log([
|
||||
`\nUsage: artix-metro [OPTIONS] [commands]...`,
|
||||
'works similarly to "artixpkg repo"... but with a few tricks!',
|
||||
'All package operations check if the package appears in the appropriate artix-checkupdate output.',
|
||||
'Build operations don\'t proceed until the previous build succeeds. Halts on failed build.\n',
|
||||
'Options',
|
||||
'-j, --job <jobfile>\tread instructions from a job file. Overrides all other options except --start',
|
||||
'--start <package>\tskips all packages before the provided package',
|
||||
'--token <token>\t\tdefines the Gitea token to use for making calls to the Gitea API',
|
||||
'--workspace <path>\tdefines the artools workspace',
|
||||
'--increment\t\tenable increment mode',
|
||||
'-h, --help\t\tshows this help message\n',
|
||||
'Commands',
|
||||
'add <destination> <pkgbase>...\t\t\tupgrade and push all packages to the specified destination',
|
||||
'move <source> <destination> <pkgbase>...\tmove all packages from the source repo to the destination repo\n',
|
||||
].join('\n'));
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
try {
|
||||
artoolsConf = await (new ArtoolsConfReader()).readConf();
|
||||
}
|
||||
catch (ex) {
|
||||
console.error(ex);
|
||||
}
|
||||
|
||||
if (jobfile) {
|
||||
try {
|
||||
job = JSON5.parse((await fsp.readFile(jobfile)).toString());
|
||||
}
|
||||
catch (ex) {
|
||||
console.error('A jobfile was provided but could not be read:');
|
||||
console.error(ex);
|
||||
process.exit(4);
|
||||
}
|
||||
}
|
||||
else if (job.packages) {
|
||||
const expanded: string[] = [];
|
||||
for (let i = 0; i < (job.packages.length || 0); i++) {
|
||||
(await expandGlob(artoolsConf.workspace, job.packages[i] as string)).forEach(p => expanded.push(p));
|
||||
}
|
||||
job.packages = expanded;
|
||||
}
|
||||
|
||||
if (startPkg && job.packages) {
|
||||
const startPos = job.packages.indexOf(startPkg);
|
||||
job.packages.splice(0, startPos < 0 ? job.packages.length : startPos)
|
||||
}
|
||||
})();
|
||||
|
||||
if (completion) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('artix-metro - Developed by Cory Sanin\n');
|
||||
|
||||
let pusher = new Pusher({
|
||||
gpgpass: process.env['GPGPASS'] || (await getGpgPass()) || ''
|
||||
}, artoolsConf);
|
||||
|
||||
try {
|
||||
await pusher.runJob(job as Job);
|
||||
}
|
||||
catch (ex) {
|
||||
console.error(clc.red('job threw exception:'));
|
||||
console.error(ex);
|
||||
process.exit(5)
|
||||
}
|
||||
}
|
||||
|
||||
export default artixMetro;
|
||||
export { artixMetro, expandGlob };
|
211
src/pusher.mts
Normal file
211
src/pusher.mts
Normal file
@@ -0,0 +1,211 @@
|
||||
import * as fs from 'node:fs';
|
||||
import * as fsp from 'node:fs/promises';
|
||||
import * as readline from 'node:readline/promises';
|
||||
import clc from 'cli-color';
|
||||
import path from 'node:path';
|
||||
import os from 'node:os';
|
||||
import { Checkupdates } from 'artix-checkupdates';
|
||||
import { Gitea } from './gitea.mjs'
|
||||
import { DefaultConf } from './artoolsconf.mjs';
|
||||
import { snooze } from './snooze.mjs';
|
||||
import { runCommand, isPasswordRequired } from './runCommand.mjs';
|
||||
import type { ArtixRepo } from 'artix-checkupdates';
|
||||
import type { ArtoolsConf } from './artoolsconf.mts';
|
||||
|
||||
interface PusherConfig {
|
||||
gpgpass?: string;
|
||||
}
|
||||
|
||||
type ArtixpkgRepo = ArtixRepo | 'stable' | 'gremlins' | 'goblins';
|
||||
|
||||
interface Job extends Partial<ArtoolsConf> {
|
||||
source?: ArtixpkgRepo;
|
||||
repo: ArtixpkgRepo;
|
||||
increment: boolean;
|
||||
packages: string[];
|
||||
}
|
||||
|
||||
const PACKAGE_ORG = 'packages';
|
||||
const SIGNFILE = path.join(os.tmpdir(), 'signfile');
|
||||
|
||||
/**
|
||||
* Formats text to be sent as a parameter to some command
|
||||
* @param param
|
||||
*/
|
||||
function escapeCommandParam(param: string) {
|
||||
return param.replace(/\\/g, "\\\\");
|
||||
}
|
||||
|
||||
|
||||
|
||||
class Pusher {
|
||||
private _gitea: Gitea | null;
|
||||
private _config: PusherConfig;
|
||||
private _artools: ArtoolsConf;
|
||||
private _createdSignfile: boolean;
|
||||
|
||||
constructor(config: PusherConfig = {}, artoolsConf: ArtoolsConf = DefaultConf) {
|
||||
this._gitea = null;
|
||||
this._artools = artoolsConf;
|
||||
this._config = config;
|
||||
this._createdSignfile = false;
|
||||
this._gitea = new Gitea({
|
||||
token: this._artools.giteaToken || ''
|
||||
});
|
||||
}
|
||||
|
||||
async refreshGpg() {
|
||||
const sshSignMode = 'SSHKEYSIGN' in process.env;
|
||||
if (sshSignMode || await isPasswordRequired()) {
|
||||
console.log(clc.cyan('Refreshing signature...'));
|
||||
this._createdSignfile ||= await runCommand('touch', [SIGNFILE]);
|
||||
if (sshSignMode) {
|
||||
await runCommand('ssh-keygen', ['-Y', 'sign', '-f', path.resolve(process.env['SSHKEYSIGN'] as string), '-n', ' git', SIGNFILE]);
|
||||
}
|
||||
else {
|
||||
await runCommand('gpg', ['-a', '--passphrase', escapeCommandParam(this._config.gpgpass || ''), '--batch', '--pinentry-mode', 'loopback', '--detach-sign', SIGNFILE]);
|
||||
}
|
||||
await fsp.rm(`${SIGNFILE}.${sshSignMode ? 'sig' : 'asc'}`)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
increment(pkg: string): Promise<void> {
|
||||
return new Promise(async (res, _) => {
|
||||
const pkgbuild = path.join(this._artools.workspace, 'artixlinux', pkg, 'PKGBUILD');
|
||||
let lines: string[] = [];
|
||||
|
||||
const rl = readline.createInterface({
|
||||
input: fs.createReadStream(pkgbuild),
|
||||
output: process.stdout,
|
||||
terminal: false
|
||||
});
|
||||
|
||||
rl.on('line', async line => {
|
||||
if (line.trim().startsWith('pkgrel')) {
|
||||
const pkgrelLine = line.split('=');
|
||||
if (pkgrelLine.length <= 1) {
|
||||
throw new Error(`Failed to parse pkgrel line: \n${line}`);
|
||||
}
|
||||
const pkgrel = (pkgrelLine[1] as string).trim();
|
||||
// let's not deal with floats in javascript
|
||||
let num = pkgrel.split('.');
|
||||
if (num.length > 1) {
|
||||
num[1] = `${parseInt(num[1] as string) + 1}`;
|
||||
}
|
||||
else {
|
||||
num.push('1');
|
||||
}
|
||||
lines.push(`pkgrel=${num.join('.')}`);
|
||||
}
|
||||
else {
|
||||
lines.push(line);
|
||||
}
|
||||
});
|
||||
rl.on('close', async () => {
|
||||
await fsp.writeFile(pkgbuild, lines.join('\n') + '\n');
|
||||
res();
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
async isNewPackage(pkg: string) {
|
||||
const pkgbuild = path.join(this._artools.workspace, 'artixlinux', pkg, 'PKGBUILD');
|
||||
try {
|
||||
const stat = await fsp.stat(pkgbuild);
|
||||
return !stat.size;
|
||||
}
|
||||
catch {
|
||||
console.log('PKGBUILD doesn\'t exist. Assuming package is new.');
|
||||
console.info(`checked ${pkgbuild}`);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
async runJob(job: Job) {
|
||||
const checkupdates = new Checkupdates();
|
||||
const gitea = this._gitea as Gitea;
|
||||
|
||||
this._artools.workspace = job.workspace || this._artools.workspace;
|
||||
gitea.setToken(job.giteaToken);
|
||||
|
||||
if (!job.repo) {
|
||||
throw new Error('Must provide `repo` destination in config!');
|
||||
}
|
||||
if (job.increment && !this._artools?.workspace) {
|
||||
throw new Error('Must provide `directory` path in config if increment is enabled!');
|
||||
}
|
||||
if (job.increment && job.source) {
|
||||
throw new Error('increment can\'t be set to true for a move operation. Set increment to false or remove the source repo.');
|
||||
}
|
||||
|
||||
console.log(clc.yellowBright('Running artix-checkupdates'));
|
||||
const actionable = job.increment ? job.packages : (await (!!job.source ? checkupdates.fetchLooseMovable() : checkupdates.fetchUpgradable())).map(res => res.basename);
|
||||
|
||||
// order is IMPORTANT. Must be BLOCKING.
|
||||
for (let i = 0; i < (job.packages || []).length; i++) {
|
||||
const p: string = job.packages[i] as string;
|
||||
let lastHash: string = '';
|
||||
if (!job.increment && !actionable.includes(p) && ! await this.isNewPackage(p)) {
|
||||
console.log(clc.magenta(`${p} isn't marked as upgradable. Skipping.`));
|
||||
continue;
|
||||
}
|
||||
console.log((new Date()).toLocaleTimeString() + clc.magentaBright(` Package ${i}/${job.packages.length}`));
|
||||
while (!job.source) {
|
||||
try {
|
||||
lastHash = (await gitea.getStatus(PACKAGE_ORG, p)).sha
|
||||
console.log(`current sha: ${lastHash}`);
|
||||
break;
|
||||
}
|
||||
catch {
|
||||
console.log(clc.red(`Failed to get status of ${p}. Retrying...`));
|
||||
await snooze(30000);
|
||||
}
|
||||
}
|
||||
console.log(clc.yellowBright(`Pushing ${p} ...`));
|
||||
if (job.source) {
|
||||
try {
|
||||
await this.refreshGpg();
|
||||
await runCommand('artixpkg', ['repo', 'move', '-p', job.source, job.repo, p]);
|
||||
}
|
||||
catch {
|
||||
console.log(clc.cyan(`Moving ${p} failed.`));
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (job.increment) {
|
||||
await this.increment(p);
|
||||
}
|
||||
else {
|
||||
await runCommand('artixpkg', ['repo', 'import', p]);
|
||||
}
|
||||
await this.refreshGpg();
|
||||
await runCommand('artixpkg', ['repo', 'add', '-p', job.repo, p]);
|
||||
}
|
||||
console.log(clc.blueBright(`${p} commit pushed`));
|
||||
if (!job.source) {
|
||||
try {
|
||||
await gitea.waitForBuild(lastHash, PACKAGE_ORG, p)
|
||||
console.log(clc.greenBright(`${p} built successfully.`));
|
||||
}
|
||||
catch (ex) {
|
||||
console.error(clc.redBright(`Failed on ${p} : ${gitea.getHomepage()}${PACKAGE_ORG}/${p}`));
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
}
|
||||
console.log(clc.greenBright('SUCCESS: All packages built'));
|
||||
if (this._createdSignfile) {
|
||||
try {
|
||||
await fsp.rm(SIGNFILE);
|
||||
}
|
||||
catch {
|
||||
console.error(clc.red('failed to remove temp signfile'));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export default Pusher;
|
||||
export { Pusher };
|
||||
export type { PusherConfig, Job, ArtixpkgRepo };
|
43
src/runCommand.mts
Normal file
43
src/runCommand.mts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { spawn } from 'node:child_process';
|
||||
import type { SpawnOptions } from 'node:child_process';
|
||||
|
||||
/**
|
||||
* Run a command (as a promise).
|
||||
* @param command command to run
|
||||
* @param args args to pass
|
||||
* @returns promise that yields true if success
|
||||
*/
|
||||
function runCommand(command: string, args: string[] = [], stdOutToLogs: boolean = true): Promise<boolean> {
|
||||
return new Promise((res, _) => {
|
||||
const opts: SpawnOptions = {stdio: stdOutToLogs ? ['pipe', 'inherit', 'inherit'] : 'pipe'};
|
||||
const proc = spawn(command, args, opts);
|
||||
proc.on('exit', code => res(code === 0));
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if password input is necessary for signing
|
||||
* @returns promise that yieds true if password is required
|
||||
*/
|
||||
function isPasswordRequired(): Promise<boolean> {
|
||||
return new Promise(async (res, _) => {
|
||||
if (! await runCommand('gpg-agent', [], false)) {
|
||||
return res(true);
|
||||
}
|
||||
const proc = spawn('gpg-connect-agent', ['KEYINFO --list', '/bye'], { stdio: 'pipe' });
|
||||
let outputstr = '';
|
||||
proc.stdout.on('data', data => {
|
||||
outputstr += data.toString();
|
||||
});
|
||||
proc.on('exit', async () => {
|
||||
const keyinfo = outputstr.split('\n').filter(l => l.includes('KEYINFO'));
|
||||
res(!keyinfo.find(l => {
|
||||
const tokens = l.split(' ');
|
||||
return tokens[0] === 'S' && tokens[1] === 'KEYINFO' && tokens[3] === 'D' && tokens[6] === '1';
|
||||
}));
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
export default runCommand;
|
||||
export { runCommand, isPasswordRequired };
|
11
src/snooze.mts
Normal file
11
src/snooze.mts
Normal file
@@ -0,0 +1,11 @@
|
||||
/**
|
||||
* Sleep equivalent as a promise
|
||||
* @param ms Number of ms
|
||||
* @returns void
|
||||
*/
|
||||
function snooze(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
|
||||
export default snooze;
|
||||
export { snooze };
|
12
tsconfig.dist.json
Normal file
12
tsconfig.dist.json
Normal file
@@ -0,0 +1,12 @@
|
||||
{
|
||||
"extends": "./tsconfig",
|
||||
"compilerOptions": {
|
||||
"sourceMap": true,
|
||||
"inlineSources": true,
|
||||
"rootDir": "./src",
|
||||
"outDir": "./distribution"
|
||||
},
|
||||
"include": [
|
||||
"src"
|
||||
]
|
||||
}
|
9
tsconfig.json
Normal file
9
tsconfig.json
Normal file
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"extends": "@sindresorhus/tsconfig",
|
||||
"compilerOptions": {
|
||||
"exactOptionalPropertyTypes": true
|
||||
},
|
||||
"include": [
|
||||
"src"
|
||||
]
|
||||
}
|
Reference in New Issue
Block a user