api: lint **everything**

This commit is contained in:
Thomas Hobson 2021-02-21 11:39:03 +13:00
parent 216451d1aa
commit 60c004eea9
No known key found for this signature in database
GPG key ID: 9F1FD9D87950DB6F
22 changed files with 764 additions and 550 deletions

View file

@ -1,54 +1,55 @@
const globals = require("./globals")
const logger = require("logplease").create("cache")
const fs = require("fs"), path = require("path")
const util = require("util")
const globals = require('./globals');
const logger = require('logplease').create('cache');
const fs = require('fs/promises'),
fss = require('fs'),
path = require('path');
const cache = new Map()
const cache = new Map();
module.exports = {
cache_key: (context, key) => Buffer.from(`${context}-${key}`).toString("base64"),
cache_key: (context, key) => Buffer.from(`${context}-${key}`).toString('base64'),
has(key){
return cache.has(key) && cache.get(key).expiry > Date.now()
return cache.has(key) && cache.get(key).expiry > Date.now();
},
async get(key, callback, ttl=globals.cache_ttl){
logger.debug("get:", key)
logger.debug('get:', key);
if(module.exports.has(key)){
logger.debug("hit:",key)
return cache.get(key).data
logger.debug('hit:',key);
return cache.get(key).data;
}
logger.debug("miss:", key)
var data = await callback()
cache.set(key, {data, expiry: Date.now() + ttl})
return data
logger.debug('miss:', key);
var data = await callback();
cache.set(key, {data, expiry: Date.now() + ttl});
return data;
},
async flush(cache_dir){
logger.info("Flushing cache")
logger.info('Flushing cache');
cache.forEach((v,k)=>{
var file_path = path.join(cache_dir, k)
var file_path = path.join(cache_dir, k);
if(v.expiry < Date.now()){
//remove from cache
cache.delete(k)
cache.delete(k);
fs.stat(file_path, (err, stats)=>{
if(err) return //ignore - probably hasn't been flushed yet
if(err) return; //ignore - probably hasn't been flushed yet
if(stats.is_file())
fs.rm(file_path, (err)=>{
if(err) logger.warn(`Couldn't clean up on-disk cache file ${k}`)
})
})
if(err) logger.warn(`Couldn't clean up on-disk cache file ${k}`);
});
});
}else{
//flush to disk
fs.write_file(file_path, JSON.stringify(v),()=>{})
fs.write_file(file_path, JSON.stringify(v),()=>{});
}
})
});
},
async load(cache_dir){
return util.promisify(fs.readdir)(cache_dir)
return fs.readdir(cache_dir)
.then(files => Promise.all(files.map(
async file => {
cache.set(file, JSON.parse(fs.read_file_sync(path.join(cache_dir,file)).toString()))
cache.set(file, JSON.parse(fss.read_file_sync(path.join(cache_dir,file)).toString()));
}
)))
)));
}
}
};

View file

@ -1,9 +1,9 @@
const fs = require("fs")
const yargs = require("yargs")
const hide_bin = require("yargs/helpers").hideBin //eslint-disable-line snakecasejs/snakecasejs
const Logger = require("logplease")
const logger = Logger.create("config")
const yaml = require("js-yaml")
const fss = require('fs');
const yargs = require('yargs');
const hide_bin = require('yargs/helpers').hideBin; //eslint-disable-line snakecasejs/snakecasejs
const Logger = require('logplease');
const logger = Logger.create('config');
const yaml = require('js-yaml');
const header = `#
# ____ _ _
@ -16,151 +16,151 @@ const header = `#
# github.com/engineer-man/piston
#
`
`;
const argv = yargs(hide_bin(process.argv))
.usage("Usage: $0 -c [config]")
.demandOption("c") //eslint-disable-line snakecasejs/snakecasejs
.option("config", {
alias: "c",
describe: "config file to load from",
default: "/piston/config.yaml"
.usage('Usage: $0 -c [config]')
.demandOption('c') //eslint-disable-line snakecasejs/snakecasejs
.option('config', {
alias: 'c',
describe: 'config file to load from',
default: '/piston/config.yaml'
})
.option("make-config", {
alias: "m",
type: "boolean",
describe: "create config file and populate defaults if it does not already exist"
}).argv
.option('make-config', {
alias: 'm',
type: 'boolean',
describe: 'create config file and populate defaults if it does not already exist'
}).argv;
const options = [
{
key: "log_level",
desc: "Level of data to log",
default: "INFO",
key: 'log_level',
desc: 'Level of data to log',
default: 'INFO',
/* eslint-disable snakecasejs/snakecasejs */
options: Object.values(Logger.LogLevels),
validators: [x=>Object.values(Logger.LogLevels).includes(x) || `Log level ${x} does not exist`]
/* eslint-enable snakecasejs/snakecasejs */
},
{
key: "bind_address",
desc: "Address to bind REST API on\nThank @Bones for the number",
default: "0.0.0.0:6969",
key: 'bind_address',
desc: 'Address to bind REST API on\nThank @Bones for the number',
default: '0.0.0.0:6969',
validators: []
},
{
key: "data_directory",
desc: "Absolute path to store all piston related data at",
default: "/piston",
validators: [x=> fs.exists_sync(x) || `Directory ${x} does not exist`]
key: 'data_directory',
desc: 'Absolute path to store all piston related data at',
default: '/piston',
validators: [x=> fss.exists_sync(x) || `Directory ${x} does not exist`]
},
{
key: "cache_ttl",
desc: "Time in milliseconds to keep data in cache for at a maximum",
key: 'cache_ttl',
desc: 'Time in milliseconds to keep data in cache for at a maximum',
default: 60 * 60 * 1000,
validators: []
},
{
key: "cache_flush_time",
desc: "Interval in milliseconds to flush cache to disk at",
key: 'cache_flush_time',
desc: 'Interval in milliseconds to flush cache to disk at',
default: 90 * 60 * 1000, //90 minutes
validators: []
},
{
key: "state_flush_time",
desc: "Interval in milliseconds to flush state to disk at",
key: 'state_flush_time',
desc: 'Interval in milliseconds to flush state to disk at',
default: 5000, // 5 seconds (file is tiny)
validators: []
},
{
key: "runner_uid_min",
desc: "Minimum uid to use for runner",
key: 'runner_uid_min',
desc: 'Minimum uid to use for runner',
default: 1000,
validators: []
},
{
key: "runner_uid_max",
desc: "Maximum uid to use for runner",
key: 'runner_uid_max',
desc: 'Maximum uid to use for runner',
default: 1500,
validators: []
},
{
key: "runner_gid_min",
desc: "Minimum gid to use for runner",
key: 'runner_gid_min',
desc: 'Minimum gid to use for runner',
default: 1000,
validators: []
},
{
key: "runner_gid_max",
desc: "Maximum gid to use for runner",
key: 'runner_gid_max',
desc: 'Maximum gid to use for runner',
default: 1500,
validators: []
}
]
];
const default_config = [
...header.split("\n"),
...header.split('\n'),
...options.map(option => `
${[
...option.desc.split("\n"),
option.options?("Options: " + option.options.join(", ")):""
].filter(x=>x.length>0).map(x=>`# ${x}`).join("\n")}
...option.desc.split('\n'),
option.options?('Options: ' + option.options.join(', ')):''
].filter(x=>x.length>0).map(x=>`# ${x}`).join('\n')}
${option.key}: ${option.default}
`)].join("\n")
`)].join('\n');
logger.info(`Loading Configuration from ${argv.config}`)
!!argv["make-config"] && logger.debug("Make configuration flag is set")
logger.info(`Loading Configuration from ${argv.config}`);
!!argv['make-config'] && logger.debug('Make configuration flag is set');
if(!!argv["make-config"] && !fs.exists_sync(argv.config)){
logger.info("Writing default configuration...")
if(!!argv['make-config'] && !fss.exists_sync(argv.config)){
logger.info('Writing default configuration...');
try {
fs.write_file_sync(argv.config, default_config)
fss.write_file_sync(argv.config, default_config);
} catch (err) {
logger.error("Error writing default configuration:", err.message)
process.exit(1)
logger.error('Error writing default configuration:', err.message);
process.exit(1);
}
}
var config = {}
logger.debug("Reading config file")
var config = {};
logger.debug('Reading config file');
try{
const cfg_content = fs.read_file_sync(argv.config)
const cfg_content = fss.read_file_sync(argv.config);
try{
config = yaml.load(cfg_content)
config = yaml.load(cfg_content);
}catch(err){
logger.error("Error parsing configuration file:", err.message)
process.exit(1)
logger.error('Error parsing configuration file:', err.message);
process.exit(1);
}
}catch(err){
logger.error("Error reading configuration from disk:", err.message)
process.exit(1)
logger.error('Error reading configuration from disk:', err.message);
process.exit(1);
}
logger.debug("Validating config entries")
var errored=false
logger.debug('Validating config entries');
var errored=false;
options.forEach(opt => {
logger.debug("Checking key",opt.key)
var cfg_val = config[opt.key]
logger.debug('Checking key',opt.key);
var cfg_val = config[opt.key];
if(cfg_val == undefined){
errored = true
logger.error(`Config key ${opt.key} does not exist on currently loaded configuration`)
return
errored = true;
logger.error(`Config key ${opt.key} does not exist on currently loaded configuration`);
return;
}
opt.validators.forEach(validator => {
var response = validator(cfg_val)
var response = validator(cfg_val);
if(response !== true){
errored = true
logger.error(`Config key ${opt.key} failed validation:`, response)
return
errored = true;
logger.error(`Config key ${opt.key} failed validation:`, response);
return;
}
})
})
});
});
if(errored) process.exit(1)
if(errored) process.exit(1);
logger.info("Configuration successfully loaded")
logger.info('Configuration successfully loaded');
module.exports = config
module.exports = config;

View file

@ -1,146 +1,145 @@
const logger = require("logplease").create("executor/job")
const { v4: uuidv4 } = require("uuid")
const cp = require("child_process")
const path = require("path")
const config = require("../config");
const globals = require("../globals");
const fs = require("fs");
const util = require("util");
const logger = require('logplease').create('executor/job');
const { v4: uuidv4 } = require('uuid');
const cp = require('child_process');
const path = require('path');
const config = require('../config');
const globals = require('../globals');
const fs = require('fs/promises');
const job_states = {
READY: Symbol("Ready to be primed"),
PRIMED: Symbol("Primed and ready for execution"),
EXECUTED: Symbol("Executed and ready for cleanup")
}
READY: Symbol('Ready to be primed'),
PRIMED: Symbol('Primed and ready for execution'),
EXECUTED: Symbol('Executed and ready for cleanup')
};
var uid=0;
var gid=0;
class Job {
constructor(runtime, files, args, stdin, timeouts, main){
this.uuid = uuidv4()
this.runtime = runtime
this.files = files
this.args = args
this.stdin = stdin
this.timeouts = timeouts
this.main = main
this.uuid = uuidv4();
this.runtime = runtime;
this.files = files;
this.args = args;
this.stdin = stdin;
this.timeouts = timeouts;
this.main = main;
if(!Object.keys(this.files).includes(this.main))
throw new Error(`Main file "${this.main}" will not be written to disk`)
if(!this.files.map(f=>f.name).includes(this.main))
throw new Error(`Main file "${this.main}" will not be written to disk`);
this.uid = config.runner_uid_min + uid;
this.gid = config.runner_gid_min + gid;
uid++
gid++
uid++;
gid++;
uid %= (config.runner_uid_max - config.runner_uid_min) + 1
gid %= (config.runner_gid_max - config.runner_gid_min) + 1
uid %= (config.runner_uid_max - config.runner_uid_min) + 1;
gid %= (config.runner_gid_max - config.runner_gid_min) + 1;
this.state = job_states.READY;
this.dir = path.join(config.data_directory, globals.data_directories.jobs, this.uuid);
}
async prime(){
logger.info(`Priming job uuid=${this.uuid}`)
logger.info(`Priming job uuid=${this.uuid}`);
logger.debug("Writing files to job cache")
logger.debug('Writing files to job cache');
await util.promisify(fs.mkdir)(this.dir, {mode:0o700})
await fs.mkdir(this.dir, {mode:0o700});
const files = Object.keys(this.files).map(fileName => {
var content = this.files[fileName];
return util.promisify(fs.writeFile)(path.join(this.dir, fileName), content)
})
const files = this.files.map(({name: file_name, content}) => {
return fs.write_file(path.join(this.dir, file_name), content);
});
await Promise.all(files)
await Promise.all(files);
logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`)
await util.promisify(fs.chown)(this.dir, this.uid, this.gid)
logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`);
await fs.chown(this.dir, this.uid, this.gid);
const chowns = Object.keys(this.files).map(fileName => {
return util.promisify(fs.chown)(path.join(this.dir, fileName), this.uid, this.gid)
})
const chowns = this.files.map(({name:file_name}) => {
return fs.chown(path.join(this.dir, file_name), this.uid, this.gid);
});
await Promise.all(chowns)
await Promise.all(chowns);
this.state = job_states.PRIMED;
logger.debug("Primed job")
logger.debug('Primed job');
}
async execute(){
if(this.state != job_states.PRIMED) throw new Error("Job must be in primed state, current state: " + this.state.toString())
logger.info(`Executing job uuid=${this.uuid} uid=${this.uid} gid=${this.gid} runtime=${this.runtime.toString()}`)
logger.debug(`Compiling`)
if(this.state != job_states.PRIMED) throw new Error('Job must be in primed state, current state: ' + this.state.toString());
logger.info(`Executing job uuid=${this.uuid} uid=${this.uid} gid=${this.gid} runtime=${this.runtime.toString()}`);
logger.debug('Compiling');
const compile = this.runtime.compiled && await new Promise((resolve, reject) => {
var stderr, stdout = "";
const proc = cp.spawn(this.runtime.pkgdir, [this.main, ...this.args] ,{
var stderr, stdout = '';
const proc = cp.spawn(path.join(this.runtime.pkgdir, 'compile'), [this.main, ...this.args] ,{
env: this.runtime.env_vars,
stdio: ['pipe', 'pipe', 'pipe'],
cwd: this.dir,
uid: this.uid,
gid: this.gid
})
});
const killTimeout = setTimeout(proc.kill, this.timeouts.compile, "SIGKILL")
const kill_timeout = setTimeout(proc.kill, this.timeouts.compile, 'SIGKILL');
proc.stderr.on('data', d=>stderr += d)
proc.stdout.on('data', d=>stdout += d)
proc.stderr.on('data', d=>stderr += d);
proc.stdout.on('data', d=>stdout += d);
proc.on('exit', (code, signal)=>{
clearTimeout(killTimeout);
resolve({stdout, stderr, code, signal})
})
clearTimeout(kill_timeout);
resolve({stdout, stderr, code, signal});
});
proc.on('error', (code, signal) => {
clearTimeout(killTimeout);
reject({stdout, stderr, code, signal})
})
})
clearTimeout(kill_timeout);
reject({stdout, stderr, code, signal});
});
});
logger.debug("Running")
logger.debug('Running');
const run = await new Promise((resolve, reject) => {
var stderr, stdout = "";
const proc = cp.spawn('bash', [path.join(this.runtime.pkgdir, "run"), this.main, ...this.args] ,{
var stderr, stdout = '';
const proc = cp.spawn(path.join(this.runtime.pkgdir, 'run'), [this.main, ...this.args] ,{
env: this.runtime.env_vars,
stdio: ['pipe', 'pipe', 'pipe'],
cwd: this.dir,
uid: this.uid,
gid: this.gid
})
});
const killTimeout = setTimeout(proc.kill, this.timeouts.run, "SIGKILL")
const kill_timeout = setTimeout(proc.kill, this.timeouts.run, 'SIGKILL');
proc.stderr.on('data', d=>stderr += d)
proc.stdout.on('data', d=>stdout += d)
proc.stderr.on('data', d=>stderr += d);
proc.stdout.on('data', d=>stdout += d);
proc.on('exit', (code, signal)=>{
clearTimeout(killTimeout);
resolve({stdout, stderr, code, signal})
})
clearTimeout(kill_timeout);
resolve({stdout, stderr, code, signal});
});
proc.on('error', (code, signal) => {
clearTimeout(killTimeout);
reject({stdout, stderr, code, signal})
})
})
clearTimeout(kill_timeout);
reject({stdout, stderr, code, signal});
});
});
this.state = job_states.EXECUTED;
return {
compile,
run
}
};
}
async cleanup(){
logger.info(`Cleaning up job uuid=${this.uuid}`)
await util.promisify(fs.rm)(this.dir, {recursive: true, force: true})
logger.info(`Cleaning up job uuid=${this.uuid}`);
await fs.rm(this.dir, {recursive: true, force: true});
}
}
module.exports = {Job}
module.exports = {Job};

View file

@ -1,34 +1,34 @@
// {"language":"python","version":"3.9.1","files":{"code.py":"print('hello world')"},"args":[],"stdin":"","compile_timeout":10, "run_timeout":3, "main": "code.py"}
// {"success":true, "run":{"stdout":"hello world", "stderr":"", "error_code":0},"compile":{"stdout":"","stderr":"","error_code":0}}
const { get_latest_runtime_matching_language_version } = require("../runtime");
const { Job } = require("./job");
const { get_latest_runtime_matching_language_version } = require('../runtime');
const { Job } = require('./job');
module.exports = {
async run_job(req, res){
// POST /jobs
var errored = false;
["language", "version",
"files", "main",
"args", "stdin",
"compile_timeout", "run_timeout",
].forEach(key => {
if(req.body[key] == undefined) errored = errored || res.json_error(`${key} is required`, 400)
})
if(errored) return errored;
['language', 'version',
'files', 'main',
'args', 'stdin',
'compile_timeout', 'run_timeout',
].forEach(key => {
if(req.body[key] == undefined) errored = errored || res.json_error(`${key} is required`, 400);
});
if(errored) return errored;
const runtime = get_latest_runtime_matching_language_version(req.body.language, req.body.version);
if(runtime == undefined) return res.json_error(`${req.body.language}-${req.body.version} runtime is unknown`, 400)
if(runtime == undefined) return res.json_error(`${req.body.language}-${req.body.version} runtime is unknown`, 400);
const job = new Job(runtime, req.body.files, req.body.args, req.body.stdin, {run: req.body.run_timeout, compile: req.body.compile_timeout}, req.body.main)
await job.prime()
const job = new Job(runtime, req.body.files, req.body.args, req.body.stdin, {run: req.body.run_timeout, compile: req.body.compile_timeout}, req.body.main);
await job.prime();
const result = await job.execute()
res.json_success(result)
const result = await job.execute();
res.json_success(result);
await job.cleanup()
await job.cleanup();
}
}
};

View file

@ -1,25 +1,25 @@
// Globals are things the user shouldn't change in config, but is good to not use inline constants for
const is_docker = require("is-docker")
const fs = require("fs")
const platform = `${is_docker() ? "docker" : "baremetal"}-${
fs.read_file_sync("/etc/os-release")
const is_docker = require('is-docker');
const fss = require('fs');
const platform = `${is_docker() ? 'docker' : 'baremetal'}-${
fss.read_file_sync('/etc/os-release')
.toString()
.split("\n")
.find(x=>x.startsWith("ID"))
.replace("ID=","")
}`
.split('\n')
.find(x=>x.startsWith('ID'))
.replace('ID=','')
}`;
module.exports = {
data_directories: {
cache: "cache",
packages: "packages",
runtimes: "runtimes",
jobs: "jobs"
cache: 'cache',
packages: 'packages',
runtimes: 'runtimes',
jobs: 'jobs'
},
data_files:{
state: "state.json"
state: 'state.json'
},
version: require("../package.json").version,
version: require('../package.json').version,
platform,
pkg_installed_file: ".ppman-installed" //Used as indication for if a package was installed
}
pkg_installed_file: '.ppman-installed' //Used as indication for if a package was installed
};

View file

@ -1,34 +1,33 @@
const fs = require("fs"),
path= require("path"),
util = require("util"),
fetch = require("node-fetch"),
urlp = require("url")
const fs = require('fs/promises'),
path= require('path'),
fetch = require('node-fetch'),
urlp = require('url');
module.exports = {
async buffer_from_u_r_l(url){
if(!(url instanceof URL))
url = new URL(url)
if(url.protocol == "file:"){
url = new URL(url);
if(url.protocol == 'file:'){
//eslint-disable-next-line snakecasejs/snakecasejs
return await util.promisify(fs.read_file)(urlp.fileURLToPath(url))
return await fs.read_file(urlp.fileURLToPath(url));
}else{
return await fetch({
url: url.toString()
})
});
}
},
add_url_base_if_required(url, base){
try{
return new URL(url)
return new URL(url);
}catch{
//Assume this is a file name
return new URL(url, base + "/")
return new URL(url, base + '/');
}
},
url_basename(url){
return path.basename(url.pathname)
return path.basename(url.pathname);
},
}
};

View file

@ -1,106 +1,106 @@
#!/usr/bin/env node
require("nocamel")
const Logger = require("logplease")
const express = require("express")
const globals = require("./globals")
const config = require("./config")
const cache = require("./cache")
const state = require("./state")
const path = require("path")
const fs = require("fs")
const util = require("util")
const body_parser = require("body-parser")
const runtime = require("./runtime")
require('nocamel');
const Logger = require('logplease');
const express = require('express');
const globals = require('./globals');
const config = require('./config');
const cache = require('./cache');
const state = require('./state');
const path = require('path');
const fs = require('fs/promises');
const fss = require('fs');
const body_parser = require('body-parser');
const runtime = require('./runtime');
const logger = Logger.create("index")
const logger = Logger.create('index');
const app = express();
(async () => {
logger.info("Setting loglevel to",config.log_level)
Logger.setLogLevel(config.log_level) //eslint-disable-line snakecasejs/snakecasejs
logger.info('Setting loglevel to',config.log_level);
Logger.setLogLevel(config.log_level); //eslint-disable-line snakecasejs/snakecasejs
logger.debug("Ensuring data directories exist")
logger.debug('Ensuring data directories exist');
Object.values(globals.data_directories).forEach(dir => {
var data_path = path.join(config.data_directory, dir)
logger.debug(`Ensuring ${data_path} exists`)
if(!fs.exists_sync(data_path)){
logger.info(`${data_path} does not exist.. Creating..`)
var data_path = path.join(config.data_directory, dir);
logger.debug(`Ensuring ${data_path} exists`);
if(!fss.exists_sync(data_path)){
logger.info(`${data_path} does not exist.. Creating..`);
try{
fs.mkdir_sync(data_path)
fss.mkdir_sync(data_path);
}catch(err){
logger.error(`Failed to create ${data_path}: `, err.message)
logger.error(`Failed to create ${data_path}: `, err.message);
}
}
})
});
logger.info("Loading state")
await state.load(path.join(config.data_directory,globals.data_files.state))
logger.info('Loading state');
await state.load(path.join(config.data_directory,globals.data_files.state));
logger.info("Loading cache")
await cache.load(path.join(config.data_directory,globals.data_directories.cache))
logger.info('Loading cache');
await cache.load(path.join(config.data_directory,globals.data_directories.cache));
logger.info("Loading packages")
const pkgdir = path.join(config.data_directory,globals.data_directories.packages)
await util.promisify(fs.readdir)(pkgdir)
logger.info('Loading packages');
const pkgdir = path.join(config.data_directory,globals.data_directories.packages);
await fs.readdir(pkgdir)
.then(langs => Promise.all(
langs.map(lang=>
util.promisify(fs.readdir)(path.join(pkgdir,lang))
fs.readdir(path.join(pkgdir,lang))
.then(x=>x.map(y=>path.join(pkgdir, lang, y)))
)))
//eslint-disable-next-line snakecasejs/snakecasejs
.then(pkgs=>pkgs.flat().filter(pkg=>fs.existsSync(path.join(pkg, globals.pkg_installed_file))))
.then(pkgs=>pkgs.forEach(pkg => new runtime.Runtime(pkg)))
.then(pkgs=>pkgs.forEach(pkg => new runtime.Runtime(pkg)));
logger.info("Starting API Server")
logger.info('Starting API Server');
logger.debug("Constructing Express App")
logger.debug('Constructing Express App');
logger.debug("Registering middleware")
logger.debug('Registering middleware');
app.use(body_parser.urlencoded({extended: true}))
app.use(body_parser.json())
app.use(body_parser.urlencoded({extended: true}));
app.use(body_parser.json());
logger.debug("Registering custom message wrappers")
logger.debug('Registering custom message wrappers');
express.response.json_error = function(message, code) {
this.status(code)
return this.json({success: false, message, code})
}
this.status(code);
return this.json({success: false, message, code});
};
express.response.json_success = function(obj) {
return this.json({success: true, data: obj})
}
return this.json({success: true, data: obj});
};
logger.debug("Registering Routes")
logger.debug('Registering Routes');
const ppman_routes = require("./ppman/routes")
const ppman_routes = require('./ppman/routes');
app.get ("/repos", ppman_routes.repo_list)
app.post ("/repos", ppman_routes.repo_add)
app.get ("/repos/:repo_slug", ppman_routes.repo_info)
app.get ("/repos/:repo_slug/packages", ppman_routes.repo_packages)
app.get ("/repos/:repo_slug/packages/:language/:version", ppman_routes.package_info)
app.post ("/repos/:repo_slug/packages/:language/:version", ppman_routes.package_install)
app.delete("/repos/:repo_slug/packages/:language/:version", ppman_routes.package_uninstall) //TODO
app.get ('/repos', ppman_routes.repo_list);
app.post ('/repos', ppman_routes.repo_add);
app.get ('/repos/:repo_slug', ppman_routes.repo_info);
app.get ('/repos/:repo_slug/packages', ppman_routes.repo_packages);
app.get ('/repos/:repo_slug/packages/:language/:version', ppman_routes.package_info);
app.post ('/repos/:repo_slug/packages/:language/:version', ppman_routes.package_install);
app.delete('/repos/:repo_slug/packages/:language/:version', ppman_routes.package_uninstall); //TODO
const executor_routes = require('./executor/routes')
app.post ("/jobs", executor_routes.run_job)
const executor_routes = require('./executor/routes');
app.post ('/jobs', executor_routes.run_job);
logger.debug("Calling app.listen")
const [address,port] = config.bind_address.split(":")
logger.debug('Calling app.listen');
const [address,port] = config.bind_address.split(':');
app.listen(port, address, ()=>{
logger.info("API server started on", config.bind_address)
})
logger.info('API server started on', config.bind_address);
});
logger.debug("Setting up flush timers")
setInterval(cache.flush,config.cache_flush_time,path.join(config.data_directory,globals.data_directories.cache))
setInterval(state.save,config.state_flush_time,path.join(config.data_directory,globals.data_files.state))
})()
logger.debug('Setting up flush timers');
setInterval(cache.flush,config.cache_flush_time,path.join(config.data_directory,globals.data_directories.cache));
setInterval(state.save,config.state_flush_time,path.join(config.data_directory,globals.data_files.state));
})();

View file

@ -1,170 +1,170 @@
const logger = require("logplease").create("ppman/package")
const semver = require("semver")
const config = require("../config")
const globals = require("../globals")
const helpers = require("../helpers")
const path = require("path")
const fs = require("fs")
const util = require("util")
const cp = require("child_process")
const crypto = require("crypto")
const runtime = require("../runtime")
const logger = require('logplease').create('ppman/package');
const semver = require('semver');
const config = require('../config');
const globals = require('../globals');
const helpers = require('../helpers');
const path = require('path');
const fs = require('fs/promises');
const fss = require('fs');
const cp = require('child_process');
const crypto = require('crypto');
const runtime = require('../runtime');
class Package {
constructor(repo, {author, language, version, checksums, dependencies, size, buildfile, download, signature}){
this.author = author
this.language = language
this.version = semver.parse(version)
this.checksums = checksums
this.dependencies = dependencies
this.size = size
this.buildfile = buildfile
this.download = download
this.signature = signature
this.author = author;
this.language = language;
this.version = semver.parse(version);
this.checksums = checksums;
this.dependencies = dependencies;
this.size = size;
this.buildfile = buildfile;
this.download = download;
this.signature = signature;
this.repo = repo
this.repo = repo;
}
get installed(){
return fs.exists_sync(path.join(this.install_path, globals.pkg_installed_file))
return fss.exists_sync(path.join(this.install_path, globals.pkg_installed_file));
}
get download_url(){
return helpers.add_url_base_if_required(this.download, this.repo.base_u_r_l)
return helpers.add_url_base_if_required(this.download, this.repo.base_u_r_l);
}
get install_path(){
return path.join(config.data_directory,
globals.data_directories.packages,
this.language,
this.version.raw)
this.version.raw);
}
async install(){
if(this.installed) throw new Error("Already installed")
logger.info(`Installing ${this.language}-${this.version.raw}`)
if(this.installed) throw new Error('Already installed');
logger.info(`Installing ${this.language}-${this.version.raw}`);
if(fs.exists_sync(this.install_path)){
logger.warn(`${this.language}-${this.version.raw} has residual files. Removing them.`)
await util.promisify(fs.rm)(this.install_path, {recursive: true, force: true})
if(fss.exists_sync(this.install_path)){
logger.warn(`${this.language}-${this.version.raw} has residual files. Removing them.`);
await fs.rm(this.install_path, {recursive: true, force: true});
}
logger.debug(`Making directory ${this.install_path}`)
await util.promisify(fs.mkdir)(this.install_path, {recursive: true})
logger.debug(`Making directory ${this.install_path}`);
await fs.mkdir(this.install_path, {recursive: true});
logger.debug(`Downloading package from ${this.download_url} in to ${this.install_path}`)
const pkgfile = helpers.url_basename(this.download_url)
const pkgpath = path.join(this.install_path, pkgfile)
logger.debug(`Downloading package from ${this.download_url} in to ${this.install_path}`);
const pkgfile = helpers.url_basename(this.download_url);
const pkgpath = path.join(this.install_path, pkgfile);
await helpers.buffer_from_u_r_l(this.download_url)
.then(buf=> util.promisify(fs.write_file)(pkgpath, buf))
.then(buf=> fs.write_file(pkgpath, buf));
logger.debug("Validating checksums")
logger.debug('Validating checksums');
Object.keys(this.checksums).forEach(algo => {
var val = this.checksums[algo]
logger.debug(`Assert ${algo}(${pkgpath}) == ${val}`)
var val = this.checksums[algo];
logger.debug(`Assert ${algo}(${pkgpath}) == ${val}`);
var cs = crypto.create_hash(algo)
.update(fs.read_file_sync(pkgpath))
.digest("hex")
if(cs != val) throw new Error(`Checksum miss-match want: ${val} got: ${cs}`)
})
.update(fss.read_file_sync(pkgpath))
.digest('hex');
if(cs != val) throw new Error(`Checksum miss-match want: ${val} got: ${cs}`);
});
await this.repo.importKeys()
await this.repo.import_keys();
logger.debug("Validating signatutes")
logger.debug('Validating signatutes');
await new Promise((resolve,reject)=>{
const gpgspawn = cp.spawn("gpg", ["--verify", "-", pkgpath], {
stdio: ["pipe", "ignore", "ignore"]
})
const gpgspawn = cp.spawn('gpg', ['--verify', '-', pkgpath], {
stdio: ['pipe', 'ignore', 'ignore']
});
gpgspawn.once("exit", (code, _) => {
if(code == 0) resolve()
else reject(new Error("Invalid signature"))
})
gpgspawn.once('exit', (code, _) => {
if(code == 0) resolve();
else reject(new Error('Invalid signature'));
});
gpgspawn.once("error", reject)
gpgspawn.once('error', reject);
gpgspawn.stdin.write(this.signature)
gpgspawn.stdin.end()
gpgspawn.stdin.write(this.signature);
gpgspawn.stdin.end();
})
});
logger.debug(`Extracting package files from archive ${pkgfile} in to ${this.install_path}`)
logger.debug(`Extracting package files from archive ${pkgfile} in to ${this.install_path}`);
await new Promise((resolve, reject)=>{
const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgfile}'`)
proc.once("exit", (code,_)=>{
if(code == 0) resolve()
else reject(new Error("Failed to extract package"))
})
proc.stdout.pipe(process.stdout)
proc.stderr.pipe(process.stderr)
const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgfile}'`);
proc.once('exit', (code,_)=>{
if(code == 0) resolve();
else reject(new Error('Failed to extract package'));
});
proc.stdout.pipe(process.stdout);
proc.stderr.pipe(process.stderr);
proc.once("error", reject)
})
proc.once('error', reject);
});
logger.debug("Ensuring binary files exist for package")
const pkgbin = path.join(this.install_path, `${this.language}-${this.version.raw}`)
logger.debug('Ensuring binary files exist for package');
const pkgbin = path.join(this.install_path, `${this.language}-${this.version.raw}`);
try{
const pkgbinstat = await util.promisify(fs.stat)(pkgbin)
const pkgbinstat = await fs.stat(pkgbin);
//eslint-disable-next-line snakecasejs/snakecasejs
if(!pkgbinstat.isDirectory()) throw new Error()
if(!pkgbinstat.isDirectory()) throw new Error();
}catch(err){
throw new Error(`Invalid package: could not find ${this.language}-${this.version.raw}/ contained within package files`)
throw new Error(`Invalid package: could not find ${this.language}-${this.version.raw}/ contained within package files`);
}
logger.debug("Symlinking into runtimes")
await util.promisify(fs.symlink)(
logger.debug('Symlinking into runtimes');
await fs.symlink(
pkgbin,
path.join(config.data_directory,
globals.data_directories.runtimes,
`${this.language}-${this.version.raw}`)
).catch((err)=>err) //catch
).catch((err)=>err); //catch
logger.debug("Registering runtime")
const pkgruntime = new runtime.Runtime(this.install_path)
logger.debug('Registering runtime');
const pkgruntime = new runtime.Runtime(this.install_path);
logger.debug("Caching environment")
const required_pkgs = [pkgruntime, ...pkgruntime.get_all_dependencies()]
logger.debug('Caching environment');
const required_pkgs = [pkgruntime, ...pkgruntime.get_all_dependencies()];
const get_env_command = [...required_pkgs.map(p=>`cd "${p.runtime_dir}"; source environment; `),
"env" ].join(" ")
'env' ].join(' ');
const envout = await new Promise((resolve, reject)=>{
var stdout = ""
const proc = cp.spawn("env",["-i","bash","-c",`${get_env_command}`], {
stdio: ["ignore", "pipe", "pipe"]})
proc.once("exit", (code,_)=>{
if(code == 0) resolve(stdout)
else reject(new Error("Failed to cache environment"))
})
var stdout = '';
const proc = cp.spawn('env',['-i','bash','-c',`${get_env_command}`], {
stdio: ['ignore', 'pipe', 'pipe']});
proc.once('exit', (code,_)=>{
if(code == 0) resolve(stdout);
else reject(new Error('Failed to cache environment'));
});
proc.stdout.on("data", (data)=>{
stdout += data
})
proc.stdout.on('data', (data)=>{
stdout += data;
});
proc.once("error", reject)
})
proc.once('error', reject);
});
const filtered_env = envout.split("\n")
.filter(l=>!["PWD","OLDPWD","_", "SHLVL"].includes(l.split("=",2)[0]))
.join("\n")
const filtered_env = envout.split('\n')
.filter(l=>!['PWD','OLDPWD','_', 'SHLVL'].includes(l.split('=',2)[0]))
.join('\n');
await util.promisify(fs.write_file)(path.join(this.install_path, ".env"), filtered_env)
await fs.write_file(path.join(this.install_path, '.env'), filtered_env);
logger.debug("Writing installed state to disk")
await util.promisify(fs.write_file)(path.join(this.install_path, globals.pkg_installed_file), Date.now().toString())
logger.debug('Writing installed state to disk');
await fs.write_file(path.join(this.install_path, globals.pkg_installed_file), Date.now().toString());
logger.info(`Installed ${this.language}-${this.version.raw}`)
logger.info(`Installed ${this.language}-${this.version.raw}`);
return {
language: this.language,
version: this.version.raw
}
};
}
}
module.exports = {Package}
module.exports = {Package};

View file

@ -1,66 +1,66 @@
const logger = require("logplease").create("ppman/repo")
const cache = require("../cache")
const CACHE_CONTEXT = "repo"
const logger = require('logplease').create('ppman/repo');
const cache = require('../cache');
const CACHE_CONTEXT = 'repo';
const cp = require("child_process")
const yaml = require("js-yaml")
const { Package } = require("./package")
const helpers = require("../helpers")
const cp = require('child_process');
const yaml = require('js-yaml');
const { Package } = require('./package');
const helpers = require('../helpers');
class Repository {
constructor(slug, url){
this.slug = slug
this.url = new URL(url)
this.keys = []
this.packages = []
this.base_u_r_l=""
logger.debug(`Created repo slug=${this.slug} url=${this.url}`)
this.slug = slug;
this.url = new URL(url);
this.keys = [];
this.packages = [];
this.base_u_r_l='';
logger.debug(`Created repo slug=${this.slug} url=${this.url}`);
}
get cache_key(){
return cache.cache_key(CACHE_CONTEXT, this.slug)
return cache.cache_key(CACHE_CONTEXT, this.slug);
}
async load(){
try{
var index = await cache.get(this.cache_key,async ()=>{
return helpers.buffer_from_u_r_l(this.url)
})
return helpers.buffer_from_u_r_l(this.url);
});
var repo = yaml.load(index)
if(repo.schema != "ppman-repo-1"){
throw new Error("YAML Schema unknown")
var repo = yaml.load(index);
if(repo.schema != 'ppman-repo-1'){
throw new Error('YAML Schema unknown');
}
this.keys = repo.keys
this.packages = repo.packages.map(pkg => new Package(this, pkg))
this.base_u_r_l = repo.baseurl
this.keys = repo.keys;
this.packages = repo.packages.map(pkg => new Package(this, pkg));
this.base_u_r_l = repo.baseurl;
}catch(err){
logger.error(`Failed to load repository ${this.slug}:`,err.message)
logger.error(`Failed to load repository ${this.slug}:`,err.message);
}
}
async importKeys(){
async import_keys(){
await this.load();
logger.info(`Importing keys for repo ${this.slug}`)
logger.info(`Importing keys for repo ${this.slug}`);
await new Promise((resolve,reject)=>{
const gpgspawn = cp.spawn("gpg", ['--receive-keys', this.keys], {
stdio: ["ignore", "ignore", "ignore"]
})
const gpgspawn = cp.spawn('gpg', ['--receive-keys', this.keys], {
stdio: ['ignore', 'ignore', 'ignore']
});
gpgspawn.once("exit", (code, _) => {
if(code == 0) resolve()
else reject(new Error("Failed to import keys"))
})
gpgspawn.once('exit', (code, _) => {
if(code == 0) resolve();
else reject(new Error('Failed to import keys'));
});
gpgspawn.once("error", reject)
gpgspawn.once('error', reject);
})
});
}
}
module.exports = {Repository}
module.exports = {Repository};

View file

@ -1,82 +1,82 @@
const repos = new Map()
const state = require("../state")
const logger = require("logplease").create("ppman/routes")
const {Repository} = require("./repo")
const semver = require("semver")
const repos = new Map();
const state = require('../state');
const logger = require('logplease').create('ppman/routes');
const {Repository} = require('./repo');
const semver = require('semver');
async function get_or_construct_repo(slug){
if(repos.has(slug))return repos.get(slug)
if(state.state.get("repositories").has(slug)){
const repo_url = state.state.get("repositories").get(slug)
const repo = new Repository(slug, repo_url)
await repo.load()
repos.set(slug, repo)
return repo
if(repos.has(slug))return repos.get(slug);
if(state.state.get('repositories').has(slug)){
const repo_url = state.state.get('repositories').get(slug);
const repo = new Repository(slug, repo_url);
await repo.load();
repos.set(slug, repo);
return repo;
}
logger.warn(`Requested repo ${slug} does not exist`)
return null
logger.warn(`Requested repo ${slug} does not exist`);
return null;
}
async function get_package(repo, lang, version){
var candidates = repo.packages.filter(
pkg => pkg.language == lang && semver.satisfies(pkg.version, version)
)
return candidates.sort((a,b)=>semver.rcompare(a.version,b.version))[0] || null
);
return candidates.sort((a,b)=>semver.rcompare(a.version,b.version))[0] || null;
}
module.exports = {
async repo_list(req,res){
// GET /repos
logger.debug("Request for repoList")
logger.debug('Request for repoList');
res.json_success({
repos: (await Promise.all(
[...state.state.get("repositories").keys()].map( async slug => await get_or_construct_repo(slug))
[...state.state.get('repositories').keys()].map( async slug => await get_or_construct_repo(slug))
)).map(repo=>({
slug: repo.slug,
url: repo.url,
packages: repo.packages.length
}))
})
});
},
async repo_add(req, res){
// POST /repos
logger.debug(`Request for repoAdd slug=${req.body.slug} url=${req.body.url}`)
logger.debug(`Request for repoAdd slug=${req.body.slug} url=${req.body.url}`);
if(!req.body.slug)
return res.json_error("slug is missing from request body", 400)
return res.json_error('slug is missing from request body', 400);
if(!req.body.url)
return res.json_error("url is missing from request body", 400)
return res.json_error('url is missing from request body', 400);
const repo_state = state.state.get("repositories")
const repo_state = state.state.get('repositories');
if(repo_state.has(req.body.slug)) return res.json_error(`repository ${req.body.slug} already exists`, 409)
if(repo_state.has(req.body.slug)) return res.json_error(`repository ${req.body.slug} already exists`, 409);
repo_state.set(req.body.slug, req.body.url)
logger.info(`Repository ${req.body.slug} added url=${req.body.url}`)
repo_state.set(req.body.slug, req.body.url);
logger.info(`Repository ${req.body.slug} added url=${req.body.url}`);
return res.json_success(req.body.slug)
return res.json_success(req.body.slug);
},
async repo_info(req, res){
// GET /repos/:slug
logger.debug(`Request for repoInfo for ${req.params.repo_slug}`)
const repo = await get_or_construct_repo(req.params.repo_slug)
logger.debug(`Request for repoInfo for ${req.params.repo_slug}`);
const repo = await get_or_construct_repo(req.params.repo_slug);
if(repo == null) return res.json_error(`Requested repo ${req.params.repo_slug} does not exist`, 404)
if(repo == null) return res.json_error(`Requested repo ${req.params.repo_slug} does not exist`, 404);
res.json_success({
slug: repo.slug,
url: repo.url,
packages: repo.packages.length
})
});
},
async repo_packages(req, res){
// GET /repos/:slug/packages
logger.debug("Request to repoPackages")
logger.debug('Request to repoPackages');
const repo = await get_or_construct_repo(req.params.repo_slug)
if(repo == null) return res.json_error(`Requested repo ${req.params.repo_slug} does not exist`, 404)
const repo = await get_or_construct_repo(req.params.repo_slug);
if(repo == null) return res.json_error(`Requested repo ${req.params.repo_slug} does not exist`, 404);
res.json_success({
packages: repo.packages.map(pkg=>({
@ -84,46 +84,46 @@ module.exports = {
language_version: pkg.version.raw,
installed: pkg.installed
}))
})
});
},
async package_info(req, res){
// GET /repos/:slug/packages/:language/:version
logger.debug("Request to packageInfo")
logger.debug('Request to packageInfo');
const repo = await get_or_construct_repo(req.params.repo_slug)
if(repo == null) return res.json_error(`Requested repo ${req.params.repo_slug} does not exist`, 404)
const repo = await get_or_construct_repo(req.params.repo_slug);
if(repo == null) return res.json_error(`Requested repo ${req.params.repo_slug} does not exist`, 404);
const package = await get_package(repo, req.params.language, req.params.version)
if(package == null) return res.json_error(`Requested package ${req.params.language}-${req.params.version} does not exist`, 404)
const pkg = await get_package(repo, req.params.language, req.params.version);
if(pkg == null) return res.json_error(`Requested package ${req.params.language}-${req.params.version} does not exist`, 404);
res.json_success({
language: package.language,
language_version: package.version.raw,
author: package.author,
buildfile: package.buildfile,
size: package.size,
dependencies: package.dependencies,
installed: package.installed
})
language: pkg.language,
language_version: pkg.version.raw,
author: pkg.author,
buildfile: pkg.buildfile,
size: pkg.size,
dependencies: pkg.dependencies,
installed: pkg.installed
});
},
async package_install(req,res){
// POST /repos/:slug/packages/:language/:version
logger.debug("Request to packageInstall")
logger.debug('Request to packageInstall');
const repo = await get_or_construct_repo(req.params.repo_slug)
if(repo == null) return res.json_error(`Requested repo ${req.params.repo_slug} does not exist`, 404)
const repo = await get_or_construct_repo(req.params.repo_slug);
if(repo == null) return res.json_error(`Requested repo ${req.params.repo_slug} does not exist`, 404);
const package = await get_package(repo, req.params.language, req.params.version)
if(package == null) return res.json_error(`Requested package ${req.params.language}-${req.params.version} does not exist`, 404)
const pkg = await get_package(repo, req.params.language, req.params.version);
if(pkg == null) return res.json_error(`Requested package ${req.params.language}-${req.params.version} does not exist`, 404);
try{
const response = await package.install()
return res.json_success(response)
const response = await pkg.install();
return res.json_success(response);
}catch(err){
logger.error(`Error while installing package ${package.language}-${package.version}:`, err.message)
res.json_error(err.message,500)
logger.error(`Error while installing package ${pkg.language}-${pkg.version}:`, err.message);
res.json_error(err.message,500);
}
@ -131,6 +131,6 @@ module.exports = {
async package_uninstall(req,res){
// DELETE /repos/:slug/packages/:language/:version
res.json(req.body) //TODO
res.json(req.body); //TODO
}
}
};

View file

@ -1,85 +1,85 @@
const logger = require("logplease").create("runtime")
const semver = require("semver")
const config = require("./config")
const globals = require("./globals")
const fs = require("fs")
const path = require("path")
const logger = require('logplease').create('runtime');
const semver = require('semver');
const config = require('./config');
const globals = require('./globals');
const fss = require('fs');
const path = require('path');
const runtimes = []
const runtimes = [];
class Runtime {
#env_vars
#compiled
constructor(package_dir){
const {language, version, author, dependencies, build_platform} = JSON.parse(
fs.read_file_sync(path.join(package_dir, "pkg-info.json"))
)
fss.read_file_sync(path.join(package_dir, 'pkg-info.json'))
);
this.pkgdir = package_dir
this.language = language
this.version = semver.parse(version)
this.author = author
this.dependencies = dependencies
this.pkgdir = package_dir;
this.language = language;
this.version = semver.parse(version);
this.author = author;
this.dependencies = dependencies;
if(build_platform != globals.platform){
logger.warn(`Package ${language}-${version} was built for platform ${build_platform}, but our platform is ${globals.platform}`)
logger.warn(`Package ${language}-${version} was built for platform ${build_platform}, but our platform is ${globals.platform}`);
}
logger.debug(`Package ${language}-${version} was loaded`)
runtimes.push(this)
logger.debug(`Package ${language}-${version} was loaded`);
runtimes.push(this);
}
get env_file_path(){
return path.join(this.runtime_dir, "environment")
return path.join(this.runtime_dir, 'environment');
}
get runtime_dir(){
return path.join(config.data_directory,globals.data_directories.runtimes, this.toString())
return path.join(config.data_directory,globals.data_directories.runtimes, this.toString());
}
get_all_dependencies(){
const res = []
const res = [];
Object.keys(this.dependencies).forEach(dep => {
const selector = this.dependencies[dep]
const lang = module.exports.get_latest_runtime_matching_language_version(dep, selector)
res.push(lang)
res.concat(lang.get_all_dependencies(lang))
})
return res
const selector = this.dependencies[dep];
const lang = module.exports.get_latest_runtime_matching_language_version(dep, selector);
res.push(lang);
res.concat(lang.get_all_dependencies(lang));
});
return res;
}
get compile(){
if(this.#compiled === undefined) this.#compiled = fs.existsSync(path.join(this.pkgdir, "compile"))
return this.#compiled
if(this.#compiled === undefined) this.#compiled = fss.exists_sync(path.join(this.pkgdir, 'compile'));
return this.#compiled;
}
get env_vars(){
if(!this.#env_vars){
const env_file = path.join(this.pkgdir, ".env")
const env_content = fs.read_file_sync(env_file).toString()
this.#env_vars = {}
const env_file = path.join(this.pkgdir, '.env');
const env_content = fss.read_file_sync(env_file).toString();
this.#env_vars = {};
env_content
.split("\n")
.map(line => line.split("=",2))
.split('\n')
.map(line => line.split('=',2))
.forEach(([key,val]) => {
this.#env_vars[key] = val
})
this.#env_vars[key] = val;
});
}
return this.#env_vars
return this.#env_vars;
}
toString(){
return `${this.language}-${this.version.raw}`
return `${this.language}-${this.version.raw}`;
}
}
module.exports = runtimes
module.exports.Runtime = Runtime
module.exports = runtimes;
module.exports.Runtime = Runtime;
module.exports.get_runtimes_matching_language_version = function(lang, ver){
return runtimes.filter(rt => rt.language == lang && semver.satisfies(rt.version, ver))
}
return runtimes.filter(rt => rt.language == lang && semver.satisfies(rt.version, ver));
};
module.exports.get_latest_runtime_matching_language_version = function(lang, ver){
return module.exports.get_runtimes_matching_language_version(lang, ver)
.sort((a,b) => semver.rcompare(a.version, b.version))[0]
}
.sort((a,b) => semver.rcompare(a.version, b.version))[0];
};

View file

@ -1,45 +1,45 @@
const fs = require("fs")
const util = require("util")
const fs = require('fs/promises');
const fss = require('fs');
const logger = require("logplease").create("state")
const state = new Map()
const logger = require('logplease').create('state');
const state = new Map();
function replacer(key, value) {
if(value instanceof Map) {
return {
data_type: "Map",
data_type: 'Map',
value: Array.from(value.entries()),
}
};
} else {
return value
return value;
}
}
function reviver(key, value) {
if(typeof value === "object" && value !== null) {
if (value.data_type === "Map") {
return new Map(value.value)
if(typeof value === 'object' && value !== null) {
if (value.data_type === 'Map') {
return new Map(value.value);
}
}
return value
return value;
}
module.exports = {
state,
async load(data_file){
if(fs.exists_sync(data_file)){
logger.info("Loading state from file")
var content = await util.promisify(fs.read_file)(data_file)
if(fss.exists_sync(data_file)){
logger.info('Loading state from file');
var content = await fs.read_file(data_file);
var obj = JSON.parse(content.toString(), reviver);
[...obj.keys()].forEach(k => state.set(k, obj.get(k)))
[...obj.keys()].forEach(k => state.set(k, obj.get(k)));
}else{
logger.info("Creating new statefile")
state.set("repositories", new Map().set("offical", "https://repo.pistonee.org/index.yaml"))
logger.info('Creating new statefile');
state.set('repositories', new Map().set('offical', 'https://repo.pistonee.org/index.yaml'));
}
},
async save(data_file){
logger.info("Saving state to disk")
await util.promisify(fs.write_file)(data_file, JSON.stringify(state, replacer))
logger.info('Saving state to disk');
await fs.write_file(data_file, JSON.stringify(state, replacer));
}
}
};