api: code cleanup

This commit is contained in:
Thomas Hobson 2021-03-05 19:29:09 +13:00
parent a91f3af1fb
commit af1741becc
No known key found for this signature in database
GPG Key ID: 9F1FD9D87950DB6F
7 changed files with 195 additions and 95 deletions

View File

@ -1,7 +1,6 @@
const globals = require('./globals');
const logger = require('logplease').create('cache');
const fs = require('fs/promises'),
fss = require('fs'),
path = require('path');
const cache = new Map();
@ -13,43 +12,54 @@ module.exports = {
},
async get(key, callback, ttl=globals.cache_ttl){
logger.debug('get:', key);
if(module.exports.has(key)){
logger.debug('hit:',key);
return cache.get(key).data;
}
logger.debug('miss:', key);
var data = await callback();
cache.set(key, {data, expiry: Date.now() + ttl});
return data;
},
async flush(cache_dir){
logger.info('Flushing cache');
cache.forEach((v,k)=>{
var file_path = path.join(cache_dir, k);
if(v.expiry < Date.now()){
//remove from cache
cache.delete(k);
fs.stat(file_path, (err, stats)=>{
if(err) return; //ignore - probably hasn't been flushed yet
async function flush_single(value, key){
const file_path = path.join(cache_dir, key);
if(value.expiry < Date.now()){
cache.delete(key);
try {
const stats = await fs.stat(file_path);
if(stats.is_file())
fs.rm(file_path, (err)=>{
if(err) logger.warn(`Couldn't clean up on-disk cache file ${k}`);
});
});
}else{
//flush to disk
fs.write_file(file_path, JSON.stringify(v),()=>{});
await fs.rm(file_path);
}catch{
// Ignore, file hasn't been flushed yet
}
});
}else{
await fs.write_file(file_path, JSON.stringify(value));
}
}
return Promise.all(
Array.from(cache).map(flush_single)
);
},
async load(cache_dir){
return fs.readdir(cache_dir)
.then(files => Promise.all(files.map(
async file => {
cache.set(file, JSON.parse(fss.read_file_sync(path.join(cache_dir,file)).toString()));
const files = await fs.readdir(cache_dir);
async function load_single(file_name){
const file_path = path.join(cache_dir,file_name);
const file_content = await fs.read_file(file_path).toString();
cache.set(file_name, JSON.parse(file_content));
}
)));
return Promise.all(files.map(load_single));
}
};

View File

@ -122,61 +122,70 @@ const options = [
}
];
const default_config = [
...header.split('\n'),
...options.map(option => `
${[
...option.desc.split('\n'),
option.options?('Options: ' + option.options.join(', ')):''
].filter(x=>x.length>0).map(x=>`# ${x}`).join('\n')}
${option.key}: ${option.default}
`)].join('\n');
function make_default_config(){
let content = header.split('\n');
options.forEach(option => {
content.concat(option.desc.split('\n').map(x=>`# ${x}`));
if(option.options)
content.append('# Options: ' + option.options.join(', '));
content.append(`${option.key}: ${option.default}`);
content.append(''); // New line between
});
return content.join('\n');
}
logger.info(`Loading Configuration from ${argv.config}`);
!!argv['make-config'] && logger.debug('Make configuration flag is set');
if(argv['make-config'])
logger.debug('Make configuration flag is set');
if(!!argv['make-config'] && !fss.exists_sync(argv.config)){
logger.info('Writing default configuration...');
try {
fss.write_file_sync(argv.config, default_config);
fss.write_file_sync(argv.config, make_default_config());
} catch (err) {
logger.error('Error writing default configuration:', err.message);
process.exit(1);
}
}
var config = {};
logger.debug('Reading config file');
try{
const cfg_content = fss.read_file_sync(argv.config);
try{
config = yaml.load(cfg_content);
}catch(err){
logger.error('Error parsing configuration file:', err.message);
process.exit(1);
}
}catch(err){
logger.error('Error reading configuration from disk:', err.message);
logger.error('Error reading configuration file:', err.message);
process.exit(1);
}
logger.debug('Validating config entries');
var errored=false;
options.forEach(opt => {
logger.debug('Checking key',opt.key);
var cfg_val = config[opt.key];
options.forEach(option => {
logger.debug('Checking option', option.key);
var cfg_val = config[option.key];
if(cfg_val == undefined){
errored = true;
logger.error(`Config key ${opt.key} does not exist on currently loaded configuration`);
logger.error(`Config key ${option.key} does not exist on currently loaded configuration`);
return;
}
opt.validators.forEach(validator => {
option.validators.forEach(validator => {
var response = validator(cfg_val);
if(response !== true){
errored = true;
logger.error(`Config key ${opt.key} failed validation:`, response);
logger.error(`Config option ${option.key} failed validation:`, response);
return;
}
});

View File

@ -6,7 +6,6 @@ const config = require('../config');
const globals = require('../globals');
const fs = require('fs/promises');
const job_states = {
READY: Symbol('Ready to be primed'),
PRIMED: Symbol('Primed and ready for execution'),
@ -17,7 +16,7 @@ var uid=0;
var gid=0;
class Job {
constructor(runtime, files, args, stdin, timeouts, main){
constructor({runtime, files, args, stdin, timeouts, main}){
this.uuid = uuidv4();
this.runtime = runtime;
this.files = files;
@ -71,6 +70,7 @@ class Job {
async safe_call(file, args, timeout){
return await new Promise((resolve, reject) => {
const unshare = config.enable_unshare ? ['unshare','-n','-r'] : [];
const prlimit = [
'prlimit',
'--nproc=' + config.max_process_count,
@ -82,15 +82,17 @@ class Job {
...unshare,
'bash',file, ...args
];
var stdout = '';
var stderr = '';
const proc = cp.spawn(proc_call[0], proc_call.splice(1) ,{
env: this.runtime.env_vars,
stdio: 'pipe',
cwd: this.dir,
uid: this.uid,
gid: this.gid,
detached: true //dont kill the main process when we kill the group
detached: true //give this process its own process group
});
proc.stdin.write(this.stdin);
@ -109,7 +111,7 @@ class Job {
try{
process.kill(-proc.pid, 'SIGKILL');
}catch{
// Process will be dead alread, so nothing to kill.
// Process will be dead already, so nothing to kill.
}
}
@ -128,9 +130,13 @@ class Job {
}
async execute(){
if(this.state != job_states.PRIMED) throw new Error('Job must be in primed state, current state: ' + this.state.toString());
if(this.state != job_states.PRIMED)
throw new Error('Job must be in primed state, current state: ' + this.state.toString());
logger.info(`Executing job uuid=${this.uuid} uid=${this.uid} gid=${this.gid} runtime=${this.runtime.toString()}`);
logger.debug('Compiling');
var compile = undefined;
if(this.runtime.compiled)
compile = await this.safe_call(
@ -138,6 +144,7 @@ class Job {
this.files.map(x=>x.name),
this.timeouts.compile);
logger.debug('Running');
const run = await this.safe_call(

View File

@ -39,15 +39,23 @@ module.exports = {
const runtime = get_latest_runtime_matching_language_version(req.body.language, req.body.version);
if(runtime == undefined) return res.json_error(`${req.body.language}-${req.body.version} runtime is unknown`, 400);
const job = new Job(runtime, req.body.files, req.body.args, req.body.stdin, {run: req.body.run_timeout, compile: req.body.compile_timeout}, req.body.main);
const job = new Job({
runtime,
files: req.body.files,
args: req.body.args,
stdin: req.body.stdin,
timeouts: {
run: req.body.run_timeout,
compile: req.body.compile_timeout
},
main: req.body.main
});
await job.prime();
const result = await job.execute();
res.json_success(result);
await job.cleanup();
}
};

View File

@ -44,16 +44,17 @@ const app = express();
logger.info('Loading packages');
const pkgdir = path.join(config.data_directory,globals.data_directories.packages);
await fs.readdir(pkgdir)
.then(langs => Promise.all(
langs.map(lang=>
const pkglist = await fs.readdir(pkgdir);
const languages = await Promise.all(
pkglist.map(lang=>
fs.readdir(path.join(pkgdir,lang))
.then(x=>x.map(y=>path.join(pkgdir, lang, y)))
)))
.then(pkgs=>pkgs.flat().filter(pkg=>fss.exists_sync(path.join(pkg, globals.pkg_installed_file))))
.then(pkgs=>pkgs.forEach(pkg => new runtime.Runtime(pkg)));
));
const installed_languages = languages.flat()
.filter(pkg=>fss.exists_sync(path.join(pkg, globals.pkg_installed_file)));
installed_languages.forEach(pkg => new runtime.Runtime(pkg));
logger.info('Starting API Server');
@ -86,23 +87,69 @@ const app = express();
logger.debug('Registering Routes');
const ppman_routes = require('./ppman/routes');
app.get ('/repos', validate, ppman_routes.repo_list);
app.post ('/repos', ppman_routes.repo_add_validators, validate, ppman_routes.repo_add);
app.get ('/repos/:repo_slug', ppman_routes.repo_info_validators, validate, ppman_routes.repo_info);
app.get ('/repos/:repo_slug/packages', ppman_routes.repo_packages_validators, validate, ppman_routes.repo_packages);
app.get ('/repos/:repo_slug/packages/:language/:version', ppman_routes.package_info_validators, validate, ppman_routes.package_info);
app.post ('/repos/:repo_slug/packages/:language/:version', ppman_routes.package_info_validators, validate, ppman_routes.package_install);
app.delete('/repos/:repo_slug/packages/:language/:version', ppman_routes.package_info_validators, validate, ppman_routes.package_uninstall);
const executor_routes = require('./executor/routes');
app.post ('/jobs', executor_routes.run_job_validators, validate, executor_routes.run_job);
app.get ('/runtimes', (_, res) => res.json_success({runtimes: runtime.map(rt=>({
app.get('/repos',
validate,
ppman_routes.repo_list
);
app.post('/repos',
ppman_routes.repo_add_validators,
validate,
ppman_routes.repo_add
);
app.get('/repos/:repo_slug',
ppman_routes.repo_info_validators,
validate,
ppman_routes.repo_info
);
app.get('/repos/:repo_slug/packages',
ppman_routes.repo_packages_validators,
validate,
ppman_routes.repo_packages
);
app.get('/repos/:repo_slug/packages/:language/:version',
ppman_routes.package_info_validators,
validate,
ppman_routes.package_info
);
app.post('/repos/:repo_slug/packages/:language/:version',
ppman_routes.package_info_validators,
validate,
ppman_routes.package_install
);
app.delete('/repos/:repo_slug/packages/:language/:version',
ppman_routes.package_info_validators,
validate,
ppman_routes.package_uninstall
);
app.post('/jobs',
executor_routes.run_job_validators,
validate,
executor_routes.run_job);
function list_runtimes(_, res){
const runtimes = runtime.map(rt => (
{
language: rt.language,
version: rt.version.raw,
author: rt.author
}))}))
}
));
return res.json_success({
runtimes
});
}
app.get('/runtimes', list_runtimes);
logger.debug('Calling app.listen');
const [address,port] = config.bind_address.split(':');
@ -112,6 +159,16 @@ const app = express();
});
logger.debug('Setting up flush timers');
setInterval(cache.flush,config.cache_flush_time,path.join(config.data_directory,globals.data_directories.cache));
setInterval(state.save,config.state_flush_time,path.join(config.data_directory,globals.data_files.state));
setInterval(
cache.flush,
config.cache_flush_time,
path.join(config.data_directory,globals.data_directories.cache)
);
setInterval(
state.save,
config.state_flush_time,
path.join(config.data_directory,globals.data_files.state)
);
})();

View File

@ -63,7 +63,9 @@ class Package {
logger.debug('Validating checksums');
Object.keys(this.checksums).forEach(algo => {
var val = this.checksums[algo];
logger.debug(`Assert ${algo}(${pkgpath}) == ${val}`);
var cs = crypto.create_hash(algo)
.update(fss.read_file_sync(pkgpath))
.digest('hex');
@ -72,7 +74,8 @@ class Package {
await this.repo.import_keys();
logger.debug('Validating signatutes');
logger.debug('Validating signatures');
if(this.signature != '')
await new Promise((resolve,reject)=>{
const gpgspawn = cp.spawn('gpg', ['--verify', '-', pkgpath], {
@ -94,6 +97,7 @@ class Package {
logger.warn('Package does not contain a signature - allowing install, but proceed with caution');
logger.debug(`Extracting package files from archive ${pkgfile} in to ${this.install_path}`);
await new Promise((resolve, reject)=>{
const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgfile}'`);
proc.once('exit', (code,_)=>{
@ -109,30 +113,35 @@ class Package {
logger.debug('Ensuring binary files exist for package');
const pkgbin = path.join(this.install_path, `${this.language}-${this.version.raw}`);
try{
const pkgbinstat = await fs.stat(pkgbin);
const pkgbin_stat = await fs.stat(pkgbin);
//eslint-disable-next-line snakecasejs/snakecasejs
if(!pkgbinstat.isDirectory()) throw new Error();
if(!pkgbin_stat.isDirectory()) throw new Error();
// Throw a blank error here, so it will be caught by the following catch, and output the correct error message
// The catch is used to catch fs.stat
}catch(err){
throw new Error(`Invalid package: could not find ${this.language}-${this.version.raw}/ contained within package files`);
}
logger.debug('Symlinking into runtimes');
await fs.symlink(
pkgbin,
path.join(config.data_directory,
globals.data_directories.runtimes,
`${this.language}-${this.version.raw}`)
).catch((err)=>err); //catch
).catch((err)=>err); //Ignore if we fail - probably means its already been installed and not cleaned up right
logger.debug('Registering runtime');
const pkgruntime = new runtime.Runtime(this.install_path);
const pkg_runtime = new runtime.Runtime(this.install_path);
logger.debug('Caching environment');
const required_pkgs = [pkgruntime, ...pkgruntime.get_all_dependencies()];
const get_env_command = [...required_pkgs.map(p=>`cd "${p.runtime_dir}"; source environment; `),
'env' ].join(' ');
const required_pkgs = [pkg_runtime, ...pkg_runtime.get_all_dependencies()];
const get_env_command = [
...required_pkgs.map(pkg=>`cd "${pkg.runtime_dir}"; source environment; `),
'env'
].join(' ');
const envout = await new Promise((resolve, reject)=>{
var stdout = '';

View File

@ -30,12 +30,12 @@ module.exports = {
async load(data_file){
if(fss.exists_sync(data_file)){
logger.info('Loading state from file');
var content = await fs.read_file(data_file);
var obj = JSON.parse(content.toString(), reviver);
const content = await fs.read_file(data_file);
const obj = JSON.parse(content.toString(), reviver);
[...obj.keys()].forEach(k => state.set(k, obj.get(k)));
}else{
logger.info('Creating new state file');
state.set('repositories', new Map().set('offical', 'https://repo.pistonee.org/index.yaml'));
}
},
async save(data_file){