diff --git a/api-client/index.cjs b/api-client/index.cjs index c58f9b3..846b237 100644 --- a/api-client/index.cjs +++ b/api-client/index.cjs @@ -106,7 +106,7 @@ class PistonEngine extends APIWrapper { return this.get_child_object(`/repos/${slug}`, PistonEngineRepository) } - run_job({language, version, files, main, args, stdin, compile_timeout, run_timeout}){ + run_job(language, version, files, main, args, stdin, compile_timeout, run_timeout){ return this.post(`/jobs`, {language, version, files, main, args, stdin, compile_timeout, run_timeout}) } } diff --git a/api/src/cache.js b/api/src/cache.js index 43727ab..469623a 100644 --- a/api/src/cache.js +++ b/api/src/cache.js @@ -1,6 +1,7 @@ const globals = require('./globals'); const logger = require('logplease').create('cache'); const fs = require('fs/promises'), + fss = require('fs'), path = require('path'); const cache = new Map(); @@ -12,54 +13,43 @@ module.exports = { }, async get(key, callback, ttl=globals.cache_ttl){ logger.debug('get:', key); - if(module.exports.has(key)){ logger.debug('hit:',key); return cache.get(key).data; } - logger.debug('miss:', key); var data = await callback(); cache.set(key, {data, expiry: Date.now() + ttl}); - return data; }, async flush(cache_dir){ logger.info('Flushing cache'); - - async function flush_single(value, key){ - const file_path = path.join(cache_dir, key); - - if(value.expiry < Date.now()){ - cache.delete(key); - try { - const stats = await fs.stat(file_path); + cache.forEach((v,k)=>{ + var file_path = path.join(cache_dir, k); + if(v.expiry < Date.now()){ + //remove from cache + cache.delete(k); + fs.stat(file_path, (err, stats)=>{ + if(err) return; //ignore - probably hasn't been flushed yet if(stats.is_file()) - await fs.rm(file_path); - }catch{ - // Ignore, file hasn't been flushed yet - } + fs.rm(file_path, (err)=>{ + if(err) logger.warn(`Couldn't clean up on-disk cache file ${k}`); + }); + }); }else{ - await fs.write_file(file_path, JSON.stringify(value)); + //flush to disk + fs.write_file(file_path, JSON.stringify(v),()=>{}); } - - } - - return Promise.all( - Array.from(cache).map(flush_single) - ); + }); }, async load(cache_dir){ - const files = await fs.readdir(cache_dir); - - async function load_single(file_name){ - const file_path = path.join(cache_dir,file_name); - const file_content = await fs.read_file(file_path).toString(); - cache.set(file_name, JSON.parse(file_content)); - } - - return Promise.all(files.map(load_single)); + return fs.readdir(cache_dir) + .then(files => Promise.all(files.map( + async file => { + cache.set(file, JSON.parse(fss.read_file_sync(path.join(cache_dir,file)).toString())); + } + ))); } }; \ No newline at end of file diff --git a/api/src/config.js b/api/src/config.js index 27cb1e9..ffe6391 100644 --- a/api/src/config.js +++ b/api/src/config.js @@ -122,70 +122,61 @@ const options = [ } ]; -function make_default_config(){ - let content = header.split('\n'); - - options.forEach(option => { - content.concat(option.desc.split('\n').map(x=>`# ${x}`)); - - if(option.options) - content.append('# Options: ' + option.options.join(', ')); - - content.append(`${option.key}: ${option.default}`); - - content.append(''); // New line between - }); - - return content.join('\n'); -} +const default_config = [ + ...header.split('\n'), + ...options.map(option => ` +${[ + ...option.desc.split('\n'), + option.options?('Options: ' + option.options.join(', ')):'' + ].filter(x=>x.length>0).map(x=>`# ${x}`).join('\n')} +${option.key}: ${option.default} + `)].join('\n'); logger.info(`Loading Configuration from ${argv.config}`); - -if(argv['make-config']) - logger.debug('Make configuration flag is set'); +!!argv['make-config'] && logger.debug('Make configuration flag is set'); if(!!argv['make-config'] && !fss.exists_sync(argv.config)){ logger.info('Writing default configuration...'); try { - fss.write_file_sync(argv.config, make_default_config()); + fss.write_file_sync(argv.config, default_config); } catch (err) { logger.error('Error writing default configuration:', err.message); process.exit(1); } + + } - var config = {}; - logger.debug('Reading config file'); - try{ const cfg_content = fss.read_file_sync(argv.config); - config = yaml.load(cfg_content); + try{ + config = yaml.load(cfg_content); + }catch(err){ + logger.error('Error parsing configuration file:', err.message); + process.exit(1); + } + }catch(err){ - logger.error('Error reading configuration file:', err.message); + logger.error('Error reading configuration from disk:', err.message); process.exit(1); } logger.debug('Validating config entries'); - var errored=false; - -options.forEach(option => { - logger.debug('Checking option', option.key); - - var cfg_val = config[option.key]; - +options.forEach(opt => { + logger.debug('Checking key',opt.key); + var cfg_val = config[opt.key]; if(cfg_val == undefined){ errored = true; - logger.error(`Config key ${option.key} does not exist on currently loaded configuration`); + logger.error(`Config key ${opt.key} does not exist on currently loaded configuration`); return; } - - option.validators.forEach(validator => { + opt.validators.forEach(validator => { var response = validator(cfg_val); if(response !== true){ errored = true; - logger.error(`Config option ${option.key} failed validation:`, response); + logger.error(`Config key ${opt.key} failed validation:`, response); return; } }); diff --git a/api/src/executor/job.js b/api/src/executor/job.js index 046cd53..1b2aaaa 100644 --- a/api/src/executor/job.js +++ b/api/src/executor/job.js @@ -6,6 +6,7 @@ const config = require('../config'); const globals = require('../globals'); const fs = require('fs/promises'); + const job_states = { READY: Symbol('Ready to be primed'), PRIMED: Symbol('Primed and ready for execution'), @@ -16,7 +17,7 @@ var uid=0; var gid=0; class Job { - constructor({runtime, files, args, stdin, timeouts, main}){ + constructor(runtime, files, args, stdin, timeouts, main){ this.uuid = uuidv4(); this.runtime = runtime; this.files = files; @@ -70,7 +71,6 @@ class Job { async safe_call(file, args, timeout){ return await new Promise((resolve, reject) => { const unshare = config.enable_unshare ? ['unshare','-n','-r'] : []; - const prlimit = [ 'prlimit', '--nproc=' + config.max_process_count, @@ -82,17 +82,15 @@ class Job { ...unshare, 'bash',file, ...args ]; - var stdout = ''; var stderr = ''; - const proc = cp.spawn(proc_call[0], proc_call.splice(1) ,{ env: this.runtime.env_vars, stdio: 'pipe', cwd: this.dir, uid: this.uid, gid: this.gid, - detached: true //give this process its own process group + detached: true //dont kill the main process when we kill the group }); proc.stdin.write(this.stdin); @@ -111,7 +109,7 @@ class Job { try{ process.kill(-proc.pid, 'SIGKILL'); }catch{ - // Process will be dead already, so nothing to kill. + // Process will be dead alread, so nothing to kill. } } @@ -130,20 +128,15 @@ class Job { } async execute(){ - if(this.state != job_states.PRIMED) - throw new Error('Job must be in primed state, current state: ' + this.state.toString()); - + if(this.state != job_states.PRIMED) throw new Error('Job must be in primed state, current state: ' + this.state.toString()); logger.info(`Executing job uuid=${this.uuid} uid=${this.uid} gid=${this.gid} runtime=${this.runtime.toString()}`); - logger.debug('Compiling'); - var compile = undefined; if(this.runtime.compiled) compile = await this.safe_call( path.join(this.runtime.pkgdir, 'compile'), this.files.map(x=>x.name), this.timeouts.compile); - logger.debug('Running'); diff --git a/api/src/executor/routes.js b/api/src/executor/routes.js index fe09d63..7d65b6b 100644 --- a/api/src/executor/routes.js +++ b/api/src/executor/routes.js @@ -11,7 +11,7 @@ module.exports = { .isString(), // eslint-disable-line snakecasejs/snakecasejs body('version') .isString(), // eslint-disable-line snakecasejs/snakecasejs - // isSemVer requires it to be a version, not a selector + // isSemVer requires it to be a version, not a selector body('files') .isArray(), // eslint-disable-line snakecasejs/snakecasejs body('files.*.name') @@ -39,23 +39,15 @@ module.exports = { const runtime = get_latest_runtime_matching_language_version(req.body.language, req.body.version); if(runtime == undefined) return res.json_error(`${req.body.language}-${req.body.version} runtime is unknown`, 400); - const job = new Job({ - runtime, - files: req.body.files, - args: req.body.args, - stdin: req.body.stdin, - timeouts: { - run: req.body.run_timeout, - compile: req.body.compile_timeout - }, - main: req.body.main - }); - + const job = new Job(runtime, req.body.files, req.body.args, req.body.stdin, {run: req.body.run_timeout, compile: req.body.compile_timeout}, req.body.main); await job.prime(); const result = await job.execute(); res.json_success(result); await job.cleanup(); + + + } }; \ No newline at end of file diff --git a/api/src/index.js b/api/src/index.js index 4d5e76c..94381c2 100644 --- a/api/src/index.js +++ b/api/src/index.js @@ -44,17 +44,16 @@ const app = express(); logger.info('Loading packages'); const pkgdir = path.join(config.data_directory,globals.data_directories.packages); + await fs.readdir(pkgdir) + .then(langs => Promise.all( + langs.map(lang=> + fs.readdir(path.join(pkgdir,lang)) + .then(x=>x.map(y=>path.join(pkgdir, lang, y))) + ))) + .then(pkgs=>pkgs.flat().filter(pkg=>fss.exists_sync(path.join(pkg, globals.pkg_installed_file)))) + .then(pkgs=>pkgs.forEach(pkg => new runtime.Runtime(pkg))); - const pkglist = await fs.readdir(pkgdir); - const languages = await Promise.all( - pkglist.map(lang=> - fs.readdir(path.join(pkgdir,lang)) - .then(x=>x.map(y=>path.join(pkgdir, lang, y))) - )); - const installed_languages = languages.flat() - .filter(pkg=>fss.exists_sync(path.join(pkg, globals.pkg_installed_file))); - installed_languages.forEach(pkg => new runtime.Runtime(pkg)); logger.info('Starting API Server'); @@ -87,69 +86,23 @@ const app = express(); logger.debug('Registering Routes'); const ppman_routes = require('./ppman/routes'); + + app.get ('/repos', validate, ppman_routes.repo_list); + app.post ('/repos', ppman_routes.repo_add_validators, validate, ppman_routes.repo_add); + app.get ('/repos/:repo_slug', ppman_routes.repo_info_validators, validate, ppman_routes.repo_info); + app.get ('/repos/:repo_slug/packages', ppman_routes.repo_packages_validators, validate, ppman_routes.repo_packages); + app.get ('/repos/:repo_slug/packages/:language/:version', ppman_routes.package_info_validators, validate, ppman_routes.package_info); + app.post ('/repos/:repo_slug/packages/:language/:version', ppman_routes.package_info_validators, validate, ppman_routes.package_install); + app.delete('/repos/:repo_slug/packages/:language/:version', ppman_routes.package_info_validators, validate, ppman_routes.package_uninstall); + const executor_routes = require('./executor/routes'); + app.post ('/jobs', executor_routes.run_job_validators, validate, executor_routes.run_job); - app.get('/repos', - validate, - ppman_routes.repo_list - ); - - app.post('/repos', - ppman_routes.repo_add_validators, - validate, - ppman_routes.repo_add - ); - - app.get('/repos/:repo_slug', - ppman_routes.repo_info_validators, - validate, - ppman_routes.repo_info - ); - - app.get('/repos/:repo_slug/packages', - ppman_routes.repo_packages_validators, - validate, - ppman_routes.repo_packages - ); - - app.get('/repos/:repo_slug/packages/:language/:version', - ppman_routes.package_info_validators, - validate, - ppman_routes.package_info - ); - - app.post('/repos/:repo_slug/packages/:language/:version', - ppman_routes.package_info_validators, - validate, - ppman_routes.package_install - ); - - app.delete('/repos/:repo_slug/packages/:language/:version', - ppman_routes.package_info_validators, - validate, - ppman_routes.package_uninstall - ); - - app.post('/jobs', - executor_routes.run_job_validators, - validate, - executor_routes.run_job); - - function list_runtimes(_, res){ - const runtimes = runtime.map(rt => ( - { - language: rt.language, - version: rt.version.raw, - author: rt.author - } - )); - - return res.json_success({ - runtimes - }); - } - - app.get('/runtimes', list_runtimes); + app.get ('/runtimes', (_, res) => res.json_success({runtimes: runtime.map(rt=>({ + language: rt.language, + version: rt.version.raw, + author: rt.author + }))})) logger.debug('Calling app.listen'); const [address,port] = config.bind_address.split(':'); @@ -159,16 +112,6 @@ const app = express(); }); logger.debug('Setting up flush timers'); - - setInterval( - cache.flush, - config.cache_flush_time, - path.join(config.data_directory,globals.data_directories.cache) - ); - - setInterval( - state.save, - config.state_flush_time, - path.join(config.data_directory,globals.data_files.state) - ); + setInterval(cache.flush,config.cache_flush_time,path.join(config.data_directory,globals.data_directories.cache)); + setInterval(state.save,config.state_flush_time,path.join(config.data_directory,globals.data_files.state)); })(); \ No newline at end of file diff --git a/api/src/ppman/package.js b/api/src/ppman/package.js index 894376b..ebccd7f 100644 --- a/api/src/ppman/package.js +++ b/api/src/ppman/package.js @@ -63,9 +63,7 @@ class Package { logger.debug('Validating checksums'); Object.keys(this.checksums).forEach(algo => { var val = this.checksums[algo]; - logger.debug(`Assert ${algo}(${pkgpath}) == ${val}`); - var cs = crypto.create_hash(algo) .update(fss.read_file_sync(pkgpath)) .digest('hex'); @@ -74,8 +72,7 @@ class Package { await this.repo.import_keys(); - logger.debug('Validating signatures'); - + logger.debug('Validating signatutes'); if(this.signature != '') await new Promise((resolve,reject)=>{ const gpgspawn = cp.spawn('gpg', ['--verify', '-', pkgpath], { @@ -97,7 +94,6 @@ class Package { logger.warn('Package does not contain a signature - allowing install, but proceed with caution'); logger.debug(`Extracting package files from archive ${pkgfile} in to ${this.install_path}`); - await new Promise((resolve, reject)=>{ const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgfile}'`); proc.once('exit', (code,_)=>{ @@ -113,35 +109,30 @@ class Package { logger.debug('Ensuring binary files exist for package'); const pkgbin = path.join(this.install_path, `${this.language}-${this.version.raw}`); try{ - const pkgbin_stat = await fs.stat(pkgbin); + const pkgbinstat = await fs.stat(pkgbin); //eslint-disable-next-line snakecasejs/snakecasejs - if(!pkgbin_stat.isDirectory()) throw new Error(); - // Throw a blank error here, so it will be caught by the following catch, and output the correct error message - // The catch is used to catch fs.stat + if(!pkgbinstat.isDirectory()) throw new Error(); }catch(err){ throw new Error(`Invalid package: could not find ${this.language}-${this.version.raw}/ contained within package files`); } logger.debug('Symlinking into runtimes'); - await fs.symlink( pkgbin, path.join(config.data_directory, globals.data_directories.runtimes, `${this.language}-${this.version.raw}`) - ).catch((err)=>err); //Ignore if we fail - probably means its already been installed and not cleaned up right + ).catch((err)=>err); //catch logger.debug('Registering runtime'); - const pkg_runtime = new runtime.Runtime(this.install_path); + const pkgruntime = new runtime.Runtime(this.install_path); logger.debug('Caching environment'); - const required_pkgs = [pkg_runtime, ...pkg_runtime.get_all_dependencies()]; - const get_env_command = [ - ...required_pkgs.map(pkg=>`cd "${pkg.runtime_dir}"; source environment; `), - 'env' - ].join(' '); + const required_pkgs = [pkgruntime, ...pkgruntime.get_all_dependencies()]; + const get_env_command = [...required_pkgs.map(p=>`cd "${p.runtime_dir}"; source environment; `), + 'env' ].join(' '); const envout = await new Promise((resolve, reject)=>{ var stdout = ''; diff --git a/api/src/state.js b/api/src/state.js index b69d626..6dc555f 100644 --- a/api/src/state.js +++ b/api/src/state.js @@ -30,12 +30,12 @@ module.exports = { async load(data_file){ if(fss.exists_sync(data_file)){ logger.info('Loading state from file'); - - const content = await fs.read_file(data_file); - const obj = JSON.parse(content.toString(), reviver); + var content = await fs.read_file(data_file); + var obj = JSON.parse(content.toString(), reviver); [...obj.keys()].forEach(k => state.set(k, obj.get(k))); }else{ - logger.info('Creating new state file'); + logger.info('Creating new statefile'); + state.set('repositories', new Map().set('offical', 'https://repo.pistonee.org/index.yaml')); } }, async save(data_file){ diff --git a/cli/commands/execute.js b/cli/commands/execute.js index 11afaa2..4fbcbc5 100644 --- a/cli/commands/execute.js +++ b/cli/commands/execute.js @@ -50,16 +50,16 @@ exports.handler = async function(argv){ })) || ""; - const response = await api.run_job({ - language: argv.language, - version: argv['language-version'], - files: files, - main: argv.file, - arsg: argv.args, + const response = await api.run_job( + argv.language, + argv['language-version'], + files, + argv.file, + argv.args, stdin, - compile_timeout: argv.ct, - run_timeout: argv.rt - }) + argv.ct, + argv.rt + ) function step(name, ctx){ console.log(chalk.bold(`== ${name} ==`)) diff --git a/cli/commands/ppman_commands/install.js b/cli/commands/ppman_commands/install.js index 1cab867..4c4b10a 100644 --- a/cli/commands/ppman_commands/install.js +++ b/cli/commands/ppman_commands/install.js @@ -17,13 +17,11 @@ exports.handler = async function(argv){ const api = new PistonEngine(argv['piston-url']); const repos = await api.list_repos(); - const repos_obj = await Promise.all(repos.repos.map(({slug}) => api.get_repo(slug))); - const repo_pkgs = await Promise.all(repos_obj.map( async repo => ({ repo: repo, - packages: await repo.list_packages().catch(_=>[]) + packages: await repo.list_packages().catch(x=>[]) }) )) diff --git a/cli/commands/ppman_commands/list.js b/cli/commands/ppman_commands/list.js index 6dd4bed..162d286 100644 --- a/cli/commands/ppman_commands/list.js +++ b/cli/commands/ppman_commands/list.js @@ -17,14 +17,11 @@ exports.handler = async function(argv){ const api = new PistonEngine(argv['piston-url']); const repos = await api.list_repos(); - const repos_obj = await Promise.all(repos.repos.map(({slug}) => api.get_repo(slug))); - - const packages = await repos_obj.reduce(async (accumulator, repo) => [ - ...await accumulator, - ...await repo.list_packages() - .catch(x=>{console.log(x); return []}) - ], []); // Loops over repos, listing packages and flattening them into a single array + const packages = await repos_obj.reduce(async (a, c) => [ + ...await a, + ...await c.list_packages().catch(x=>{console.log(x); return []}) + ], []); const pkg_msg = packages .map(msg_format.color) diff --git a/repo/entrypoint.sh b/repo/entrypoint.sh index 0b4d78c..6f917d7 100755 --- a/repo/entrypoint.sh +++ b/repo/entrypoint.sh @@ -1,6 +1,5 @@ cd /packages - -for pkg in "$@" +for pkg in "$*" do make -j16 build-$pkg done