From b3d18600cd3c0d5f03530a1f768df49e862ba53e Mon Sep 17 00:00:00 2001 From: Brian Seymour Date: Fri, 12 Mar 2021 23:01:04 -0600 Subject: [PATCH] api refactoring --- api/.eslintrc.json | 40 ------------ api/Dockerfile | 7 ++- api/package.json | 41 ++++++------- api/src/config.js | 68 +++++++++++---------- api/src/executor/job.js | 114 ++++++++++++++++++++-------------- api/src/executor/routes.js | 41 ++++++++----- api/src/globals.js | 4 +- api/src/index.js | 122 ++++++++++++++++--------------------- api/src/ppman/package.js | 92 ++++++++++++++++------------ api/src/ppman/routes.js | 110 +++++++++++++++++++++------------ api/src/runtime.js | 46 ++++++++------ 11 files changed, 358 insertions(+), 327 deletions(-) delete mode 100644 api/.eslintrc.json diff --git a/api/.eslintrc.json b/api/.eslintrc.json deleted file mode 100644 index 579bcfc..0000000 --- a/api/.eslintrc.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "env": { - "commonjs": true, - "es2021": true, - "node": true - }, - "plugins": [ - "snakecasejs" - ], - "extends": "eslint:recommended", - "parser": "babel-eslint", - "parserOptions": { - "ecmaVersion": 12 - }, - "settings": - { - "snakecasejs/filter": ["ClassDeclaration", "NewExpression"], - "snakecasejs/whitelist": [] - }, - "rules": { - "indent": [ - "error", - 4 - ], - "linebreak-style": [ - "error", - "unix" - ], - "quotes": [ - "error", - "single" - ], - "semi": [ - "error", - "always" - ], - "no-unused-vars": ["error", { "argsIgnorePattern": "^_"}], - "snakecasejs/snakecasejs": "warn" - } -} diff --git a/api/Dockerfile b/api/Dockerfile index b3eb8a8..8f49ca1 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -1,7 +1,8 @@ FROM node:15.8.0-buster-slim RUN dpkg-reconfigure -p critical dash -RUN apt-get update && apt-get install -y libxml2 gnupg tar coreutils util-linux \ - && rm -rf /var/lib/apt/lists/* +RUN apt-get update && \ + apt-get install -y libxml2 gnupg tar coreutils util-linux && \ + rm -rf /var/lib/apt/lists/* RUN for i in $(seq 1001 1500); do \ groupadd -g $i runner$i && \ @@ -15,4 +16,4 @@ RUN yarn COPY ./src ./src CMD [ "node", "src", "-m", "-c", "/piston/config.yaml"] -EXPOSE 6969/tcp \ No newline at end of file +EXPOSE 6969/tcp diff --git a/api/package.json b/api/package.json index ba86daa..75cb08c 100644 --- a/api/package.json +++ b/api/package.json @@ -1,25 +1,20 @@ { - "name": "piston-api", - "version": "3.0.0", - "description": "API for piston - a high performance code execution engine", - "main": "src/index.js", - "dependencies": { - "body-parser": "^1.19.0", - "express": "^4.17.1", - "express-validator": "^6.10.0", - "is-docker": "^2.1.1", - "js-yaml": "^4.0.0", - "logplease": "^1.2.15", - "nocamel": "HexF/nocamel#patch-1", - "node-fetch": "^2.6.1", - "semver": "^7.3.4", - "uuid": "^8.3.2", - "yargs": "^16.2.0" - }, - "devDependencies": { - "babel-eslint": "^10.1.0", - "eslint": "^7.20.0", - "eslint-plugin-snakecasejs": "^2.2.0" - }, - "license": "MIT" + "name": "piston-api", + "version": "3.0.0", + "description": "API for piston - a high performance code execution engine", + "main": "src/index.js", + "dependencies": { + "body-parser": "^1.19.0", + "express": "^4.17.1", + "express-validator": "^6.10.0", + "is-docker": "^2.1.1", + "js-yaml": "^4.0.0", + "logplease": "^1.2.15", + "nocamel": "HexF/nocamel#patch-1", + "node-fetch": "^2.6.1", + "semver": "^7.3.4", + "uuid": "^8.3.2", + "yargs": "^16.2.0" + }, + "license": "MIT" } diff --git a/api/src/config.js b/api/src/config.js index 2d841b0..2ad3ffc 100644 --- a/api/src/config.js +++ b/api/src/config.js @@ -1,25 +1,25 @@ const fss = require('fs'); const yargs = require('yargs'); -const hide_bin = require('yargs/helpers').hideBin; //eslint-disable-line snakecasejs/snakecasejs +const hide_bin = require('yargs/helpers').hideBin; const Logger = require('logplease'); const logger = Logger.create('config'); const yaml = require('js-yaml'); const header = `# -# ____ _ _ -# | _ \\(_)___| |_ ___ _ __ -# | |_) | / __| __/ _ \\| '_ \\ +# ____ _ _ +# | _ \\(_)___| |_ ___ _ __ +# | |_) | / __| __/ _ \\| '_ \\ # | __/| \\__ \\ || (_) | | | | # |_| |_|___/\\__\\___/|_| |_| # -# A High performance code execution engine +# A High performance code execution engine # github.com/engineer-man/piston # `; const argv = yargs(hide_bin(process.argv)) .usage('Usage: $0 -c [config]') - .demandOption('c') //eslint-disable-line snakecasejs/snakecasejs + .demandOption('c') .option('config', { alias: 'c', describe: 'config file to load from', @@ -29,18 +29,18 @@ const argv = yargs(hide_bin(process.argv)) alias: 'm', type: 'boolean', describe: 'create config file and populate defaults if it does not already exist' - }).argv; - + }) + .argv; const options = [ { key: 'log_level', desc: 'Level of data to log', default: 'INFO', - /* eslint-disable snakecasejs/snakecasejs */ options: Object.values(Logger.LogLevels), - validators: [x=>Object.values(Logger.LogLevels).includes(x) || `Log level ${x} does not exist`] - /* eslint-enable snakecasejs/snakecasejs */ + validators: [ + x => Object.values(Logger.LogLevels).includes(x) || `Log level ${x} does not exist` + ] }, { key: 'bind_address', @@ -110,68 +110,71 @@ const options = [ } ]; -function make_default_config(){ +const make_default_config = () => { let content = header.split('\n'); options.forEach(option => { content = content.concat(option.desc.split('\n').map(x=>`# ${x}`)); - if(option.options) + if (option.options) { content.push('# Options: ' + option.options.join(', ')); + } content.push(`${option.key}: ${option.default}`); - + content.push(''); // New line between }); return content.join('\n'); -} +}; logger.info(`Loading Configuration from ${argv.config}`); -if(argv['make-config']) +if (argv['make-config']) { logger.debug('Make configuration flag is set'); +} -if(!!argv['make-config'] && !fss.exists_sync(argv.config)){ +if (!!argv['make-config'] && !fss.exists_sync(argv.config)) { logger.info('Writing default configuration...'); try { - fss.write_file_sync(argv.config, make_default_config()); - } catch (err) { - logger.error('Error writing default configuration:', err.message); + fss.write_file_sync(argv.config, make_default_config()); + } catch (e) { + logger.error('Error writing default configuration:', e.message); process.exit(1); } } -var config = {}; +let config = {}; logger.debug('Reading config file'); -try{ +try { const cfg_content = fss.read_file_sync(argv.config); config = yaml.load(cfg_content); -}catch(err){ +} catch(err) { logger.error('Error reading configuration file:', err.message); process.exit(1); } logger.debug('Validating config entries'); -var errored=false; +let errored = false; -options.forEach(option => { +options.for_each(option => { logger.debug('Checking option', option.key); - var cfg_val = config[option.key]; + let cfg_val = config[option.key]; - if(cfg_val == undefined){ + if (cfg_val === undefined) { errored = true; logger.error(`Config key ${option.key} does not exist on currently loaded configuration`); return; } - option.validators.forEach(validator => { - var response = validator(cfg_val); - if(response !== true){ + option.validators.for_each(validator => { + let response = validator(cfg_val); + + if (!response) { errored = true; logger.error(`Config option ${option.key} failed validation:`, response); return; @@ -179,9 +182,10 @@ options.forEach(option => { }); }); -if(errored) process.exit(1); +if (errored) { + process.exit(1); +} logger.info('Configuration successfully loaded'); module.exports = config; - diff --git a/api/src/executor/job.js b/api/src/executor/job.js index 046cd53..2a64a59 100644 --- a/api/src/executor/job.js +++ b/api/src/executor/job.js @@ -1,5 +1,5 @@ const logger = require('logplease').create('executor/job'); -const { v4: uuidv4 } = require('uuid'); +const uuidv4 = require('uuid/v4'); const cp = require('child_process'); const path = require('path'); const config = require('../config'); @@ -12,11 +12,12 @@ const job_states = { EXECUTED: Symbol('Executed and ready for cleanup') }; -var uid=0; -var gid=0; +let uid = 0; +let gid = 0; class Job { - constructor({runtime, files, args, stdin, timeouts, main}){ + + constructor({ runtime, files, args, stdin, timeouts, main }) { this.uuid = uuidv4(); this.runtime = runtime; this.files = files; @@ -25,8 +26,11 @@ class Job { this.timeouts = timeouts; this.main = main; - if(!this.files.map(f=>f.name).includes(this.main)) + let file_list = this.files.map(f => f.name); + + if (!file_list.includes(this.main)) { throw new Error(`Main file "${this.main}" will not be written to disk`); + } this.uid = config.runner_uid_min + uid; this.gid = config.runner_gid_min + gid; @@ -41,34 +45,30 @@ class Job { this.dir = path.join(config.data_directory, globals.data_directories.jobs, this.uuid); } - async prime(){ + async prime() { logger.info(`Priming job uuid=${this.uuid}`); logger.debug('Writing files to job cache'); - await fs.mkdir(this.dir, {mode:0o700}); - - const files = this.files.map(({name: file_name, content}) => { - return fs.write_file(path.join(this.dir, file_name), content); - }); - - await Promise.all(files); - logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`); - await fs.chown(this.dir, this.uid, this.gid); - - const chowns = this.files.map(({name:file_name}) => { - return fs.chown(path.join(this.dir, file_name), this.uid, this.gid); - }); - await Promise.all(chowns); + await fs.mkdir(this.dir, { mode:0o700 }); + await fs.chown(this.dir, this.uid, this.gid); + + for (const file of this.files) { + let file_path = path.join(this.dir, file.name); + + await fs.write_file(file_path, file.content); + await fs.chown(file_path, this.uid, this.gid); + } this.state = job_states.PRIMED; + logger.debug('Primed job'); } - async safe_call(file, args, timeout){ - return await new Promise((resolve, reject) => { + async safe_call(file, args, timeout) { + return new Promise((resolve, reject) => { const unshare = config.enable_unshare ? ['unshare','-n','-r'] : []; const prlimit = [ @@ -94,63 +94,81 @@ class Job { gid: this.gid, detached: true //give this process its own process group }); - + proc.stdin.write(this.stdin); proc.stdin.end(); - - const kill_timeout = setTimeout(_ => proc.kill('SIGKILL'), timeout); + const kill_timeout = set_timeout(_ => proc.kill('SIGKILL'), timeout); - proc.stderr.on('data', d=>{if(stderr.length>config.output_max_size) proc.kill('SIGKILL'); else stderr += d;}); - proc.stdout.on('data', d=>{if(stdout.length>config.output_max_size) proc.kill('SIGKILL'); else stdout += d;}); + proc.stderr.on('data', data => { + if (stderr.length > config.output_max_size) { + proc.kill('SIGKILL'); + } else { + stderr += data; + } + ); + + proc.stdout.on('data', data => { + if (stdout.length > config.output_max_size) { + proc.kill('SIGKILL'); + } else { + stdout += data; + } + ); + + const exit_cleanup = () => { + clear_timeout(kill_timeout); - function exit_cleanup(){ - clearTimeout(kill_timeout); proc.stderr.destroy(); proc.stdout.destroy(); - try{ + + try { process.kill(-proc.pid, 'SIGKILL'); - }catch{ + } catch { // Process will be dead already, so nothing to kill. } - } + }; proc.on('exit', (code, signal)=>{ exit_cleanup(); - - resolve({stdout, stderr, code, signal}); + + resolve({ stdout, stderr, code, signal }); }); proc.on('error', (err) => { exit_cleanup(); - reject({error: err, stdout, stderr}); + reject({ error: err, stdout, stderr }); }); }); } - async execute(){ - if(this.state != job_states.PRIMED) + async execute() { + if (this.state !== job_states.PRIMED) { throw new Error('Job must be in primed state, current state: ' + this.state.toString()); + } logger.info(`Executing job uuid=${this.uuid} uid=${this.uid} gid=${this.gid} runtime=${this.runtime.toString()}`); logger.debug('Compiling'); - var compile = undefined; - if(this.runtime.compiled) + let compile; + + if (this.runtime.compiled) { compile = await this.safe_call( path.join(this.runtime.pkgdir, 'compile'), - this.files.map(x=>x.name), - this.timeouts.compile); - + this.files.map(x => x.name), + this.timeouts.compile + ); + } logger.debug('Running'); const run = await this.safe_call( path.join(this.runtime.pkgdir, 'run'), [this.main, ...this.args], - this.timeouts.run); + this.timeouts.run + ); this.state = job_states.EXECUTED; @@ -158,13 +176,15 @@ class Job { compile, run }; - } - async cleanup(){ + async cleanup() { logger.info(`Cleaning up job uuid=${this.uuid}`); - await fs.rm(this.dir, {recursive: true, force: true}); + await fs.rm(this.dir, { recursive: true, force: true }); } + } -module.exports = {Job}; \ No newline at end of file +module.exports = { + Job +}; diff --git a/api/src/executor/routes.js b/api/src/executor/routes.js index fe09d63..edf959b 100644 --- a/api/src/executor/routes.js +++ b/api/src/executor/routes.js @@ -6,38 +6,45 @@ const { Job } = require('./job'); const { body } = require('express-validator'); module.exports = { + run_job_validators: [ body('language') - .isString(), // eslint-disable-line snakecasejs/snakecasejs + .isString(), body('version') - .isString(), // eslint-disable-line snakecasejs/snakecasejs + .isString(), // isSemVer requires it to be a version, not a selector body('files') - .isArray(), // eslint-disable-line snakecasejs/snakecasejs + .isArray(), body('files.*.name') - .isString() // eslint-disable-line snakecasejs/snakecasejs + .isString() .bail() .not() .contains('/'), body('files.*.content') - .isString(), // eslint-disable-line snakecasejs/snakecasejs + .isString(), body('compile_timeout') - .isNumeric(), // eslint-disable-line snakecasejs/snakecasejs + .isNumeric(), body('run_timeout') - .isNumeric(), // eslint-disable-line snakecasejs/snakecasejs + .isNumeric(), body('stdin') - .isString(), // eslint-disable-line snakecasejs/snakecasejs + .isString(), body('args') .isArray(), body('args.*') - .isString() // eslint-disable-line snakecasejs/snakecasejs + .isString() ], - async run_job(req, res){ - // POST /jobs - + // POST /jobs + async run_job(req, res) { const runtime = get_latest_runtime_matching_language_version(req.body.language, req.body.version); - if(runtime == undefined) return res.json_error(`${req.body.language}-${req.body.version} runtime is unknown`, 400); + + if (runtime === undefined) { + return res + .status(400) + .send({ + message: `${req.body.language}-${req.body.version} runtime is unknown` + }); + } const job = new Job({ runtime, @@ -54,8 +61,12 @@ module.exports = { await job.prime(); const result = await job.execute(); - res.json_success(result); await job.cleanup(); + + return res + .status(200) + .send(result); } -}; \ No newline at end of file + +}; diff --git a/api/src/globals.js b/api/src/globals.js index c9bd427..300558e 100644 --- a/api/src/globals.js +++ b/api/src/globals.js @@ -5,7 +5,7 @@ const platform = `${is_docker() ? 'docker' : 'baremetal'}-${ fss.read_file_sync('/etc/os-release') .toString() .split('\n') - .find(x=>x.startsWith('ID')) + .find(x => x.startsWith('ID')) .replace('ID=','') }`; @@ -17,4 +17,4 @@ module.exports = { version: require('../package.json').version, platform, pkg_installed_file: '.ppman-installed' //Used as indication for if a package was installed -}; \ No newline at end of file +}; diff --git a/api/src/index.js b/api/src/index.js index 1325243..63d1393 100644 --- a/api/src/index.js +++ b/api/src/index.js @@ -9,117 +9,103 @@ const fs = require('fs/promises'); const fss = require('fs'); const body_parser = require('body-parser'); const runtime = require('./runtime'); -const {validationResult} = require('express-validator'); //eslint-disable-line snakecasejs/snakecasejs +const { validationResult } = require('express-validator'); const logger = Logger.create('index'); const app = express(); (async () => { logger.info('Setting loglevel to',config.log_level); - Logger.setLogLevel(config.log_level); //eslint-disable-line snakecasejs/snakecasejs - + Logger.setLogLevel(config.log_level); logger.debug('Ensuring data directories exist'); - Object.values(globals.data_directories).forEach(dir => { - var data_path = path.join(config.data_directory, dir); + + Object.values(globals.data_directories).for_each(dir => { + let data_path = path.join(config.data_directory, dir); + logger.debug(`Ensuring ${data_path} exists`); - if(!fss.exists_sync(data_path)){ + + if (!fss.exists_sync(data_path)) { logger.info(`${data_path} does not exist.. Creating..`); - try{ + + try { fss.mkdir_sync(data_path); - }catch(err){ - logger.error(`Failed to create ${data_path}: `, err.message); + } catch(e) { + logger.error(`Failed to create ${data_path}: `, e.message); } } - }); logger.info('Loading packages'); const pkgdir = path.join(config.data_directory,globals.data_directories.packages); const pkglist = await fs.readdir(pkgdir); + const languages = await Promise.all( pkglist.map(lang=> fs.readdir(path.join(pkgdir,lang)) .then(x=>x.map(y=>path.join(pkgdir, lang, y))) )); - const installed_languages = languages.flat() - .filter(pkg=>fss.exists_sync(path.join(pkg, globals.pkg_installed_file))); + + const installed_languages = languages + .flat() + .filter(pkg => fss.exists_sync(path.join(pkg, globals.pkg_installed_file))); installed_languages.forEach(pkg => new runtime.Runtime(pkg)); logger.info('Starting API Server'); - logger.debug('Constructing Express App'); - - logger.debug('Registering custom message wrappers'); - - express.response.json_error = function(message, code) { - this.status(code); - return this.json({success: false, message, code}); - }; - - express.response.json_success = function(obj) { - return this.json({success: true, data: obj}); - }; - logger.debug('Registering middleware'); - app.use(body_parser.urlencoded({extended: true})); + app.use(body_parser.urlencoded({ extended: true })); app.use(body_parser.json()); + const validate = (req, res, next) => { + const errors = validationResult(req); + + if (!errors.isEmpty()) { + return res + .status(422) + .send({ + message: errors.array() + }); + } - function validate(req, res, next) { - const errors = validationResult(req); //eslint-disable-line snakecasejs/snakecasejs - if (!errors.isEmpty()) //eslint-disable-line snakecasejs/snakecasejs - return res.json_error(errors.array(), 422); next(); - } + }; logger.debug('Registering Routes'); const ppman_routes = require('./ppman/routes'); const executor_routes = require('./executor/routes'); - - app.get('/packages', - ppman_routes.package_list - ); - - app.post('/packages/:language/:version', - ppman_routes.package_install - ); - - app.delete('/packages/:language/:version', - ppman_routes.package_uninstall - ); - + app.get('/packages', ppman_routes.package_list); + app.post('/packages/:language/:version', ppman_routes.package_install); + app.delete('/packages/:language/:version', ppman_routes.package_uninstall); app.post('/jobs', executor_routes.run_job_validators, validate, - executor_routes.run_job); + executor_routes.run_job + ); + app.get('/runtimes', (req, res) => { + const runtimes = runtime + .map(rt => { + return { + language: rt.language, + version: rt.version.raw, + author: rt.author, + aliases: rt.aliases + }; + }); - function list_runtimes(_, res){ - const runtimes = runtime.map(rt => ( - { - language: rt.language, - version: rt.version.raw, - author: rt.author, - aliases: rt.aliases - } - )); - - return res.json_success({ - runtimes - }); - } - - app.get('/runtimes', list_runtimes); - - logger.debug('Calling app.listen'); - const [address,port] = config.bind_address.split(':'); - - app.listen(port, address, ()=>{ - logger.info('API server started on', config.bind_address); + return res + .status(200) + .send(runtimes); }); -})(); \ No newline at end of file + logger.debug('Calling app.listen'); + const [ address, port ] = config.bind_address.split(':'); + + app.listen(port, address, () => { + logger.info('API server started on', config.bind_address); + }); +})(); diff --git a/api/src/ppman/package.js b/api/src/ppman/package.js index 66a29b7..2b31137 100644 --- a/api/src/ppman/package.js +++ b/api/src/ppman/package.js @@ -11,66 +11,73 @@ const crypto = require('crypto'); const runtime = require('../runtime'); class Package { - constructor({language, version, download, checksum}){ + + constructor({ language, version, download, checksum }){ this.language = language; this.version = semver.parse(version); this.checksum = checksum; this.download = download; } - get installed(){ + get installed() { return fss.exists_sync(path.join(this.install_path, globals.pkg_installed_file)); } - get download_url(){ - return this.download; - } - - get install_path(){ - return path.join(config.data_directory, + get install_path() { + return path.join( + config.data_directory, globals.data_directories.packages, this.language, - this.version.raw); + this.version.raw + ); } - async install(){ - if(this.installed) throw new Error('Already installed'); + async install() { + if (this.installed) { + throw new Error('Already installed'); + } + logger.info(`Installing ${this.language}-${this.version.raw}`); - if(fss.exists_sync(this.install_path)){ + if (fss.exists_sync(this.install_path)) { logger.warn(`${this.language}-${this.version.raw} has residual files. Removing them.`); - await fs.rm(this.install_path, {recursive: true, force: true}); + await fs.rm(this.install_path, { recursive: true, force: true }); } logger.debug(`Making directory ${this.install_path}`); await fs.mkdir(this.install_path, {recursive: true}); + logger.debug(`Downloading package from ${this.download} in to ${this.install_path}`); + const pkgpath = path.join(this.install_path, 'pkg.tar.gz'); + const download = await fetch(this.download); - logger.debug(`Downloading package from ${this.download_url} in to ${this.install_path}`); - const pkgpath = path.join(this.install_path, "pkg.tar.gz"); - const download = await fetch(this.download_url); const file_stream = fss.create_write_stream(pkgpath); await new Promise((resolve, reject) => { - download.body.pipe(file_stream) - download.body.on("error", reject) - file_stream.on("finish", resolve) + download.body.pipe(file_stream); + download.body.on('error', reject); + + file_stream.on('finish', resolve); }); logger.debug('Validating checksums'); - logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`) + logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`); const cs = crypto.create_hash("sha256") .update(fss.readFileSync(pkgpath)) .digest('hex'); - if(cs != this.checksum) throw new Error(`Checksum miss-match want: ${val} got: ${cs}`); + + if (cs !== this.checksum) { + throw new Error(`Checksum miss-match want: ${val} got: ${cs}`); + } logger.debug(`Extracting package files from archive ${pkgpath} in to ${this.install_path}`); - await new Promise((resolve, reject)=>{ + await new Promise((resolve, reject) => { const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'`); - proc.once('exit', (code,_)=>{ - if(code == 0) resolve(); - else reject(new Error('Failed to extract package')); + + proc.once('exit', (code, _) => { + code === 0 ? resolve() : reject(); }); + proc.stdout.pipe(process.stdout); proc.stderr.pipe(process.stderr); @@ -80,28 +87,35 @@ class Package { logger.debug('Registering runtime'); new runtime.Runtime(this.install_path); - logger.debug('Caching environment'); const get_env_command = `cd ${this.install_path}; source environment; env`; - const envout = await new Promise((resolve, reject)=>{ - var stdout = ''; - const proc = cp.spawn('env',['-i','bash','-c',`${get_env_command}`], { - stdio: ['ignore', 'pipe', 'pipe']}); - proc.once('exit', (code,_)=>{ - if(code == 0) resolve(stdout); - else reject(new Error('Failed to cache environment')); + const envout = await new Promise((resolve, reject) => { + let stdout = ''; + + const proc = cp + .spawn( + 'env', + ['-i','bash','-c',`${get_env_command}`], + { + stdio: ['ignore', 'pipe', 'pipe'] + } + ); + + proc.once('exit', (code, _) => { + code === 0 ? resolve() : reject(); }); - proc.stdout.on('data', (data)=>{ + proc.stdout.on('data', data => { stdout += data; }); proc.once('error', reject); }); - const filtered_env = envout.split('\n') - .filter(l=>!['PWD','OLDPWD','_', 'SHLVL'].includes(l.split('=',2)[0])) + const filtered_env = envout + .split('\n') + .filter(l => !['PWD','OLDPWD','_', 'SHLVL'].includes(l.split('=',2)[0])) .join('\n'); await fs.write_file(path.join(this.install_path, '.env'), filtered_env); @@ -116,7 +130,9 @@ class Package { version: this.version.raw }; } + } - -module.exports = {Package}; \ No newline at end of file +module.exports = { + Package +}; diff --git a/api/src/ppman/routes.js b/api/src/ppman/routes.js index c4f0b8a..a957449 100644 --- a/api/src/ppman/routes.js +++ b/api/src/ppman/routes.js @@ -4,66 +4,98 @@ const fetch = require('node-fetch'); const config = require('../config'); const { Package } = require('./package'); +const get_package_list = async () => { + const repo_content = await fetch(config.repo_url).then(x => x.text()); -async function get_package_list(){ - const repo_content = await fetch(config.repo_url).then(x=>x.text()); - - const entries = repo_content.split('\n').filter(x=>x.length > 0); + const entries = repo_content + .split('\n') + .filter(x => x.length > 0); return entries.map(line => { - const [language, version, checksum, download] = line.split(',',4); - return new Package({language, version, checksum, download}); - }) -} + const [ language, version, checksum, download ] = line.split(',', 4); + return new Package({ + language, + version, + checksum, + download + }); + }); +}; -async function get_package(lang, version){ +const get_package async (lang, version) => { const packages = await get_package_list(); - const candidates = packages.filter( - pkg => pkg.language == lang && semver.satisfies(pkg.version, version) - ); - return candidates.sort((a,b)=>semver.rcompare(a.version,b.version))[0] || null; -} + + const candidates = packages + .filter(pkg => { + return pkg.language == lang && semver.satisfies(pkg.version, version) + }); + + candidates.sort((a, b) => semver.rcompare(a.version, b.version)); + + return candidates[0] || null; +}; module.exports = { - - async package_list(req, res){ - // GET /packages + + // GET /packages + async package_list(req, res) { logger.debug('Request to list packages'); const packages = await get_package_list(); - res.json_success({ - packages: packages.map(pkg=>({ - language: pkg.language, - language_version: pkg.version.raw, - installed: pkg.installed - })) - }); + packages = packages + .map(pkg => { + return { + language: pkg.language, + language_version: pkg.version.raw, + installed: pkg.installed + }; + }); + return res + .status(200) + .send(packages); }, - async package_install(req,res){ - // POST /packages/:language/:version + // POST /packages/:language/:version + async package_install(req, res) { logger.debug('Request to install package'); const pkg = await get_package(req.params.language, req.params.version); - if(pkg == null) return res.json_error(`Requested package ${req.params.language}-${req.params.version} does not exist`, 404); - try{ - const response = await pkg.install(); - return res.json_success(response); - }catch(err){ - logger.error(`Error while installing package ${pkg.language}-${pkg.version}:`, err.message); - res.json_error(err.message,500); + if (pkg == null) { + return res + .status(404) + .send({ + message: `Requested package ${req.params.language}-${req.params.version} does not exist` + }); } - + try { + const response = await pkg.install(); + + return res + .status(200) + .send(response); + } catch(e) { + logger.error(`Error while installing package ${pkg.language}-${pkg.version}:`, e.message); + + return res + .status(500) + .send({ + message: e.message + }); + } }, - async package_uninstall(req,res){ - // DELETE /packages/:language/:version - //res.json(req.body); //TODO - res.json_error('not implemented', 500); + // DELETE /packages/:language/:version + async package_uninstall(req, res) { + return res + .status(500) + .send({ + message: 'Not implemented' + }); } -}; \ No newline at end of file + +}; diff --git a/api/src/runtime.js b/api/src/runtime.js index 5a01891..e220aec 100644 --- a/api/src/runtime.js +++ b/api/src/runtime.js @@ -8,53 +8,60 @@ const path = require('path'); const runtimes = []; class Runtime { - #env_vars - #compiled + constructor(package_dir){ - const {language, version, author, build_platform, aliases} = JSON.parse( + let info = JSON.parse( fss.read_file_sync(path.join(package_dir, 'pkg-info.json')) ); + const { language, version, author, build_platform, aliases } = info; + this.pkgdir = package_dir; this.language = language; this.version = semver.parse(version); this.author = author; this.aliases = aliases; - if(build_platform != globals.platform){ - logger.warn(`Package ${language}-${version} was built for platform ${build_platform}, but our platform is ${globals.platform}`); + if (build_platform !== globals.platform) { + logger.warn( + `Package ${language}-${version} was built for platform ${build_platform}, ` + + `but our platform is ${globals.platform}` + ); } - + logger.debug(`Package ${language}-${version} was loaded`); + runtimes.push(this); } - get env_file_path(){ - return path.join(this.pkgdir, 'environment'); + get compiled() { + if (this.compiled === undefined) { + this.compiled = fss.exists_sync(path.join(this.pkgdir, 'compile')); + } + + return this.compiled; } - get compiled(){ - if(this.#compiled === undefined) this.#compiled = fss.exists_sync(path.join(this.pkgdir, 'compile')); - return this.#compiled; - } - - get env_vars(){ - if(!this.#env_vars){ + get env_vars() { + if (!this.env_vars) { const env_file = path.join(this.pkgdir, '.env'); const env_content = fss.read_file_sync(env_file).toString(); - this.#env_vars = {}; + + this.env_vars = {}; + env_content .trim() .split('\n') .map(line => line.split('=',2)) .forEach(([key,val]) => { - this.#env_vars[key.trim()] = val.trim(); + this.env_vars[key.trim()] = val.trim(); }); } - return this.#env_vars; + + return this.env_vars; } - toString(){ + toString() { return `${this.language}-${this.version.raw}`; } } @@ -68,4 +75,3 @@ module.exports.get_latest_runtime_matching_language_version = function(lang, ver return module.exports.get_runtimes_matching_language_version(lang, ver) .sort((a,b) => semver.rcompare(a.version, b.version))[0]; }; -