diff --git a/.github/workflows/package-pr.yaml b/.github/workflows/package-pr.yaml index 5c935c5..4e82395 100644 --- a/.github/workflows/package-pr.yaml +++ b/.github/workflows/package-pr.yaml @@ -71,7 +71,7 @@ jobs: docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api docker.pkg.github.com/engineer-man/piston/api echo Waiting for API to start.. - docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v1/runtimes + docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes echo Waiting for Index to start.. docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index @@ -80,7 +80,7 @@ jobs: sed -i 's/repo/localhost/g' repo/index echo Listing Packages - PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v1/packages) + PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v2/packages) echo $PACKAGES_JSON echo Getting CLI ready @@ -94,7 +94,7 @@ jobs: PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package) echo "Installing..." - docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v1/packages/$PKG_PATH + docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v2/packages/$PKG_PATH TEST_SCRIPTS=packages/$PKG_PATH/test.* echo "Tests: $TEST_SCRIPTS" diff --git a/api/.prettierignore b/api/.prettierignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/api/.prettierignore @@ -0,0 +1 @@ +node_modules diff --git a/api/.prettierrc.yaml b/api/.prettierrc.yaml new file mode 100644 index 0000000..59b6ad3 --- /dev/null +++ b/api/.prettierrc.yaml @@ -0,0 +1,3 @@ +singleQuote: true +tabWidth: 4 +arrowParens: avoid diff --git a/api/Dockerfile b/api/Dockerfile index cc2edf8..668c54a 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -20,11 +20,10 @@ RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen WORKDIR /piston_api COPY ["package.json", "package-lock.json", "./"] -RUN npm install +RUN npm install COPY ./src ./src RUN make -C ./src/nosocket/ all && make -C ./src/nosocket/ install CMD [ "node", "src"] EXPOSE 2000/tcp - diff --git a/api/package-lock.json b/api/package-lock.json index c066efc..c46ae87 100644 --- a/api/package-lock.json +++ b/api/package-lock.json @@ -19,6 +19,9 @@ "semver": "^7.3.4", "uuid": "^8.3.2", "waitpid": "git+https://github.com/HexF/node-waitpid.git" + }, + "devDependencies": { + "prettier": "2.2.1" } }, "node_modules/accepts": { @@ -391,6 +394,18 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" }, + "node_modules/prettier": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.2.1.tgz", + "integrity": "sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==", + "dev": true, + "bin": { + "prettier": "bin-prettier.js" + }, + "engines": { + "node": ">=10.13.0" + } + }, "node_modules/proxy-addr": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz", @@ -855,6 +870,12 @@ "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" }, + "prettier": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.2.1.tgz", + "integrity": "sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==", + "dev": true + }, "proxy-addr": { "version": "2.0.6", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz", diff --git a/api/package.json b/api/package.json index cefd900..ab34063 100644 --- a/api/package.json +++ b/api/package.json @@ -15,5 +15,11 @@ "uuid": "^8.3.2", "waitpid": "git+https://github.com/HexF/node-waitpid.git" }, - "license": "MIT" + "license": "MIT", + "scripts": { + "lint": "prettier . --write" + }, + "devDependencies": { + "prettier": "2.2.1" + } } diff --git a/api/src/api/v2.js b/api/src/api/v2.js index ae6e54a..948dccf 100644 --- a/api/src/api/v2.js +++ b/api/src/api/v2.js @@ -1,230 +1,224 @@ const express = require('express'); -const router = express.Router(); +const router = express.Router(); const config = require('../config'); const runtime = require('../runtime'); -const {Job} = require("../job"); -const package = require('../package') -const logger = require('logplease').create('api/v1'); +const { Job } = require('../job'); +const package = require('../package'); +const logger = require('logplease').create('api/v2'); -router.use(function(req, res, next){ - if(req.method == "POST" && !req.headers['content-type'].startsWith("application/json")) - return res - .status(415) - .send({ - message: "requests must be of type application/json" - }) - next(); -}) +router.use((req, res, next) => { + if (['GET', 'HEAD', 'OPTIONS'].includes(req.method)) { + return next(); + } -router.post('/execute', async function(req, res){ - const {language, version, files, stdin, args, run_timeout, compile_timeout, compile_memory_limit, run_memory_limit} = req.body; - - if(!language || typeof language !== "string") - { - return res - .status(400) - .send({ - message: "language is required as a string" - }); - } - - if(!version || typeof version !== "string") - { - return res - .status(400) - .send({ - message: "version is required as a string" - }); - } - - if(!files || !Array.isArray(files)) - { - return res - .status(400) - .send({ - message: "files is required as an array" - }); - } - - for (const [i,file] of files.entries()) { - if(typeof file.content !== "string"){ - return res - .status(400) - .send({ - message: `files[${i}].content is required as a string` - }); - } - } - - if (compile_memory_limit) { - if (typeof compile_memory_limit !== "number") { - return res - .status(400) - .send({ - message: "if specified, compile_memory_limit must be a number" - }) - } else if (config.compile_memory_limit >= 0 && (compile_memory_limit > config.compile_memory_limit || compile_memory_limit < 0)) { - return res - .status(400) - .send({ - message: "compile_memory_limit cannot exceed the configured limit of " + config.compile_memory_limit - }) - } - } - - if (run_memory_limit) { - if (typeof run_memory_limit !== "number") { - return res - .status(400) - .send({ - message: "if specified, run_memory_limit must be a number" - }) - } else if (config.run_memory_limit >= 0 && (run_memory_limit > config.run_memory_limit || run_memory_limit < 0)) { - return res - .status(400) - .send({ - message: "run_memory_limit cannot exceed the configured limit of " + config.run_memory_limit - }) - } - } - - - - const rt = runtime.get_latest_runtime_matching_language_version(language, version); - - if (rt === undefined) { - return res - .status(400) - .send({ - message: `${language}-${version} runtime is unknown` - }); - } - - const job = new Job({ - runtime: rt, - alias: language, - files: files, - args: args || [], - stdin: stdin || "", - timeouts: { - run: run_timeout || 3000, - compile: compile_timeout || 10000 - }, - memory_limits: { - run: run_memory_limit || config.run_memory_limit, - compile: compile_memory_limit || config.compile_memory_limit - } + if (!req.headers['content-type'].startsWith('application/json')) { + return res.status(415).send({ + message: 'requests must be of type application/json', }); + } - await job.prime(); - - const result = await job.execute(); - - await job.cleanup(); - - return res - .status(200) - .send(result); + next(); }); -router.get('/runtimes', function(req, res){ - const runtimes = runtime.map(rt => ({ - language: rt.language, - version: rt.version.raw, - aliases: rt.aliases, - runtime: rt.runtime - })); +router.post('/execute', async (req, res) => { + const { + language, + version, + files, + stdin, + args, + run_timeout, + compile_timeout, + compile_memory_limit, + run_memory_limit, + } = req.body; - return res - .status(200) - .send(runtimes); + if (!language || typeof language !== 'string') { + return res.status(400).send({ + message: 'language is required as a string', + }); + } + + if (!version || typeof version !== 'string') { + return res.status(400).send({ + message: 'version is required as a string', + }); + } + + if (!files || !Array.isArray(files)) { + return res.status(400).send({ + message: 'files is required as an array', + }); + } + + for (const [i, file] of files.entries()) { + if (typeof file.content !== 'string') { + return res.status(400).send({ + message: `files[${i}].content is required as a string`, + }); + } + } + + if (compile_memory_limit) { + if (typeof compile_memory_limit !== 'number') { + return res.status(400).send({ + message: 'if specified, compile_memory_limit must be a number', + }); + } + + if ( + config.compile_memory_limit >= 0 && + (compile_memory_limit > config.compile_memory_limit || + compile_memory_limit < 0) + ) { + return res.status(400).send({ + message: + 'compile_memory_limit cannot exceed the configured limit of ' + + config.compile_memory_limit, + }); + } + } + + if (run_memory_limit) { + if (typeof run_memory_limit !== 'number') { + return res.status(400).send({ + message: 'if specified, run_memory_limit must be a number', + }); + } + + if ( + config.run_memory_limit >= 0 && + (run_memory_limit > config.run_memory_limit || run_memory_limit < 0) + ) { + return res.status(400).send({ + message: + 'run_memory_limit cannot exceed the configured limit of ' + + config.run_memory_limit, + }); + } + } + + const rt = runtime.get_latest_runtime_matching_language_version( + language, + version + ); + + if (rt === undefined) { + return res.status(400).send({ + message: `${language}-${version} runtime is unknown`, + }); + } + + const job = new Job({ + runtime: rt, + alias: language, + files: files, + args: args || [], + stdin: stdin || '', + timeouts: { + run: run_timeout || 3000, + compile: compile_timeout || 10000, + }, + memory_limits: { + run: run_memory_limit || config.run_memory_limit, + compile: compile_memory_limit || config.compile_memory_limit, + }, + }); + + await job.prime(); + + const result = await job.execute(); + + await job.cleanup(); + + return res.status(200).send(result); }); -router.get('/packages', async function(req, res){ +router.get('/runtimes', (req, res) => { + const runtimes = runtime.map(rt => { + return { + language: rt.language, + version: rt.version.raw, + aliases: rt.aliases, + runtime: rt.runtime, + }; + }); + + return res.status(200).send(runtimes); +}); + +router.get('/packages', async (req, res) => { logger.debug('Request to list packages'); let packages = await package.get_package_list(); - packages = packages - .map(pkg => { - return { - language: pkg.language, - language_version: pkg.version.raw, - installed: pkg.installed - }; - }); + packages = packages.map(pkg => { + return { + language: pkg.language, + language_version: pkg.version.raw, + installed: pkg.installed, + }; + }); - return res - .status(200) - .send(packages); + return res.status(200).send(packages); }); -router.post('/packages/:language/:version', async function(req, res){ +router.post('/packages', async (req, res) => { logger.debug('Request to install package'); - const {language, version} = req.params; + const { language, version } = req.body; const pkg = await package.get_package(language, version); if (pkg == null) { - return res - .status(404) - .send({ - message: `Requested package ${language}-${version} does not exist` - }); + return res.status(404).send({ + message: `Requested package ${language}-${version} does not exist`, + }); } try { const response = await pkg.install(); - return res - .status(200) - .send(response); - } catch(e) { - logger.error(`Error while installing package ${pkg.language}-${pkg.version}:`, e.message); + return res.status(200).send(response); + } catch (e) { + logger.error( + `Error while installing package ${pkg.language}-${pkg.version}:`, + e.message + ); - return res - .status(500) - .send({ - message: e.message - }); + return res.status(500).send({ + message: e.message, + }); } }); -router.delete('/packages/:language/:version', async function(req, res){ +router.delete('/packages', async (req, res) => { logger.debug('Request to uninstall package'); - const {language, version} = req.params; + const { language, version } = req.body; const pkg = await package.get_package(language, version); if (pkg == null) { - return res - .status(404) - .send({ - message: `Requested package ${language}-${version} does not exist` - }); + return res.status(404).send({ + message: `Requested package ${language}-${version} does not exist`, + }); } try { const response = await pkg.uninstall(); - return res - .status(200) - .send(response); - } catch(e) { - logger.error(`Error while uninstalling package ${pkg.language}-${pkg.version}:`, e.message); + return res.status(200).send(response); + } catch (e) { + logger.error( + `Error while uninstalling package ${pkg.language}-${pkg.version}:`, + e.message + ); - return res - .status(500) - .send({ - message: e.message - }); + return res.status(500).send({ + message: e.message, + }); } }); - - - - module.exports = router; diff --git a/api/src/config.js b/api/src/config.js index 8cc1122..84270aa 100644 --- a/api/src/config.js +++ b/api/src/config.js @@ -2,7 +2,6 @@ const fss = require('fs'); const Logger = require('logplease'); const logger = Logger.create('config'); - const options = [ { key: 'log_level', @@ -10,126 +9,111 @@ const options = [ default: 'INFO', options: Object.values(Logger.LogLevels), validators: [ - x => Object.values(Logger.LogLevels).includes(x) || `Log level ${x} does not exist` - ] + x => + Object.values(Logger.LogLevels).includes(x) || + `Log level ${x} does not exist`, + ], }, { key: 'bind_address', - desc: 'Address to bind REST API on\nThank @Bones for the number', + desc: 'Address to bind REST API on', default: '0.0.0.0:2000', - validators: [] + validators: [], }, { key: 'data_directory', desc: 'Absolute path to store all piston related data at', default: '/piston', - validators: [x=> fss.exists_sync(x) || `Directory ${x} does not exist`] + validators: [ + x => fss.exists_sync(x) || `Directory ${x} does not exist`, + ], }, { key: 'runner_uid_min', desc: 'Minimum uid to use for runner', default: 1001, parser: parse_int, - validators: [ - (x,raw) => !isNaN(x) || `${raw} is not a number`, - ] + validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, { key: 'runner_uid_max', desc: 'Maximum uid to use for runner', default: 1500, parser: parse_int, - validators: [ - (x,raw) => !isNaN(x) || `${raw} is not a number`, - ] + validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, { key: 'runner_gid_min', desc: 'Minimum gid to use for runner', default: 1001, parser: parse_int, - validators: [ - (x,raw) => !isNaN(x) || `${raw} is not a number`, - ] + validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, { key: 'runner_gid_max', desc: 'Maximum gid to use for runner', default: 1500, parser: parse_int, - validators: [ - (x,raw) => !isNaN(x) || `${raw} is not a number`, - ] + validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, { key: 'disable_networking', desc: 'Set to true to disable networking', default: true, - parser: x => x === "true", - validators: [ - x => typeof x === "boolean" || `${x} is not a boolean` - ] + parser: x => x === 'true', + validators: [x => typeof x === 'boolean' || `${x} is not a boolean`], }, { key: 'output_max_size', desc: 'Max size of each stdio buffer', default: 1024, parser: parse_int, - validators: [ - (x,raw) => !isNaN(x) || `${raw} is not a number`, - ] + validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, { key: 'max_process_count', desc: 'Max number of processes per job', default: 64, parser: parse_int, - validators: [ - (x,raw) => !isNaN(x) || `${raw} is not a number`, - ] + validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, { key: 'max_open_files', desc: 'Max number of open files per job', default: 2048, parser: parse_int, - validators: [ - (x,raw) => !isNaN(x) || `${raw} is not a number`, - ] + validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, { key: 'max_file_size', desc: 'Max file size in bytes for a file', - default: 1000000, //1MB + default: 10000000, //10MB parser: parse_int, - validators: [ - (x,raw) => !isNaN(x) || `${raw} is not a number`, - ] + validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, { key: 'compile_memory_limit', - desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)', + desc: + 'Max memory usage for compile stage in bytes (set to -1 for no limit)', default: -1, // no limit parser: parse_int, - validators: [ - (x,raw) => !isNaN(x) || `${raw} is not a number`, - ] + validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, { key: 'run_memory_limit', - desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)', + desc: + 'Max memory usage for run stage in bytes (set to -1 for no limit)', default: -1, // no limit parser: parse_int, - validators: [ - (x,raw) => !isNaN(x) || `${raw} is not a number`, - ] + validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, { key: 'repo_url', desc: 'URL of repo index', - default: 'https://github.com/engineer-man/piston/releases/download/pkgs/index', - validators: [] - } + default: + 'https://github.com/engineer-man/piston/releases/download/pkgs/index', + validators: [], + }, ]; logger.info(`Loading Configuration from environment`); @@ -139,28 +123,27 @@ let errored = false; let config = {}; options.forEach(option => { - const env_key = "PISTON_" + option.key.to_upper_case(); + const env_key = 'PISTON_' + option.key.to_upper_case(); - const parser = option.parser || (x=>x); + const parser = option.parser || (x => x); const env_val = process.env[env_key]; const parsed_val = parser(env_val); - const value = env_val || option.default; - option.validators.for_each(validator => { let response = null; - if(env_val) - response = validator(parsed_val, env_val); - else - response = validator(value, value); + if (env_val) response = validator(parsed_val, env_val); + else response = validator(value, value); if (response !== true) { errored = true; - logger.error(`Config option ${option.key} failed validation:`, response); + logger.error( + `Config option ${option.key} failed validation:`, + response + ); return; } }); @@ -174,5 +157,4 @@ if (errored) { logger.info('Configuration successfully loaded'); - module.exports = config; diff --git a/api/src/globals.js b/api/src/globals.js index e632a88..933d2ca 100644 --- a/api/src/globals.js +++ b/api/src/globals.js @@ -1,26 +1,20 @@ // Globals are things the user shouldn't change in config, but is good to not use inline constants for const is_docker = require('is-docker'); -const fss = require('fs'); -const platform = `${is_docker() ? 'docker' : 'baremetal'}-${ - fss.read_file_sync('/etc/os-release') - .toString() - .split('\n') - .find(x => x.startsWith('ID')) - .replace('ID=','') -}`; +const fs = require('fs'); +const platform = `${is_docker() ? 'docker' : 'baremetal'}-${fs + .read_file_sync('/etc/os-release') + .toString() + .split('\n') + .find(x => x.startsWith('ID')) + .replace('ID=', '')}`; module.exports = { data_directories: { packages: 'packages', - jobs: 'jobs' + jobs: 'jobs', }, version: require('../package.json').version, platform, pkg_installed_file: '.ppman-installed', //Used as indication for if a package was installed - clean_directories: [ - "/dev/shm", - "/run/lock", - "/tmp", - "/var/tmp" - ] + clean_directories: ['/dev/shm', '/run/lock', '/tmp', '/var/tmp'], }; diff --git a/api/src/index.js b/api/src/index.js index ac4d6f7..ef16916 100644 --- a/api/src/index.js +++ b/api/src/index.js @@ -14,7 +14,7 @@ const logger = Logger.create('index'); const app = express(); (async () => { - logger.info('Setting loglevel to',config.log_level); + logger.info('Setting loglevel to', config.log_level); Logger.setLogLevel(config.log_level); logger.debug('Ensuring data directories exist'); @@ -28,28 +28,35 @@ const app = express(); try { fss.mkdir_sync(data_path); - } catch(e) { + } catch (e) { logger.error(`Failed to create ${data_path}: `, e.message); } } }); logger.info('Loading packages'); - const pkgdir = path.join(config.data_directory,globals.data_directories.packages); + const pkgdir = path.join( + config.data_directory, + globals.data_directories.packages + ); const pkglist = await fs.readdir(pkgdir); const languages = await Promise.all( - pkglist.map(lang=> - fs.readdir(path.join(pkgdir,lang)) - .then(x=>x.map(y=>path.join(pkgdir, lang, y))) - )); + pkglist.map(lang => { + return fs.readdir(path.join(pkgdir, lang)).then(x => { + return x.map(y => path.join(pkgdir, lang, y)); + }); + }) + ); const installed_languages = languages .flat() - .filter(pkg => fss.exists_sync(path.join(pkg, globals.pkg_installed_file))); + .filter(pkg => + fss.exists_sync(path.join(pkg, globals.pkg_installed_file)) + ); - installed_languages.forEach(pkg => runtime.load_package(pkg)); + installed_languages.for_each(pkg => runtime.load_package(pkg)); logger.info('Starting API Server'); logger.debug('Constructing Express App'); @@ -58,26 +65,24 @@ const app = express(); app.use(body_parser.urlencoded({ extended: true })); app.use(body_parser.json()); - app.use(function (err, req, res, next) { - return res - .status(400) - .send({ - stack: err.stack - }) - }) + app.use((err, req, res, next) => { + return res.status(400).send({ + stack: err.stack, + }); + }); logger.debug('Registering Routes'); - const api_v2 = require('./api/v2') - app.use('/api/v1', api_v2); - app.use('/api/v2', api_v2); + const api_v2 = require('./api/v2'); + app.use('/api/v2', api_v2); + app.use('/api/v2', api_v2); - app.use(function (req,res,next){ - return res.status(404).send({message: 'Not Found'}); + app.use((req, res, next) => { + return res.status(404).send({ message: 'Not Found' }); }); logger.debug('Calling app.listen'); - const [ address, port ] = config.bind_address.split(':'); + const [address, port] = config.bind_address.split(':'); app.listen(port, address, () => { logger.info('API server started on', config.bind_address); diff --git a/api/src/job.js b/api/src/job.js index 9484974..d4b90ea 100644 --- a/api/src/job.js +++ b/api/src/job.js @@ -1,5 +1,5 @@ const logger = require('logplease').create('job'); -const {v4: uuidv4} = require('uuid'); +const { v4: uuidv4 } = require('uuid'); const cp = require('child_process'); const path = require('path'); const config = require('./config'); @@ -10,22 +10,21 @@ const wait_pid = require('waitpid'); const job_states = { READY: Symbol('Ready to be primed'), PRIMED: Symbol('Primed and ready for execution'), - EXECUTED: Symbol('Executed and ready for cleanup') + EXECUTED: Symbol('Executed and ready for cleanup'), }; let uid = 0; let gid = 0; class Job { - constructor({ runtime, files, args, stdin, timeouts, memory_limits }) { - this.uuid = uuidv4(); + this.uuid = uuidv4(); this.runtime = runtime; - this.files = files.map((file,i) => ({ + this.files = files.map((file, i) => ({ name: file.name || `file${i}.code`, - content: file.content + content: file.content, })); - + this.args = args; this.stdin = stdin; this.timeouts = timeouts; @@ -37,12 +36,15 @@ class Job { uid++; gid++; - uid %= (config.runner_uid_max - config.runner_uid_min) + 1; - gid %= (config.runner_gid_max - config.runner_gid_min) + 1; - + uid %= config.runner_uid_max - config.runner_uid_min + 1; + gid %= config.runner_gid_max - config.runner_gid_min + 1; this.state = job_states.READY; - this.dir = path.join(config.data_directory, globals.data_directories.jobs, this.uuid); + this.dir = path.join( + config.data_directory, + globals.data_directories.jobs, + this.uuid + ); } async prime() { @@ -52,7 +54,7 @@ class Job { logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`); - await fs.mkdir(this.dir, { mode:0o700 }); + await fs.mkdir(this.dir, { mode: 0o700 }); await fs.chown(this.dir, this.uid, this.gid); for (const file of this.files) { @@ -75,41 +77,39 @@ class Job { 'prlimit', '--nproc=' + config.max_process_count, '--nofile=' + config.max_open_files, - '--fsize=' + config.max_file_size + '--fsize=' + config.max_file_size, ]; if (memory_limit >= 0) { prlimit.push('--as=' + memory_limit); } - const proc_call = [ - ...prlimit, - ...nonetwork, - 'bash',file, - ...args - ]; + const proc_call = [...prlimit, ...nonetwork, 'bash', file, ...args]; var stdout = ''; var stderr = ''; var output = ''; - const proc = cp.spawn(proc_call[0], proc_call.splice(1) ,{ - env: { + const proc = cp.spawn(proc_call[0], proc_call.splice(1), { + env: { ...this.runtime.env_vars, - PISTON_LANGUAGE: this.runtime.language + PISTON_LANGUAGE: this.runtime.language, }, stdio: 'pipe', cwd: this.dir, uid: this.uid, gid: this.gid, - detached: true //give this process its own process group + detached: true, //give this process its own process group }); proc.stdin.write(this.stdin); proc.stdin.end(); proc.stdin.destroy(); - const kill_timeout = set_timeout(_ => proc.kill('SIGKILL'), timeout); + const kill_timeout = set_timeout( + _ => proc.kill('SIGKILL'), + timeout + ); proc.stderr.on('data', data => { if (stderr.length > config.output_max_size) { @@ -136,13 +136,13 @@ class Job { proc.stdout.destroy(); }; - proc.on('exit', (code, signal)=>{ + proc.on('exit', (code, signal) => { exit_cleanup(); resolve({ stdout, stderr, code, signal, output }); }); - proc.on('error', (err) => { + proc.on('error', err => { exit_cleanup(); reject({ error: err, stdout, stderr, output }); @@ -152,10 +152,17 @@ class Job { async execute() { if (this.state !== job_states.PRIMED) { - throw new Error('Job must be in primed state, current state: ' + this.state.toString()); + throw new Error( + 'Job must be in primed state, current state: ' + + this.state.toString() + ); } - logger.info(`Executing job uuid=${this.uuid} uid=${this.uid} gid=${this.gid} runtime=${this.runtime.toString()}`); + logger.info( + `Executing job uuid=${this.uuid} uid=${this.uid} gid=${ + this.gid + } runtime=${this.runtime.toString()}` + ); logger.debug('Compiling'); @@ -185,73 +192,79 @@ class Job { compile, run, language: this.runtime.language, - version: this.runtime.version.raw + version: this.runtime.version.raw, }; } - - async cleanup_processes(){ + async cleanup_processes() { let processes = [1]; - while(processes.length > 0){ - processes = await new Promise((resolve, reject) => cp.execFile('ps', ['awwxo', 'pid,ruid'], function(err, stdout) { - if(err === null){ - const lines = stdout.split('\n').slice(1); //Remove header with slice - const procs = lines.map(line => { - const [pid, ruid] = line - .trim() - .split(/\s+/) - .map(n => parseInt(n)); - return { pid, ruid } - }) - resolve(procs) - } - else{ - reject(error) - } - })); + while (processes.length > 0) { + processes = await new Promise((resolve, reject) => + cp.execFile('ps', ['awwxo', 'pid,ruid'], (err, stdout) => { + if (err === null) { + const lines = stdout.split('\n').slice(1); //Remove header with slice + const procs = lines.map(line => { + const [pid, ruid] = line + .trim() + .split(/\s+/) + .map(n => parseInt(n)); - processes = processes.filter(proc => proc.ruid == this.uid); + return { pid, ruid }; + }); - for(const proc of processes){ + resolve(procs); + } else { + reject(error); + } + }) + ); + + processes = processes.filter(proc => proc.ruid === this.uid); + + for (const proc of processes) { // First stop the processes, but keep their resources allocated so they cant re-fork - try{ + try { process.kill(proc.pid, 'SIGSTOP'); - }catch{ + } catch { // Could already be dead } } - - for(const proc of processes){ + for (const proc of processes) { // Then clear them out of the process tree - try{ + try { process.kill(proc.pid, 'SIGKILL'); - }catch{ + } catch { // Could already be dead and just needs to be waited on } + wait_pid(proc.pid); } } } - async cleanup_filesystem(){ - + async cleanup_filesystem() { for (const clean_path of globals.clean_directories) { const contents = await fs.readdir(clean_path); for (const file of contents) { const file_path = path.join(clean_path, file); - try{ + + try { const stat = await fs.stat(file_path); - if(stat.uid == this.uid) - await fs.rm(file_path, { recursive: true, force: true }); - }catch(e){ + + if (stat.uid === this.uid) { + await fs.rm(file_path, { + recursive: true, + force: true, + }); + } + } catch (e) { // File was somehow deleted in the time that we read the dir to when we checked the file - logger.warn(`Error removing file ${file_path}: ${e}`) + logger.warn(`Error removing file ${file_path}: ${e}`); } } - } await fs.rm(this.dir, { recursive: true, force: true }); @@ -259,15 +272,14 @@ class Job { async cleanup() { logger.info(`Cleaning up job uuid=${this.uuid}`); - + await Promise.all([ this.cleanup_processes(), - this.cleanup_filesystem() + this.cleanup_filesystem(), ]); } - } module.exports = { - Job + Job, }; diff --git a/api/src/package.js b/api/src/package.js index 991c72c..1baa8af 100644 --- a/api/src/package.js +++ b/api/src/package.js @@ -13,8 +13,7 @@ const chownr = require('chownr'); const util = require('util'); class Package { - - constructor({ language, version, download, checksum }){ + constructor({ language, version, download, checksum }) { this.language = language; this.version = semver.parse(version); this.checksum = checksum; @@ -22,7 +21,9 @@ class Package { } get installed() { - return fss.exists_sync(path.join(this.install_path, globals.pkg_installed_file)); + return fss.exists_sync( + path.join(this.install_path, globals.pkg_installed_file) + ); } get install_path() { @@ -42,14 +43,18 @@ class Package { logger.info(`Installing ${this.language}-${this.version.raw}`); if (fss.exists_sync(this.install_path)) { - logger.warn(`${this.language}-${this.version.raw} has residual files. Removing them.`); + logger.warn( + `${this.language}-${this.version.raw} has residual files. Removing them.` + ); await fs.rm(this.install_path, { recursive: true, force: true }); } logger.debug(`Making directory ${this.install_path}`); - await fs.mkdir(this.install_path, {recursive: true}); + await fs.mkdir(this.install_path, { recursive: true }); - logger.debug(`Downloading package from ${this.download} in to ${this.install_path}`); + logger.debug( + `Downloading package from ${this.download} in to ${this.install_path}` + ); const pkgpath = path.join(this.install_path, 'pkg.tar.gz'); const download = await fetch(this.download); @@ -63,7 +68,8 @@ class Package { logger.debug('Validating checksums'); logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`); - const cs = crypto.create_hash("sha256") + const cs = crypto + .create_hash('sha256') .update(fss.readFileSync(pkgpath)) .digest('hex'); @@ -71,10 +77,14 @@ class Package { throw new Error(`Checksum miss-match want: ${val} got: ${cs}`); } - logger.debug(`Extracting package files from archive ${pkgpath} in to ${this.install_path}`); + logger.debug( + `Extracting package files from archive ${pkgpath} in to ${this.install_path}` + ); await new Promise((resolve, reject) => { - const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'`); + const proc = cp.exec( + `bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'` + ); proc.once('exit', (code, _) => { code === 0 ? resolve() : reject(); @@ -95,14 +105,13 @@ class Package { const envout = await new Promise((resolve, reject) => { let stdout = ''; - const proc = cp - .spawn( - 'env', - ['-i','bash','-c',`${get_env_command}`], - { - stdio: ['ignore', 'pipe', 'pipe'] - } - ); + const proc = cp.spawn( + 'env', + ['-i', 'bash', '-c', `${get_env_command}`], + { + stdio: ['ignore', 'pipe', 'pipe'], + } + ); proc.once('exit', (code, _) => { code === 0 ? resolve(stdout) : reject(); @@ -117,84 +126,95 @@ class Package { const filtered_env = envout .split('\n') - .filter(l => !['PWD','OLDPWD','_', 'SHLVL'].includes(l.split('=',2)[0])) + .filter( + l => + !['PWD', 'OLDPWD', '_', 'SHLVL'].includes( + l.split('=', 2)[0] + ) + ) .join('\n'); await fs.write_file(path.join(this.install_path, '.env'), filtered_env); logger.debug('Changing Ownership of package directory'); - await util.promisify(chownr)(this.install_path,0,0); + await util.promisify(chownr)(this.install_path, 0, 0); logger.debug('Writing installed state to disk'); - await fs.write_file(path.join(this.install_path, globals.pkg_installed_file), Date.now().toString()); + await fs.write_file( + path.join(this.install_path, globals.pkg_installed_file), + Date.now().toString() + ); logger.info(`Installed ${this.language}-${this.version.raw}`); return { language: this.language, - version: this.version.raw + version: this.version.raw, }; } - - async uninstall(){ + async uninstall() { logger.info(`Uninstalling ${this.language}-${this.version.raw}`); - logger.debug("Finding runtime") - const found_runtime = runtime.get_runtime_by_name_and_version(this.language, this.version.raw); + logger.debug('Finding runtime'); + const found_runtime = runtime.get_runtime_by_name_and_version( + this.language, + this.version.raw + ); - if(!found_runtime){ - logger.error(`Uninstalling ${this.language}-${this.version.raw} failed: Not installed`) - throw new Error(`${this.language}-${this.version.raw} is not installed`) + if (!found_runtime) { + logger.error( + `Uninstalling ${this.language}-${this.version.raw} failed: Not installed` + ); + throw new Error( + `${this.language}-${this.version.raw} is not installed` + ); } - logger.debug("Unregistering runtime") + logger.debug('Unregistering runtime'); found_runtime.unregister(); - logger.debug("Cleaning files from disk") - await fs.rmdir(this.install_path, {recursive: true}) + logger.debug('Cleaning files from disk'); + await fs.rmdir(this.install_path, { recursive: true }); - logger.info(`Uninstalled ${this.language}-${this.version.raw}`) + logger.info(`Uninstalled ${this.language}-${this.version.raw}`); return { language: this.language, - version: this.version.raw + version: this.version.raw, }; - } static async get_package_list() { const repo_content = await fetch(config.repo_url).then(x => x.text()); - - const entries = repo_content - .split('\n') - .filter(x => x.length > 0); - + + const entries = repo_content.split('\n').filter(x => x.length > 0); + return entries.map(line => { - const [ language, version, checksum, download ] = line.split(',', 4); - + const [language, version, checksum, download] = line.split(',', 4); + return new Package({ language, version, checksum, - download + download, }); }); } - - static async get_package (lang, version) { + + static async get_package(lang, version) { const packages = await Package.get_package_list(); - - const candidates = packages - .filter(pkg => { - return pkg.language == lang && semver.satisfies(pkg.version, version) - }); - + + const candidates = packages.filter(pkg => { + return ( + pkg.language == lang && semver.satisfies(pkg.version, version) + ); + }); + candidates.sort((a, b) => semver.rcompare(a.version, b.version)); - + return candidates[0] || null; } - } module.exports = Package; diff --git a/api/src/runtime.js b/api/src/runtime.js index 43713ee..191fc5d 100644 --- a/api/src/runtime.js +++ b/api/src/runtime.js @@ -8,8 +8,7 @@ const path = require('path'); const runtimes = []; class Runtime { - - constructor({language, version, aliases, pkgdir, runtime}){ + constructor({ language, version, aliases, pkgdir, runtime }) { this.language = language; this.version = version; this.aliases = aliases || []; @@ -17,7 +16,7 @@ class Runtime { this.runtime = runtime; } - static load_package(package_dir){ + static load_package(package_dir) { let info = JSON.parse( fss.read_file_sync(path.join(package_dir, 'pkg-info.json')) ); @@ -28,33 +27,35 @@ class Runtime { if (build_platform !== globals.platform) { logger.warn( `Package ${language}-${version} was built for platform ${build_platform}, ` + - `but our platform is ${globals.platform}` + `but our platform is ${globals.platform}` ); } - if(provides){ + if (provides) { // Multiple languages in 1 package provides.forEach(lang => { - runtimes.push(new Runtime({ - language: lang.language, - aliases: lang.aliases, - version, - pkgdir: package_dir, - runtime: language - })); + runtimes.push( + new Runtime({ + language: lang.language, + aliases: lang.aliases, + version, + pkgdir: package_dir, + runtime: language, + }) + ); }); - }else{ - runtimes.push(new Runtime({ - language, - version, - aliases, - pkgdir: package_dir - })) + } else { + runtimes.push( + new Runtime({ + language, + version, + aliases, + pkgdir: package_dir, + }) + ); } logger.debug(`Package ${language}-${version} was loaded`); - - } get compiled() { @@ -75,8 +76,8 @@ class Runtime { env_content .trim() .split('\n') - .map(line => line.split('=',2)) - .forEach(([key,val]) => { + .map(line => line.split('=', 2)) + .forEach(([key, val]) => { this._env_vars[key.trim()] = val.trim(); }); } @@ -96,16 +97,29 @@ class Runtime { module.exports = runtimes; module.exports.Runtime = Runtime; -module.exports.get_runtimes_matching_language_version = function(lang, ver){ - return runtimes.filter(rt => (rt.language == lang || rt.aliases.includes(lang)) && semver.satisfies(rt.version, ver)); +module.exports.get_runtimes_matching_language_version = function (lang, ver) { + return runtimes.filter( + rt => + (rt.language == lang || rt.aliases.includes(lang)) && + semver.satisfies(rt.version, ver) + ); }; -module.exports.get_latest_runtime_matching_language_version = function(lang, ver){ - return module.exports.get_runtimes_matching_language_version(lang, ver) - .sort((a,b) => semver.rcompare(a.version, b.version))[0]; +module.exports.get_latest_runtime_matching_language_version = function ( + lang, + ver +) { + return module.exports + .get_runtimes_matching_language_version(lang, ver) + .sort((a, b) => semver.rcompare(a.version, b.version))[0]; }; -module.exports.get_runtime_by_name_and_version = function(runtime, ver){ - return runtimes.find(rt => (rt.runtime == runtime || (rt.runtime === undefined && rt.language == runtime)) && semver.satisfies(rt.version, ver)); -} +module.exports.get_runtime_by_name_and_version = function (runtime, ver) { + return runtimes.find( + rt => + (rt.runtime == runtime || + (rt.runtime === undefined && rt.language == runtime)) && + semver.satisfies(rt.version, ver) + ); +}; -module.exports.load_package = Runtime.load_package; \ No newline at end of file +module.exports.load_package = Runtime.load_package; diff --git a/cli/commands/execute.js b/cli/commands/execute.js index 4ddb221..e273548 100644 --- a/cli/commands/execute.js +++ b/cli/commands/execute.js @@ -8,7 +8,7 @@ exports.aliases = ['run']; exports.describe = 'Executes file with the specified runner'; exports.builder = { - languageVersion: { + language_version: { string: true, desc: 'Set the version of the language to use', alias: ['l'], @@ -38,7 +38,7 @@ exports.builder = { } }; -exports.handler = async function(argv) { +exports.handler = async (argv) => { const files = [...(argv.files || []),argv.file] .map(file_path => { return { @@ -55,7 +55,7 @@ exports.handler = async function(argv) { const request = { language: argv.language, - version: argv['language-version'], + version: argv['language_version'], files: files, args: argv.args, stdin, @@ -63,7 +63,7 @@ exports.handler = async function(argv) { run_timeout: argv.rt }; - let { data: response } = await argv.axios.post('/api/v1/execute', request); + let { data: response } = await argv.axios.post('/api/v2/execute', request); const step = (name, ctx) => { console.log(chalk.bold(`== ${name} ==`)); diff --git a/cli/commands/ppman.js b/cli/commands/ppman.js index f41c1dd..8d1cb34 100644 --- a/cli/commands/ppman.js +++ b/cli/commands/ppman.js @@ -1,7 +1,7 @@ -exports.command = 'ppman' -exports.aliases = ['pkg'] -exports.describe = 'Package Manager' +exports.command = 'ppman'; +exports.aliases = ['pkg']; +exports.describe = 'Package Manager'; exports.builder = yargs => yargs .commandDir('ppman_commands') - .demandCommand() \ No newline at end of file + .demandCommand(); diff --git a/cli/commands/ppman_commands/install.js b/cli/commands/ppman_commands/install.js index fedf5b8..5d9e92a 100644 --- a/cli/commands/ppman_commands/install.js +++ b/cli/commands/ppman_commands/install.js @@ -1,23 +1,26 @@ const chalk = require('chalk'); -exports.command = ['install [language-version]'] -exports.aliases = ['i'] -exports.describe = 'Installs the named package' - +exports.command = ['install [language_version]']; +exports.aliases = ['i']; +exports.describe = 'Installs the named package'; const msg_format = { - 'color': p => `${p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')} Installation ${p.language ? "succeeded" : "failed: " + p.message}`, - 'monochrome': p => `Installation ${p.language ? "succeeded" : "failed: " + p.message}`, - 'json': JSON.stringify + color: p => `${p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')} Installation ${p.language ? 'succeeded' : 'failed: ' + p.message}`, + monochrome: p => `Installation ${p.language ? 'succeeded' : 'failed: ' + p.message}`, + json: JSON.stringify +}; -} +exports.handler = async ({ axios, language, language_version }) => { + try { + const request = { + language, + version: language_version || '*' + }; -exports.handler = async function({axios, language, languageVersion}){ - try{ - const install = await axios.post(`/api/v1/packages/${language}/${languageVersion || '*'}`) + const install = await axios.post(`/api/v2/packages`, request); console.log(msg_format.color(install.data)); - }catch({response}){ + } catch ({ response }) { console.error(response.data.message) } } diff --git a/cli/commands/ppman_commands/list.js b/cli/commands/ppman_commands/list.js index fa3cb27..a45030c 100644 --- a/cli/commands/ppman_commands/list.js +++ b/cli/commands/ppman_commands/list.js @@ -1,22 +1,17 @@ -//const fetch = require('node-fetch'); const chalk = require('chalk'); -exports.command = ['list'] -exports.aliases = ['l'] -exports.describe = 'Lists all available packages' - +exports.command = ['list']; +exports.aliases = ['l']; +exports.describe = 'Lists all available packages'; const msg_format = { - 'color': p => `${chalk[p.installed ? "green":"red"]("•")} ${p.language} ${p.language_version}`, - 'monochrome': p => `${p.language} ${p.language_version} ${p.installed ? "(INSTALLED)": ""}`, - 'json': JSON.stringify - -} - -exports.handler = async function({axios}){ - - const packages = await axios.get('/api/v1/packages'); + color: p => `${chalk[p.installed ? 'green':'red']('•')} ${p.language} ${p.language_version}`, + monochrome: p => `${p.language} ${p.language_version} ${p.installed ? '(INSTALLED)': ''}`, + json: JSON.stringify +}; +exports.handler = async ({ axios }) => { + const packages = await axios.get('/api/v2/packages'); const pkg_msg = packages.data .map(msg_format.color) diff --git a/cli/commands/ppman_commands/uninstall.js b/cli/commands/ppman_commands/uninstall.js index 0e4ebf2..c7cbc83 100644 --- a/cli/commands/ppman_commands/uninstall.js +++ b/cli/commands/ppman_commands/uninstall.js @@ -1,23 +1,25 @@ const chalk = require('chalk'); -exports.command = ['uninstall [language-version]'] -exports.aliases = ['u'] -exports.describe = 'Uninstalls the named package' - +exports.command = ['uninstall [language_version]']; +exports.aliases = ['u']; +exports.describe = 'Uninstalls the named package'; const msg_format = { - 'color': p => `${p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')} Uninstallation ${p.language ? "succeeded" : "failed: " + p.message}`, - 'monochrome': p => `Uninstallation ${p.language ? "succeeded" : "failed: " + p.message}`, - 'json': JSON.stringify + color: p => `${p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')} Uninstallation ${p.language ? 'succeeded' : 'failed: ' + p.message}`, + monochrome: p => `Uninstallation ${p.language ? 'succeeded' : 'failed: ' + p.message}`, + json: JSON.stringify +}; -} - -exports.handler = async function({axios, language, languageVersion}){ - try{ - const uninstall = await axios.delete(`/api/v1/packages/${language}/${languageVersion || '*'}`) +exports.handler = async ({ axios, language, language_version }) => { + try { + const request = { + language, + version: language_version || '*' + }; + const uninstall = await axios.delete(`/api/v2/packages`, {data: request}); console.log(msg_format.color(uninstall.data)); - }catch({response}){ + } catch ({ response }) { console.error(response.data.message) } } diff --git a/cli/index.js b/cli/index.js index 415dbf3..d25ec7d 100755 --- a/cli/index.js +++ b/cli/index.js @@ -2,9 +2,12 @@ const axios = require('axios').default; -const axios_instance = function(argv){ +const axios_instance = argv => { argv.axios = axios.create({ - baseURL: argv['piston-url'] + baseURL: argv['piston-url'], + headers: { + 'Content-Type': 'application/json' + } }); return argv; @@ -18,7 +21,7 @@ require('yargs')(process.argv.slice(2)) string: true }) .middleware(axios_instance) - .scriptName("piston") + .scriptName('piston') .commandDir('commands') .demandCommand() .help()