Compare commits

..

11 Commits

Author SHA1 Message Date
Thomas Hobson 93188099b7
fix cli 2021-05-08 12:57:37 +12:00
Thomas Hobson 543cb11e69
Change package manager request signature 2021-05-08 12:41:41 +12:00
Thomas Hobson b3be57e0b4
lint like rest of codebase 2021-05-08 12:30:40 +12:00
Thomas Hobson 1b7504a191
switch back to startsWith
This is because application/json;charset=utf8 is perfectly valid!
2021-05-08 12:25:04 +12:00
Thomas Hobson 4259e89bb2
lint api 2021-05-08 12:20:21 +12:00
Thomas Hobson 2beb0abff7
Merge branch 'master' of github.com:engineer-man/piston 2021-05-08 12:08:30 +12:00
Thomas Hobson b51350489f
Up the default max file size limit 2021-05-08 12:08:13 +12:00
Brian Seymour 106d5c72c9 fix json validation 2021-05-07 14:50:50 -05:00
Brian Seymour 3ea6ca1180 fix json validation 2021-05-07 14:42:55 -05:00
Brian Seymour 5830d7fb43 fix json validation 2021-05-07 14:35:27 -05:00
Brian Seymour de449c6d56 validate json instead of a json header, and lots of cleanup 2021-05-07 14:22:25 -05:00
19 changed files with 535 additions and 481 deletions

View File

@ -71,7 +71,7 @@ jobs:
docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api docker.pkg.github.com/engineer-man/piston/api docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api docker.pkg.github.com/engineer-man/piston/api
echo Waiting for API to start.. echo Waiting for API to start..
docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v1/runtimes docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes
echo Waiting for Index to start.. echo Waiting for Index to start..
docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index
@ -80,7 +80,7 @@ jobs:
sed -i 's/repo/localhost/g' repo/index sed -i 's/repo/localhost/g' repo/index
echo Listing Packages echo Listing Packages
PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v1/packages) PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v2/packages)
echo $PACKAGES_JSON echo $PACKAGES_JSON
echo Getting CLI ready echo Getting CLI ready
@ -94,7 +94,7 @@ jobs:
PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package) PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package)
echo "Installing..." echo "Installing..."
docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v1/packages/$PKG_PATH docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v2/packages/$PKG_PATH
TEST_SCRIPTS=packages/$PKG_PATH/test.* TEST_SCRIPTS=packages/$PKG_PATH/test.*
echo "Tests: $TEST_SCRIPTS" echo "Tests: $TEST_SCRIPTS"

1
api/.prettierignore Normal file
View File

@ -0,0 +1 @@
node_modules

3
api/.prettierrc.yaml Normal file
View File

@ -0,0 +1,3 @@
singleQuote: true
tabWidth: 4
arrowParens: avoid

View File

@ -27,4 +27,3 @@ RUN make -C ./src/nosocket/ all && make -C ./src/nosocket/ install
CMD [ "node", "src"] CMD [ "node", "src"]
EXPOSE 2000/tcp EXPOSE 2000/tcp

21
api/package-lock.json generated
View File

@ -19,6 +19,9 @@
"semver": "^7.3.4", "semver": "^7.3.4",
"uuid": "^8.3.2", "uuid": "^8.3.2",
"waitpid": "git+https://github.com/HexF/node-waitpid.git" "waitpid": "git+https://github.com/HexF/node-waitpid.git"
},
"devDependencies": {
"prettier": "2.2.1"
} }
}, },
"node_modules/accepts": { "node_modules/accepts": {
@ -391,6 +394,18 @@
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
"integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w="
}, },
"node_modules/prettier": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.2.1.tgz",
"integrity": "sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==",
"dev": true,
"bin": {
"prettier": "bin-prettier.js"
},
"engines": {
"node": ">=10.13.0"
}
},
"node_modules/proxy-addr": { "node_modules/proxy-addr": {
"version": "2.0.6", "version": "2.0.6",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz",
@ -855,6 +870,12 @@
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz", "resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
"integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w=" "integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w="
}, },
"prettier": {
"version": "2.2.1",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.2.1.tgz",
"integrity": "sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==",
"dev": true
},
"proxy-addr": { "proxy-addr": {
"version": "2.0.6", "version": "2.0.6",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz", "resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz",

View File

@ -15,5 +15,11 @@
"uuid": "^8.3.2", "uuid": "^8.3.2",
"waitpid": "git+https://github.com/HexF/node-waitpid.git" "waitpid": "git+https://github.com/HexF/node-waitpid.git"
}, },
"license": "MIT" "license": "MIT",
"scripts": {
"lint": "prettier . --write"
},
"devDependencies": {
"prettier": "2.2.1"
}
} }

View File

@ -3,101 +3,110 @@ const router = express.Router();
const config = require('../config'); const config = require('../config');
const runtime = require('../runtime'); const runtime = require('../runtime');
const {Job} = require("../job"); const { Job } = require('../job');
const package = require('../package') const package = require('../package');
const logger = require('logplease').create('api/v1'); const logger = require('logplease').create('api/v2');
router.use((req, res, next) => {
if (['GET', 'HEAD', 'OPTIONS'].includes(req.method)) {
return next();
}
if (!req.headers['content-type'].startsWith('application/json')) {
return res.status(415).send({
message: 'requests must be of type application/json',
});
}
router.use(function(req, res, next){
if(req.method == "POST" && !req.headers['content-type'].startsWith("application/json"))
return res
.status(415)
.send({
message: "requests must be of type application/json"
})
next(); next();
}) });
router.post('/execute', async function(req, res){ router.post('/execute', async (req, res) => {
const {language, version, files, stdin, args, run_timeout, compile_timeout, compile_memory_limit, run_memory_limit} = req.body; const {
language,
version,
files,
stdin,
args,
run_timeout,
compile_timeout,
compile_memory_limit,
run_memory_limit,
} = req.body;
if(!language || typeof language !== "string") if (!language || typeof language !== 'string') {
{ return res.status(400).send({
return res message: 'language is required as a string',
.status(400)
.send({
message: "language is required as a string"
}); });
} }
if(!version || typeof version !== "string") if (!version || typeof version !== 'string') {
{ return res.status(400).send({
return res message: 'version is required as a string',
.status(400)
.send({
message: "version is required as a string"
}); });
} }
if(!files || !Array.isArray(files)) if (!files || !Array.isArray(files)) {
{ return res.status(400).send({
return res message: 'files is required as an array',
.status(400)
.send({
message: "files is required as an array"
}); });
} }
for (const [i, file] of files.entries()) { for (const [i, file] of files.entries()) {
if(typeof file.content !== "string"){ if (typeof file.content !== 'string') {
return res return res.status(400).send({
.status(400) message: `files[${i}].content is required as a string`,
.send({
message: `files[${i}].content is required as a string`
}); });
} }
} }
if (compile_memory_limit) { if (compile_memory_limit) {
if (typeof compile_memory_limit !== "number") { if (typeof compile_memory_limit !== 'number') {
return res return res.status(400).send({
.status(400) message: 'if specified, compile_memory_limit must be a number',
.send({ });
message: "if specified, compile_memory_limit must be a number" }
})
} else if (config.compile_memory_limit >= 0 && (compile_memory_limit > config.compile_memory_limit || compile_memory_limit < 0)) { if (
return res config.compile_memory_limit >= 0 &&
.status(400) (compile_memory_limit > config.compile_memory_limit ||
.send({ compile_memory_limit < 0)
message: "compile_memory_limit cannot exceed the configured limit of " + config.compile_memory_limit ) {
}) return res.status(400).send({
message:
'compile_memory_limit cannot exceed the configured limit of ' +
config.compile_memory_limit,
});
} }
} }
if (run_memory_limit) { if (run_memory_limit) {
if (typeof run_memory_limit !== "number") { if (typeof run_memory_limit !== 'number') {
return res return res.status(400).send({
.status(400) message: 'if specified, run_memory_limit must be a number',
.send({ });
message: "if specified, run_memory_limit must be a number" }
})
} else if (config.run_memory_limit >= 0 && (run_memory_limit > config.run_memory_limit || run_memory_limit < 0)) { if (
return res config.run_memory_limit >= 0 &&
.status(400) (run_memory_limit > config.run_memory_limit || run_memory_limit < 0)
.send({ ) {
message: "run_memory_limit cannot exceed the configured limit of " + config.run_memory_limit return res.status(400).send({
}) message:
'run_memory_limit cannot exceed the configured limit of ' +
config.run_memory_limit,
});
} }
} }
const rt = runtime.get_latest_runtime_matching_language_version(
language,
const rt = runtime.get_latest_runtime_matching_language_version(language, version); version
);
if (rt === undefined) { if (rt === undefined) {
return res return res.status(400).send({
.status(400) message: `${language}-${version} runtime is unknown`,
.send({
message: `${language}-${version} runtime is unknown`
}); });
} }
@ -106,15 +115,15 @@ router.post('/execute', async function(req, res){
alias: language, alias: language,
files: files, files: files,
args: args || [], args: args || [],
stdin: stdin || "", stdin: stdin || '',
timeouts: { timeouts: {
run: run_timeout || 3000, run: run_timeout || 3000,
compile: compile_timeout || 10000 compile: compile_timeout || 10000,
}, },
memory_limits: { memory_limits: {
run: run_memory_limit || config.run_memory_limit, run: run_memory_limit || config.run_memory_limit,
compile: compile_memory_limit || config.compile_memory_limit compile: compile_memory_limit || config.compile_memory_limit,
} },
}); });
await job.prime(); await job.prime();
@ -123,108 +132,93 @@ router.post('/execute', async function(req, res){
await job.cleanup(); await job.cleanup();
return res return res.status(200).send(result);
.status(200)
.send(result);
}); });
router.get('/runtimes', function(req, res){ router.get('/runtimes', (req, res) => {
const runtimes = runtime.map(rt => ({ const runtimes = runtime.map(rt => {
return {
language: rt.language, language: rt.language,
version: rt.version.raw, version: rt.version.raw,
aliases: rt.aliases, aliases: rt.aliases,
runtime: rt.runtime runtime: rt.runtime,
}));
return res
.status(200)
.send(runtimes);
});
router.get('/packages', async function(req, res){
logger.debug('Request to list packages');
let packages = await package.get_package_list();
packages = packages
.map(pkg => {
return {
language: pkg.language,
language_version: pkg.version.raw,
installed: pkg.installed
}; };
}); });
return res return res.status(200).send(runtimes);
.status(200)
.send(packages);
}); });
router.post('/packages/:language/:version', async function(req, res){ router.get('/packages', async (req, res) => {
logger.debug('Request to list packages');
let packages = await package.get_package_list();
packages = packages.map(pkg => {
return {
language: pkg.language,
language_version: pkg.version.raw,
installed: pkg.installed,
};
});
return res.status(200).send(packages);
});
router.post('/packages', async (req, res) => {
logger.debug('Request to install package'); logger.debug('Request to install package');
const {language, version} = req.params; const { language, version } = req.body;
const pkg = await package.get_package(language, version); const pkg = await package.get_package(language, version);
if (pkg == null) { if (pkg == null) {
return res return res.status(404).send({
.status(404) message: `Requested package ${language}-${version} does not exist`,
.send({
message: `Requested package ${language}-${version} does not exist`
}); });
} }
try { try {
const response = await pkg.install(); const response = await pkg.install();
return res return res.status(200).send(response);
.status(200)
.send(response);
} catch (e) { } catch (e) {
logger.error(`Error while installing package ${pkg.language}-${pkg.version}:`, e.message); logger.error(
`Error while installing package ${pkg.language}-${pkg.version}:`,
e.message
);
return res return res.status(500).send({
.status(500) message: e.message,
.send({
message: e.message
}); });
} }
}); });
router.delete('/packages/:language/:version', async function(req, res){ router.delete('/packages', async (req, res) => {
logger.debug('Request to uninstall package'); logger.debug('Request to uninstall package');
const {language, version} = req.params; const { language, version } = req.body;
const pkg = await package.get_package(language, version); const pkg = await package.get_package(language, version);
if (pkg == null) { if (pkg == null) {
return res return res.status(404).send({
.status(404) message: `Requested package ${language}-${version} does not exist`,
.send({
message: `Requested package ${language}-${version} does not exist`
}); });
} }
try { try {
const response = await pkg.uninstall(); const response = await pkg.uninstall();
return res return res.status(200).send(response);
.status(200)
.send(response);
} catch (e) { } catch (e) {
logger.error(`Error while uninstalling package ${pkg.language}-${pkg.version}:`, e.message); logger.error(
`Error while uninstalling package ${pkg.language}-${pkg.version}:`,
e.message
);
return res return res.status(500).send({
.status(500) message: e.message,
.send({
message: e.message
}); });
} }
}); });
module.exports = router; module.exports = router;

View File

@ -2,7 +2,6 @@ const fss = require('fs');
const Logger = require('logplease'); const Logger = require('logplease');
const logger = Logger.create('config'); const logger = Logger.create('config');
const options = [ const options = [
{ {
key: 'log_level', key: 'log_level',
@ -10,126 +9,111 @@ const options = [
default: 'INFO', default: 'INFO',
options: Object.values(Logger.LogLevels), options: Object.values(Logger.LogLevels),
validators: [ validators: [
x => Object.values(Logger.LogLevels).includes(x) || `Log level ${x} does not exist` x =>
] Object.values(Logger.LogLevels).includes(x) ||
`Log level ${x} does not exist`,
],
}, },
{ {
key: 'bind_address', key: 'bind_address',
desc: 'Address to bind REST API on\nThank @Bones for the number', desc: 'Address to bind REST API on',
default: '0.0.0.0:2000', default: '0.0.0.0:2000',
validators: [] validators: [],
}, },
{ {
key: 'data_directory', key: 'data_directory',
desc: 'Absolute path to store all piston related data at', desc: 'Absolute path to store all piston related data at',
default: '/piston', default: '/piston',
validators: [x=> fss.exists_sync(x) || `Directory ${x} does not exist`] validators: [
x => fss.exists_sync(x) || `Directory ${x} does not exist`,
],
}, },
{ {
key: 'runner_uid_min', key: 'runner_uid_min',
desc: 'Minimum uid to use for runner', desc: 'Minimum uid to use for runner',
default: 1001, default: 1001,
parser: parse_int, parser: parse_int,
validators: [ validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
(x,raw) => !isNaN(x) || `${raw} is not a number`,
]
}, },
{ {
key: 'runner_uid_max', key: 'runner_uid_max',
desc: 'Maximum uid to use for runner', desc: 'Maximum uid to use for runner',
default: 1500, default: 1500,
parser: parse_int, parser: parse_int,
validators: [ validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
(x,raw) => !isNaN(x) || `${raw} is not a number`,
]
}, },
{ {
key: 'runner_gid_min', key: 'runner_gid_min',
desc: 'Minimum gid to use for runner', desc: 'Minimum gid to use for runner',
default: 1001, default: 1001,
parser: parse_int, parser: parse_int,
validators: [ validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
(x,raw) => !isNaN(x) || `${raw} is not a number`,
]
}, },
{ {
key: 'runner_gid_max', key: 'runner_gid_max',
desc: 'Maximum gid to use for runner', desc: 'Maximum gid to use for runner',
default: 1500, default: 1500,
parser: parse_int, parser: parse_int,
validators: [ validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
(x,raw) => !isNaN(x) || `${raw} is not a number`,
]
}, },
{ {
key: 'disable_networking', key: 'disable_networking',
desc: 'Set to true to disable networking', desc: 'Set to true to disable networking',
default: true, default: true,
parser: x => x === "true", parser: x => x === 'true',
validators: [ validators: [x => typeof x === 'boolean' || `${x} is not a boolean`],
x => typeof x === "boolean" || `${x} is not a boolean`
]
}, },
{ {
key: 'output_max_size', key: 'output_max_size',
desc: 'Max size of each stdio buffer', desc: 'Max size of each stdio buffer',
default: 1024, default: 1024,
parser: parse_int, parser: parse_int,
validators: [ validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
(x,raw) => !isNaN(x) || `${raw} is not a number`,
]
}, },
{ {
key: 'max_process_count', key: 'max_process_count',
desc: 'Max number of processes per job', desc: 'Max number of processes per job',
default: 64, default: 64,
parser: parse_int, parser: parse_int,
validators: [ validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
(x,raw) => !isNaN(x) || `${raw} is not a number`,
]
}, },
{ {
key: 'max_open_files', key: 'max_open_files',
desc: 'Max number of open files per job', desc: 'Max number of open files per job',
default: 2048, default: 2048,
parser: parse_int, parser: parse_int,
validators: [ validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
(x,raw) => !isNaN(x) || `${raw} is not a number`,
]
}, },
{ {
key: 'max_file_size', key: 'max_file_size',
desc: 'Max file size in bytes for a file', desc: 'Max file size in bytes for a file',
default: 1000000, //1MB default: 10000000, //10MB
parser: parse_int, parser: parse_int,
validators: [ validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
(x,raw) => !isNaN(x) || `${raw} is not a number`,
]
}, },
{ {
key: 'compile_memory_limit', key: 'compile_memory_limit',
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)', desc:
'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit default: -1, // no limit
parser: parse_int, parser: parse_int,
validators: [ validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
(x,raw) => !isNaN(x) || `${raw} is not a number`,
]
}, },
{ {
key: 'run_memory_limit', key: 'run_memory_limit',
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)', desc:
'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit default: -1, // no limit
parser: parse_int, parser: parse_int,
validators: [ validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
(x,raw) => !isNaN(x) || `${raw} is not a number`,
]
}, },
{ {
key: 'repo_url', key: 'repo_url',
desc: 'URL of repo index', desc: 'URL of repo index',
default: 'https://github.com/engineer-man/piston/releases/download/pkgs/index', default:
validators: [] 'https://github.com/engineer-man/piston/releases/download/pkgs/index',
} validators: [],
},
]; ];
logger.info(`Loading Configuration from environment`); logger.info(`Loading Configuration from environment`);
@ -139,7 +123,7 @@ let errored = false;
let config = {}; let config = {};
options.forEach(option => { options.forEach(option => {
const env_key = "PISTON_" + option.key.to_upper_case(); const env_key = 'PISTON_' + option.key.to_upper_case();
const parser = option.parser || (x => x); const parser = option.parser || (x => x);
@ -147,20 +131,19 @@ options.forEach(option => {
const parsed_val = parser(env_val); const parsed_val = parser(env_val);
const value = env_val || option.default; const value = env_val || option.default;
option.validators.for_each(validator => { option.validators.for_each(validator => {
let response = null; let response = null;
if(env_val) if (env_val) response = validator(parsed_val, env_val);
response = validator(parsed_val, env_val); else response = validator(value, value);
else
response = validator(value, value);
if (response !== true) { if (response !== true) {
errored = true; errored = true;
logger.error(`Config option ${option.key} failed validation:`, response); logger.error(
`Config option ${option.key} failed validation:`,
response
);
return; return;
} }
}); });
@ -174,5 +157,4 @@ if (errored) {
logger.info('Configuration successfully loaded'); logger.info('Configuration successfully loaded');
module.exports = config; module.exports = config;

View File

@ -1,26 +1,20 @@
// Globals are things the user shouldn't change in config, but is good to not use inline constants for // Globals are things the user shouldn't change in config, but is good to not use inline constants for
const is_docker = require('is-docker'); const is_docker = require('is-docker');
const fss = require('fs'); const fs = require('fs');
const platform = `${is_docker() ? 'docker' : 'baremetal'}-${ const platform = `${is_docker() ? 'docker' : 'baremetal'}-${fs
fss.read_file_sync('/etc/os-release') .read_file_sync('/etc/os-release')
.toString() .toString()
.split('\n') .split('\n')
.find(x => x.startsWith('ID')) .find(x => x.startsWith('ID'))
.replace('ID=','') .replace('ID=', '')}`;
}`;
module.exports = { module.exports = {
data_directories: { data_directories: {
packages: 'packages', packages: 'packages',
jobs: 'jobs' jobs: 'jobs',
}, },
version: require('../package.json').version, version: require('../package.json').version,
platform, platform,
pkg_installed_file: '.ppman-installed', //Used as indication for if a package was installed pkg_installed_file: '.ppman-installed', //Used as indication for if a package was installed
clean_directories: [ clean_directories: ['/dev/shm', '/run/lock', '/tmp', '/var/tmp'],
"/dev/shm",
"/run/lock",
"/tmp",
"/var/tmp"
]
}; };

View File

@ -35,21 +35,28 @@ const app = express();
}); });
logger.info('Loading packages'); logger.info('Loading packages');
const pkgdir = path.join(config.data_directory,globals.data_directories.packages); const pkgdir = path.join(
config.data_directory,
globals.data_directories.packages
);
const pkglist = await fs.readdir(pkgdir); const pkglist = await fs.readdir(pkgdir);
const languages = await Promise.all( const languages = await Promise.all(
pkglist.map(lang=> pkglist.map(lang => {
fs.readdir(path.join(pkgdir,lang)) return fs.readdir(path.join(pkgdir, lang)).then(x => {
.then(x=>x.map(y=>path.join(pkgdir, lang, y))) return x.map(y => path.join(pkgdir, lang, y));
)); });
})
);
const installed_languages = languages const installed_languages = languages
.flat() .flat()
.filter(pkg => fss.exists_sync(path.join(pkg, globals.pkg_installed_file))); .filter(pkg =>
fss.exists_sync(path.join(pkg, globals.pkg_installed_file))
);
installed_languages.forEach(pkg => runtime.load_package(pkg)); installed_languages.for_each(pkg => runtime.load_package(pkg));
logger.info('Starting API Server'); logger.info('Starting API Server');
logger.debug('Constructing Express App'); logger.debug('Constructing Express App');
@ -58,21 +65,19 @@ const app = express();
app.use(body_parser.urlencoded({ extended: true })); app.use(body_parser.urlencoded({ extended: true }));
app.use(body_parser.json()); app.use(body_parser.json());
app.use(function (err, req, res, next) { app.use((err, req, res, next) => {
return res return res.status(400).send({
.status(400) stack: err.stack,
.send({ });
stack: err.stack });
})
})
logger.debug('Registering Routes'); logger.debug('Registering Routes');
const api_v2 = require('./api/v2') const api_v2 = require('./api/v2');
app.use('/api/v1', api_v2); app.use('/api/v2', api_v2);
app.use('/api/v2', api_v2); app.use('/api/v2', api_v2);
app.use(function (req,res,next){ app.use((req, res, next) => {
return res.status(404).send({ message: 'Not Found' }); return res.status(404).send({ message: 'Not Found' });
}); });

View File

@ -10,20 +10,19 @@ const wait_pid = require('waitpid');
const job_states = { const job_states = {
READY: Symbol('Ready to be primed'), READY: Symbol('Ready to be primed'),
PRIMED: Symbol('Primed and ready for execution'), PRIMED: Symbol('Primed and ready for execution'),
EXECUTED: Symbol('Executed and ready for cleanup') EXECUTED: Symbol('Executed and ready for cleanup'),
}; };
let uid = 0; let uid = 0;
let gid = 0; let gid = 0;
class Job { class Job {
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) { constructor({ runtime, files, args, stdin, timeouts, memory_limits }) {
this.uuid = uuidv4(); this.uuid = uuidv4();
this.runtime = runtime; this.runtime = runtime;
this.files = files.map((file, i) => ({ this.files = files.map((file, i) => ({
name: file.name || `file${i}.code`, name: file.name || `file${i}.code`,
content: file.content content: file.content,
})); }));
this.args = args; this.args = args;
@ -37,12 +36,15 @@ class Job {
uid++; uid++;
gid++; gid++;
uid %= (config.runner_uid_max - config.runner_uid_min) + 1; uid %= config.runner_uid_max - config.runner_uid_min + 1;
gid %= (config.runner_gid_max - config.runner_gid_min) + 1; gid %= config.runner_gid_max - config.runner_gid_min + 1;
this.state = job_states.READY; this.state = job_states.READY;
this.dir = path.join(config.data_directory, globals.data_directories.jobs, this.uuid); this.dir = path.join(
config.data_directory,
globals.data_directories.jobs,
this.uuid
);
} }
async prime() { async prime() {
@ -75,19 +77,14 @@ class Job {
'prlimit', 'prlimit',
'--nproc=' + config.max_process_count, '--nproc=' + config.max_process_count,
'--nofile=' + config.max_open_files, '--nofile=' + config.max_open_files,
'--fsize=' + config.max_file_size '--fsize=' + config.max_file_size,
]; ];
if (memory_limit >= 0) { if (memory_limit >= 0) {
prlimit.push('--as=' + memory_limit); prlimit.push('--as=' + memory_limit);
} }
const proc_call = [ const proc_call = [...prlimit, ...nonetwork, 'bash', file, ...args];
...prlimit,
...nonetwork,
'bash',file,
...args
];
var stdout = ''; var stdout = '';
var stderr = ''; var stderr = '';
@ -96,20 +93,23 @@ class Job {
const proc = cp.spawn(proc_call[0], proc_call.splice(1), { const proc = cp.spawn(proc_call[0], proc_call.splice(1), {
env: { env: {
...this.runtime.env_vars, ...this.runtime.env_vars,
PISTON_LANGUAGE: this.runtime.language PISTON_LANGUAGE: this.runtime.language,
}, },
stdio: 'pipe', stdio: 'pipe',
cwd: this.dir, cwd: this.dir,
uid: this.uid, uid: this.uid,
gid: this.gid, gid: this.gid,
detached: true //give this process its own process group detached: true, //give this process its own process group
}); });
proc.stdin.write(this.stdin); proc.stdin.write(this.stdin);
proc.stdin.end(); proc.stdin.end();
proc.stdin.destroy(); proc.stdin.destroy();
const kill_timeout = set_timeout(_ => proc.kill('SIGKILL'), timeout); const kill_timeout = set_timeout(
_ => proc.kill('SIGKILL'),
timeout
);
proc.stderr.on('data', data => { proc.stderr.on('data', data => {
if (stderr.length > config.output_max_size) { if (stderr.length > config.output_max_size) {
@ -142,7 +142,7 @@ class Job {
resolve({ stdout, stderr, code, signal, output }); resolve({ stdout, stderr, code, signal, output });
}); });
proc.on('error', (err) => { proc.on('error', err => {
exit_cleanup(); exit_cleanup();
reject({ error: err, stdout, stderr, output }); reject({ error: err, stdout, stderr, output });
@ -152,10 +152,17 @@ class Job {
async execute() { async execute() {
if (this.state !== job_states.PRIMED) { if (this.state !== job_states.PRIMED) {
throw new Error('Job must be in primed state, current state: ' + this.state.toString()); throw new Error(
'Job must be in primed state, current state: ' +
this.state.toString()
);
} }
logger.info(`Executing job uuid=${this.uuid} uid=${this.uid} gid=${this.gid} runtime=${this.runtime.toString()}`); logger.info(
`Executing job uuid=${this.uuid} uid=${this.uid} gid=${
this.gid
} runtime=${this.runtime.toString()}`
);
logger.debug('Compiling'); logger.debug('Compiling');
@ -185,15 +192,16 @@ class Job {
compile, compile,
run, run,
language: this.runtime.language, language: this.runtime.language,
version: this.runtime.version.raw version: this.runtime.version.raw,
}; };
} }
async cleanup_processes() { async cleanup_processes() {
let processes = [1]; let processes = [1];
while (processes.length > 0) { while (processes.length > 0) {
processes = await new Promise((resolve, reject) => cp.execFile('ps', ['awwxo', 'pid,ruid'], function(err, stdout) { processes = await new Promise((resolve, reject) =>
cp.execFile('ps', ['awwxo', 'pid,ruid'], (err, stdout) => {
if (err === null) { if (err === null) {
const lines = stdout.split('\n').slice(1); //Remove header with slice const lines = stdout.split('\n').slice(1); //Remove header with slice
const procs = lines.map(line => { const procs = lines.map(line => {
@ -202,16 +210,17 @@ class Job {
.split(/\s+/) .split(/\s+/)
.map(n => parseInt(n)); .map(n => parseInt(n));
return { pid, ruid } return { pid, ruid };
}) });
resolve(procs)
}
else{
reject(error)
}
}));
processes = processes.filter(proc => proc.ruid == this.uid); resolve(procs);
} else {
reject(error);
}
})
);
processes = processes.filter(proc => proc.ruid === this.uid);
for (const proc of processes) { for (const proc of processes) {
// First stop the processes, but keep their resources allocated so they cant re-fork // First stop the processes, but keep their resources allocated so they cant re-fork
@ -222,7 +231,6 @@ class Job {
} }
} }
for (const proc of processes) { for (const proc of processes) {
// Then clear them out of the process tree // Then clear them out of the process tree
try { try {
@ -230,28 +238,33 @@ class Job {
} catch { } catch {
// Could already be dead and just needs to be waited on // Could already be dead and just needs to be waited on
} }
wait_pid(proc.pid); wait_pid(proc.pid);
} }
} }
} }
async cleanup_filesystem() { async cleanup_filesystem() {
for (const clean_path of globals.clean_directories) { for (const clean_path of globals.clean_directories) {
const contents = await fs.readdir(clean_path); const contents = await fs.readdir(clean_path);
for (const file of contents) { for (const file of contents) {
const file_path = path.join(clean_path, file); const file_path = path.join(clean_path, file);
try { try {
const stat = await fs.stat(file_path); const stat = await fs.stat(file_path);
if(stat.uid == this.uid)
await fs.rm(file_path, { recursive: true, force: true }); if (stat.uid === this.uid) {
await fs.rm(file_path, {
recursive: true,
force: true,
});
}
} catch (e) { } catch (e) {
// File was somehow deleted in the time that we read the dir to when we checked the file // File was somehow deleted in the time that we read the dir to when we checked the file
logger.warn(`Error removing file ${file_path}: ${e}`) logger.warn(`Error removing file ${file_path}: ${e}`);
} }
} }
} }
await fs.rm(this.dir, { recursive: true, force: true }); await fs.rm(this.dir, { recursive: true, force: true });
@ -262,12 +275,11 @@ class Job {
await Promise.all([ await Promise.all([
this.cleanup_processes(), this.cleanup_processes(),
this.cleanup_filesystem() this.cleanup_filesystem(),
]); ]);
} }
} }
module.exports = { module.exports = {
Job Job,
}; };

View File

@ -13,7 +13,6 @@ const chownr = require('chownr');
const util = require('util'); const util = require('util');
class Package { class Package {
constructor({ language, version, download, checksum }) { constructor({ language, version, download, checksum }) {
this.language = language; this.language = language;
this.version = semver.parse(version); this.version = semver.parse(version);
@ -22,7 +21,9 @@ class Package {
} }
get installed() { get installed() {
return fss.exists_sync(path.join(this.install_path, globals.pkg_installed_file)); return fss.exists_sync(
path.join(this.install_path, globals.pkg_installed_file)
);
} }
get install_path() { get install_path() {
@ -42,14 +43,18 @@ class Package {
logger.info(`Installing ${this.language}-${this.version.raw}`); logger.info(`Installing ${this.language}-${this.version.raw}`);
if (fss.exists_sync(this.install_path)) { if (fss.exists_sync(this.install_path)) {
logger.warn(`${this.language}-${this.version.raw} has residual files. Removing them.`); logger.warn(
`${this.language}-${this.version.raw} has residual files. Removing them.`
);
await fs.rm(this.install_path, { recursive: true, force: true }); await fs.rm(this.install_path, { recursive: true, force: true });
} }
logger.debug(`Making directory ${this.install_path}`); logger.debug(`Making directory ${this.install_path}`);
await fs.mkdir(this.install_path, { recursive: true }); await fs.mkdir(this.install_path, { recursive: true });
logger.debug(`Downloading package from ${this.download} in to ${this.install_path}`); logger.debug(
`Downloading package from ${this.download} in to ${this.install_path}`
);
const pkgpath = path.join(this.install_path, 'pkg.tar.gz'); const pkgpath = path.join(this.install_path, 'pkg.tar.gz');
const download = await fetch(this.download); const download = await fetch(this.download);
@ -63,7 +68,8 @@ class Package {
logger.debug('Validating checksums'); logger.debug('Validating checksums');
logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`); logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`);
const cs = crypto.create_hash("sha256") const cs = crypto
.create_hash('sha256')
.update(fss.readFileSync(pkgpath)) .update(fss.readFileSync(pkgpath))
.digest('hex'); .digest('hex');
@ -71,10 +77,14 @@ class Package {
throw new Error(`Checksum miss-match want: ${val} got: ${cs}`); throw new Error(`Checksum miss-match want: ${val} got: ${cs}`);
} }
logger.debug(`Extracting package files from archive ${pkgpath} in to ${this.install_path}`); logger.debug(
`Extracting package files from archive ${pkgpath} in to ${this.install_path}`
);
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'`); const proc = cp.exec(
`bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'`
);
proc.once('exit', (code, _) => { proc.once('exit', (code, _) => {
code === 0 ? resolve() : reject(); code === 0 ? resolve() : reject();
@ -95,12 +105,11 @@ class Package {
const envout = await new Promise((resolve, reject) => { const envout = await new Promise((resolve, reject) => {
let stdout = ''; let stdout = '';
const proc = cp const proc = cp.spawn(
.spawn(
'env', 'env',
['-i', 'bash', '-c', `${get_env_command}`], ['-i', 'bash', '-c', `${get_env_command}`],
{ {
stdio: ['ignore', 'pipe', 'pipe'] stdio: ['ignore', 'pipe', 'pipe'],
} }
); );
@ -117,7 +126,12 @@ class Package {
const filtered_env = envout const filtered_env = envout
.split('\n') .split('\n')
.filter(l => !['PWD','OLDPWD','_', 'SHLVL'].includes(l.split('=',2)[0])) .filter(
l =>
!['PWD', 'OLDPWD', '_', 'SHLVL'].includes(
l.split('=', 2)[0]
)
)
.join('\n'); .join('\n');
await fs.write_file(path.join(this.install_path, '.env'), filtered_env); await fs.write_file(path.join(this.install_path, '.env'), filtered_env);
@ -126,49 +140,55 @@ class Package {
await util.promisify(chownr)(this.install_path, 0, 0); await util.promisify(chownr)(this.install_path, 0, 0);
logger.debug('Writing installed state to disk'); logger.debug('Writing installed state to disk');
await fs.write_file(path.join(this.install_path, globals.pkg_installed_file), Date.now().toString()); await fs.write_file(
path.join(this.install_path, globals.pkg_installed_file),
Date.now().toString()
);
logger.info(`Installed ${this.language}-${this.version.raw}`); logger.info(`Installed ${this.language}-${this.version.raw}`);
return { return {
language: this.language, language: this.language,
version: this.version.raw version: this.version.raw,
}; };
} }
async uninstall() { async uninstall() {
logger.info(`Uninstalling ${this.language}-${this.version.raw}`); logger.info(`Uninstalling ${this.language}-${this.version.raw}`);
logger.debug("Finding runtime") logger.debug('Finding runtime');
const found_runtime = runtime.get_runtime_by_name_and_version(this.language, this.version.raw); const found_runtime = runtime.get_runtime_by_name_and_version(
this.language,
this.version.raw
);
if (!found_runtime) { if (!found_runtime) {
logger.error(`Uninstalling ${this.language}-${this.version.raw} failed: Not installed`) logger.error(
throw new Error(`${this.language}-${this.version.raw} is not installed`) `Uninstalling ${this.language}-${this.version.raw} failed: Not installed`
);
throw new Error(
`${this.language}-${this.version.raw} is not installed`
);
} }
logger.debug("Unregistering runtime") logger.debug('Unregistering runtime');
found_runtime.unregister(); found_runtime.unregister();
logger.debug("Cleaning files from disk") logger.debug('Cleaning files from disk');
await fs.rmdir(this.install_path, {recursive: true}) await fs.rmdir(this.install_path, { recursive: true });
logger.info(`Uninstalled ${this.language}-${this.version.raw}`) logger.info(`Uninstalled ${this.language}-${this.version.raw}`);
return { return {
language: this.language, language: this.language,
version: this.version.raw version: this.version.raw,
}; };
} }
static async get_package_list() { static async get_package_list() {
const repo_content = await fetch(config.repo_url).then(x => x.text()); const repo_content = await fetch(config.repo_url).then(x => x.text());
const entries = repo_content const entries = repo_content.split('\n').filter(x => x.length > 0);
.split('\n')
.filter(x => x.length > 0);
return entries.map(line => { return entries.map(line => {
const [language, version, checksum, download] = line.split(',', 4); const [language, version, checksum, download] = line.split(',', 4);
@ -177,7 +197,7 @@ class Package {
language, language,
version, version,
checksum, checksum,
download download,
}); });
}); });
} }
@ -185,16 +205,16 @@ class Package {
static async get_package(lang, version) { static async get_package(lang, version) {
const packages = await Package.get_package_list(); const packages = await Package.get_package_list();
const candidates = packages const candidates = packages.filter(pkg => {
.filter(pkg => { return (
return pkg.language == lang && semver.satisfies(pkg.version, version) pkg.language == lang && semver.satisfies(pkg.version, version)
);
}); });
candidates.sort((a, b) => semver.rcompare(a.version, b.version)); candidates.sort((a, b) => semver.rcompare(a.version, b.version));
return candidates[0] || null; return candidates[0] || null;
} }
} }
module.exports = Package; module.exports = Package;

View File

@ -8,7 +8,6 @@ const path = require('path');
const runtimes = []; const runtimes = [];
class Runtime { class Runtime {
constructor({ language, version, aliases, pkgdir, runtime }) { constructor({ language, version, aliases, pkgdir, runtime }) {
this.language = language; this.language = language;
this.version = version; this.version = version;
@ -35,26 +34,28 @@ class Runtime {
if (provides) { if (provides) {
// Multiple languages in 1 package // Multiple languages in 1 package
provides.forEach(lang => { provides.forEach(lang => {
runtimes.push(new Runtime({ runtimes.push(
new Runtime({
language: lang.language, language: lang.language,
aliases: lang.aliases, aliases: lang.aliases,
version, version,
pkgdir: package_dir, pkgdir: package_dir,
runtime: language runtime: language,
})); })
);
}); });
} else { } else {
runtimes.push(new Runtime({ runtimes.push(
new Runtime({
language, language,
version, version,
aliases, aliases,
pkgdir: package_dir pkgdir: package_dir,
})) })
);
} }
logger.debug(`Package ${language}-${version} was loaded`); logger.debug(`Package ${language}-${version} was loaded`);
} }
get compiled() { get compiled() {
@ -97,15 +98,28 @@ class Runtime {
module.exports = runtimes; module.exports = runtimes;
module.exports.Runtime = Runtime; module.exports.Runtime = Runtime;
module.exports.get_runtimes_matching_language_version = function (lang, ver) { module.exports.get_runtimes_matching_language_version = function (lang, ver) {
return runtimes.filter(rt => (rt.language == lang || rt.aliases.includes(lang)) && semver.satisfies(rt.version, ver)); return runtimes.filter(
rt =>
(rt.language == lang || rt.aliases.includes(lang)) &&
semver.satisfies(rt.version, ver)
);
}; };
module.exports.get_latest_runtime_matching_language_version = function(lang, ver){ module.exports.get_latest_runtime_matching_language_version = function (
return module.exports.get_runtimes_matching_language_version(lang, ver) lang,
ver
) {
return module.exports
.get_runtimes_matching_language_version(lang, ver)
.sort((a, b) => semver.rcompare(a.version, b.version))[0]; .sort((a, b) => semver.rcompare(a.version, b.version))[0];
}; };
module.exports.get_runtime_by_name_and_version = function (runtime, ver) { module.exports.get_runtime_by_name_and_version = function (runtime, ver) {
return runtimes.find(rt => (rt.runtime == runtime || (rt.runtime === undefined && rt.language == runtime)) && semver.satisfies(rt.version, ver)); return runtimes.find(
} rt =>
(rt.runtime == runtime ||
(rt.runtime === undefined && rt.language == runtime)) &&
semver.satisfies(rt.version, ver)
);
};
module.exports.load_package = Runtime.load_package; module.exports.load_package = Runtime.load_package;

View File

@ -8,7 +8,7 @@ exports.aliases = ['run'];
exports.describe = 'Executes file with the specified runner'; exports.describe = 'Executes file with the specified runner';
exports.builder = { exports.builder = {
languageVersion: { language_version: {
string: true, string: true,
desc: 'Set the version of the language to use', desc: 'Set the version of the language to use',
alias: ['l'], alias: ['l'],
@ -38,7 +38,7 @@ exports.builder = {
} }
}; };
exports.handler = async function(argv) { exports.handler = async (argv) => {
const files = [...(argv.files || []),argv.file] const files = [...(argv.files || []),argv.file]
.map(file_path => { .map(file_path => {
return { return {
@ -55,7 +55,7 @@ exports.handler = async function(argv) {
const request = { const request = {
language: argv.language, language: argv.language,
version: argv['language-version'], version: argv['language_version'],
files: files, files: files,
args: argv.args, args: argv.args,
stdin, stdin,
@ -63,7 +63,7 @@ exports.handler = async function(argv) {
run_timeout: argv.rt run_timeout: argv.rt
}; };
let { data: response } = await argv.axios.post('/api/v1/execute', request); let { data: response } = await argv.axios.post('/api/v2/execute', request);
const step = (name, ctx) => { const step = (name, ctx) => {
console.log(chalk.bold(`== ${name} ==`)); console.log(chalk.bold(`== ${name} ==`));

View File

@ -1,7 +1,7 @@
exports.command = 'ppman' exports.command = 'ppman';
exports.aliases = ['pkg'] exports.aliases = ['pkg'];
exports.describe = 'Package Manager' exports.describe = 'Package Manager';
exports.builder = yargs => yargs exports.builder = yargs => yargs
.commandDir('ppman_commands') .commandDir('ppman_commands')
.demandCommand() .demandCommand();

View File

@ -1,20 +1,23 @@
const chalk = require('chalk'); const chalk = require('chalk');
exports.command = ['install <language> [language-version]'] exports.command = ['install <language> [language_version]'];
exports.aliases = ['i'] exports.aliases = ['i'];
exports.describe = 'Installs the named package' exports.describe = 'Installs the named package';
const msg_format = { const msg_format = {
'color': p => `${p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')} Installation ${p.language ? "succeeded" : "failed: " + p.message}`, color: p => `${p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')} Installation ${p.language ? 'succeeded' : 'failed: ' + p.message}`,
'monochrome': p => `Installation ${p.language ? "succeeded" : "failed: " + p.message}`, monochrome: p => `Installation ${p.language ? 'succeeded' : 'failed: ' + p.message}`,
'json': JSON.stringify json: JSON.stringify
};
} exports.handler = async ({ axios, language, language_version }) => {
exports.handler = async function({axios, language, languageVersion}){
try { try {
const install = await axios.post(`/api/v1/packages/${language}/${languageVersion || '*'}`) const request = {
language,
version: language_version || '*'
};
const install = await axios.post(`/api/v2/packages`, request);
console.log(msg_format.color(install.data)); console.log(msg_format.color(install.data));
} catch ({ response }) { } catch ({ response }) {

View File

@ -1,22 +1,17 @@
//const fetch = require('node-fetch');
const chalk = require('chalk'); const chalk = require('chalk');
exports.command = ['list'] exports.command = ['list'];
exports.aliases = ['l'] exports.aliases = ['l'];
exports.describe = 'Lists all available packages' exports.describe = 'Lists all available packages';
const msg_format = { const msg_format = {
'color': p => `${chalk[p.installed ? "green":"red"]("•")} ${p.language} ${p.language_version}`, color: p => `${chalk[p.installed ? 'green':'red']('•')} ${p.language} ${p.language_version}`,
'monochrome': p => `${p.language} ${p.language_version} ${p.installed ? "(INSTALLED)": ""}`, monochrome: p => `${p.language} ${p.language_version} ${p.installed ? '(INSTALLED)': ''}`,
'json': JSON.stringify json: JSON.stringify
};
}
exports.handler = async function({axios}){
const packages = await axios.get('/api/v1/packages');
exports.handler = async ({ axios }) => {
const packages = await axios.get('/api/v2/packages');
const pkg_msg = packages.data const pkg_msg = packages.data
.map(msg_format.color) .map(msg_format.color)

View File

@ -1,20 +1,22 @@
const chalk = require('chalk'); const chalk = require('chalk');
exports.command = ['uninstall <language> [language-version]'] exports.command = ['uninstall <language> [language_version]'];
exports.aliases = ['u'] exports.aliases = ['u'];
exports.describe = 'Uninstalls the named package' exports.describe = 'Uninstalls the named package';
const msg_format = { const msg_format = {
'color': p => `${p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')} Uninstallation ${p.language ? "succeeded" : "failed: " + p.message}`, color: p => `${p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')} Uninstallation ${p.language ? 'succeeded' : 'failed: ' + p.message}`,
'monochrome': p => `Uninstallation ${p.language ? "succeeded" : "failed: " + p.message}`, monochrome: p => `Uninstallation ${p.language ? 'succeeded' : 'failed: ' + p.message}`,
'json': JSON.stringify json: JSON.stringify
};
} exports.handler = async ({ axios, language, language_version }) => {
exports.handler = async function({axios, language, languageVersion}){
try { try {
const uninstall = await axios.delete(`/api/v1/packages/${language}/${languageVersion || '*'}`) const request = {
language,
version: language_version || '*'
};
const uninstall = await axios.delete(`/api/v2/packages`, {data: request});
console.log(msg_format.color(uninstall.data)); console.log(msg_format.color(uninstall.data));
} catch ({ response }) { } catch ({ response }) {

View File

@ -2,9 +2,12 @@
const axios = require('axios').default; const axios = require('axios').default;
const axios_instance = function(argv){ const axios_instance = argv => {
argv.axios = axios.create({ argv.axios = axios.create({
baseURL: argv['piston-url'] baseURL: argv['piston-url'],
headers: {
'Content-Type': 'application/json'
}
}); });
return argv; return argv;
@ -18,7 +21,7 @@ require('yargs')(process.argv.slice(2))
string: true string: true
}) })
.middleware(axios_instance) .middleware(axios_instance)
.scriptName("piston") .scriptName('piston')
.commandDir('commands') .commandDir('commands')
.demandCommand() .demandCommand()
.help() .help()