lint api
This commit is contained in:
parent
2beb0abff7
commit
4259e89bb2
|
@ -0,0 +1 @@
|
|||
node_modules
|
|
@ -0,0 +1 @@
|
|||
singleQuote: true
|
File diff suppressed because it is too large
Load Diff
|
@ -1,19 +1,25 @@
|
|||
{
|
||||
"name": "piston-api",
|
||||
"version": "3.0.0",
|
||||
"description": "API for piston - a high performance code execution engine",
|
||||
"main": "src/index.js",
|
||||
"dependencies": {
|
||||
"body-parser": "^1.19.0",
|
||||
"chownr": "^2.0.0",
|
||||
"express": "^4.17.1",
|
||||
"is-docker": "^2.1.1",
|
||||
"logplease": "^1.2.15",
|
||||
"nocamel": "HexF/nocamel#patch-1",
|
||||
"node-fetch": "^2.6.1",
|
||||
"semver": "^7.3.4",
|
||||
"uuid": "^8.3.2",
|
||||
"waitpid": "git+https://github.com/HexF/node-waitpid.git"
|
||||
},
|
||||
"license": "MIT"
|
||||
"name": "piston-api",
|
||||
"version": "3.0.0",
|
||||
"description": "API for piston - a high performance code execution engine",
|
||||
"main": "src/index.js",
|
||||
"dependencies": {
|
||||
"body-parser": "^1.19.0",
|
||||
"chownr": "^2.0.0",
|
||||
"express": "^4.17.1",
|
||||
"is-docker": "^2.1.1",
|
||||
"logplease": "^1.2.15",
|
||||
"nocamel": "HexF/nocamel#patch-1",
|
||||
"node-fetch": "^2.6.1",
|
||||
"semver": "^7.3.4",
|
||||
"uuid": "^8.3.2",
|
||||
"waitpid": "git+https://github.com/HexF/node-waitpid.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"lint": "prettier . --write"
|
||||
},
|
||||
"devDependencies": {
|
||||
"prettier": "2.2.1"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,240 +1,224 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
const router = express.Router();
|
||||
|
||||
const config = require('../config');
|
||||
const runtime = require('../runtime');
|
||||
const { Job } = require('../job');
|
||||
const package = require('../package')
|
||||
const package = require('../package');
|
||||
const logger = require('logplease').create('api/v2');
|
||||
|
||||
router.use((req, res, next) => {
|
||||
if (['GET', 'HEAD', 'OPTIONS'].includes(req.method)) {
|
||||
return next();
|
||||
}
|
||||
if (['GET', 'HEAD', 'OPTIONS'].includes(req.method)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
if (req.headers['content-type'] !== 'application/json') {
|
||||
return res
|
||||
.status(415)
|
||||
.send({
|
||||
message: 'requests must be of type application/json'
|
||||
});
|
||||
}
|
||||
if (req.headers['content-type'] !== 'application/json') {
|
||||
return res.status(415).send({
|
||||
message: 'requests must be of type application/json',
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
next();
|
||||
});
|
||||
|
||||
router.post('/execute', async (req, res) => {
|
||||
const {
|
||||
language, version,
|
||||
files,
|
||||
stdin, args,
|
||||
run_timeout, compile_timeout,
|
||||
compile_memory_limit, run_memory_limit
|
||||
} = req.body;
|
||||
const {
|
||||
language,
|
||||
version,
|
||||
files,
|
||||
stdin,
|
||||
args,
|
||||
run_timeout,
|
||||
compile_timeout,
|
||||
compile_memory_limit,
|
||||
run_memory_limit,
|
||||
} = req.body;
|
||||
|
||||
if (!language || typeof language !== 'string') {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: 'language is required as a string'
|
||||
});
|
||||
}
|
||||
|
||||
if (!version || typeof version !== 'string') {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: 'version is required as a string'
|
||||
});
|
||||
}
|
||||
|
||||
if (!files || !Array.isArray(files)) {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: 'files is required as an array'
|
||||
});
|
||||
}
|
||||
|
||||
for (const [i, file] of files.entries()) {
|
||||
if (typeof file.content !== 'string') {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: `files[${i}].content is required as a string`
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (compile_memory_limit) {
|
||||
if (typeof compile_memory_limit !== 'number') {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: 'if specified, compile_memory_limit must be a number'
|
||||
})
|
||||
}
|
||||
|
||||
if (config.compile_memory_limit >= 0 && (compile_memory_limit > config.compile_memory_limit || compile_memory_limit < 0)) {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: 'compile_memory_limit cannot exceed the configured limit of ' + config.compile_memory_limit
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (run_memory_limit) {
|
||||
if (typeof run_memory_limit !== 'number') {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: 'if specified, run_memory_limit must be a number'
|
||||
})
|
||||
}
|
||||
|
||||
if (config.run_memory_limit >= 0 && (run_memory_limit > config.run_memory_limit || run_memory_limit < 0)) {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: 'run_memory_limit cannot exceed the configured limit of ' + config.run_memory_limit
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
const rt = runtime.get_latest_runtime_matching_language_version(language, version);
|
||||
|
||||
if (rt === undefined) {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: `${language}-${version} runtime is unknown`
|
||||
});
|
||||
}
|
||||
|
||||
const job = new Job({
|
||||
runtime: rt,
|
||||
alias: language,
|
||||
files: files,
|
||||
args: args || [],
|
||||
stdin: stdin || '',
|
||||
timeouts: {
|
||||
run: run_timeout || 3000,
|
||||
compile: compile_timeout || 10000
|
||||
},
|
||||
memory_limits: {
|
||||
run: run_memory_limit || config.run_memory_limit,
|
||||
compile: compile_memory_limit || config.compile_memory_limit
|
||||
}
|
||||
if (!language || typeof language !== 'string') {
|
||||
return res.status(400).send({
|
||||
message: 'language is required as a string',
|
||||
});
|
||||
}
|
||||
|
||||
await job.prime();
|
||||
if (!version || typeof version !== 'string') {
|
||||
return res.status(400).send({
|
||||
message: 'version is required as a string',
|
||||
});
|
||||
}
|
||||
|
||||
const result = await job.execute();
|
||||
if (!files || !Array.isArray(files)) {
|
||||
return res.status(400).send({
|
||||
message: 'files is required as an array',
|
||||
});
|
||||
}
|
||||
|
||||
await job.cleanup();
|
||||
for (const [i, file] of files.entries()) {
|
||||
if (typeof file.content !== 'string') {
|
||||
return res.status(400).send({
|
||||
message: `files[${i}].content is required as a string`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return res
|
||||
.status(200)
|
||||
.send(result);
|
||||
if (compile_memory_limit) {
|
||||
if (typeof compile_memory_limit !== 'number') {
|
||||
return res.status(400).send({
|
||||
message: 'if specified, compile_memory_limit must be a number',
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
config.compile_memory_limit >= 0 &&
|
||||
(compile_memory_limit > config.compile_memory_limit ||
|
||||
compile_memory_limit < 0)
|
||||
) {
|
||||
return res.status(400).send({
|
||||
message:
|
||||
'compile_memory_limit cannot exceed the configured limit of ' +
|
||||
config.compile_memory_limit,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (run_memory_limit) {
|
||||
if (typeof run_memory_limit !== 'number') {
|
||||
return res.status(400).send({
|
||||
message: 'if specified, run_memory_limit must be a number',
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
config.run_memory_limit >= 0 &&
|
||||
(run_memory_limit > config.run_memory_limit || run_memory_limit < 0)
|
||||
) {
|
||||
return res.status(400).send({
|
||||
message:
|
||||
'run_memory_limit cannot exceed the configured limit of ' +
|
||||
config.run_memory_limit,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const rt = runtime.get_latest_runtime_matching_language_version(
|
||||
language,
|
||||
version
|
||||
);
|
||||
|
||||
if (rt === undefined) {
|
||||
return res.status(400).send({
|
||||
message: `${language}-${version} runtime is unknown`,
|
||||
});
|
||||
}
|
||||
|
||||
const job = new Job({
|
||||
runtime: rt,
|
||||
alias: language,
|
||||
files: files,
|
||||
args: args || [],
|
||||
stdin: stdin || '',
|
||||
timeouts: {
|
||||
run: run_timeout || 3000,
|
||||
compile: compile_timeout || 10000,
|
||||
},
|
||||
memory_limits: {
|
||||
run: run_memory_limit || config.run_memory_limit,
|
||||
compile: compile_memory_limit || config.compile_memory_limit,
|
||||
},
|
||||
});
|
||||
|
||||
await job.prime();
|
||||
|
||||
const result = await job.execute();
|
||||
|
||||
await job.cleanup();
|
||||
|
||||
return res.status(200).send(result);
|
||||
});
|
||||
|
||||
router.get('/runtimes', (req, res) => {
|
||||
const runtimes = runtime
|
||||
.map(rt => {
|
||||
return {
|
||||
language: rt.language,
|
||||
version: rt.version.raw,
|
||||
aliases: rt.aliases,
|
||||
runtime: rt.runtime
|
||||
};
|
||||
});
|
||||
const runtimes = runtime.map((rt) => {
|
||||
return {
|
||||
language: rt.language,
|
||||
version: rt.version.raw,
|
||||
aliases: rt.aliases,
|
||||
runtime: rt.runtime,
|
||||
};
|
||||
});
|
||||
|
||||
return res
|
||||
.status(200)
|
||||
.send(runtimes);
|
||||
return res.status(200).send(runtimes);
|
||||
});
|
||||
|
||||
router.get('/packages', async (req, res) => {
|
||||
logger.debug('Request to list packages');
|
||||
let packages = await package.get_package_list();
|
||||
logger.debug('Request to list packages');
|
||||
let packages = await package.get_package_list();
|
||||
|
||||
packages = packages
|
||||
.map(pkg => {
|
||||
return {
|
||||
language: pkg.language,
|
||||
language_version: pkg.version.raw,
|
||||
installed: pkg.installed
|
||||
};
|
||||
});
|
||||
packages = packages.map((pkg) => {
|
||||
return {
|
||||
language: pkg.language,
|
||||
language_version: pkg.version.raw,
|
||||
installed: pkg.installed,
|
||||
};
|
||||
});
|
||||
|
||||
return res
|
||||
.status(200)
|
||||
.send(packages);
|
||||
return res.status(200).send(packages);
|
||||
});
|
||||
|
||||
router.post('/packages/:language/:version', async (req, res) => {
|
||||
logger.debug('Request to install package');
|
||||
logger.debug('Request to install package');
|
||||
|
||||
const { language, version } = req.params;
|
||||
const { language, version } = req.params;
|
||||
|
||||
const pkg = await package.get_package(language, version);
|
||||
const pkg = await package.get_package(language, version);
|
||||
|
||||
if (pkg == null) {
|
||||
return res
|
||||
.status(404)
|
||||
.send({
|
||||
message: `Requested package ${language}-${version} does not exist`
|
||||
});
|
||||
}
|
||||
if (pkg == null) {
|
||||
return res.status(404).send({
|
||||
message: `Requested package ${language}-${version} does not exist`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await pkg.install();
|
||||
try {
|
||||
const response = await pkg.install();
|
||||
|
||||
return res
|
||||
.status(200)
|
||||
.send(response);
|
||||
} catch (e) {
|
||||
logger.error(`Error while installing package ${pkg.language}-${pkg.version}:`, e.message);
|
||||
return res.status(200).send(response);
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Error while installing package ${pkg.language}-${pkg.version}:`,
|
||||
e.message
|
||||
);
|
||||
|
||||
return res
|
||||
.status(500)
|
||||
.send({
|
||||
message: e.message
|
||||
});
|
||||
}
|
||||
return res.status(500).send({
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
router.delete('/packages/:language/:version', async (req, res) => {
|
||||
logger.debug('Request to uninstall package');
|
||||
logger.debug('Request to uninstall package');
|
||||
|
||||
const {language, version} = req.params;
|
||||
const { language, version } = req.params;
|
||||
|
||||
const pkg = await package.get_package(language, version);
|
||||
const pkg = await package.get_package(language, version);
|
||||
|
||||
if (pkg == null) {
|
||||
return res
|
||||
.status(404)
|
||||
.send({
|
||||
message: `Requested package ${language}-${version} does not exist`
|
||||
});
|
||||
}
|
||||
if (pkg == null) {
|
||||
return res.status(404).send({
|
||||
message: `Requested package ${language}-${version} does not exist`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await pkg.uninstall();
|
||||
try {
|
||||
const response = await pkg.uninstall();
|
||||
|
||||
return res
|
||||
.status(200)
|
||||
.send(response);
|
||||
} catch (e) {
|
||||
logger.error(`Error while uninstalling package ${pkg.language}-${pkg.version}:`, e.message);
|
||||
return res.status(200).send(response);
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Error while uninstalling package ${pkg.language}-${pkg.version}:`,
|
||||
e.message
|
||||
);
|
||||
|
||||
return res
|
||||
.status(500)
|
||||
.send({
|
||||
message: e.message
|
||||
});
|
||||
}
|
||||
return res.status(500).send({
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
|
|
@ -3,132 +3,114 @@ const Logger = require('logplease');
|
|||
const logger = Logger.create('config');
|
||||
|
||||
const options = [
|
||||
{
|
||||
key: 'log_level',
|
||||
desc: 'Level of data to log',
|
||||
default: 'INFO',
|
||||
options: Object.values(Logger.LogLevels),
|
||||
validators: [
|
||||
x => Object.values(Logger.LogLevels).includes(x) || `Log level ${x} does not exist`
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'bind_address',
|
||||
desc: 'Address to bind REST API on\nThank @Bones for the number',
|
||||
default: '0.0.0.0:2000',
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'data_directory',
|
||||
desc: 'Absolute path to store all piston related data at',
|
||||
default: '/piston',
|
||||
validators: [x=> fss.exists_sync(x) || `Directory ${x} does not exist`]
|
||||
},
|
||||
{
|
||||
key: 'runner_uid_min',
|
||||
desc: 'Minimum uid to use for runner',
|
||||
default: 1001,
|
||||
parser: parse_int,
|
||||
validators: [
|
||||
(x, raw) => !is_nan(x) || `${raw} is not a number`,
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'runner_uid_max',
|
||||
desc: 'Maximum uid to use for runner',
|
||||
default: 1500,
|
||||
parser: parse_int,
|
||||
validators: [
|
||||
(x, raw) => !is_nan(x) || `${raw} is not a number`,
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'runner_gid_min',
|
||||
desc: 'Minimum gid to use for runner',
|
||||
default: 1001,
|
||||
parser: parse_int,
|
||||
validators: [
|
||||
(x, raw) => !is_nan(x) || `${raw} is not a number`,
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'runner_gid_max',
|
||||
desc: 'Maximum gid to use for runner',
|
||||
default: 1500,
|
||||
parser: parse_int,
|
||||
validators: [
|
||||
(x, raw) => !is_nan(x) || `${raw} is not a number`,
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'disable_networking',
|
||||
desc: 'Set to true to disable networking',
|
||||
default: true,
|
||||
parser: x => x === "true",
|
||||
validators: [
|
||||
x => typeof x === "boolean" || `${x} is not a boolean`
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'output_max_size',
|
||||
desc: 'Max size of each stdio buffer',
|
||||
default: 1024,
|
||||
parser: parse_int,
|
||||
validators: [
|
||||
(x, raw) => !is_nan(x) || `${raw} is not a number`,
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'max_process_count',
|
||||
desc: 'Max number of processes per job',
|
||||
default: 64,
|
||||
parser: parse_int,
|
||||
validators: [
|
||||
(x, raw) => !is_nan(x) || `${raw} is not a number`,
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'max_open_files',
|
||||
desc: 'Max number of open files per job',
|
||||
default: 2048,
|
||||
parser: parse_int,
|
||||
validators: [
|
||||
(x, raw) => !is_nan(x) || `${raw} is not a number`,
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'max_file_size',
|
||||
desc: 'Max file size in bytes for a file',
|
||||
default: 10000000, //10MB
|
||||
parser: parse_int,
|
||||
validators: [
|
||||
(x, raw) => !is_nan(x) || `${raw} is not a number`,
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'compile_memory_limit',
|
||||
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
|
||||
default: -1, // no limit
|
||||
parser: parse_int,
|
||||
validators: [
|
||||
(x, raw) => !is_nan(x) || `${raw} is not a number`,
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'run_memory_limit',
|
||||
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
|
||||
default: -1, // no limit
|
||||
parser: parse_int,
|
||||
validators: [
|
||||
(x, raw) => !is_nan(x) || `${raw} is not a number`,
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'repo_url',
|
||||
desc: 'URL of repo index',
|
||||
default: 'https://github.com/engineer-man/piston/releases/download/pkgs/index',
|
||||
validators: []
|
||||
}
|
||||
{
|
||||
key: 'log_level',
|
||||
desc: 'Level of data to log',
|
||||
default: 'INFO',
|
||||
options: Object.values(Logger.LogLevels),
|
||||
validators: [
|
||||
(x) =>
|
||||
Object.values(Logger.LogLevels).includes(x) ||
|
||||
`Log level ${x} does not exist`,
|
||||
],
|
||||
},
|
||||
{
|
||||
key: 'bind_address',
|
||||
desc: 'Address to bind REST API on\nThank @Bones for the number',
|
||||
default: '0.0.0.0:2000',
|
||||
validators: [],
|
||||
},
|
||||
{
|
||||
key: 'data_directory',
|
||||
desc: 'Absolute path to store all piston related data at',
|
||||
default: '/piston',
|
||||
validators: [(x) => fss.exists_sync(x) || `Directory ${x} does not exist`],
|
||||
},
|
||||
{
|
||||
key: 'runner_uid_min',
|
||||
desc: 'Minimum uid to use for runner',
|
||||
default: 1001,
|
||||
parser: parse_int,
|
||||
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
|
||||
},
|
||||
{
|
||||
key: 'runner_uid_max',
|
||||
desc: 'Maximum uid to use for runner',
|
||||
default: 1500,
|
||||
parser: parse_int,
|
||||
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
|
||||
},
|
||||
{
|
||||
key: 'runner_gid_min',
|
||||
desc: 'Minimum gid to use for runner',
|
||||
default: 1001,
|
||||
parser: parse_int,
|
||||
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
|
||||
},
|
||||
{
|
||||
key: 'runner_gid_max',
|
||||
desc: 'Maximum gid to use for runner',
|
||||
default: 1500,
|
||||
parser: parse_int,
|
||||
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
|
||||
},
|
||||
{
|
||||
key: 'disable_networking',
|
||||
desc: 'Set to true to disable networking',
|
||||
default: true,
|
||||
parser: (x) => x === 'true',
|
||||
validators: [(x) => typeof x === 'boolean' || `${x} is not a boolean`],
|
||||
},
|
||||
{
|
||||
key: 'output_max_size',
|
||||
desc: 'Max size of each stdio buffer',
|
||||
default: 1024,
|
||||
parser: parse_int,
|
||||
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
|
||||
},
|
||||
{
|
||||
key: 'max_process_count',
|
||||
desc: 'Max number of processes per job',
|
||||
default: 64,
|
||||
parser: parse_int,
|
||||
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
|
||||
},
|
||||
{
|
||||
key: 'max_open_files',
|
||||
desc: 'Max number of open files per job',
|
||||
default: 2048,
|
||||
parser: parse_int,
|
||||
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
|
||||
},
|
||||
{
|
||||
key: 'max_file_size',
|
||||
desc: 'Max file size in bytes for a file',
|
||||
default: 10000000, //10MB
|
||||
parser: parse_int,
|
||||
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
|
||||
},
|
||||
{
|
||||
key: 'compile_memory_limit',
|
||||
desc:
|
||||
'Max memory usage for compile stage in bytes (set to -1 for no limit)',
|
||||
default: -1, // no limit
|
||||
parser: parse_int,
|
||||
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
|
||||
},
|
||||
{
|
||||
key: 'run_memory_limit',
|
||||
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
|
||||
default: -1, // no limit
|
||||
parser: parse_int,
|
||||
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
|
||||
},
|
||||
{
|
||||
key: 'repo_url',
|
||||
desc: 'URL of repo index',
|
||||
default:
|
||||
'https://github.com/engineer-man/piston/releases/download/pkgs/index',
|
||||
validators: [],
|
||||
},
|
||||
];
|
||||
|
||||
logger.info(`Loading Configuration from environment`);
|
||||
|
@ -137,36 +119,34 @@ let errored = false;
|
|||
|
||||
let config = {};
|
||||
|
||||
options.forEach(option => {
|
||||
const env_key = "PISTON_" + option.key.to_upper_case();
|
||||
options.forEach((option) => {
|
||||
const env_key = 'PISTON_' + option.key.to_upper_case();
|
||||
|
||||
const parser = option.parser || (x => x);
|
||||
const parser = option.parser || ((x) => x);
|
||||
|
||||
const env_val = process.env[env_key];
|
||||
const env_val = process.env[env_key];
|
||||
|
||||
const parsed_val = parser(env_val);
|
||||
const parsed_val = parser(env_val);
|
||||
|
||||
const value = env_val || option.default;
|
||||
const value = env_val || option.default;
|
||||
|
||||
option.validators.for_each(validator => {
|
||||
let response = null;
|
||||
if(env_val)
|
||||
response = validator(parsed_val, env_val);
|
||||
else
|
||||
response = validator(value, value);
|
||||
option.validators.for_each((validator) => {
|
||||
let response = null;
|
||||
if (env_val) response = validator(parsed_val, env_val);
|
||||
else response = validator(value, value);
|
||||
|
||||
if (response !== true) {
|
||||
errored = true;
|
||||
logger.error(`Config option ${option.key} failed validation:`, response);
|
||||
return;
|
||||
}
|
||||
});
|
||||
if (response !== true) {
|
||||
errored = true;
|
||||
logger.error(`Config option ${option.key} failed validation:`, response);
|
||||
return;
|
||||
}
|
||||
});
|
||||
|
||||
config[option.key] = value;
|
||||
config[option.key] = value;
|
||||
});
|
||||
|
||||
if (errored) {
|
||||
process.exit(1);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
logger.info('Configuration successfully loaded');
|
||||
|
|
|
@ -1,26 +1,20 @@
|
|||
// Globals are things the user shouldn't change in config, but is good to not use inline constants for
|
||||
const is_docker = require('is-docker');
|
||||
const fs = require('fs');
|
||||
const platform = `${is_docker() ? 'docker' : 'baremetal'}-${
|
||||
fs.read_file_sync('/etc/os-release')
|
||||
.toString()
|
||||
.split('\n')
|
||||
.find(x => x.startsWith('ID'))
|
||||
.replace('ID=','')
|
||||
}`;
|
||||
const platform = `${is_docker() ? 'docker' : 'baremetal'}-${fs
|
||||
.read_file_sync('/etc/os-release')
|
||||
.toString()
|
||||
.split('\n')
|
||||
.find((x) => x.startsWith('ID'))
|
||||
.replace('ID=', '')}`;
|
||||
|
||||
module.exports = {
|
||||
data_directories: {
|
||||
packages: 'packages',
|
||||
jobs: 'jobs'
|
||||
},
|
||||
version: require('../package.json').version,
|
||||
platform,
|
||||
pkg_installed_file: '.ppman-installed', //Used as indication for if a package was installed
|
||||
clean_directories: [
|
||||
'/dev/shm',
|
||||
'/run/lock',
|
||||
'/tmp',
|
||||
'/var/tmp'
|
||||
]
|
||||
data_directories: {
|
||||
packages: 'packages',
|
||||
jobs: 'jobs',
|
||||
},
|
||||
version: require('../package.json').version,
|
||||
platform,
|
||||
pkg_installed_file: '.ppman-installed', //Used as indication for if a package was installed
|
||||
clean_directories: ['/dev/shm', '/run/lock', '/tmp', '/var/tmp'],
|
||||
};
|
||||
|
|
111
api/src/index.js
111
api/src/index.js
|
@ -14,78 +14,77 @@ const logger = Logger.create('index');
|
|||
const app = express();
|
||||
|
||||
(async () => {
|
||||
logger.info('Setting loglevel to',config.log_level);
|
||||
Logger.setLogLevel(config.log_level);
|
||||
logger.debug('Ensuring data directories exist');
|
||||
logger.info('Setting loglevel to', config.log_level);
|
||||
Logger.setLogLevel(config.log_level);
|
||||
logger.debug('Ensuring data directories exist');
|
||||
|
||||
Object.values(globals.data_directories).for_each(dir => {
|
||||
let data_path = path.join(config.data_directory, dir);
|
||||
Object.values(globals.data_directories).for_each((dir) => {
|
||||
let data_path = path.join(config.data_directory, dir);
|
||||
|
||||
logger.debug(`Ensuring ${data_path} exists`);
|
||||
logger.debug(`Ensuring ${data_path} exists`);
|
||||
|
||||
if (!fss.exists_sync(data_path)) {
|
||||
logger.info(`${data_path} does not exist.. Creating..`);
|
||||
if (!fss.exists_sync(data_path)) {
|
||||
logger.info(`${data_path} does not exist.. Creating..`);
|
||||
|
||||
try {
|
||||
fss.mkdir_sync(data_path);
|
||||
} catch(e) {
|
||||
logger.error(`Failed to create ${data_path}: `, e.message);
|
||||
}
|
||||
}
|
||||
});
|
||||
try {
|
||||
fss.mkdir_sync(data_path);
|
||||
} catch (e) {
|
||||
logger.error(`Failed to create ${data_path}: `, e.message);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
logger.info('Loading packages');
|
||||
const pkgdir = path.join(config.data_directory,globals.data_directories.packages);
|
||||
logger.info('Loading packages');
|
||||
const pkgdir = path.join(
|
||||
config.data_directory,
|
||||
globals.data_directories.packages
|
||||
);
|
||||
|
||||
const pkglist = await fs.readdir(pkgdir);
|
||||
const pkglist = await fs.readdir(pkgdir);
|
||||
|
||||
const languages = await Promise.all(
|
||||
pkglist.map(lang => {
|
||||
return fs
|
||||
.readdir(path.join(pkgdir,lang))
|
||||
.then(x => {
|
||||
return x.map(y => path.join(pkgdir, lang, y))
|
||||
});
|
||||
})
|
||||
const languages = await Promise.all(
|
||||
pkglist.map((lang) => {
|
||||
return fs.readdir(path.join(pkgdir, lang)).then((x) => {
|
||||
return x.map((y) => path.join(pkgdir, lang, y));
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
const installed_languages = languages
|
||||
.flat()
|
||||
.filter((pkg) =>
|
||||
fss.exists_sync(path.join(pkg, globals.pkg_installed_file))
|
||||
);
|
||||
|
||||
const installed_languages = languages
|
||||
.flat()
|
||||
.filter(pkg => fss.exists_sync(path.join(pkg, globals.pkg_installed_file)));
|
||||
installed_languages.for_each((pkg) => runtime.load_package(pkg));
|
||||
|
||||
installed_languages.for_each(pkg => runtime.load_package(pkg));
|
||||
logger.info('Starting API Server');
|
||||
logger.debug('Constructing Express App');
|
||||
logger.debug('Registering middleware');
|
||||
|
||||
logger.info('Starting API Server');
|
||||
logger.debug('Constructing Express App');
|
||||
logger.debug('Registering middleware');
|
||||
app.use(body_parser.urlencoded({ extended: true }));
|
||||
app.use(body_parser.json());
|
||||
|
||||
app.use(body_parser.urlencoded({ extended: true }));
|
||||
app.use(body_parser.json());
|
||||
|
||||
app.use((err, req, res, next) => {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
stack: err.stack
|
||||
});
|
||||
app.use((err, req, res, next) => {
|
||||
return res.status(400).send({
|
||||
stack: err.stack,
|
||||
});
|
||||
});
|
||||
|
||||
logger.debug('Registering Routes');
|
||||
logger.debug('Registering Routes');
|
||||
|
||||
const api_v2 = require('./api/v2')
|
||||
app.use('/api/v2', api_v2);
|
||||
app.use('/api/v2', api_v2);
|
||||
const api_v2 = require('./api/v2');
|
||||
app.use('/api/v2', api_v2);
|
||||
app.use('/api/v2', api_v2);
|
||||
|
||||
app.use((req, res, next) => {
|
||||
return res
|
||||
.status(404)
|
||||
.send({message: 'Not Found'});
|
||||
});
|
||||
app.use((req, res, next) => {
|
||||
return res.status(404).send({ message: 'Not Found' });
|
||||
});
|
||||
|
||||
logger.debug('Calling app.listen');
|
||||
const [ address, port ] = config.bind_address.split(':');
|
||||
logger.debug('Calling app.listen');
|
||||
const [address, port] = config.bind_address.split(':');
|
||||
|
||||
app.listen(port, address, () => {
|
||||
logger.info('API server started on', config.bind_address);
|
||||
});
|
||||
app.listen(port, address, () => {
|
||||
logger.info('API server started on', config.bind_address);
|
||||
});
|
||||
})();
|
||||
|
|
433
api/src/job.js
433
api/src/job.js
|
@ -8,267 +8,268 @@ const fs = require('fs/promises');
|
|||
const wait_pid = require('waitpid');
|
||||
|
||||
const job_states = {
|
||||
READY: Symbol('Ready to be primed'),
|
||||
PRIMED: Symbol('Primed and ready for execution'),
|
||||
EXECUTED: Symbol('Executed and ready for cleanup')
|
||||
READY: Symbol('Ready to be primed'),
|
||||
PRIMED: Symbol('Primed and ready for execution'),
|
||||
EXECUTED: Symbol('Executed and ready for cleanup'),
|
||||
};
|
||||
|
||||
let uid = 0;
|
||||
let gid = 0;
|
||||
|
||||
class Job {
|
||||
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) {
|
||||
this.uuid = uuidv4();
|
||||
this.runtime = runtime;
|
||||
this.files = files.map((file, i) => ({
|
||||
name: file.name || `file${i}.code`,
|
||||
content: file.content,
|
||||
}));
|
||||
|
||||
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) {
|
||||
this.uuid = uuidv4();
|
||||
this.runtime = runtime;
|
||||
this.files = files.map((file,i) => ({
|
||||
name: file.name || `file${i}.code`,
|
||||
content: file.content
|
||||
}));
|
||||
this.args = args;
|
||||
this.stdin = stdin;
|
||||
this.timeouts = timeouts;
|
||||
this.memory_limits = memory_limits;
|
||||
|
||||
this.args = args;
|
||||
this.stdin = stdin;
|
||||
this.timeouts = timeouts;
|
||||
this.memory_limits = memory_limits;
|
||||
this.uid = config.runner_uid_min + uid;
|
||||
this.gid = config.runner_gid_min + gid;
|
||||
|
||||
this.uid = config.runner_uid_min + uid;
|
||||
this.gid = config.runner_gid_min + gid;
|
||||
uid++;
|
||||
gid++;
|
||||
|
||||
uid++;
|
||||
gid++;
|
||||
uid %= config.runner_uid_max - config.runner_uid_min + 1;
|
||||
gid %= config.runner_gid_max - config.runner_gid_min + 1;
|
||||
|
||||
uid %= (config.runner_uid_max - config.runner_uid_min) + 1;
|
||||
gid %= (config.runner_gid_max - config.runner_gid_min) + 1;
|
||||
this.state = job_states.READY;
|
||||
this.dir = path.join(
|
||||
config.data_directory,
|
||||
globals.data_directories.jobs,
|
||||
this.uuid
|
||||
);
|
||||
}
|
||||
|
||||
this.state = job_states.READY;
|
||||
this.dir = path.join(config.data_directory, globals.data_directories.jobs, this.uuid);
|
||||
async prime() {
|
||||
logger.info(`Priming job uuid=${this.uuid}`);
|
||||
|
||||
logger.debug('Writing files to job cache');
|
||||
|
||||
logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`);
|
||||
|
||||
await fs.mkdir(this.dir, { mode: 0o700 });
|
||||
await fs.chown(this.dir, this.uid, this.gid);
|
||||
|
||||
for (const file of this.files) {
|
||||
let file_path = path.join(this.dir, file.name);
|
||||
|
||||
await fs.write_file(file_path, file.content);
|
||||
await fs.chown(file_path, this.uid, this.gid);
|
||||
}
|
||||
|
||||
async prime() {
|
||||
logger.info(`Priming job uuid=${this.uuid}`);
|
||||
this.state = job_states.PRIMED;
|
||||
|
||||
logger.debug('Writing files to job cache');
|
||||
logger.debug('Primed job');
|
||||
}
|
||||
|
||||
logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`);
|
||||
async safe_call(file, args, timeout, memory_limit) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const nonetwork = config.disable_networking ? ['nosocket'] : [];
|
||||
|
||||
await fs.mkdir(this.dir, { mode:0o700 });
|
||||
await fs.chown(this.dir, this.uid, this.gid);
|
||||
const prlimit = [
|
||||
'prlimit',
|
||||
'--nproc=' + config.max_process_count,
|
||||
'--nofile=' + config.max_open_files,
|
||||
'--fsize=' + config.max_file_size,
|
||||
];
|
||||
|
||||
for (const file of this.files) {
|
||||
let file_path = path.join(this.dir, file.name);
|
||||
if (memory_limit >= 0) {
|
||||
prlimit.push('--as=' + memory_limit);
|
||||
}
|
||||
|
||||
await fs.write_file(file_path, file.content);
|
||||
await fs.chown(file_path, this.uid, this.gid);
|
||||
const proc_call = [...prlimit, ...nonetwork, 'bash', file, ...args];
|
||||
|
||||
var stdout = '';
|
||||
var stderr = '';
|
||||
var output = '';
|
||||
|
||||
const proc = cp.spawn(proc_call[0], proc_call.splice(1), {
|
||||
env: {
|
||||
...this.runtime.env_vars,
|
||||
PISTON_LANGUAGE: this.runtime.language,
|
||||
},
|
||||
stdio: 'pipe',
|
||||
cwd: this.dir,
|
||||
uid: this.uid,
|
||||
gid: this.gid,
|
||||
detached: true, //give this process its own process group
|
||||
});
|
||||
|
||||
proc.stdin.write(this.stdin);
|
||||
proc.stdin.end();
|
||||
proc.stdin.destroy();
|
||||
|
||||
const kill_timeout = set_timeout((_) => proc.kill('SIGKILL'), timeout);
|
||||
|
||||
proc.stderr.on('data', (data) => {
|
||||
if (stderr.length > config.output_max_size) {
|
||||
proc.kill('SIGKILL');
|
||||
} else {
|
||||
stderr += data;
|
||||
output += data;
|
||||
}
|
||||
});
|
||||
|
||||
this.state = job_states.PRIMED;
|
||||
proc.stdout.on('data', (data) => {
|
||||
if (stdout.length > config.output_max_size) {
|
||||
proc.kill('SIGKILL');
|
||||
} else {
|
||||
stdout += data;
|
||||
output += data;
|
||||
}
|
||||
});
|
||||
|
||||
logger.debug('Primed job');
|
||||
const exit_cleanup = () => {
|
||||
clear_timeout(kill_timeout);
|
||||
|
||||
proc.stderr.destroy();
|
||||
proc.stdout.destroy();
|
||||
};
|
||||
|
||||
proc.on('exit', (code, signal) => {
|
||||
exit_cleanup();
|
||||
|
||||
resolve({ stdout, stderr, code, signal, output });
|
||||
});
|
||||
|
||||
proc.on('error', (err) => {
|
||||
exit_cleanup();
|
||||
|
||||
reject({ error: err, stdout, stderr, output });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async execute() {
|
||||
if (this.state !== job_states.PRIMED) {
|
||||
throw new Error(
|
||||
'Job must be in primed state, current state: ' + this.state.toString()
|
||||
);
|
||||
}
|
||||
|
||||
async safe_call(file, args, timeout, memory_limit) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const nonetwork = config.disable_networking ? ['nosocket'] : [];
|
||||
logger.info(
|
||||
`Executing job uuid=${this.uuid} uid=${this.uid} gid=${
|
||||
this.gid
|
||||
} runtime=${this.runtime.toString()}`
|
||||
);
|
||||
|
||||
const prlimit = [
|
||||
'prlimit',
|
||||
'--nproc=' + config.max_process_count,
|
||||
'--nofile=' + config.max_open_files,
|
||||
'--fsize=' + config.max_file_size
|
||||
];
|
||||
logger.debug('Compiling');
|
||||
|
||||
if (memory_limit >= 0) {
|
||||
prlimit.push('--as=' + memory_limit);
|
||||
}
|
||||
let compile;
|
||||
|
||||
const proc_call = [
|
||||
...prlimit,
|
||||
...nonetwork,
|
||||
'bash',file,
|
||||
...args
|
||||
];
|
||||
if (this.runtime.compiled) {
|
||||
compile = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'compile'),
|
||||
this.files.map((x) => x.name),
|
||||
this.timeouts.compile,
|
||||
this.memory_limits.compile
|
||||
);
|
||||
}
|
||||
|
||||
var stdout = '';
|
||||
var stderr = '';
|
||||
var output = '';
|
||||
logger.debug('Running');
|
||||
|
||||
const proc = cp.spawn(proc_call[0], proc_call.splice(1) ,{
|
||||
env: {
|
||||
...this.runtime.env_vars,
|
||||
PISTON_LANGUAGE: this.runtime.language
|
||||
},
|
||||
stdio: 'pipe',
|
||||
cwd: this.dir,
|
||||
uid: this.uid,
|
||||
gid: this.gid,
|
||||
detached: true //give this process its own process group
|
||||
const run = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'run'),
|
||||
[this.files[0].name, ...this.args],
|
||||
this.timeouts.run,
|
||||
this.memory_limits.run
|
||||
);
|
||||
|
||||
this.state = job_states.EXECUTED;
|
||||
|
||||
return {
|
||||
compile,
|
||||
run,
|
||||
language: this.runtime.language,
|
||||
version: this.runtime.version.raw,
|
||||
};
|
||||
}
|
||||
|
||||
async cleanup_processes() {
|
||||
let processes = [1];
|
||||
|
||||
while (processes.length > 0) {
|
||||
processes = await new Promise((resolve, reject) =>
|
||||
cp.execFile('ps', ['awwxo', 'pid,ruid'], (err, stdout) => {
|
||||
if (err === null) {
|
||||
const lines = stdout.split('\n').slice(1); //Remove header with slice
|
||||
const procs = lines.map((line) => {
|
||||
const [pid, ruid] = line
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.map((n) => parseInt(n));
|
||||
|
||||
return { pid, ruid };
|
||||
});
|
||||
|
||||
proc.stdin.write(this.stdin);
|
||||
proc.stdin.end();
|
||||
proc.stdin.destroy();
|
||||
resolve(procs);
|
||||
} else {
|
||||
reject(error);
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
const kill_timeout = set_timeout(_ => proc.kill('SIGKILL'), timeout);
|
||||
processes = processes.filter((proc) => proc.ruid === this.uid);
|
||||
|
||||
proc.stderr.on('data', data => {
|
||||
if (stderr.length > config.output_max_size) {
|
||||
proc.kill('SIGKILL');
|
||||
} else {
|
||||
stderr += data;
|
||||
output += data;
|
||||
}
|
||||
});
|
||||
for (const proc of processes) {
|
||||
// First stop the processes, but keep their resources allocated so they cant re-fork
|
||||
try {
|
||||
process.kill(proc.pid, 'SIGSTOP');
|
||||
} catch {
|
||||
// Could already be dead
|
||||
}
|
||||
}
|
||||
|
||||
proc.stdout.on('data', data => {
|
||||
if (stdout.length > config.output_max_size) {
|
||||
proc.kill('SIGKILL');
|
||||
} else {
|
||||
stdout += data;
|
||||
output += data;
|
||||
}
|
||||
});
|
||||
|
||||
const exit_cleanup = () => {
|
||||
clear_timeout(kill_timeout);
|
||||
|
||||
proc.stderr.destroy();
|
||||
proc.stdout.destroy();
|
||||
};
|
||||
|
||||
proc.on('exit', (code, signal)=>{
|
||||
exit_cleanup();
|
||||
|
||||
resolve({ stdout, stderr, code, signal, output });
|
||||
});
|
||||
|
||||
proc.on('error', (err) => {
|
||||
exit_cleanup();
|
||||
|
||||
reject({ error: err, stdout, stderr, output });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async execute() {
|
||||
if (this.state !== job_states.PRIMED) {
|
||||
throw new Error('Job must be in primed state, current state: ' + this.state.toString());
|
||||
for (const proc of processes) {
|
||||
// Then clear them out of the process tree
|
||||
try {
|
||||
process.kill(proc.pid, 'SIGKILL');
|
||||
} catch {
|
||||
// Could already be dead and just needs to be waited on
|
||||
}
|
||||
|
||||
logger.info(`Executing job uuid=${this.uuid} uid=${this.uid} gid=${this.gid} runtime=${this.runtime.toString()}`);
|
||||
wait_pid(proc.pid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.debug('Compiling');
|
||||
async cleanup_filesystem() {
|
||||
for (const clean_path of globals.clean_directories) {
|
||||
const contents = await fs.readdir(clean_path);
|
||||
|
||||
let compile;
|
||||
for (const file of contents) {
|
||||
const file_path = path.join(clean_path, file);
|
||||
|
||||
if (this.runtime.compiled) {
|
||||
compile = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'compile'),
|
||||
this.files.map(x => x.name),
|
||||
this.timeouts.compile,
|
||||
this.memory_limits.compile
|
||||
);
|
||||
try {
|
||||
const stat = await fs.stat(file_path);
|
||||
|
||||
if (stat.uid === this.uid) {
|
||||
await fs.rm(file_path, { recursive: true, force: true });
|
||||
}
|
||||
} catch (e) {
|
||||
// File was somehow deleted in the time that we read the dir to when we checked the file
|
||||
logger.warn(`Error removing file ${file_path}: ${e}`);
|
||||
}
|
||||
|
||||
logger.debug('Running');
|
||||
|
||||
const run = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'run'),
|
||||
[this.files[0].name, ...this.args],
|
||||
this.timeouts.run,
|
||||
this.memory_limits.run
|
||||
);
|
||||
|
||||
this.state = job_states.EXECUTED;
|
||||
|
||||
return {
|
||||
compile,
|
||||
run,
|
||||
language: this.runtime.language,
|
||||
version: this.runtime.version.raw
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
async cleanup_processes() {
|
||||
let processes = [1];
|
||||
await fs.rm(this.dir, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
while (processes.length > 0) {
|
||||
processes = await new Promise((resolve, reject) => cp.execFile('ps', ['awwxo', 'pid,ruid'], (err, stdout) => {
|
||||
if (err === null) {
|
||||
const lines = stdout.split('\n').slice(1); //Remove header with slice
|
||||
const procs = lines.map(line => {
|
||||
const [pid, ruid] = line
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.map(n => parseInt(n));
|
||||
|
||||
return { pid, ruid }
|
||||
});
|
||||
|
||||
resolve(procs);
|
||||
} else {
|
||||
reject(error);
|
||||
}
|
||||
}));
|
||||
|
||||
processes = processes.filter(proc => proc.ruid === this.uid);
|
||||
|
||||
for (const proc of processes) {
|
||||
// First stop the processes, but keep their resources allocated so they cant re-fork
|
||||
try {
|
||||
process.kill(proc.pid, 'SIGSTOP');
|
||||
} catch {
|
||||
// Could already be dead
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
for (const proc of processes) {
|
||||
// Then clear them out of the process tree
|
||||
try {
|
||||
process.kill(proc.pid, 'SIGKILL');
|
||||
} catch {
|
||||
// Could already be dead and just needs to be waited on
|
||||
}
|
||||
|
||||
wait_pid(proc.pid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async cleanup_filesystem() {
|
||||
for (const clean_path of globals.clean_directories) {
|
||||
const contents = await fs.readdir(clean_path);
|
||||
|
||||
for (const file of contents) {
|
||||
const file_path = path.join(clean_path, file);
|
||||
|
||||
try {
|
||||
const stat = await fs.stat(file_path);
|
||||
|
||||
if (stat.uid === this.uid) {
|
||||
await fs.rm(file_path, { recursive: true, force: true });
|
||||
}
|
||||
} catch (e) {
|
||||
// File was somehow deleted in the time that we read the dir to when we checked the file
|
||||
logger.warn(`Error removing file ${file_path}: ${e}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
await fs.rm(this.dir, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
async cleanup() {
|
||||
logger.info(`Cleaning up job uuid=${this.uuid}`);
|
||||
|
||||
await Promise.all([
|
||||
this.cleanup_processes(),
|
||||
this.cleanup_filesystem()
|
||||
]);
|
||||
}
|
||||
async cleanup() {
|
||||
logger.info(`Cleaning up job uuid=${this.uuid}`);
|
||||
|
||||
await Promise.all([this.cleanup_processes(), this.cleanup_filesystem()]);
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Job
|
||||
Job,
|
||||
};
|
||||
|
|
|
@ -13,188 +13,197 @@ const chownr = require('chownr');
|
|||
const util = require('util');
|
||||
|
||||
class Package {
|
||||
constructor({ language, version, download, checksum }) {
|
||||
this.language = language;
|
||||
this.version = semver.parse(version);
|
||||
this.checksum = checksum;
|
||||
this.download = download;
|
||||
}
|
||||
|
||||
constructor({ language, version, download, checksum }){
|
||||
this.language = language;
|
||||
this.version = semver.parse(version);
|
||||
this.checksum = checksum;
|
||||
this.download = download;
|
||||
get installed() {
|
||||
return fss.exists_sync(
|
||||
path.join(this.install_path, globals.pkg_installed_file)
|
||||
);
|
||||
}
|
||||
|
||||
get install_path() {
|
||||
return path.join(
|
||||
config.data_directory,
|
||||
globals.data_directories.packages,
|
||||
this.language,
|
||||
this.version.raw
|
||||
);
|
||||
}
|
||||
|
||||
async install() {
|
||||
if (this.installed) {
|
||||
throw new Error('Already installed');
|
||||
}
|
||||
|
||||
get installed() {
|
||||
return fss.exists_sync(path.join(this.install_path, globals.pkg_installed_file));
|
||||
logger.info(`Installing ${this.language}-${this.version.raw}`);
|
||||
|
||||
if (fss.exists_sync(this.install_path)) {
|
||||
logger.warn(
|
||||
`${this.language}-${this.version.raw} has residual files. Removing them.`
|
||||
);
|
||||
await fs.rm(this.install_path, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
get install_path() {
|
||||
return path.join(
|
||||
config.data_directory,
|
||||
globals.data_directories.packages,
|
||||
this.language,
|
||||
this.version.raw
|
||||
);
|
||||
logger.debug(`Making directory ${this.install_path}`);
|
||||
await fs.mkdir(this.install_path, { recursive: true });
|
||||
|
||||
logger.debug(
|
||||
`Downloading package from ${this.download} in to ${this.install_path}`
|
||||
);
|
||||
const pkgpath = path.join(this.install_path, 'pkg.tar.gz');
|
||||
const download = await fetch(this.download);
|
||||
|
||||
const file_stream = fss.create_write_stream(pkgpath);
|
||||
await new Promise((resolve, reject) => {
|
||||
download.body.pipe(file_stream);
|
||||
download.body.on('error', reject);
|
||||
|
||||
file_stream.on('finish', resolve);
|
||||
});
|
||||
|
||||
logger.debug('Validating checksums');
|
||||
logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`);
|
||||
const cs = crypto
|
||||
.create_hash('sha256')
|
||||
.update(fss.readFileSync(pkgpath))
|
||||
.digest('hex');
|
||||
|
||||
if (cs !== this.checksum) {
|
||||
throw new Error(`Checksum miss-match want: ${val} got: ${cs}`);
|
||||
}
|
||||
|
||||
async install() {
|
||||
if (this.installed) {
|
||||
throw new Error('Already installed');
|
||||
}
|
||||
logger.debug(
|
||||
`Extracting package files from archive ${pkgpath} in to ${this.install_path}`
|
||||
);
|
||||
|
||||
logger.info(`Installing ${this.language}-${this.version.raw}`);
|
||||
await new Promise((resolve, reject) => {
|
||||
const proc = cp.exec(
|
||||
`bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'`
|
||||
);
|
||||
|
||||
if (fss.exists_sync(this.install_path)) {
|
||||
logger.warn(`${this.language}-${this.version.raw} has residual files. Removing them.`);
|
||||
await fs.rm(this.install_path, { recursive: true, force: true });
|
||||
}
|
||||
proc.once('exit', (code, _) => {
|
||||
code === 0 ? resolve() : reject();
|
||||
});
|
||||
|
||||
logger.debug(`Making directory ${this.install_path}`);
|
||||
await fs.mkdir(this.install_path, {recursive: true});
|
||||
proc.stdout.pipe(process.stdout);
|
||||
proc.stderr.pipe(process.stderr);
|
||||
|
||||
logger.debug(`Downloading package from ${this.download} in to ${this.install_path}`);
|
||||
const pkgpath = path.join(this.install_path, 'pkg.tar.gz');
|
||||
const download = await fetch(this.download);
|
||||
proc.once('error', reject);
|
||||
});
|
||||
|
||||
const file_stream = fss.create_write_stream(pkgpath);
|
||||
await new Promise((resolve, reject) => {
|
||||
download.body.pipe(file_stream);
|
||||
download.body.on('error', reject);
|
||||
logger.debug('Registering runtime');
|
||||
runtime.load_package(this.install_path);
|
||||
|
||||
file_stream.on('finish', resolve);
|
||||
});
|
||||
logger.debug('Caching environment');
|
||||
const get_env_command = `cd ${this.install_path}; source environment; env`;
|
||||
|
||||
logger.debug('Validating checksums');
|
||||
logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`);
|
||||
const cs = crypto.create_hash("sha256")
|
||||
.update(fss.readFileSync(pkgpath))
|
||||
.digest('hex');
|
||||
const envout = await new Promise((resolve, reject) => {
|
||||
let stdout = '';
|
||||
|
||||
if (cs !== this.checksum) {
|
||||
throw new Error(`Checksum miss-match want: ${val} got: ${cs}`);
|
||||
}
|
||||
const proc = cp.spawn('env', ['-i', 'bash', '-c', `${get_env_command}`], {
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
});
|
||||
|
||||
logger.debug(`Extracting package files from archive ${pkgpath} in to ${this.install_path}`);
|
||||
proc.once('exit', (code, _) => {
|
||||
code === 0 ? resolve(stdout) : reject();
|
||||
});
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'`);
|
||||
proc.stdout.on('data', (data) => {
|
||||
stdout += data;
|
||||
});
|
||||
|
||||
proc.once('exit', (code, _) => {
|
||||
code === 0 ? resolve() : reject();
|
||||
});
|
||||
proc.once('error', reject);
|
||||
});
|
||||
|
||||
proc.stdout.pipe(process.stdout);
|
||||
proc.stderr.pipe(process.stderr);
|
||||
const filtered_env = envout
|
||||
.split('\n')
|
||||
.filter(
|
||||
(l) => !['PWD', 'OLDPWD', '_', 'SHLVL'].includes(l.split('=', 2)[0])
|
||||
)
|
||||
.join('\n');
|
||||
|
||||
proc.once('error', reject);
|
||||
});
|
||||
await fs.write_file(path.join(this.install_path, '.env'), filtered_env);
|
||||
|
||||
logger.debug('Registering runtime');
|
||||
runtime.load_package(this.install_path);
|
||||
logger.debug('Changing Ownership of package directory');
|
||||
await util.promisify(chownr)(this.install_path, 0, 0);
|
||||
|
||||
logger.debug('Caching environment');
|
||||
const get_env_command = `cd ${this.install_path}; source environment; env`;
|
||||
logger.debug('Writing installed state to disk');
|
||||
await fs.write_file(
|
||||
path.join(this.install_path, globals.pkg_installed_file),
|
||||
Date.now().toString()
|
||||
);
|
||||
|
||||
const envout = await new Promise((resolve, reject) => {
|
||||
let stdout = '';
|
||||
logger.info(`Installed ${this.language}-${this.version.raw}`);
|
||||
|
||||
const proc = cp
|
||||
.spawn(
|
||||
'env',
|
||||
['-i','bash','-c',`${get_env_command}`],
|
||||
{
|
||||
stdio: ['ignore', 'pipe', 'pipe']
|
||||
}
|
||||
);
|
||||
return {
|
||||
language: this.language,
|
||||
version: this.version.raw,
|
||||
};
|
||||
}
|
||||
|
||||
proc.once('exit', (code, _) => {
|
||||
code === 0 ? resolve(stdout) : reject();
|
||||
});
|
||||
async uninstall() {
|
||||
logger.info(`Uninstalling ${this.language}-${this.version.raw}`);
|
||||
|
||||
proc.stdout.on('data', data => {
|
||||
stdout += data;
|
||||
});
|
||||
logger.debug('Finding runtime');
|
||||
const found_runtime = runtime.get_runtime_by_name_and_version(
|
||||
this.language,
|
||||
this.version.raw
|
||||
);
|
||||
|
||||
proc.once('error', reject);
|
||||
});
|
||||
|
||||
const filtered_env = envout
|
||||
.split('\n')
|
||||
.filter(l => !['PWD','OLDPWD','_', 'SHLVL'].includes(l.split('=',2)[0]))
|
||||
.join('\n');
|
||||
|
||||
await fs.write_file(path.join(this.install_path, '.env'), filtered_env);
|
||||
|
||||
logger.debug('Changing Ownership of package directory');
|
||||
await util.promisify(chownr)(this.install_path,0,0);
|
||||
|
||||
logger.debug('Writing installed state to disk');
|
||||
await fs.write_file(path.join(this.install_path, globals.pkg_installed_file), Date.now().toString());
|
||||
|
||||
logger.info(`Installed ${this.language}-${this.version.raw}`);
|
||||
|
||||
return {
|
||||
language: this.language,
|
||||
version: this.version.raw
|
||||
};
|
||||
if (!found_runtime) {
|
||||
logger.error(
|
||||
`Uninstalling ${this.language}-${this.version.raw} failed: Not installed`
|
||||
);
|
||||
throw new Error(`${this.language}-${this.version.raw} is not installed`);
|
||||
}
|
||||
|
||||
logger.debug('Unregistering runtime');
|
||||
found_runtime.unregister();
|
||||
|
||||
async uninstall(){
|
||||
logger.info(`Uninstalling ${this.language}-${this.version.raw}`);
|
||||
logger.debug('Cleaning files from disk');
|
||||
await fs.rmdir(this.install_path, { recursive: true });
|
||||
|
||||
logger.debug("Finding runtime")
|
||||
const found_runtime = runtime.get_runtime_by_name_and_version(this.language, this.version.raw);
|
||||
logger.info(`Uninstalled ${this.language}-${this.version.raw}`);
|
||||
|
||||
if(!found_runtime){
|
||||
logger.error(`Uninstalling ${this.language}-${this.version.raw} failed: Not installed`)
|
||||
throw new Error(`${this.language}-${this.version.raw} is not installed`)
|
||||
}
|
||||
return {
|
||||
language: this.language,
|
||||
version: this.version.raw,
|
||||
};
|
||||
}
|
||||
|
||||
logger.debug("Unregistering runtime")
|
||||
found_runtime.unregister();
|
||||
static async get_package_list() {
|
||||
const repo_content = await fetch(config.repo_url).then((x) => x.text());
|
||||
|
||||
logger.debug("Cleaning files from disk")
|
||||
await fs.rmdir(this.install_path, {recursive: true})
|
||||
const entries = repo_content.split('\n').filter((x) => x.length > 0);
|
||||
|
||||
logger.info(`Uninstalled ${this.language}-${this.version.raw}`)
|
||||
return entries.map((line) => {
|
||||
const [language, version, checksum, download] = line.split(',', 4);
|
||||
|
||||
return {
|
||||
language: this.language,
|
||||
version: this.version.raw
|
||||
};
|
||||
return new Package({
|
||||
language,
|
||||
version,
|
||||
checksum,
|
||||
download,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
static async get_package(lang, version) {
|
||||
const packages = await Package.get_package_list();
|
||||
|
||||
static async get_package_list() {
|
||||
const repo_content = await fetch(config.repo_url).then(x => x.text());
|
||||
const candidates = packages.filter((pkg) => {
|
||||
return pkg.language == lang && semver.satisfies(pkg.version, version);
|
||||
});
|
||||
|
||||
const entries = repo_content
|
||||
.split('\n')
|
||||
.filter(x => x.length > 0);
|
||||
|
||||
return entries.map(line => {
|
||||
const [ language, version, checksum, download ] = line.split(',', 4);
|
||||
|
||||
return new Package({
|
||||
language,
|
||||
version,
|
||||
checksum,
|
||||
download
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
static async get_package(lang, version) {
|
||||
const packages = await Package.get_package_list();
|
||||
|
||||
const candidates = packages
|
||||
.filter(pkg => {
|
||||
return pkg.language == lang && semver.satisfies(pkg.version, version)
|
||||
});
|
||||
|
||||
candidates.sort((a, b) => semver.rcompare(a.version, b.version));
|
||||
|
||||
return candidates[0] || null;
|
||||
}
|
||||
candidates.sort((a, b) => semver.rcompare(a.version, b.version));
|
||||
|
||||
return candidates[0] || null;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Package;
|
||||
|
|
|
@ -8,102 +8,118 @@ const path = require('path');
|
|||
const runtimes = [];
|
||||
|
||||
class Runtime {
|
||||
constructor({ language, version, aliases, pkgdir, runtime }) {
|
||||
this.language = language;
|
||||
this.version = version;
|
||||
this.aliases = aliases || [];
|
||||
this.pkgdir = pkgdir;
|
||||
this.runtime = runtime;
|
||||
}
|
||||
|
||||
constructor({language, version, aliases, pkgdir, runtime}) {
|
||||
this.language = language;
|
||||
this.version = version;
|
||||
this.aliases = aliases || [];
|
||||
this.pkgdir = pkgdir;
|
||||
this.runtime = runtime;
|
||||
static load_package(package_dir) {
|
||||
let info = JSON.parse(
|
||||
fss.read_file_sync(path.join(package_dir, 'pkg-info.json'))
|
||||
);
|
||||
|
||||
let { language, version, build_platform, aliases, provides } = info;
|
||||
version = semver.parse(version);
|
||||
|
||||
if (build_platform !== globals.platform) {
|
||||
logger.warn(
|
||||
`Package ${language}-${version} was built for platform ${build_platform}, ` +
|
||||
`but our platform is ${globals.platform}`
|
||||
);
|
||||
}
|
||||
|
||||
static load_package(package_dir) {
|
||||
let info = JSON.parse(
|
||||
fss.read_file_sync(path.join(package_dir, 'pkg-info.json'))
|
||||
if (provides) {
|
||||
// Multiple languages in 1 package
|
||||
provides.forEach((lang) => {
|
||||
runtimes.push(
|
||||
new Runtime({
|
||||
language: lang.language,
|
||||
aliases: lang.aliases,
|
||||
version,
|
||||
pkgdir: package_dir,
|
||||
runtime: language,
|
||||
})
|
||||
);
|
||||
|
||||
let { language, version, build_platform, aliases, provides } = info;
|
||||
version = semver.parse(version);
|
||||
|
||||
if (build_platform !== globals.platform) {
|
||||
logger.warn(
|
||||
`Package ${language}-${version} was built for platform ${build_platform}, ` +
|
||||
`but our platform is ${globals.platform}`
|
||||
);
|
||||
}
|
||||
|
||||
if (provides) {
|
||||
// Multiple languages in 1 package
|
||||
provides.forEach(lang => {
|
||||
runtimes.push(new Runtime({
|
||||
language: lang.language,
|
||||
aliases: lang.aliases,
|
||||
version,
|
||||
pkgdir: package_dir,
|
||||
runtime: language
|
||||
}));
|
||||
});
|
||||
} else {
|
||||
runtimes.push(new Runtime({
|
||||
language,
|
||||
version,
|
||||
aliases,
|
||||
pkgdir: package_dir
|
||||
}))
|
||||
}
|
||||
|
||||
logger.debug(`Package ${language}-${version} was loaded`);
|
||||
});
|
||||
} else {
|
||||
runtimes.push(
|
||||
new Runtime({
|
||||
language,
|
||||
version,
|
||||
aliases,
|
||||
pkgdir: package_dir,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
get compiled() {
|
||||
if (this._compiled === undefined) {
|
||||
this._compiled = fss.exists_sync(path.join(this.pkgdir, 'compile'));
|
||||
}
|
||||
logger.debug(`Package ${language}-${version} was loaded`);
|
||||
}
|
||||
|
||||
return this._compiled;
|
||||
get compiled() {
|
||||
if (this._compiled === undefined) {
|
||||
this._compiled = fss.exists_sync(path.join(this.pkgdir, 'compile'));
|
||||
}
|
||||
|
||||
get env_vars() {
|
||||
if (!this._env_vars) {
|
||||
const env_file = path.join(this.pkgdir, '.env');
|
||||
const env_content = fss.read_file_sync(env_file).toString();
|
||||
return this._compiled;
|
||||
}
|
||||
|
||||
this._env_vars = {};
|
||||
get env_vars() {
|
||||
if (!this._env_vars) {
|
||||
const env_file = path.join(this.pkgdir, '.env');
|
||||
const env_content = fss.read_file_sync(env_file).toString();
|
||||
|
||||
env_content
|
||||
.trim()
|
||||
.split('\n')
|
||||
.map(line => line.split('=',2))
|
||||
.forEach(([key,val]) => {
|
||||
this._env_vars[key.trim()] = val.trim();
|
||||
});
|
||||
}
|
||||
this._env_vars = {};
|
||||
|
||||
return this._env_vars;
|
||||
env_content
|
||||
.trim()
|
||||
.split('\n')
|
||||
.map((line) => line.split('=', 2))
|
||||
.forEach(([key, val]) => {
|
||||
this._env_vars[key.trim()] = val.trim();
|
||||
});
|
||||
}
|
||||
|
||||
toString() {
|
||||
return `${this.language}-${this.version.raw}`;
|
||||
}
|
||||
return this._env_vars;
|
||||
}
|
||||
|
||||
unregister() {
|
||||
const index = runtimes.indexOf(this);
|
||||
runtimes.splice(index, 1); //Remove from runtimes list
|
||||
}
|
||||
toString() {
|
||||
return `${this.language}-${this.version.raw}`;
|
||||
}
|
||||
|
||||
unregister() {
|
||||
const index = runtimes.indexOf(this);
|
||||
runtimes.splice(index, 1); //Remove from runtimes list
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = runtimes;
|
||||
module.exports.Runtime = Runtime;
|
||||
module.exports.get_runtimes_matching_language_version = function(lang, ver){
|
||||
return runtimes.filter(rt => (rt.language == lang || rt.aliases.includes(lang)) && semver.satisfies(rt.version, ver));
|
||||
module.exports.get_runtimes_matching_language_version = function (lang, ver) {
|
||||
return runtimes.filter(
|
||||
(rt) =>
|
||||
(rt.language == lang || rt.aliases.includes(lang)) &&
|
||||
semver.satisfies(rt.version, ver)
|
||||
);
|
||||
};
|
||||
module.exports.get_latest_runtime_matching_language_version = function(lang, ver){
|
||||
return module.exports.get_runtimes_matching_language_version(lang, ver)
|
||||
.sort((a,b) => semver.rcompare(a.version, b.version))[0];
|
||||
module.exports.get_latest_runtime_matching_language_version = function (
|
||||
lang,
|
||||
ver
|
||||
) {
|
||||
return module.exports
|
||||
.get_runtimes_matching_language_version(lang, ver)
|
||||
.sort((a, b) => semver.rcompare(a.version, b.version))[0];
|
||||
};
|
||||
|
||||
module.exports.get_runtime_by_name_and_version = function(runtime, ver){
|
||||
return runtimes.find(rt => (rt.runtime == runtime || (rt.runtime === undefined && rt.language == runtime)) && semver.satisfies(rt.version, ver));
|
||||
}
|
||||
module.exports.get_runtime_by_name_and_version = function (runtime, ver) {
|
||||
return runtimes.find(
|
||||
(rt) =>
|
||||
(rt.runtime == runtime ||
|
||||
(rt.runtime === undefined && rt.language == runtime)) &&
|
||||
semver.satisfies(rt.version, ver)
|
||||
);
|
||||
};
|
||||
|
||||
module.exports.load_package = Runtime.load_package;
|
||||
|
|
Loading…
Reference in New Issue