From 021ec1aa9418b2bf860b36fe77efa05f4ea17f0b Mon Sep 17 00:00:00 2001 From: Hydrazer Date: Thu, 30 Dec 2021 11:12:20 -0700 Subject: [PATCH 1/6] pkg(MATL-22.5.0): added MATL 22.5.0 --- packages/MATL/22.5.0/build.sh | 9 +++++++++ packages/MATL/22.5.0/environment | 5 +++++ packages/MATL/22.5.0/metadata.json | 5 +++++ packages/MATL/22.5.0/run | 13 +++++++++++++ packages/MATL/22.5.0/test.matl | 1 + readme.md | 1 + 6 files changed, 34 insertions(+) create mode 100644 packages/MATL/22.5.0/build.sh create mode 100644 packages/MATL/22.5.0/environment create mode 100644 packages/MATL/22.5.0/metadata.json create mode 100644 packages/MATL/22.5.0/run create mode 100644 packages/MATL/22.5.0/test.matl diff --git a/packages/MATL/22.5.0/build.sh b/packages/MATL/22.5.0/build.sh new file mode 100644 index 0000000..ea2a376 --- /dev/null +++ b/packages/MATL/22.5.0/build.sh @@ -0,0 +1,9 @@ +#!/usr/bin/env bash + +# build octave as dependency +source ../../octave/6.2.0/build.sh + +# curl MATL 22.5.0 +curl -L "https://github.com/lmendo/MATL/archive/refs/tags/22.5.0.tar.gz" -o MATL.tar.xz +tar xf MATL.tar.xz --strip-components=1 +rm MATL.tar.xz \ No newline at end of file diff --git a/packages/MATL/22.5.0/environment b/packages/MATL/22.5.0/environment new file mode 100644 index 0000000..015acc2 --- /dev/null +++ b/packages/MATL/22.5.0/environment @@ -0,0 +1,5 @@ +#!/usr/bin/env bash + +# Path to MATL binary +export PATH=$PWD/bin:$PATH +export MATL_PATH=$PWD \ No newline at end of file diff --git a/packages/MATL/22.5.0/metadata.json b/packages/MATL/22.5.0/metadata.json new file mode 100644 index 0000000..07cdc09 --- /dev/null +++ b/packages/MATL/22.5.0/metadata.json @@ -0,0 +1,5 @@ +{ + "language": "matl", + "version": "22.5.0", + "aliases": [] +} diff --git a/packages/MATL/22.5.0/run b/packages/MATL/22.5.0/run new file mode 100644 index 0000000..8576158 --- /dev/null +++ b/packages/MATL/22.5.0/run @@ -0,0 +1,13 @@ +#!/usr/bin/env bash + +# get file as first argument +file="$1" + +# remove the file from $@ +shift + +# use the rest of the arguments as stdin +stdin=`printf "%s\n" "$@"` + +# pass stdin into octave which will run MATL +echo "$stdin" | octave -W -p "$MATL_PATH" --eval "matl -of '$file'" diff --git a/packages/MATL/22.5.0/test.matl b/packages/MATL/22.5.0/test.matl new file mode 100644 index 0000000..97791fa --- /dev/null +++ b/packages/MATL/22.5.0/test.matl @@ -0,0 +1 @@ +'OK' \ No newline at end of file diff --git a/readme.md b/readme.md index 0cc0fa4..cebde53 100644 --- a/readme.md +++ b/readme.md @@ -351,6 +351,7 @@ Content-Type: application/json `llvm_ir`, `lolcode`, `lua`, +`matl`, `nasm`, `nasm64`, `nim`, From f6fa9cb968b0b455942e1154130812862cffdd4d Mon Sep 17 00:00:00 2001 From: Shane Date: Sun, 2 Jan 2022 09:39:24 +0000 Subject: [PATCH 2/6] Make builder script run relative to directory it was called from --- builder/build.sh | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/builder/build.sh b/builder/build.sh index 8559eaf..eaae21d 100755 --- a/builder/build.sh +++ b/builder/build.sh @@ -2,6 +2,9 @@ # Build a container using the spec file provided +START_DIR=$PWD +cd "$(dirname "$BASH_SOURCE[0]}")" + help_msg(){ echo "Usage: $0 [specfile] [tag]" echo @@ -37,14 +40,14 @@ fetch_packages(){ } build_container(){ - docker build -t $1 -f "$(dirname $0)/Dockerfile" "$PWD/build" + docker build -t $1 -f "Dockerfile" "$START_DIR/build" } -SPEC_FILE=$1 +SPEC_FILE=$START_DIR/$1 TAG=$2 -[ -z "$SPEC_FILE" ] && help_msg "specfile is required" +[ -z "$1" ] && help_msg "specfile is required" [ -z "$TAG" ] && help_msg "tag is required" [ -f "$SPEC_FILE" ] || help_msg "specfile does not exist" @@ -58,4 +61,4 @@ fetch_packages $SPEC_FILE build_container $TAG echo "Start your custom piston container with" -echo "$ docker run --tmpfs /piston/jobs -dit -p 2000:2000 $TAG" \ No newline at end of file +echo "$ docker run --tmpfs /piston/jobs -dit -p 2000:2000 $TAG" From fe7f66a7542ba488e18dfb4de78ab814c47b1963 Mon Sep 17 00:00:00 2001 From: Omar Brikaa Date: Fri, 28 Jan 2022 11:43:56 +0200 Subject: [PATCH 3/6] Add .vscode to gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index eb53d81..e82d86f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ data/ .piston_env node_modules +.vscode/ From 416ade1b76e8d6e641ed54fba9e8a7b69bccf873 Mon Sep 17 00:00:00 2001 From: Omar Brikaa Date: Fri, 28 Jan 2022 17:58:00 +0200 Subject: [PATCH 4/6] Refactor config.js --- api/src/config.js | 170 ++++++++++++++++++++++++---------------------- 1 file changed, 88 insertions(+), 82 deletions(-) diff --git a/api/src/config.js b/api/src/config.js index 5324097..d207431 100644 --- a/api/src/config.js +++ b/api/src/config.js @@ -2,57 +2,6 @@ const fss = require('fs'); const Logger = require('logplease'); const logger = Logger.create('config'); -function parse_overrides(overrides) { - try { - return JSON.parse(overrides); - } catch (e) { - return null; - } -} - -function validate_overrides(overrides, options) { - for (const language in overrides) { - for (const key in overrides[language]) { - if ( - ![ - 'max_process_count', - 'max_open_files', - 'max_file_size', - 'compile_memory_limit', - 'run_memory_limit', - 'compile_timeout', - 'run_timeout', - 'output_max_size', - ].includes(key) - ) { - logger.error(`Invalid overridden option: ${key}`); - return false; - } - const option = options.find(o => o.key === key); - const parser = option.parser; - const raw = overrides[language][key]; - const value = parser(raw); - const validators = option.validators; - for (const validator of validators) { - const response = validator(value, raw); - if (response !== true) { - logger.error( - `Failed to validate overridden option: ${key}`, - response - ); - return false; - } - } - overrides[language][key] = value; - } - // Modifies the reference - options[ - options.index_of(options.find(o => o.key === 'limit_overrides')) - ] = overrides; - } - return true; -} - const options = [ { key: 'log_level', @@ -68,7 +17,7 @@ const options = [ { key: 'bind_address', desc: 'Address to bind REST API on', - default: `0.0.0.0:${process.env["PORT"] || 2000}`, + default: `0.0.0.0:${process.env['PORT'] || 2000}`, validators: [], }, { @@ -192,53 +141,110 @@ const options = [ default: {}, parser: parse_overrides, validators: [ - x => !!x || `Invalid JSON format for the overrides\n${x}`, - (overrides, _, options) => - validate_overrides(overrides, options) || - `Failed to validate the overrides`, + x => !!x || `Failed to parse the overrides\n${x}`, + validate_overrides, ], }, ]; -logger.info(`Loading Configuration from environment`); +Object.freeze(options); -let errored = false; +function apply_validators(validators, validator_parameters) { + for (const validator of validators) { + const validation_response = validator(...validator_parameters); + if (validation_response !== true) { + return validation_response; + } + } + return true; +} + +function parse_overrides(overrides_string) { + function get_parsed_json_or_null(overrides) { + try { + return JSON.parse(overrides); + } catch (e) { + return null; + } + } + + const overrides = get_parsed_json_or_null(overrides_string); + if (typeof overrides === null) { + return null; + } + const parsed_overrides = {}; + for (const language in overrides) { + parsed_overrides[language] = {}; + for (const key in overrides[language]) { + if ( + ![ + 'max_process_count', + 'max_open_files', + 'max_file_size', + 'compile_memory_limit', + 'run_memory_limit', + 'compile_timeout', + 'run_timeout', + 'output_max_size', + ].includes(key) + ) { + return null; + } + // Find the option for the override + const option = options.find(o => o.key === key); + const parser = option.parser; + const raw = overrides[language][key]; + const value = parser(raw); + parsed_overrides[language][key] = value; + } + } + return parsed_overrides; +} + +function validate_overrides(overrides) { + for (const language in overrides) { + for (const key in overrides[language]) { + const value = overrides[language][key]; + const option = options.find(o => o.key === key); + const validators = option.validators; + const validation_response = apply_validators(validators, [ + value, + value, + ]); + if (validation_response !== true) { + return `In overridden option ${key} for ${language}, ${validation_response}`; + } + } + } + return true; +} + +logger.info(`Loading Configuration from environment`); let config = {}; options.forEach(option => { const env_key = 'PISTON_' + option.key.to_upper_case(); - const parser = option.parser || (x => x); - const env_val = process.env[env_key]; - const parsed_val = parser(env_val); - const value = env_val === undefined ? option.default : parsed_val; - - option.validators.for_each(validator => { - let response = null; - if (env_val) response = validator(parsed_val, env_val, options); - else response = validator(value, value, options); - - if (response !== true) { - errored = true; - logger.error( - `Config option ${option.key} failed validation:`, - response - ); - return; - } - }); - + const validator_parameters = + env_val === undefined ? [value, value] : [parsed_val, env_val]; + const validation_response = apply_validators( + option.validators, + validator_parameters + ); + if (validation_response !== true) { + logger.error( + `Config option ${option.key} failed validation:`, + validation_response + ); + process.exit(1); + } config[option.key] = value; }); -if (errored) { - process.exit(1); -} - logger.info('Configuration successfully loaded'); module.exports = config; From 9760f8fcf973877b1d01370db61eb14ab28eb8a2 Mon Sep 17 00:00:00 2001 From: Omar Brikaa Date: Sun, 30 Jan 2022 13:35:16 +0200 Subject: [PATCH 5/6] config.js: index by key, bug fix and more refactoring --- api/src/config.js | 85 ++++++++++++++++++----------------------------- 1 file changed, 33 insertions(+), 52 deletions(-) diff --git a/api/src/config.js b/api/src/config.js index d207431..b8fa97d 100644 --- a/api/src/config.js +++ b/api/src/config.js @@ -2,139 +2,119 @@ const fss = require('fs'); const Logger = require('logplease'); const logger = Logger.create('config'); -const options = [ - { - key: 'log_level', +const options = { + log_level: { desc: 'Level of data to log', default: 'INFO', - options: Object.values(Logger.LogLevels), validators: [ x => Object.values(Logger.LogLevels).includes(x) || `Log level ${x} does not exist`, ], }, - { - key: 'bind_address', + bind_address: { desc: 'Address to bind REST API on', default: `0.0.0.0:${process.env['PORT'] || 2000}`, validators: [], }, - { - key: 'data_directory', + data_directory: { desc: 'Absolute path to store all piston related data at', default: '/piston', validators: [ x => fss.exists_sync(x) || `Directory ${x} does not exist`, ], }, - { - key: 'runner_uid_min', + runner_uid_min: { desc: 'Minimum uid to use for runner', default: 1001, parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, - { - key: 'runner_uid_max', + runner_uid_max: { desc: 'Maximum uid to use for runner', default: 1500, parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, - { - key: 'runner_gid_min', + runner_gid_min: { desc: 'Minimum gid to use for runner', default: 1001, parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, - { - key: 'runner_gid_max', + runner_gid_max: { desc: 'Maximum gid to use for runner', default: 1500, parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, - { - key: 'disable_networking', + disable_networking: { desc: 'Set to true to disable networking', default: true, parser: x => x === 'true', validators: [x => typeof x === 'boolean' || `${x} is not a boolean`], }, - { - key: 'output_max_size', + output_max_size: { desc: 'Max size of each stdio buffer', default: 1024, parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, - { - key: 'max_process_count', + max_process_count: { desc: 'Max number of processes per job', default: 64, parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, - { - key: 'max_open_files', + max_open_files: { desc: 'Max number of open files per job', default: 2048, parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, - { - key: 'max_file_size', + max_file_size: { desc: 'Max file size in bytes for a file', default: 10000000, //10MB parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, - { - key: 'compile_timeout', + compile_timeout: { desc: 'Max time allowed for compile stage in milliseconds', default: 10000, // 10 seconds parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, - { - key: 'run_timeout', + run_timeout: { desc: 'Max time allowed for run stage in milliseconds', default: 3000, // 3 seconds parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, - { - key: 'compile_memory_limit', + compile_memory_limit: { desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)', default: -1, // no limit parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, - { - key: 'run_memory_limit', + run_memory_limit: { desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)', default: -1, // no limit parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, - { - key: 'repo_url', + repo_url: { desc: 'URL of repo index', default: 'https://github.com/engineer-man/piston/releases/download/pkgs/index', validators: [], }, - { - key: 'max_concurrent_jobs', + max_concurrent_jobs: { desc: 'Maximum number of concurrent jobs to run at one time', default: 64, parser: parse_int, validators: [x => x > 0 || `${x} cannot be negative`], }, - { - key: 'limit_overrides', + limit_overrides: { desc: 'Per-language exceptions in JSON format for each of:\ max_process_count, max_open_files, max_file_size, compile_memory_limit,\ run_memory_limit, compile_timeout, run_timeout, output_max_size', @@ -145,7 +125,7 @@ const options = [ validate_overrides, ], }, -]; +}; Object.freeze(options); @@ -169,7 +149,7 @@ function parse_overrides(overrides_string) { } const overrides = get_parsed_json_or_null(overrides_string); - if (typeof overrides === null) { + if (overrides === null) { return null; } const parsed_overrides = {}; @@ -191,11 +171,11 @@ function parse_overrides(overrides_string) { return null; } // Find the option for the override - const option = options.find(o => o.key === key); + const option = options[key]; const parser = option.parser; - const raw = overrides[language][key]; - const value = parser(raw); - parsed_overrides[language][key] = value; + const raw_value = overrides[language][key]; + const parsed_value = parser(raw_value); + parsed_overrides[language][key] = parsed_value; } } return parsed_overrides; @@ -205,7 +185,7 @@ function validate_overrides(overrides) { for (const language in overrides) { for (const key in overrides[language]) { const value = overrides[language][key]; - const option = options.find(o => o.key === key); + const option = options[key]; const validators = option.validators; const validation_response = apply_validators(validators, [ value, @@ -223,8 +203,9 @@ logger.info(`Loading Configuration from environment`); let config = {}; -options.forEach(option => { - const env_key = 'PISTON_' + option.key.to_upper_case(); +for (const option_name in options) { + const env_key = 'PISTON_' + option_name.to_upper_case(); + const option = options[option_name]; const parser = option.parser || (x => x); const env_val = process.env[env_key]; const parsed_val = parser(env_val); @@ -237,13 +218,13 @@ options.forEach(option => { ); if (validation_response !== true) { logger.error( - `Config option ${option.key} failed validation:`, + `Config option ${option_name} failed validation:`, validation_response ); process.exit(1); } - config[option.key] = value; -}); + config[option_name] = value; +} logger.info('Configuration successfully loaded'); From 7de631383fb948844bbdbedcfdb28bc0c46e89cd Mon Sep 17 00:00:00 2001 From: Omar Brikaa Date: Sat, 5 Feb 2022 15:30:44 +0200 Subject: [PATCH 6/6] Add master features in V3 API, refactor --- api/src/api/v2.js | 47 +------ api/src/api/v3.js | 311 +++++++++++++++++++++++++++------------------ api/src/runtime.js | 3 - 3 files changed, 187 insertions(+), 174 deletions(-) diff --git a/api/src/api/v2.js b/api/src/api/v2.js index 86294b4..0ccbbf0 100644 --- a/api/src/api/v2.js +++ b/api/src/api/v2.js @@ -5,7 +5,6 @@ const events = require('events'); const runtime = require('../runtime'); const { Job } = require('../job'); -const logger = require('logplease').create('api/v3'); const SIGNALS = [ 'SIGABRT', @@ -81,49 +80,9 @@ function get_job(body) { } } - if (compile_memory_limit) { - if (typeof compile_memory_limit !== 'number') { - return reject({ - message: 'if specified, compile_memory_limit must be a number', - }); - } - - if ( - config.compile_memory_limit >= 0 && - (compile_memory_limit > config.compile_memory_limit || - compile_memory_limit < 0) - ) { - return reject({ - message: - 'compile_memory_limit cannot exceed the configured limit of ' + - config.compile_memory_limit, - }); - } - } - - if (run_memory_limit) { - if (typeof run_memory_limit !== 'number') { - return reject({ - message: 'if specified, run_memory_limit must be a number', - }); - } - - if ( - config.run_memory_limit >= 0 && - (run_memory_limit > config.run_memory_limit || run_memory_limit < 0) - ) { - return reject({ - message: - 'run_memory_limit cannot exceed the configured limit of ' + - config.run_memory_limit, - }); - } - } - - const rt = runtime.find(rt => [ - ...rt.aliases, - rt.language - ].includes(rt.language)) + const rt = runtime.find(rt => + [...rt.aliases, rt.language].includes(rt.language) + ); if (rt === undefined) { return reject({ diff --git a/api/src/api/v3.js b/api/src/api/v3.js index 3e8b530..aee5772 100644 --- a/api/src/api/v3.js +++ b/api/src/api/v3.js @@ -3,15 +3,52 @@ const router = express.Router(); const events = require('events'); -const config = require('../config'); const runtime = require('../runtime'); const { Job } = require('../job'); -const logger = require('logplease').create('api/v3'); -const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"] +const SIGNALS = [ + 'SIGABRT', + 'SIGALRM', + 'SIGBUS', + 'SIGCHLD', + 'SIGCLD', + 'SIGCONT', + 'SIGEMT', + 'SIGFPE', + 'SIGHUP', + 'SIGILL', + 'SIGINFO', + 'SIGINT', + 'SIGIO', + 'SIGIOT', + 'SIGKILL', + 'SIGLOST', + 'SIGPIPE', + 'SIGPOLL', + 'SIGPROF', + 'SIGPWR', + 'SIGQUIT', + 'SIGSEGV', + 'SIGSTKFLT', + 'SIGSTOP', + 'SIGTSTP', + 'SIGSYS', + 'SIGTERM', + 'SIGTRAP', + 'SIGTTIN', + 'SIGTTOU', + 'SIGUNUSED', + 'SIGURG', + 'SIGUSR1', + 'SIGUSR2', + 'SIGVTALRM', + 'SIGXCPU', + 'SIGXFSZ', + 'SIGWINCH', +]; // ref: https://man7.org/linux/man-pages/man7/signal.7.html -function get_job(body){ +function get_job(body) { const { runtime_id, args, @@ -20,93 +57,96 @@ function get_job(body){ compile_memory_limit, run_memory_limit, run_timeout, - compile_timeout + compile_timeout, } = body; return new Promise((resolve, reject) => { if (typeof runtime_id !== 'number') { return reject({ - message: 'runtime_id is required as a number' + message: 'runtime_id is required as a number', }); } - if (!Array.isArray(files)) { + if (!files || !Array.isArray(files)) { return reject({ message: 'files is required as an array', }); } - for (const [i, file] of files.entries()) { - if (typeof file.content !== 'string') { - return reject({ - message: `files[${i}].content is required as a string`, - }); - } - } - - if (compile_memory_limit) { - if (typeof compile_memory_limit !== 'number') { - return reject({ - message: 'if specified, compile_memory_limit must be a number', - }); - } - - if ( - config.compile_memory_limit >= 0 && - (compile_memory_limit > config.compile_memory_limit || - compile_memory_limit < 0) - ) { - return reject({ - message: - 'compile_memory_limit cannot exceed the configured limit of ' + - config.compile_memory_limit, - }); - } - } - - if (run_memory_limit) { - if (typeof run_memory_limit !== 'number') { - return reject({ - message: 'if specified, run_memory_limit must be a number', - }); - } - - if ( - config.run_memory_limit >= 0 && - (run_memory_limit > config.run_memory_limit || run_memory_limit < 0) - ) { - return reject({ - message: - 'run_memory_limit cannot exceed the configured limit of ' + - config.run_memory_limit, - }); - } - } const rt = runtime[runtime_id]; - if (rt === undefined) { return reject({ message: `Runtime #${runtime_id} is unknown`, }); } - resolve(new Job({ - runtime: rt, - args: args || [], - stdin: stdin || "", - files, - timeouts: { - run: run_timeout || 3000, - compile: compile_timeout || 10000, - }, - memory_limits: { - run: run_memory_limit || config.run_memory_limit, - compile: compile_memory_limit || config.compile_memory_limit, - } - })); - }) + if ( + rt.language !== 'file' && + !files.some(file => !file.encoding || file.encoding === 'utf8') + ) { + return reject({ + message: 'files must include at least one utf8 encoded file', + }); + } + if (files.some(file => typeof file.content !== 'string')) { + return reject({ + message: 'file.content is required as a string', + }); + } + + for (const constraint of ['memory_limit', 'timeout']) { + for (const type of ['compile', 'run']) { + const constraint_name = `${type}_${constraint}`; + const constraint_value = body[constraint_name]; + const configured_limit = rt[`${constraint}s`][type]; + if (!constraint_value) { + continue; + } + if (typeof constraint_value !== 'number') { + return reject({ + message: `If specified, ${constraint_name} must be a number`, + }); + } + if (configured_limit <= 0) { + continue; + } + if (constraint_value > configured_limit) { + return reject({ + message: `${constraint_name} cannot exceed the configured limit of ${configured_limit}`, + }); + } + if (constraint_value < 0) { + return reject({ + message: `${constraint_name} must be non-negative`, + }); + } + } + } + + const job_compile_timeout = compile_timeout || rt.timeouts.compile; + const job_run_timeout = run_timeout || rt.timeouts.run; + const job_compile_memory_limit = + compile_memory_limit || rt.memory_limits.compile; + const job_run_memory_limit = run_memory_limit || rt.memory_limits.run; + resolve( + new Job({ + runtime: rt, + args: args || [], + stdin: stdin || '', + files, + timeouts: { + run: job_run_timeout, + compile: job_compile_timeout, + }, + memory_limits: { + run: job_run_memory_limit, + compile: job_compile_memory_limit, + }, + }) + ); + }); } router.use((req, res, next) => { @@ -124,89 +164,106 @@ router.use((req, res, next) => { }); router.ws('/connect', async (ws, req) => { - let job = null; let eventBus = new events.EventEmitter(); - eventBus.on("stdout", (data) => ws.send(JSON.stringify({type: "data", stream: "stdout", data: data.toString()}))) - eventBus.on("stderr", (data) => ws.send(JSON.stringify({type: "data", stream: "stderr", data: data.toString()}))) - eventBus.on("stage", (stage)=> ws.send(JSON.stringify({type: "stage", stage}))) - eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status}))) + eventBus.on('stdout', data => + ws.send( + JSON.stringify({ + type: 'data', + stream: 'stdout', + data: data.toString(), + }) + ) + ); + eventBus.on('stderr', data => + ws.send( + JSON.stringify({ + type: 'data', + stream: 'stderr', + data: data.toString(), + }) + ) + ); + eventBus.on('stage', stage => + ws.send(JSON.stringify({ type: 'stage', stage })) + ); + eventBus.on('exit', (stage, status) => + ws.send(JSON.stringify({ type: 'exit', stage, ...status })) + ); - ws.on("message", async (data) => { - - try{ + ws.on('message', async data => { + try { const msg = JSON.parse(data); - switch(msg.type){ - case "init": - if(job === null){ + switch (msg.type) { + case 'init': + if (job === null) { job = await get_job(msg); await job.prime(); - ws.send(JSON.stringify({ - type: "runtime", - language: job.runtime.language, - version: job.runtime.version.raw - })) + ws.send( + JSON.stringify({ + type: 'runtime', + language: job.runtime.language, + version: job.runtime.version.raw, + }) + ); await job.execute_interactive(eventBus); - ws.close(4999, "Job Completed"); - - }else{ - ws.close(4000, "Already Initialized"); + ws.close(4999, 'Job Completed'); + } else { + ws.close(4000, 'Already Initialized'); } break; - case "data": - if(job !== null){ - if(msg.stream === "stdin"){ - eventBus.emit("stdin", msg.data) - }else{ - ws.close(4004, "Can only write to stdin") + case 'data': + if (job !== null) { + if (msg.stream === 'stdin') { + eventBus.emit('stdin', msg.data); + } else { + ws.close(4004, 'Can only write to stdin'); + } + } else { + ws.close(4003, 'Not yet initialized'); } - }else{ - ws.close(4003, "Not yet initialized") - } - break; - case "signal": - if(job !== null){ - if(SIGNALS.includes(msg.signal)){ - eventBus.emit("signal", msg.signal) - }else{ - ws.close(4005, "Invalid signal") + break; + case 'signal': + if (job !== null) { + if (SIGNALS.includes(msg.signal)) { + eventBus.emit('signal', msg.signal); + } else { + ws.close(4005, 'Invalid signal'); + } + } else { + ws.close(4003, 'Not yet initialized'); } - }else{ - ws.close(4003, "Not yet initialized") - } - break; + break; } - - }catch(error){ - ws.send(JSON.stringify({type: "error", message: error.message})) - ws.close(4002, "Notified Error") + } catch (error) { + ws.send(JSON.stringify({ type: 'error', message: error.message })); + ws.close(4002, 'Notified Error'); // ws.close message is limited to 123 characters, so we notify over WS then close. } - }) + }); - ws.on("close", async ()=>{ - if(job !== null){ - await job.cleanup() + ws.on('close', async () => { + if (job !== null) { + await job.cleanup(); } - }) + }); - setTimeout(()=>{ + setTimeout(() => { //Terminate the socket after 1 second, if not initialized. - if(job === null) - ws.close(4001, "Initialization Timeout"); - }, 1000) -}) + if (job === null) ws.close(4001, 'Initialization Timeout'); + }, 1000); +}); router.post('/execute', async (req, res) => { - - try{ + try { const job = await get_job(req.body); + await job.prime(); const result = await job.execute(); @@ -214,7 +271,7 @@ router.post('/execute', async (req, res) => { await job.cleanup(); return res.status(200).send(result); - }catch(error){ + } catch (error) { return res.status(400).json(error); } }); @@ -226,7 +283,7 @@ router.get('/runtimes', (req, res) => { version: rt.version.raw, aliases: rt.aliases, runtime: rt.runtime, - id: rt.id + id: rt.id, }; }); diff --git a/api/src/runtime.js b/api/src/runtime.js index b239426..554feed 100644 --- a/api/src/runtime.js +++ b/api/src/runtime.js @@ -1,9 +1,6 @@ const logger = require('logplease').create('runtime'); const cp = require('child_process'); const config = require('./config'); -const globals = require('./globals'); -const fss = require('fs'); -const path = require('path'); const runtimes = [];