diff --git a/api/src/api/v2.js b/api/src/api/v2.js index e3e0522..a3571e1 100644 --- a/api/src/api/v2.js +++ b/api/src/api/v2.js @@ -3,7 +3,6 @@ const router = express.Router(); const events = require('events'); -const config = require('../config'); const runtime = require('../runtime'); const { Job } = require('../job'); const package = require('../package'); @@ -13,7 +12,7 @@ const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGE // ref: https://man7.org/linux/man-pages/man7/signal.7.html function get_job(body){ - const { + let { language, version, args, @@ -31,19 +30,16 @@ function get_job(body){ message: 'language is required as a string', }); } - if (!version || typeof version !== 'string') { return reject({ message: 'version is required as a string', }); } - if (!files || !Array.isArray(files)) { return reject({ message: 'files is required as an array', }); } - for (const [i, file] of files.entries()) { if (typeof file.content !== 'string') { return reject({ @@ -52,73 +48,64 @@ function get_job(body){ } } - if (compile_memory_limit) { - if (typeof compile_memory_limit !== 'number') { - return reject({ - message: 'if specified, compile_memory_limit must be a number', - }); - } - - if ( - config.compile_memory_limit >= 0 && - (compile_memory_limit > config.compile_memory_limit || - compile_memory_limit < 0) - ) { - return reject({ - message: - 'compile_memory_limit cannot exceed the configured limit of ' + - config.compile_memory_limit, - }); - } - } - - if (run_memory_limit) { - if (typeof run_memory_limit !== 'number') { - return reject({ - message: 'if specified, run_memory_limit must be a number', - }); - } - - if ( - config.run_memory_limit >= 0 && - (run_memory_limit > config.run_memory_limit || run_memory_limit < 0) - ) { - return reject({ - message: - 'run_memory_limit cannot exceed the configured limit of ' + - config.run_memory_limit, - }); - } - } - const rt = runtime.get_latest_runtime_matching_language_version( language, version ); - if (rt === undefined) { return reject({ message: `${language}-${version} runtime is unknown`, }); } + for (let constraint of ['memory_limit', 'timeout']) { + for (let type of ['compile', 'run']) { + let constraint_name = `${type}_${constraint}`; + let constraint_value = body[constraint_name]; + let configured_limit = rt[`${constraint}s`][type]; + if (!constraint_value) { + continue; + } + if (typeof constraint_value !== 'number') { + return reject({ + message: `If specified, ${constraint_name} must be a number` + }); + } + if (configured_limit <= 0) { + continue; + } + if (constraint_value > configured_limit) { + return reject({ + message: `${constraint_name} cannot exceed the configured limit of ${configured_limit}` + }); + } + if (constraint_value < 0) { + return reject({ + message: `${constraint_name} must be non-negative` + }); + } + } + } + + compile_timeout = compile_timeout || rt.timeouts.compile; + run_timeout = run_timeout || rt.timeouts.run; + compile_memory_limit = compile_memory_limit || rt.memory_limits.compile; + run_timeout = run_timeout || rt.timeouts.run; resolve(new Job({ runtime: rt, - alias: language, args: args || [], stdin: stdin || "", files, timeouts: { - run: run_timeout || 3000, - compile: compile_timeout || 10000, + run: run_timeout, + compile: compile_timeout, }, memory_limits: { - run: run_memory_limit || config.run_memory_limit, - compile: compile_memory_limit || config.compile_memory_limit, + run: run_memory_limit, + compile: compile_memory_limit, } })); - }) - + }); } router.use((req, res, next) => { diff --git a/api/src/config.js b/api/src/config.js index bbd7ae9..c191644 100644 --- a/api/src/config.js +++ b/api/src/config.js @@ -2,6 +2,48 @@ const fss = require('fs'); const Logger = require('logplease'); const logger = Logger.create('config'); +function parse_overrides(overrides) { + try { + return JSON.parse(overrides); + } + catch (e) { + return null; + } +} + +function validate_overrides(overrides, options) { + for (let language in overrides) { + for (let key in overrides[language]) { + if ( + ![ + 'max_process_count', 'max_open_files', 'max_file_size', + 'compile_memory_limit', 'run_memory_limit', 'compile_timeout', + 'run_timeout', 'output_max_size' + ].includes(key) + ) { + logger.error(`Invalid overridden option: ${key}`); + return false; + } + let option = options.find((o) => o.key === key); + let parser = option.parser; + let raw = overrides[language][key]; + let value = parser(raw); + let validators = option.validators; + for (let validator of validators) { + let response = validator(value, raw); + if (response !== true) { + logger.error(`Failed to validate overridden option: ${key}`, response); + return false; + } + } + overrides[language][key] = value; + } + // Modifies the reference + options[options.index_of(options.find((o) => o.key === 'limit_overrides'))] = overrides; + } + return true; +} + const options = [ { key: 'log_level', @@ -91,6 +133,22 @@ const options = [ parser: parse_int, validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], }, + { + key: 'compile_timeout', + desc: + 'Max time allowed for compile stage in milliseconds', + default: 10000, // 10 seconds + parser: parse_int, + validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], + }, + { + key: 'run_timeout', + desc: + 'Max time allowed for run stage in milliseconds', + default: 3000, // 3 seconds + parser: parse_int, + validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], + }, { key: 'compile_memory_limit', desc: @@ -120,6 +178,18 @@ const options = [ default: 64, parser: parse_int, validators: [(x) => x > 0 || `${x} cannot be negative`] + }, + { + key: 'limit_overrides', + desc: 'Per-language exceptions in JSON format for each of:\ + max_process_count, max_open_files, max_file_size, compile_memory_limit,\ + run_memory_limit, compile_timeout, run_timeout, output_max_size', + default: {}, + parser: parse_overrides, + validators: [ + (x) => !!x || `Invalid JSON format for the overrides\n${x}`, + (overrides, _, options) => validate_overrides(overrides, options) || `Failed to validate the overrides` + ] } ]; @@ -138,12 +208,12 @@ options.forEach(option => { const parsed_val = parser(env_val); - const value = env_val || option.default; + const value = parsed_val || option.default; option.validators.for_each(validator => { let response = null; - if (env_val) response = validator(parsed_val, env_val); - else response = validator(value, value); + if (env_val) response = validator(parsed_val, env_val, options); + else response = validator(value, value, options); if (response !== true) { errored = true; diff --git a/api/src/job.js b/api/src/job.js index ecc4ab3..552463a 100644 --- a/api/src/job.js +++ b/api/src/job.js @@ -30,7 +30,7 @@ setInterval(()=>{ class Job { - constructor({ runtime, files, args, stdin, timeouts, memory_limits }) { + constructor({ runtime, files, args, stdin }) { this.uuid = uuidv4(); this.runtime = runtime; this.files = files.map((file, i) => ({ @@ -40,8 +40,6 @@ class Job { this.args = args; this.stdin = stdin; - this.timeouts = timeouts; - this.memory_limits = memory_limits; this.uid = config.runner_uid_min + uid; this.gid = config.runner_gid_min + gid; @@ -102,9 +100,9 @@ class Job { const prlimit = [ 'prlimit', - '--nproc=' + config.max_process_count, - '--nofile=' + config.max_open_files, - '--fsize=' + config.max_file_size, + '--nproc=' + this.runtime.max_process_count, + '--nofile=' + this.runtime.max_open_files, + '--fsize=' + this.runtime.max_file_size, ]; if (memory_limit >= 0) { @@ -142,8 +140,6 @@ class Job { proc.kill(signal) }) } - - const kill_timeout = set_timeout( async _ => { @@ -156,7 +152,7 @@ class Job { proc.stderr.on('data', async data => { if(eventBus !== null) { eventBus.emit("stderr", data); - } else if (stderr.length > config.output_max_size) { + } else if (stderr.length > this.runtime.output_max_size) { logger.info(`stderr length exceeded uuid=${this.uuid}`) process.kill(proc.pid, 'SIGKILL') } else { @@ -168,7 +164,7 @@ class Job { proc.stdout.on('data', async data => { if(eventBus !== null){ eventBus.emit("stdout", data); - } else if (stdout.length > config.output_max_size) { + } else if (stdout.length > this.runtime.output_max_size) { logger.info(`stdout length exceeded uuid=${this.uuid}`) process.kill(proc.pid, 'SIGKILL') } else { @@ -223,8 +219,8 @@ class Job { compile = await this.safe_call( path.join(this.runtime.pkgdir, 'compile'), this.files.map(x => x.name), - this.timeouts.compile, - this.memory_limits.compile + this.runtime.timeouts.compile, + this.runtime.memory_limits.compile ); } @@ -233,8 +229,8 @@ class Job { const run = await this.safe_call( path.join(this.runtime.pkgdir, 'run'), [this.files[0].name, ...this.args], - this.timeouts.run, - this.memory_limits.run + this.runtime.timeouts.run, + this.runtime.memory_limits.run ); this.state = job_states.EXECUTED; @@ -266,8 +262,8 @@ class Job { const {error, code, signal} = await this.safe_call( path.join(this.runtime.pkgdir, 'compile'), this.files.map(x => x.name), - this.timeouts.compile, - this.memory_limits.compile, + this.runtime.timeouts.compile, + this.runtime.memory_limits.compile, eventBus ) @@ -279,14 +275,14 @@ class Job { const {error, code, signal} = await this.safe_call( path.join(this.runtime.pkgdir, 'run'), [this.files[0].name, ...this.args], - this.timeouts.run, - this.memory_limits.run, + this.runtime.timeouts.run, + this.runtime.memory_limits.run, eventBus ); eventBus.emit("exit", "run", {error, code, signal}) - + this.state = job_states.EXECUTED; } @@ -308,8 +304,7 @@ class Job { const proc_lines = proc_status.to_string().split("\n") const uid_line = proc_lines.find(line=>line.starts_with("Uid:")) const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/); - - + if(ruid == this.uid || euid == this.uid) return parse_int(proc_id) diff --git a/api/src/runtime.js b/api/src/runtime.js index 191fc5d..60d3c23 100644 --- a/api/src/runtime.js +++ b/api/src/runtime.js @@ -8,12 +8,54 @@ const path = require('path'); const runtimes = []; class Runtime { - constructor({ language, version, aliases, pkgdir, runtime }) { + constructor({ + language, version, aliases, pkgdir, runtime, timeouts, memory_limits, max_process_count, + max_open_files, max_file_size, output_max_size + }) { this.language = language; this.version = version; this.aliases = aliases || []; this.pkgdir = pkgdir; this.runtime = runtime; + this.timeouts = timeouts; + this.memory_limits = memory_limits; + this.max_process_count = max_process_count; + this.max_open_files = max_open_files; + this.max_file_size = max_file_size; + this.output_max_size = output_max_size; + } + + static compute_single_limit(language_name, limit_name, language_limit_overrides) { + return ( + config.limit_overrides[language_name] && config.limit_overrides[language_name][limit_name] + || language_limit_overrides && language_limit_overrides[limit_name] + || config[limit_name] + ); + } + + static compute_all_limits(language_name, language_limit_overrides) { + return { + timeouts: { + compile: + this.compute_single_limit(language_name, 'compile_timeout', language_limit_overrides), + run: + this.compute_single_limit(language_name, 'run_timeout', language_limit_overrides) + }, + memory_limits: { + compile: + this.compute_single_limit(language_name, 'compile_memory_limit', language_limit_overrides), + run: + this.compute_single_limit(language_name, 'run_memory_limit', language_limit_overrides) + }, + max_process_count: + this.compute_single_limit(language_name, 'max_process_count', language_limit_overrides), + max_open_files: + this.compute_single_limit(language_name, 'max_open_files', language_limit_overrides), + max_file_size: + this.compute_single_limit(language_name, 'max_file_size', language_limit_overrides), + output_max_size: + this.compute_single_limit(language_name, 'output_max_size', language_limit_overrides), + } } static load_package(package_dir) { @@ -21,7 +63,7 @@ class Runtime { fss.read_file_sync(path.join(package_dir, 'pkg-info.json')) ); - let { language, version, build_platform, aliases, provides } = info; + let { language, version, build_platform, aliases, provides, limit_overrides } = info; version = semver.parse(version); if (build_platform !== globals.platform) { @@ -41,6 +83,7 @@ class Runtime { version, pkgdir: package_dir, runtime: language, + ...Runtime.compute_all_limits(lang.language, lang.limit_overrides) }) ); }); @@ -51,6 +94,7 @@ class Runtime { version, aliases, pkgdir: package_dir, + ...Runtime.compute_all_limits(language, limit_overrides) }) ); } diff --git a/packages/dotnet/5.0.201/build.sh b/packages/dotnet/5.0.201/build.sh old mode 100644 new mode 100755 index c685668..6318b07 --- a/packages/dotnet/5.0.201/build.sh +++ b/packages/dotnet/5.0.201/build.sh @@ -7,8 +7,10 @@ rm dotnet.tar.gz # Cache nuget packages export DOTNET_CLI_HOME=$PWD ./dotnet new console -o cache_application +./dotnet new console -lang F# -o fs_cache_application +./dotnet new console -lang VB -o vb_cache_application # This calls a restore on the global-packages index ($DOTNET_CLI_HOME/.nuget/packages) # If we want to allow more packages, we could add them to this cache_application -rm -rf cache_application -# Get rid of it, we don't actually need the application - just the restore \ No newline at end of file +rm -rf cache_application fs_cache_application vb_cache_application +# Get rid of it, we don't actually need the application - just the restore diff --git a/packages/dotnet/5.0.201/compile b/packages/dotnet/5.0.201/compile index 8bfcc27..1c34213 100644 --- a/packages/dotnet/5.0.201/compile +++ b/packages/dotnet/5.0.201/compile @@ -1,15 +1,36 @@ #!/usr/bin/env bash +[ "${PISTON_LANGUAGE}" == "fsi" ] && exit 0 + export DOTNET_CLI_HOME=$PWD export HOME=$PWD -rename 's/$/\.cs/' "$@" # Add .cs extension - dotnet build --help > /dev/null # Shut the thing up -dotnet new console -o . --no-restore -rm Program.cs +case "${PISTON_LANGUAGE}" in + basic.net) + rename 's/$/\.vb/' "$@" # Add .vb extension + dotnet new console -lang VB -o . --no-restore + rm Program.vb + ;; + fsharp.net) + first_file=$1 + shift + rename 's/$/\.fs/' "$@" # Add .fs extension + dotnet new console -lang F# -o . --no-restore + mv $first_file Program.fs # For some reason F#.net doesn't work unless the file name is Program.fs + ;; + csharp.net) + rename 's/$/\.cs/' "$@" # Add .cs extension + dotnet new console -o . --no-restore + rm Program.cs + ;; + *) + echo "How did you get here? (${PISTON_LANGUAGE})" + exit 1 + ;; +esac dotnet restore --source $DOTNET_ROOT/.nuget/packages -dotnet build --no-restore \ No newline at end of file +dotnet build --no-restore diff --git a/packages/dotnet/5.0.201/environment b/packages/dotnet/5.0.201/environment index 596d56e..468463d 100644 --- a/packages/dotnet/5.0.201/environment +++ b/packages/dotnet/5.0.201/environment @@ -2,4 +2,5 @@ # Put 'export' statements here for environment variables export DOTNET_ROOT=$PWD -export PATH=$DOTNET_ROOT:$PATH \ No newline at end of file +export PATH=$DOTNET_ROOT:$PATH +export FSI_PATH=$(find $(pwd) -name fsi.dll) diff --git a/packages/dotnet/5.0.201/metadata.json b/packages/dotnet/5.0.201/metadata.json index 619265d..7c73c58 100644 --- a/packages/dotnet/5.0.201/metadata.json +++ b/packages/dotnet/5.0.201/metadata.json @@ -1,5 +1,66 @@ { "language": "dotnet", "version": "5.0.201", - "aliases": ["cs", "csharp"] + "provides": [ + { + "language": "basic.net", + "aliases": [ + "basic", + "visual-basic", + "visual-basic.net", + "vb", + "vb.net", + "vb-dotnet", + "dotnet-vb", + "basic-dotnet", + "dotnet-basic" + ], + "limit_overrides": { "max_process_count": 128 } + }, + { + "language": "fsharp.net", + "aliases": [ + "fsharp", + "fs", + "f#", + "fs.net", + "f#.net", + "fsharp-dotnet", + "fs-dotnet", + "f#-dotnet", + "dotnet-fsharp", + "dotnet-fs", + "dotnet-fs" + ], + "limit_overrides": { "max_process_count": 128 } + }, + { + "language": "csharp.net", + "aliases": [ + "csharp", + "c#", + "cs", + "c#.net", + "cs.net", + "c#-dotnet", + "cs-dotnet", + "csharp-dotnet", + "dotnet-c#", + "dotnet-cs", + "dotnet-csharp" + ], + "limit_overrides": { "max_process_count": 128 } + }, + { + "language": "fsi", + "aliases": [ + "fsx", + "fsharp-interactive", + "f#-interactive", + "dotnet-fsi", + "fsi-dotnet", + "fsi.net" + ] + } + ] } diff --git a/packages/dotnet/5.0.201/run b/packages/dotnet/5.0.201/run index 774a08a..6b5c995 100644 --- a/packages/dotnet/5.0.201/run +++ b/packages/dotnet/5.0.201/run @@ -3,5 +3,23 @@ # Put instructions to run the runtime export DOTNET_CLI_HOME=$PWD -shift -dotnet bin/Debug/net5.0/$(basename $(realpath .)).dll "$@" \ No newline at end of file +case "${PISTON_LANGUAGE}" in + basic.net) + ;& + fsharp.net) + ;& + csharp.net) + shift + dotnet bin/Debug/net5.0/$(basename $(realpath .)).dll "$@" + ;; + fsi) + FILENAME=$1 + rename 's/$/\.fsx/' $FILENAME # Add .fsx extension + shift + dotnet $FSI_PATH $FILENAME.fsx "$@" + ;; + *) + echo "How did you get here? (${PISTON_LANGUAGE})" + exit 1 + ;; +esac diff --git a/packages/dotnet/5.0.201/test.fs b/packages/dotnet/5.0.201/test.fs new file mode 100644 index 0000000..006ac10 --- /dev/null +++ b/packages/dotnet/5.0.201/test.fs @@ -0,0 +1,6 @@ +open System + +[] +let main argv = + printfn "OK" + 0 diff --git a/packages/dotnet/5.0.201/test.fsx b/packages/dotnet/5.0.201/test.fsx new file mode 100644 index 0000000..33d166f --- /dev/null +++ b/packages/dotnet/5.0.201/test.fsx @@ -0,0 +1 @@ +printfn "OK" diff --git a/packages/dotnet/5.0.201/test.vb b/packages/dotnet/5.0.201/test.vb new file mode 100644 index 0000000..291042e --- /dev/null +++ b/packages/dotnet/5.0.201/test.vb @@ -0,0 +1,9 @@ +Imports System + +Module Module1 + + Sub Main() + Console.WriteLine("OK") + End Sub + +End Module diff --git a/readme.md b/readme.md index 5b41e6a..6a55116 100644 --- a/readme.md +++ b/readme.md @@ -330,10 +330,10 @@ Content-Type: application/json `cow`, `crystal`, `csharp`, +`csharp.net`, `d`, `dart`, `dash`, -`dotnet`, `dragon`, `elixir`, `emacs`, @@ -341,6 +341,8 @@ Content-Type: application/json `forte`, `fortran`, `freebasic`, +`fsharp.net`, +`fsi`, `go`, `golfscript`, `groovy`, @@ -382,6 +384,7 @@ Content-Type: application/json `swift`, `typescript`, `basic`, +`basic.net`, `vlang`, `yeethon`, `zig`,