Improved memory limits (#248)
* Added optional compile_memory_limit and run_memory_limit parameters * Combined memory limit parameters into one
This commit is contained in:
parent
eaf0ba34bd
commit
fb102ebe83
|
@ -1,6 +1,7 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
const config = require('../config');
|
||||
const runtime = require('../runtime');
|
||||
const {Job} = require("../job");
|
||||
const package = require('../package')
|
||||
|
@ -17,7 +18,7 @@ router.use(function(req, res, next){
|
|||
})
|
||||
|
||||
router.post('/execute', async function(req, res){
|
||||
const {language, version, files, stdin, args, run_timeout, compile_timeout} = req.body;
|
||||
const {language, version, files, stdin, args, run_timeout, compile_timeout, compile_memory_limit, run_memory_limit} = req.body;
|
||||
|
||||
if(!language || typeof language !== "string")
|
||||
{
|
||||
|
@ -56,6 +57,37 @@ router.post('/execute', async function(req, res){
|
|||
}
|
||||
}
|
||||
|
||||
if (compile_memory_limit) {
|
||||
if (typeof compile_memory_limit !== "number") {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: "if specified, compile_memory_limit must be a number"
|
||||
})
|
||||
} else if (config.compile_memory_limit >= 0 && (compile_memory_limit > config.compile_memory_limit || compile_memory_limit < 0)) {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: "compile_memory_limit cannot exceed the configured limit of " + config.compile_memory_limit
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
if (run_memory_limit) {
|
||||
if (typeof run_memory_limit !== "number") {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: "if specified, run_memory_limit must be a number"
|
||||
})
|
||||
} else if (config.run_memory_limit >= 0 && (run_memory_limit > config.run_memory_limit || run_memory_limit < 0)) {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: "run_memory_limit cannot exceed the configured limit of " + config.run_memory_limit
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
@ -78,6 +110,10 @@ router.post('/execute', async function(req, res){
|
|||
timeouts: {
|
||||
run: run_timeout || 3000,
|
||||
compile: compile_timeout || 10000
|
||||
},
|
||||
memory_limits: {
|
||||
run: run_memory_limit || config.run_memory_limit,
|
||||
compile: compile_memory_limit || config.compile_memory_limit
|
||||
}
|
||||
});
|
||||
|
||||
|
|
|
@ -108,6 +108,18 @@ const options = [
|
|||
default: 1000000, //1MB
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'compile_memory_limit',
|
||||
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
|
||||
default: -1, // no limit
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'run_memory_limit',
|
||||
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
|
||||
default: -1, // no limit
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'repo_url',
|
||||
desc: 'URL of repo index',
|
||||
|
|
|
@ -19,7 +19,7 @@ let gid = 0;
|
|||
|
||||
class Job {
|
||||
|
||||
constructor({ runtime, files, args, stdin, timeouts }) {
|
||||
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) {
|
||||
this.uuid = uuidv4();
|
||||
this.runtime = runtime;
|
||||
this.files = files.map((file,i) => ({
|
||||
|
@ -30,6 +30,7 @@ class Job {
|
|||
this.args = args;
|
||||
this.stdin = stdin;
|
||||
this.timeouts = timeouts;
|
||||
this.memory_limits = memory_limits;
|
||||
|
||||
this.uid = config.runner_uid_min + uid;
|
||||
this.gid = config.runner_gid_min + gid;
|
||||
|
@ -67,7 +68,7 @@ class Job {
|
|||
logger.debug('Primed job');
|
||||
}
|
||||
|
||||
async safe_call(file, args, timeout) {
|
||||
async safe_call(file, args, timeout, memory_limit) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const nonetwork = config.disable_networking ? ['nosocket'] : [];
|
||||
|
||||
|
@ -78,6 +79,10 @@ class Job {
|
|||
'--fsize=' + config.max_file_size
|
||||
];
|
||||
|
||||
if (memory_limit >= 0) {
|
||||
prlimit.push('--as=' + memory_limit);
|
||||
}
|
||||
|
||||
const proc_call = [
|
||||
...prlimit,
|
||||
...nonetwork,
|
||||
|
@ -161,7 +166,8 @@ class Job {
|
|||
compile = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'compile'),
|
||||
this.files.map(x => x.name),
|
||||
this.timeouts.compile
|
||||
this.timeouts.compile,
|
||||
this.memory_limits.compile
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -170,7 +176,8 @@ class Job {
|
|||
const run = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'run'),
|
||||
[this.files[0].name, ...this.args],
|
||||
this.timeouts.run
|
||||
this.timeouts.run,
|
||||
this.memory_limits.run
|
||||
);
|
||||
|
||||
this.state = job_states.EXECUTED;
|
||||
|
|
|
@ -210,6 +210,8 @@ This endpoint requests execution of some arbitrary code.
|
|||
- `args` (*optional*) The arguments to pass to the program. Must be an array or left out. Defaults to `[]`.
|
||||
- `compile_timeout` (*optional*) The maximum time allowed for the compile stage to finish before bailing out in milliseconds. Must be a number or left out. Defaults to `10000` (10 seconds).
|
||||
- `run_timeout` (*optional*) The maximum time allowed for the run stage to finish before bailing out in milliseconds. Must be a number or left out. Defaults to `3000` (3 seconds).
|
||||
- `compile_memory_limit` (*optional*) The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number or left out. Defaults to `-1` (no limit)
|
||||
- `run_memory_limit` (*optional*) The maximum amount of memory the run stage is allowed to use in bytes. Must be a number or left out. Defaults to `-1` (no limit)
|
||||
|
||||
```json
|
||||
{
|
||||
|
@ -228,7 +230,9 @@ This endpoint requests execution of some arbitrary code.
|
|||
"3"
|
||||
],
|
||||
"compile_timeout": 10000,
|
||||
"run_timeout": 3000
|
||||
"run_timeout": 3000,
|
||||
"compile_memory_limit": -1,
|
||||
"run_memory_limit": -1
|
||||
}
|
||||
```
|
||||
A typical response upon successful execution will contain 1 or 2 keys `run` and `compile`.
|
||||
|
|
Loading…
Reference in New Issue