Compare commits

..

4 Commits

Author SHA1 Message Date
Felix fb102ebe83
Improved memory limits (#248)
* Added optional compile_memory_limit and run_memory_limit parameters

* Combined memory limit parameters into one
2021-05-07 20:21:25 +12:00
Thomas Hobson eaf0ba34bd
Allow additional content-type parameters 2021-05-07 20:12:27 +12:00
Thomas Hobson d95d67071c
reject on non-json content-types (#233) 2021-05-07 19:37:22 +12:00
Thomas Hobson 9590d5afbc
Patch for race condition with filesystem and process cleanup 2021-05-07 19:23:34 +12:00
5 changed files with 132 additions and 9 deletions

View File

@ -1,13 +1,24 @@
const express = require('express');
const router = express.Router();
const config = require('../config');
const runtime = require('../runtime');
const {Job} = require("../job");
const package = require('../package')
const logger = require('logplease').create('api/v1');
router.use(function(req, res, next){
if(req.method == "POST" && !req.headers['content-type'].startsWith("application/json"))
return res
.status(415)
.send({
message: "requests must be of type application/json"
})
next();
})
router.post('/execute', async function(req, res){
const {language, version, files, stdin, args, run_timeout, compile_timeout} = req.body;
const {language, version, files, stdin, args, run_timeout, compile_timeout, compile_memory_limit, run_memory_limit} = req.body;
if(!language || typeof language !== "string")
{
@ -46,6 +57,37 @@ router.post('/execute', async function(req, res){
}
}
if (compile_memory_limit) {
if (typeof compile_memory_limit !== "number") {
return res
.status(400)
.send({
message: "if specified, compile_memory_limit must be a number"
})
} else if (config.compile_memory_limit >= 0 && (compile_memory_limit > config.compile_memory_limit || compile_memory_limit < 0)) {
return res
.status(400)
.send({
message: "compile_memory_limit cannot exceed the configured limit of " + config.compile_memory_limit
})
}
}
if (run_memory_limit) {
if (typeof run_memory_limit !== "number") {
return res
.status(400)
.send({
message: "if specified, run_memory_limit must be a number"
})
} else if (config.run_memory_limit >= 0 && (run_memory_limit > config.run_memory_limit || run_memory_limit < 0)) {
return res
.status(400)
.send({
message: "run_memory_limit cannot exceed the configured limit of " + config.run_memory_limit
})
}
}
@ -68,6 +110,10 @@ router.post('/execute', async function(req, res){
timeouts: {
run: run_timeout || 3000,
compile: compile_timeout || 10000
},
memory_limits: {
run: run_memory_limit || config.run_memory_limit,
compile: compile_memory_limit || config.compile_memory_limit
}
});

View File

@ -108,6 +108,18 @@ const options = [
default: 1000000, //1MB
validators: []
},
{
key: 'compile_memory_limit',
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit
validators: []
},
{
key: 'run_memory_limit',
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit
validators: []
},
{
key: 'repo_url',
desc: 'URL of repo index',

View File

@ -19,7 +19,7 @@ let gid = 0;
class Job {
constructor({ runtime, files, args, stdin, timeouts }) {
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) {
this.uuid = uuidv4();
this.runtime = runtime;
this.files = files.map((file,i) => ({
@ -30,6 +30,7 @@ class Job {
this.args = args;
this.stdin = stdin;
this.timeouts = timeouts;
this.memory_limits = memory_limits;
this.uid = config.runner_uid_min + uid;
this.gid = config.runner_gid_min + gid;
@ -67,7 +68,7 @@ class Job {
logger.debug('Primed job');
}
async safe_call(file, args, timeout) {
async safe_call(file, args, timeout, memory_limit) {
return new Promise((resolve, reject) => {
const nonetwork = config.disable_networking ? ['nosocket'] : [];
@ -78,6 +79,10 @@ class Job {
'--fsize=' + config.max_file_size
];
if (memory_limit >= 0) {
prlimit.push('--as=' + memory_limit);
}
const proc_call = [
...prlimit,
...nonetwork,
@ -161,7 +166,8 @@ class Job {
compile = await this.safe_call(
path.join(this.runtime.pkgdir, 'compile'),
this.files.map(x => x.name),
this.timeouts.compile
this.timeouts.compile,
this.memory_limits.compile
);
}
@ -170,7 +176,8 @@ class Job {
const run = await this.safe_call(
path.join(this.runtime.pkgdir, 'run'),
[this.files[0].name, ...this.args],
this.timeouts.run
this.timeouts.run,
this.memory_limits.run
);
this.state = job_states.EXECUTED;
@ -219,9 +226,14 @@ class Job {
for (const file of contents) {
const file_path = path.join(clean_path, file);
try{
const stat = await fs.stat(file_path);
if(stat.uid == this.uid)
await fs.rm(file_path, { recursive: true, force: true });
}catch(e){
// File was somehow deleted in the time that we read the dir to when we checked the file
logger.warn(`Error removing file ${file_path}: ${e}`)
}
}
}

View File

@ -210,6 +210,8 @@ This endpoint requests execution of some arbitrary code.
- `args` (*optional*) The arguments to pass to the program. Must be an array or left out. Defaults to `[]`.
- `compile_timeout` (*optional*) The maximum time allowed for the compile stage to finish before bailing out in milliseconds. Must be a number or left out. Defaults to `10000` (10 seconds).
- `run_timeout` (*optional*) The maximum time allowed for the run stage to finish before bailing out in milliseconds. Must be a number or left out. Defaults to `3000` (3 seconds).
- `compile_memory_limit` (*optional*) The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number or left out. Defaults to `-1` (no limit)
- `run_memory_limit` (*optional*) The maximum amount of memory the run stage is allowed to use in bytes. Must be a number or left out. Defaults to `-1` (no limit)
```json
{
@ -228,7 +230,9 @@ This endpoint requests execution of some arbitrary code.
"3"
],
"compile_timeout": 10000,
"run_timeout": 3000
"run_timeout": 3000,
"compile_memory_limit": -1,
"run_memory_limit": -1
}
```
A typical response upon successful execution will contain 1 or 2 keys `run` and `compile`.

49
tests/multi_request.py Normal file
View File

@ -0,0 +1,49 @@
"""
Description
Running multiple requests in parallel can cause the whole container to crash.
This happens due to a race condition within the cleanup, where killing the
process removes the file, but before this happens we have already marked
the file for deletion
Resolution
Catching any errors resulting from individual file deletes in the
filesystem cleanup.
"""
import aiohttp
import asyncio
def get_request_data(message):
return {
'language': 'java',
'version': '15.0.2',
'files': [
{
'name': 'Test.java',
'content': 'public class HelloWorld { public static void main(String[] args) { System.out.print("' + message + '"); }}'
}
],
'stdin': '',
'args': [],
'compile_timeout': 10000,
'run_timeout': 3000
}
async def post_request(session, data):
async with session.post('http://127.0.0.1:2000/api/v2/execute', json=data) as resp:
response = await resp.json()
return response
async def run_many_requests(number):
async with aiohttp.ClientSession() as session:
tasks = []
for i in range(number):
request_data = get_request_data(f"Request #{i}")
tasks.append(asyncio.ensure_future(post_request(session, request_data)))
results = await asyncio.gather(*tasks)
for result in results:
print(result)
asyncio.run(run_many_requests(5))