Patch for race condition with filesystem and process cleanup

This commit is contained in:
Thomas Hobson 2021-05-07 19:23:34 +12:00
parent 95421c9e15
commit 9590d5afbc
No known key found for this signature in database
GPG Key ID: 9F1FD9D87950DB6F
2 changed files with 57 additions and 3 deletions

View File

@ -219,9 +219,14 @@ class Job {
for (const file of contents) {
const file_path = path.join(clean_path, file);
try{
const stat = await fs.stat(file_path);
if(stat.uid == this.uid)
await fs.rm(file_path, { recursive: true, force: true });
}catch(e){
// File was somehow deleted in the time that we read the dir to when we checked the file
logger.warn(`Error removing file ${file_path}: ${e}`)
}
}
}

49
tests/multi_request.py Normal file
View File

@ -0,0 +1,49 @@
"""
Description
Running multiple requests in parallel can cause the whole container to crash.
This happens due to a race condition within the cleanup, where killing the
process removes the file, but before this happens we have already marked
the file for deletion
Resolution
Catching any errors resulting from individual file deletes in the
filesystem cleanup.
"""
import aiohttp
import asyncio
def get_request_data(message):
return {
'language': 'java',
'version': '15.0.2',
'files': [
{
'name': 'Test.java',
'content': 'public class HelloWorld { public static void main(String[] args) { System.out.print("' + message + '"); }}'
}
],
'stdin': '',
'args': [],
'compile_timeout': 10000,
'run_timeout': 3000
}
async def post_request(session, data):
async with session.post('http://127.0.0.1:2000/api/v2/execute', json=data) as resp:
response = await resp.json()
return response
async def run_many_requests(number):
async with aiohttp.ClientSession() as session:
tasks = []
for i in range(number):
request_data = get_request_data(f"Request #{i}")
tasks.append(asyncio.ensure_future(post_request(session, request_data)))
results = await asyncio.gather(*tasks)
for result in results:
print(result)
asyncio.run(run_many_requests(5))