Bug fixes
This commit is contained in:
parent
63dd925bb1
commit
1a1236dcbe
|
@ -1,4 +1,4 @@
|
||||||
FROM buildpack-deps@sha256:d56cd472000631b8faca51f40d4e3f1b20deffa588f9f207fa6c60efb62ba7c4 AS isolate
|
FROM buildpack-deps:bookworm AS isolate
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y --no-install-recommends git libcap-dev && \
|
apt-get install -y --no-install-recommends git libcap-dev && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
|
@ -8,23 +8,20 @@ RUN apt-get update && \
|
||||||
make -j$(nproc) install && \
|
make -j$(nproc) install && \
|
||||||
rm -rf /tmp/*
|
rm -rf /tmp/*
|
||||||
|
|
||||||
FROM node:15.10.0-buster-slim
|
FROM node:20-bookworm-slim
|
||||||
|
|
||||||
ENV DEBIAN_FRONTEND=noninteractive
|
ENV DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
RUN dpkg-reconfigure -p critical dash
|
RUN dpkg-reconfigure -p critical dash
|
||||||
RUN for i in $(seq 1001 1500); do \
|
|
||||||
groupadd -g $i runner$i && \
|
|
||||||
useradd -M runner$i -g $i -u $i ; \
|
|
||||||
done
|
|
||||||
RUN apt-get update && \
|
RUN apt-get update && \
|
||||||
apt-get install -y libxml2 gnupg tar coreutils util-linux libc6-dev \
|
apt-get install -y libxml2 gnupg tar coreutils util-linux libc6-dev \
|
||||||
binutils build-essential locales libpcre3-dev libevent-dev libgmp3-dev \
|
binutils build-essential locales libpcre3-dev libevent-dev libgmp3-dev \
|
||||||
libncurses6 libncurses5 libedit-dev libseccomp-dev rename procps python3 \
|
libncurses6 libncurses5 libedit-dev libseccomp-dev rename procps python3 \
|
||||||
libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev \
|
libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev \
|
||||||
libfftw3-dev libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev \
|
libfftw3-dev libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev \
|
||||||
libsundials-dev libpcre2-dev && \
|
libsundials-dev libpcre2-dev libcap-dev && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
RUN useradd -M piston
|
||||||
COPY --from=isolate /usr/local/bin/isolate /usr/local/bin
|
COPY --from=isolate /usr/local/bin/isolate /usr/local/bin
|
||||||
COPY --from=isolate /usr/local/etc/isolate /usr/local/etc/isolate
|
COPY --from=isolate /usr/local/etc/isolate /usr/local/etc/isolate
|
||||||
|
|
||||||
|
|
|
@ -135,23 +135,19 @@ function get_job(body) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
compile_timeout = compile_timeout || rt.timeouts.compile;
|
|
||||||
run_timeout = run_timeout || rt.timeouts.run;
|
|
||||||
compile_memory_limit = compile_memory_limit || rt.memory_limits.compile;
|
|
||||||
run_memory_limit = run_memory_limit || rt.memory_limits.run;
|
|
||||||
resolve(
|
resolve(
|
||||||
new Job({
|
new Job({
|
||||||
runtime: rt,
|
runtime: rt,
|
||||||
args: args || [],
|
args: args ?? [],
|
||||||
stdin: stdin || '',
|
stdin: stdin ?? '',
|
||||||
files,
|
files,
|
||||||
timeouts: {
|
timeouts: {
|
||||||
run: run_timeout,
|
run: run_timeout ?? rt.timeouts.run,
|
||||||
compile: compile_timeout,
|
compile: compile_timeout ?? rt.timeouts.compile,
|
||||||
},
|
},
|
||||||
memory_limits: {
|
memory_limits: {
|
||||||
run: run_memory_limit,
|
run: run_memory_limit ?? rt.memory_limits.run,
|
||||||
compile: compile_memory_limit,
|
compile: compile_memory_limit ?? rt.memory_limits.compile,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
|
@ -9,4 +9,4 @@ mkdir init && \
|
||||||
echo 1 > init/cgroup.procs && \
|
echo 1 > init/cgroup.procs && \
|
||||||
echo '+cpuset +memory' > cgroup.subtree_control && \
|
echo '+cpuset +memory' > cgroup.subtree_control && \
|
||||||
echo "Initialized cgroup" && \
|
echo "Initialized cgroup" && \
|
||||||
exec su -- piston -c 'ulimit -n 65536 && node'
|
exec su -- piston -c 'ulimit -n 65536 && node /piston_api/src'
|
||||||
|
|
|
@ -35,10 +35,6 @@ expressWs(app);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
fss.chmodSync(
|
|
||||||
path.join(config.data_directory, globals.data_directories.jobs),
|
|
||||||
0o711
|
|
||||||
);
|
|
||||||
|
|
||||||
logger.info('Loading packages');
|
logger.info('Loading packages');
|
||||||
const pkgdir = path.join(
|
const pkgdir = path.join(
|
||||||
|
|
135
api/src/job.js
135
api/src/job.js
|
@ -18,7 +18,7 @@ let box_id = 0;
|
||||||
let remaining_job_spaces = config.max_concurrent_jobs;
|
let remaining_job_spaces = config.max_concurrent_jobs;
|
||||||
let job_queue = [];
|
let job_queue = [];
|
||||||
|
|
||||||
const get_next_box_id = () => (box_id + 1) % MAX_BOX_ID;
|
const get_next_box_id = () => ++box_id % MAX_BOX_ID;
|
||||||
|
|
||||||
class Job {
|
class Job {
|
||||||
#dirty_boxes;
|
#dirty_boxes;
|
||||||
|
@ -68,7 +68,7 @@ class Job {
|
||||||
const box = {
|
const box = {
|
||||||
id: box_id,
|
id: box_id,
|
||||||
metadata_file_path,
|
metadata_file_path,
|
||||||
dir: stdout,
|
dir: `${stdout.trim()}/box`,
|
||||||
};
|
};
|
||||||
this.#dirty_boxes.push(box);
|
this.#dirty_boxes.push(box);
|
||||||
res(box);
|
res(box);
|
||||||
|
@ -117,9 +117,15 @@ class Job {
|
||||||
}
|
}
|
||||||
|
|
||||||
async safe_call(box, file, args, timeout, memory_limit, event_bus = null) {
|
async safe_call(box, file, args, timeout, memory_limit, event_bus = null) {
|
||||||
var stdout = '';
|
let stdout = '';
|
||||||
var stderr = '';
|
let stderr = '';
|
||||||
var output = '';
|
let output = '';
|
||||||
|
let memory = null;
|
||||||
|
let code = null;
|
||||||
|
let signal = null;
|
||||||
|
let message = null;
|
||||||
|
let status = null;
|
||||||
|
let time = null;
|
||||||
|
|
||||||
const proc = cp.spawn(
|
const proc = cp.spawn(
|
||||||
ISOLATE_PATH,
|
ISOLATE_PATH,
|
||||||
|
@ -133,14 +139,18 @@ class Job {
|
||||||
'/box/submission',
|
'/box/submission',
|
||||||
'-e',
|
'-e',
|
||||||
`--dir=/runtime=${this.runtime.pkgdir}`,
|
`--dir=/runtime=${this.runtime.pkgdir}`,
|
||||||
|
`--dir=/etc:noexec`,
|
||||||
`--processes=${this.runtime.max_process_count}`,
|
`--processes=${this.runtime.max_process_count}`,
|
||||||
`--open-files=${this.runtime.max_open_files}`,
|
`--open-files=${this.runtime.max_open_files}`,
|
||||||
`--fsize=${this.runtime.max_file_size}`,
|
`--fsize=${Math.floor(this.runtime.max_file_size / 1000)}`,
|
||||||
`--time=${timeout}`,
|
`--time=${timeout / 1000}`,
|
||||||
`--extra-time=0`,
|
`--extra-time=0`,
|
||||||
...(memory_limit >= 0 ? [`--cg-mem=${memory_limit}`] : []),
|
...(memory_limit >= 0
|
||||||
|
? [`--cg-mem=${Math.floor(memory_limit / 1000)}`]
|
||||||
|
: []),
|
||||||
...(config.disable_networking ? [] : ['--share-net']),
|
...(config.disable_networking ? [] : ['--share-net']),
|
||||||
'--',
|
'--',
|
||||||
|
'/bin/bash',
|
||||||
file,
|
file,
|
||||||
...args,
|
...args,
|
||||||
],
|
],
|
||||||
|
@ -174,7 +184,8 @@ class Job {
|
||||||
stderr.length + data.length >
|
stderr.length + data.length >
|
||||||
this.runtime.output_max_size
|
this.runtime.output_max_size
|
||||||
) {
|
) {
|
||||||
this.logger.info(`stderr length exceeded`);
|
message = 'stderr length exceeded';
|
||||||
|
this.logger.info(message);
|
||||||
try {
|
try {
|
||||||
process.kill(proc.pid, 'SIGABRT');
|
process.kill(proc.pid, 'SIGABRT');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
@ -197,7 +208,8 @@ class Job {
|
||||||
stdout.length + data.length >
|
stdout.length + data.length >
|
||||||
this.runtime.output_max_size
|
this.runtime.output_max_size
|
||||||
) {
|
) {
|
||||||
this.logger.info(`stdout length exceeded`);
|
message = 'stdout length exceeded';
|
||||||
|
this.logger.info(message);
|
||||||
try {
|
try {
|
||||||
process.kill(proc.pid, 'SIGABRT');
|
process.kill(proc.pid, 'SIGABRT');
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
|
@ -213,18 +225,27 @@ class Job {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
let memory = null;
|
const data = await new Promise((res, rej) => {
|
||||||
let code = null;
|
proc.on('close', () => {
|
||||||
let signal = null;
|
res({
|
||||||
let message = null;
|
stdout,
|
||||||
let status = null;
|
stderr,
|
||||||
let time = null;
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
proc.on('error', err => {
|
||||||
|
rej({
|
||||||
|
error: err,
|
||||||
|
stdout,
|
||||||
|
stderr,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const metadata_str = await fs.read_file(
|
const metadata_str = (
|
||||||
box.metadata_file_path,
|
await fs.read_file(box.metadata_file_path)
|
||||||
'utf-8'
|
).toString();
|
||||||
);
|
|
||||||
const metadata_lines = metadata_str.split('\n');
|
const metadata_lines = metadata_str.split('\n');
|
||||||
for (const line of metadata_lines) {
|
for (const line of metadata_lines) {
|
||||||
if (!line) continue;
|
if (!line) continue;
|
||||||
|
@ -237,46 +258,46 @@ class Job {
|
||||||
}
|
}
|
||||||
switch (key) {
|
switch (key) {
|
||||||
case 'cg-mem':
|
case 'cg-mem':
|
||||||
memory =
|
try {
|
||||||
parse_int(value) ||
|
memory = parse_int(value);
|
||||||
(() => {
|
} catch (e) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Failed to parse memory usage, received value: ${value}`
|
`Failed to parse memory usage, received value: ${value}`
|
||||||
);
|
);
|
||||||
})();
|
}
|
||||||
break;
|
break;
|
||||||
case 'exitcode':
|
case 'exitcode':
|
||||||
code =
|
try {
|
||||||
parse_int(value) ||
|
code = parse_int(value);
|
||||||
(() => {
|
} catch (e) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Failed to parse exit code, received value: ${value}`
|
`Failed to parse exit code, received value: ${value}`
|
||||||
);
|
);
|
||||||
})();
|
}
|
||||||
break;
|
break;
|
||||||
case 'exitsig':
|
case 'exitsig':
|
||||||
signal =
|
try {
|
||||||
parse_int(value) ||
|
signal = parse_int(value);
|
||||||
(() => {
|
} catch (e) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Failed to parse exit signal, received value: ${value}`
|
`Failed to parse exit signal, received value: ${value}`
|
||||||
);
|
);
|
||||||
})();
|
}
|
||||||
break;
|
break;
|
||||||
case 'message':
|
case 'message':
|
||||||
message = value;
|
message = message || value;
|
||||||
break;
|
break;
|
||||||
case 'status':
|
case 'status':
|
||||||
status = value;
|
status = value;
|
||||||
break;
|
break;
|
||||||
case 'time':
|
case 'time':
|
||||||
time =
|
try {
|
||||||
parse_float(value) ||
|
time = parse_float(value);
|
||||||
(() => {
|
} catch (e) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
`Failed to parse cpu time, received value: ${value}`
|
`Failed to parse cpu time, received value: ${value}`
|
||||||
);
|
);
|
||||||
})();
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
|
@ -288,10 +309,8 @@ class Job {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
proc.on('close', () => {
|
return {
|
||||||
resolve({
|
...data,
|
||||||
stdout,
|
|
||||||
stderr,
|
|
||||||
code,
|
code,
|
||||||
signal,
|
signal,
|
||||||
output,
|
output,
|
||||||
|
@ -299,23 +318,7 @@ class Job {
|
||||||
message,
|
message,
|
||||||
status,
|
status,
|
||||||
time,
|
time,
|
||||||
});
|
};
|
||||||
});
|
|
||||||
|
|
||||||
proc.on('error', err => {
|
|
||||||
reject({
|
|
||||||
error: err,
|
|
||||||
stdout,
|
|
||||||
stderr,
|
|
||||||
code,
|
|
||||||
signal,
|
|
||||||
output,
|
|
||||||
memory,
|
|
||||||
message,
|
|
||||||
status,
|
|
||||||
time,
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(box, event_bus = null) {
|
async execute(box, event_bus = null) {
|
||||||
|
@ -369,20 +372,22 @@ class Job {
|
||||||
);
|
);
|
||||||
emit_event_bus_result('compile', compile);
|
emit_event_bus_result('compile', compile);
|
||||||
compile_errored = compile.code !== 0;
|
compile_errored = compile.code !== 0;
|
||||||
|
if (!compile_errored) {
|
||||||
|
const old_box_dir = box.dir;
|
||||||
|
box = await this.#create_isolate_box();
|
||||||
|
await fs.rename(
|
||||||
|
path.join(old_box_dir, 'submission'),
|
||||||
|
path.join(box.dir, 'submission')
|
||||||
|
);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
let run;
|
let run;
|
||||||
if (!compile_errored) {
|
if (!compile_errored) {
|
||||||
this.logger.debug('Running');
|
this.logger.debug('Running');
|
||||||
const old_box_dir = box.dir;
|
|
||||||
const new_box = await this.#create_isolate_box();
|
|
||||||
await fs.rename(
|
|
||||||
path.join(old_box_dir, 'submission'),
|
|
||||||
path.join(new_box, 'submission')
|
|
||||||
);
|
|
||||||
emit_event_bus_stage('run');
|
emit_event_bus_stage('run');
|
||||||
run = await this.safe_call(
|
run = await this.safe_call(
|
||||||
new_box,
|
box,
|
||||||
'/runtime/run',
|
'/runtime/run',
|
||||||
[code_files[0].name, ...this.args],
|
[code_files[0].name, ...this.args],
|
||||||
this.timeouts.run,
|
this.timeouts.run,
|
||||||
|
|
Loading…
Reference in New Issue