Bug fixes

This commit is contained in:
Omar Brikaa 2024-08-30 20:49:42 +03:00
parent 63dd925bb1
commit 1a1236dcbe
5 changed files with 100 additions and 106 deletions

View File

@ -1,4 +1,4 @@
FROM buildpack-deps@sha256:d56cd472000631b8faca51f40d4e3f1b20deffa588f9f207fa6c60efb62ba7c4 AS isolate
FROM buildpack-deps:bookworm AS isolate
RUN apt-get update && \
apt-get install -y --no-install-recommends git libcap-dev && \
rm -rf /var/lib/apt/lists/* && \
@ -8,23 +8,20 @@ RUN apt-get update && \
make -j$(nproc) install && \
rm -rf /tmp/*
FROM node:15.10.0-buster-slim
FROM node:20-bookworm-slim
ENV DEBIAN_FRONTEND=noninteractive
RUN dpkg-reconfigure -p critical dash
RUN for i in $(seq 1001 1500); do \
groupadd -g $i runner$i && \
useradd -M runner$i -g $i -u $i ; \
done
RUN apt-get update && \
apt-get install -y libxml2 gnupg tar coreutils util-linux libc6-dev \
binutils build-essential locales libpcre3-dev libevent-dev libgmp3-dev \
libncurses6 libncurses5 libedit-dev libseccomp-dev rename procps python3 \
libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev \
libfftw3-dev libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev \
libsundials-dev libpcre2-dev && \
libsundials-dev libpcre2-dev libcap-dev && \
rm -rf /var/lib/apt/lists/*
RUN useradd -M piston
COPY --from=isolate /usr/local/bin/isolate /usr/local/bin
COPY --from=isolate /usr/local/etc/isolate /usr/local/etc/isolate

View File

@ -135,23 +135,19 @@ function get_job(body) {
}
}
compile_timeout = compile_timeout || rt.timeouts.compile;
run_timeout = run_timeout || rt.timeouts.run;
compile_memory_limit = compile_memory_limit || rt.memory_limits.compile;
run_memory_limit = run_memory_limit || rt.memory_limits.run;
resolve(
new Job({
runtime: rt,
args: args || [],
stdin: stdin || '',
args: args ?? [],
stdin: stdin ?? '',
files,
timeouts: {
run: run_timeout,
compile: compile_timeout,
run: run_timeout ?? rt.timeouts.run,
compile: compile_timeout ?? rt.timeouts.compile,
},
memory_limits: {
run: run_memory_limit,
compile: compile_memory_limit,
run: run_memory_limit ?? rt.memory_limits.run,
compile: compile_memory_limit ?? rt.memory_limits.compile,
},
})
);

2
api/src/docker-entrypoint.sh Normal file → Executable file
View File

@ -9,4 +9,4 @@ mkdir init && \
echo 1 > init/cgroup.procs && \
echo '+cpuset +memory' > cgroup.subtree_control && \
echo "Initialized cgroup" && \
exec su -- piston -c 'ulimit -n 65536 && node'
exec su -- piston -c 'ulimit -n 65536 && node /piston_api/src'

View File

@ -35,10 +35,6 @@ expressWs(app);
}
}
});
fss.chmodSync(
path.join(config.data_directory, globals.data_directories.jobs),
0o711
);
logger.info('Loading packages');
const pkgdir = path.join(

View File

@ -18,7 +18,7 @@ let box_id = 0;
let remaining_job_spaces = config.max_concurrent_jobs;
let job_queue = [];
const get_next_box_id = () => (box_id + 1) % MAX_BOX_ID;
const get_next_box_id = () => ++box_id % MAX_BOX_ID;
class Job {
#dirty_boxes;
@ -68,7 +68,7 @@ class Job {
const box = {
id: box_id,
metadata_file_path,
dir: stdout,
dir: `${stdout.trim()}/box`,
};
this.#dirty_boxes.push(box);
res(box);
@ -117,9 +117,15 @@ class Job {
}
async safe_call(box, file, args, timeout, memory_limit, event_bus = null) {
var stdout = '';
var stderr = '';
var output = '';
let stdout = '';
let stderr = '';
let output = '';
let memory = null;
let code = null;
let signal = null;
let message = null;
let status = null;
let time = null;
const proc = cp.spawn(
ISOLATE_PATH,
@ -133,14 +139,18 @@ class Job {
'/box/submission',
'-e',
`--dir=/runtime=${this.runtime.pkgdir}`,
`--dir=/etc:noexec`,
`--processes=${this.runtime.max_process_count}`,
`--open-files=${this.runtime.max_open_files}`,
`--fsize=${this.runtime.max_file_size}`,
`--time=${timeout}`,
`--fsize=${Math.floor(this.runtime.max_file_size / 1000)}`,
`--time=${timeout / 1000}`,
`--extra-time=0`,
...(memory_limit >= 0 ? [`--cg-mem=${memory_limit}`] : []),
...(memory_limit >= 0
? [`--cg-mem=${Math.floor(memory_limit / 1000)}`]
: []),
...(config.disable_networking ? [] : ['--share-net']),
'--',
'/bin/bash',
file,
...args,
],
@ -174,7 +184,8 @@ class Job {
stderr.length + data.length >
this.runtime.output_max_size
) {
this.logger.info(`stderr length exceeded`);
message = 'stderr length exceeded';
this.logger.info(message);
try {
process.kill(proc.pid, 'SIGABRT');
} catch (e) {
@ -197,7 +208,8 @@ class Job {
stdout.length + data.length >
this.runtime.output_max_size
) {
this.logger.info(`stdout length exceeded`);
message = 'stdout length exceeded';
this.logger.info(message);
try {
process.kill(proc.pid, 'SIGABRT');
} catch (e) {
@ -213,18 +225,27 @@ class Job {
}
});
let memory = null;
let code = null;
let signal = null;
let message = null;
let status = null;
let time = null;
const data = await new Promise((res, rej) => {
proc.on('close', () => {
res({
stdout,
stderr,
});
});
proc.on('error', err => {
rej({
error: err,
stdout,
stderr,
});
});
});
try {
const metadata_str = await fs.read_file(
box.metadata_file_path,
'utf-8'
);
const metadata_str = (
await fs.read_file(box.metadata_file_path)
).toString();
const metadata_lines = metadata_str.split('\n');
for (const line of metadata_lines) {
if (!line) continue;
@ -237,46 +258,46 @@ class Job {
}
switch (key) {
case 'cg-mem':
memory =
parse_int(value) ||
(() => {
throw new Error(
`Failed to parse memory usage, received value: ${value}`
);
})();
try {
memory = parse_int(value);
} catch (e) {
throw new Error(
`Failed to parse memory usage, received value: ${value}`
);
}
break;
case 'exitcode':
code =
parse_int(value) ||
(() => {
throw new Error(
`Failed to parse exit code, received value: ${value}`
);
})();
try {
code = parse_int(value);
} catch (e) {
throw new Error(
`Failed to parse exit code, received value: ${value}`
);
}
break;
case 'exitsig':
signal =
parse_int(value) ||
(() => {
throw new Error(
`Failed to parse exit signal, received value: ${value}`
);
})();
try {
signal = parse_int(value);
} catch (e) {
throw new Error(
`Failed to parse exit signal, received value: ${value}`
);
}
break;
case 'message':
message = value;
message = message || value;
break;
case 'status':
status = value;
break;
case 'time':
time =
parse_float(value) ||
(() => {
throw new Error(
`Failed to parse cpu time, received value: ${value}`
);
})();
try {
time = parse_float(value);
} catch (e) {
throw new Error(
`Failed to parse cpu time, received value: ${value}`
);
}
break;
default:
break;
@ -288,34 +309,16 @@ class Job {
);
}
proc.on('close', () => {
resolve({
stdout,
stderr,
code,
signal,
output,
memory,
message,
status,
time,
});
});
proc.on('error', err => {
reject({
error: err,
stdout,
stderr,
code,
signal,
output,
memory,
message,
status,
time,
});
});
return {
...data,
code,
signal,
output,
memory,
message,
status,
time,
};
}
async execute(box, event_bus = null) {
@ -369,20 +372,22 @@ class Job {
);
emit_event_bus_result('compile', compile);
compile_errored = compile.code !== 0;
if (!compile_errored) {
const old_box_dir = box.dir;
box = await this.#create_isolate_box();
await fs.rename(
path.join(old_box_dir, 'submission'),
path.join(box.dir, 'submission')
);
}
}
let run;
if (!compile_errored) {
this.logger.debug('Running');
const old_box_dir = box.dir;
const new_box = await this.#create_isolate_box();
await fs.rename(
path.join(old_box_dir, 'submission'),
path.join(new_box, 'submission')
);
emit_event_bus_stage('run');
run = await this.safe_call(
new_box,
box,
'/runtime/run',
[code_files[0].name, ...this.args],
this.timeouts.run,