Merge branch 'master' into pre-commit
This commit is contained in:
commit
ddb3703a0d
|
@ -129,7 +129,7 @@ function get_job(body) {
|
|||
compile_timeout = compile_timeout || rt.timeouts.compile;
|
||||
run_timeout = run_timeout || rt.timeouts.run;
|
||||
compile_memory_limit = compile_memory_limit || rt.memory_limits.compile;
|
||||
run_timeout = run_timeout || rt.timeouts.run;
|
||||
run_memory_limit = run_memory_limit || rt.memory_limits.run;
|
||||
resolve(
|
||||
new Job({
|
||||
runtime: rt,
|
||||
|
|
|
@ -215,7 +215,7 @@ options.forEach(option => {
|
|||
|
||||
const parsed_val = parser(env_val);
|
||||
|
||||
const value = parsed_val || option.default;
|
||||
const value = env_val === undefined ? option.default : parsed_val;
|
||||
|
||||
option.validators.for_each(validator => {
|
||||
let response = null;
|
||||
|
|
|
@ -27,7 +27,7 @@ setInterval(() => {
|
|||
}, 10);
|
||||
|
||||
class Job {
|
||||
constructor({ runtime, files, args, stdin }) {
|
||||
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) {
|
||||
this.uuid = uuidv4();
|
||||
this.runtime = runtime;
|
||||
this.files = files.map((file, i) => ({
|
||||
|
@ -38,6 +38,9 @@ class Job {
|
|||
this.args = args;
|
||||
this.stdin = stdin;
|
||||
|
||||
this.timeouts = timeouts;
|
||||
this.memory_limits = memory_limits;
|
||||
|
||||
this.uid = config.runner_uid_min + uid;
|
||||
this.gid = config.runner_gid_min + gid;
|
||||
|
||||
|
@ -143,12 +146,15 @@ class Job {
|
|||
});
|
||||
}
|
||||
|
||||
const kill_timeout = set_timeout(async _ => {
|
||||
const kill_timeout =
|
||||
(timeout >= 0 &&
|
||||
set_timeout(async _ => {
|
||||
logger.info(
|
||||
`Timeout exceeded timeout=${timeout} uuid=${this.uuid}`
|
||||
);
|
||||
process.kill(proc.pid, 'SIGKILL');
|
||||
}, timeout);
|
||||
}, timeout)) ||
|
||||
null;
|
||||
|
||||
proc.stderr.on('data', async data => {
|
||||
if (eventBus !== null) {
|
||||
|
@ -220,8 +226,8 @@ class Job {
|
|||
compile = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'compile'),
|
||||
this.files.map(x => x.name),
|
||||
this.runtime.timeouts.compile,
|
||||
this.runtime.memory_limits.compile
|
||||
this.timeouts.compile,
|
||||
this.memory_limits.compile
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -230,8 +236,8 @@ class Job {
|
|||
const run = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'run'),
|
||||
[this.files[0].name, ...this.args],
|
||||
this.runtime.timeouts.run,
|
||||
this.runtime.memory_limits.run
|
||||
this.timeouts.run,
|
||||
this.memory_limits.run
|
||||
);
|
||||
|
||||
this.state = job_states.EXECUTED;
|
||||
|
@ -263,8 +269,8 @@ class Job {
|
|||
const { error, code, signal } = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'compile'),
|
||||
this.files.map(x => x.name),
|
||||
this.runtime.timeouts.compile,
|
||||
this.runtime.memory_limits.compile,
|
||||
this.timeouts.compile,
|
||||
this.memory_limits.compile,
|
||||
eventBus
|
||||
);
|
||||
|
||||
|
@ -276,8 +282,8 @@ class Job {
|
|||
const { error, code, signal } = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'run'),
|
||||
[this.files[0].name, ...this.args],
|
||||
this.runtime.timeouts.run,
|
||||
this.runtime.memory_limits.run,
|
||||
this.timeouts.run,
|
||||
this.memory_limits.run,
|
||||
eventBus
|
||||
);
|
||||
|
||||
|
|
|
@ -1,5 +1,8 @@
|
|||
{
|
||||
"language": "zig",
|
||||
"version": "0.8.0",
|
||||
"aliases": ["zig"]
|
||||
"aliases": ["zig"],
|
||||
"limit_overrides": {
|
||||
"compile_timeout": 15000
|
||||
}
|
||||
}
|
||||
|
|
29
piston
29
piston
|
@ -1,7 +1,10 @@
|
|||
#!/usr/bin/env bash
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
PISTON_ENV=$(cat .piston_env || echo dev)
|
||||
EXECUTION_PATH="$PWD"
|
||||
PISTON_PATH="$(dirname "$(realpath "$0")")"
|
||||
|
||||
cd "$PISTON_PATH"
|
||||
PISTON_ENV=$(cat .piston_env 2> /dev/null || echo dev)
|
||||
|
||||
docker_compose(){
|
||||
if [ -f "docker-compose.$PISTON_ENV.yaml" ]; then
|
||||
|
@ -39,11 +42,12 @@ case $1 in
|
|||
echo
|
||||
echo "Development Commands:"
|
||||
|
||||
if [ $PISTON_ENV == dev ]; then
|
||||
if [ "$PISTON_ENV" == dev ]; then
|
||||
|
||||
echo " clean-pkgs Clean any package build artifacts on disk"
|
||||
echo " clean-repo Remove all packages from local repo"
|
||||
echo " build-pkg <package> <version> Build a package"
|
||||
echo " list-pkgs Lists all packages that can be built"
|
||||
echo " build-pkg <package> <version> [builder] Build a package [with desired builder image]"
|
||||
echo " rebuild Build and restart the docker container"
|
||||
echo " lint Lint the codebase using prettier"
|
||||
|
||||
|
@ -63,6 +67,8 @@ case $1 in
|
|||
restart) docker_compose restart ;;
|
||||
start)
|
||||
init_precommit
|
||||
rm -f .git/hooks/pre-commit
|
||||
ln -s "$PISTON_PATH/pre-commit" "$PISTON_PATH/.git/hooks/pre-commit"
|
||||
docker_compose up -d
|
||||
;;
|
||||
stop) docker_compose down ;;
|
||||
|
@ -75,6 +81,7 @@ case $1 in
|
|||
|
||||
update)
|
||||
git pull
|
||||
cd cli && npm i > /dev/null && cd -
|
||||
docker_compose pull
|
||||
docker_compose up -d
|
||||
;;
|
||||
|
@ -82,12 +89,15 @@ case $1 in
|
|||
clean-pkgs) git clean -fqXd packages ;;
|
||||
clean-repo) git clean -fqXd repo ;;
|
||||
|
||||
list-pkgs) find packages -depth 2 | awk -F/ '$2 && $3{ print $2 "-" $3 }' | column ;;
|
||||
|
||||
build-pkg)
|
||||
PKGSLUG="$2-$3"
|
||||
BUILDER="${4:-piston-repo-builder}"
|
||||
echo "Building $PKGSLUG"
|
||||
echo "Ensuring latest builder image"
|
||||
docker build repo -t piston-repo-builder
|
||||
docker run --rm -v "$(realpath $(dirname "$0")):/piston" piston-repo-builder --no-server $PKGSLUG
|
||||
docker build repo -t "$BUILDER"
|
||||
docker run --rm -v "$PWD:/piston" "$BUILDER" --no-server "$PKGSLUG"
|
||||
;;
|
||||
|
||||
lint)
|
||||
|
@ -95,9 +105,8 @@ case $1 in
|
|||
npx prettier --ignore-unknown --write .
|
||||
;;
|
||||
*)
|
||||
cd cli
|
||||
npm i > /dev/null
|
||||
cd ../
|
||||
node cli/index.js "$@"
|
||||
[ -d ./cli/node_modules ] || npm i > /dev/null
|
||||
cd "$EXECUTION_PATH"
|
||||
node "${PISTON_PATH}/cli/index.js" "$@"
|
||||
;;
|
||||
esac
|
||||
|
|
Loading…
Reference in New Issue