Compare commits
No commits in common. "9f98f393a7c4a4743a6ba9ba78be0c12baf6b8dc" and "959830c6520970d01d12a055d8baad0f0e4ada15" have entirely different histories.
9f98f393a7
...
959830c652
|
@ -74,7 +74,6 @@ jobs:
|
||||||
output_max_size: 1024
|
output_max_size: 1024
|
||||||
max_process_count: 64
|
max_process_count: 64
|
||||||
max_open_files: 2048
|
max_open_files: 2048
|
||||||
max_file_size: 1000000
|
|
||||||
repo_url: http://localhost:8000/index
|
repo_url: http://localhost:8000/index
|
||||||
|
|
||||||
write-mode: overwrite
|
write-mode: overwrite
|
||||||
|
|
|
@ -102,12 +102,6 @@ const options = [
|
||||||
default: 2048,
|
default: 2048,
|
||||||
validators: []
|
validators: []
|
||||||
},
|
},
|
||||||
{
|
|
||||||
key: 'max_file_size',
|
|
||||||
desc: 'Max file size in bytes for a file',
|
|
||||||
default: 1000000, //1MB
|
|
||||||
validators: []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
key: 'repo_url',
|
key: 'repo_url',
|
||||||
desc: 'URL of repo index',
|
desc: 'URL of repo index',
|
||||||
|
|
|
@ -16,11 +16,5 @@ module.exports = {
|
||||||
},
|
},
|
||||||
version: require('../package.json').version,
|
version: require('../package.json').version,
|
||||||
platform,
|
platform,
|
||||||
pkg_installed_file: '.ppman-installed', //Used as indication for if a package was installed
|
pkg_installed_file: '.ppman-installed' //Used as indication for if a package was installed
|
||||||
clean_directories: [
|
|
||||||
"/dev/shm",
|
|
||||||
"/run/lock",
|
|
||||||
"/tmp",
|
|
||||||
"/var/tmp"
|
|
||||||
]
|
|
||||||
};
|
};
|
||||||
|
|
|
@ -74,8 +74,7 @@ class Job {
|
||||||
const prlimit = [
|
const prlimit = [
|
||||||
'prlimit',
|
'prlimit',
|
||||||
'--nproc=' + config.max_process_count,
|
'--nproc=' + config.max_process_count,
|
||||||
'--nofile=' + config.max_open_files,
|
'--nofile=' + config.max_open_files
|
||||||
'--fsize=' + config.max_file_size
|
|
||||||
];
|
];
|
||||||
|
|
||||||
const proc_call = [
|
const proc_call = [
|
||||||
|
@ -183,10 +182,12 @@ class Job {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async cleanup() {
|
||||||
async cleanup_processes(){
|
logger.info(`Cleaning up job uuid=${this.uuid}`);
|
||||||
let processes = [1];
|
await fs.rm(this.dir, { recursive: true, force: true });
|
||||||
|
let processes = [1]
|
||||||
while(processes.length > 0){
|
while(processes.length > 0){
|
||||||
|
|
||||||
processes = await ps_list();
|
processes = await ps_list();
|
||||||
processes = processes.filter(proc => proc.uid == this.uid);
|
processes = processes.filter(proc => proc.uid == this.uid);
|
||||||
|
|
||||||
|
@ -210,32 +211,8 @@ class Job {
|
||||||
wait_pid(proc.pid);
|
wait_pid(proc.pid);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
async cleanup_filesystem(){
|
|
||||||
|
|
||||||
for (const clean_path of globals.clean_directories) {
|
|
||||||
const contents = await fs.readdir(clean_path);
|
|
||||||
|
|
||||||
for (const file of contents) {
|
|
||||||
const file_path = path.join(clean_path, file);
|
|
||||||
const stat = await fs.stat(file_path);
|
|
||||||
if(stat.uid == this.uid)
|
|
||||||
await fs.rm(file_path, { recursive: true, force: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
await fs.rm(this.dir, { recursive: true, force: true });
|
|
||||||
}
|
|
||||||
|
|
||||||
async cleanup() {
|
|
||||||
logger.info(`Cleaning up job uuid=${this.uuid}`);
|
|
||||||
|
|
||||||
await Promise.all([
|
|
||||||
this.cleanup_processes(),
|
|
||||||
this.cleanup_filesystem()
|
|
||||||
]);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,4 +11,3 @@ services:
|
||||||
- ./data/piston:/piston
|
- ./data/piston:/piston
|
||||||
tmpfs:
|
tmpfs:
|
||||||
- /piston/jobs:exec
|
- /piston/jobs:exec
|
||||||
- /tmp
|
|
||||||
|
|
|
@ -1,20 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
# Put instructions to build your package in here
|
|
||||||
PREFIX=$(realpath $(dirname $0))
|
|
||||||
|
|
||||||
mkdir -p build
|
|
||||||
|
|
||||||
cd build
|
|
||||||
|
|
||||||
curl -OL "https://downloads.sourceforge.net/project/gnucobol/gnucobol/3.1/gnucobol-3.1.2.tar.xz"
|
|
||||||
|
|
||||||
tar xf gnucobol-3.1.2.tar.xz --strip-components=1
|
|
||||||
|
|
||||||
# === autoconf based ===
|
|
||||||
./configure --prefix "$PREFIX" --without-db
|
|
||||||
|
|
||||||
make -j$(nproc)
|
|
||||||
make install -j$(nproc)
|
|
||||||
cd ../
|
|
||||||
rm -rf build
|
|
|
@ -1,4 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
cobc -o binary --free -x -L lib "$@"
|
|
||||||
chmod +x binary
|
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
export PATH=$PWD/bin:$PATH
|
|
||||||
export LD_LIBRARY_PATH=$PWD/lib
|
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
{
|
|
||||||
"language": "cobol",
|
|
||||||
"version": "3.1.2",
|
|
||||||
"aliases": ["cob"]
|
|
||||||
}
|
|
|
@ -1,5 +0,0 @@
|
||||||
#!/usr/bin/env bash
|
|
||||||
|
|
||||||
shift
|
|
||||||
./binary "$@"
|
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
*> Test Program
|
|
||||||
identification division.
|
|
||||||
program-id. ok-test.
|
|
||||||
|
|
||||||
procedure division.
|
|
||||||
display "OK"
|
|
||||||
goback.
|
|
||||||
end program ok-test.
|
|
|
@ -17,7 +17,7 @@ tar xzf gcc.tar.gz --strip-components=1
|
||||||
cd ../obj
|
cd ../obj
|
||||||
|
|
||||||
# === autoconf based ===
|
# === autoconf based ===
|
||||||
../build/configure --prefix "$PREFIX" --enable-languages=c,c++,d,fortran --disable-multilib --disable-bootstrap
|
../build/configure --prefix "$PREFIX" --enable-languages=c,c++,d --disable-multilib --disable-bootstrap
|
||||||
|
|
||||||
make -j$(nproc)
|
make -j$(nproc)
|
||||||
make install -j$(nproc)
|
make install -j$(nproc)
|
||||||
|
|
|
@ -16,10 +16,6 @@ case "${PISTON_LANGUAGE}" in
|
||||||
rename 's/.code$/\.d/' "$@" # Add .d extension
|
rename 's/.code$/\.d/' "$@" # Add .d extension
|
||||||
gdc *.d
|
gdc *.d
|
||||||
;;
|
;;
|
||||||
fortran)
|
|
||||||
rename 's/.code$/\.f90/' "$@" # Add .f90 extension
|
|
||||||
gfortran *.f90
|
|
||||||
;;
|
|
||||||
*)
|
*)
|
||||||
echo "How did you get here? (${PISTON_LANGUAGE})"
|
echo "How did you get here? (${PISTON_LANGUAGE})"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
|
@ -2,4 +2,3 @@
|
||||||
|
|
||||||
# Put 'export' statements here for environment variables
|
# Put 'export' statements here for environment variables
|
||||||
export PATH=$PWD/bin:$PATH
|
export PATH=$PWD/bin:$PATH
|
||||||
export LD_LIBRARY_PATH="$PWD/lib:$PWD/lib64" # Need this to properly link Fortran
|
|
||||||
|
|
|
@ -13,10 +13,6 @@
|
||||||
{
|
{
|
||||||
"language": "d",
|
"language": "d",
|
||||||
"aliases": ["gdc"]
|
"aliases": ["gdc"]
|
||||||
},
|
|
||||||
{
|
|
||||||
"language": "fortran",
|
|
||||||
"aliases": ["fortran", "f90"]
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
program test
|
|
||||||
print "(a)", 'OK'
|
|
||||||
end program test
|
|
|
@ -2,12 +2,9 @@
|
||||||
|
|
||||||
PREFIX=$(realpath $(dirname $0))
|
PREFIX=$(realpath $(dirname $0))
|
||||||
|
|
||||||
mkdir -p build
|
# Cloning lolcode source
|
||||||
cd build
|
git clone https://github.com/justinmeza/lci.git lolcode
|
||||||
|
cd lolcode
|
||||||
# lolcode release
|
|
||||||
curl -L "https://github.com/justinmeza/lci/archive/refs/tags/v0.11.2.tar.gz" -o lolcode.tar.gz
|
|
||||||
tar xzf lolcode.tar.gz --strip-components=1
|
|
||||||
|
|
||||||
# Building and installing lolcode
|
# Building and installing lolcode
|
||||||
cmake -DCMAKE_INSTALL_PREFIX:STRING="$PREFIX" .
|
cmake -DCMAKE_INSTALL_PREFIX:STRING="$PREFIX" .
|
||||||
|
@ -15,4 +12,4 @@ make -j$(nproc)
|
||||||
make install -j$(nproc)
|
make install -j$(nproc)
|
||||||
|
|
||||||
# Cleaning up
|
# Cleaning up
|
||||||
cd ../ && rm -rf build
|
cd ../ && rm -rf lolcode
|
||||||
|
|
|
@ -170,9 +170,9 @@ The container exposes an API on port 2000 by default.
|
||||||
This is used by the CLI to carry out running jobs and package management.
|
This is used by the CLI to carry out running jobs and package management.
|
||||||
|
|
||||||
#### Runtimes Endpoint
|
#### Runtimes Endpoint
|
||||||
`GET /api/v2/runtimes`
|
`GET /api/v1/runtimes`
|
||||||
This endpoint will return the supported languages along with the current version and aliases. To execute
|
This endpoint will return the supported languages along with the current version and aliases. To execute
|
||||||
code for a particular language using the `/api/v2/execute` endpoint, either the name or one of the aliases must
|
code for a particular language using the `/api/v1/execute` endpoint, either the name or one of the aliases must
|
||||||
be provided, along with the version.
|
be provided, along with the version.
|
||||||
Multiple versions of the same language may be present at the same time, and may be selected when running a job.
|
Multiple versions of the same language may be present at the same time, and may be selected when running a job.
|
||||||
```json
|
```json
|
||||||
|
@ -199,7 +199,7 @@ Content-Type: application/json
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Execute Endpoint
|
#### Execute Endpoint
|
||||||
`POST /api/v2/execute`
|
`POST /api/v1/execute`
|
||||||
This endpoint requests execution of some arbitrary code.
|
This endpoint requests execution of some arbitrary code.
|
||||||
- `language` (**required**) The language to use for execution, must be a string and must be installed.
|
- `language` (**required**) The language to use for execution, must be a string and must be installed.
|
||||||
- `version` (**required**) The version of the language to use for execution, must be a string containing a SemVer selector for the version or the specific version number to use.
|
- `version` (**required**) The version of the language to use for execution, must be a string containing a SemVer selector for the version or the specific version number to use.
|
||||||
|
|
|
@ -1,13 +1,6 @@
|
||||||
FROM debian:buster-slim
|
FROM debian:buster-slim
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
ENV DEBIAN_FRONTEND noninteractive
|
||||||
RUN apt-get update && apt-get install -y unzip autoconf build-essential libssl-dev \
|
RUN apt-get update && apt-get install -y unzip autoconf build-essential libssl-dev pkg-config zlib1g-dev libargon2-dev libsodium-dev libcurl4-openssl-dev sqlite3 libsqlite3-dev libonig-dev libxml2 libxml2-dev bc curl git linux-headers-amd64 perl xz-utils python3 python3-pip gnupg jq zlib1g-dev cmake cmake-doc extra-cmake-modules build-essential gcc binutils bash coreutils util-linux pciutils usbutils coreutils binutils findutils grep libncurses5-dev libncursesw5-dev python3-pip libgmp-dev libmpfr-dev python2 libffi-dev && \
|
||||||
pkg-config zlib1g-dev libargon2-dev libsodium-dev libcurl4-openssl-dev \
|
|
||||||
sqlite3 libsqlite3-dev libonig-dev libxml2 libxml2-dev bc curl git \
|
|
||||||
linux-headers-amd64 perl xz-utils python3 python3-pip gnupg jq zlib1g-dev \
|
|
||||||
cmake cmake-doc extra-cmake-modules build-essential gcc binutils bash coreutils \
|
|
||||||
util-linux pciutils usbutils coreutils binutils findutils grep libncurses5-dev \
|
|
||||||
libncursesw5-dev python3-pip libgmp-dev libmpfr-dev python2 libffi-dev \
|
|
||||||
libreadline-dev && \
|
|
||||||
ln -sf /bin/bash /bin/sh && \
|
ln -sf /bin/bash /bin/sh && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
update-alternatives --install /usr/bin/python python /usr/bin/python3.7 2
|
update-alternatives --install /usr/bin/python python /usr/bin/python3.7 2
|
||||||
|
|
|
@ -1,12 +0,0 @@
|
||||||
"""
|
|
||||||
Description
|
|
||||||
Writing a large file to disk in the jobs directory, exhausting the
|
|
||||||
space will temporarly disable other jobs to be started.
|
|
||||||
|
|
||||||
Discovered by
|
|
||||||
Discord Derpius#9144
|
|
||||||
"""
|
|
||||||
|
|
||||||
with open("beans","w") as f:
|
|
||||||
n = 2**24
|
|
||||||
f.write("I love beans\n"*n)
|
|
|
@ -1,25 +0,0 @@
|
||||||
"""
|
|
||||||
Description
|
|
||||||
Files can be written into world writable directories without being removed,
|
|
||||||
potentially leading to disk space exhaustion
|
|
||||||
|
|
||||||
Run this test twice and there should be no output
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
directories = [
|
|
||||||
"/dev/shm",
|
|
||||||
"/run/lock",
|
|
||||||
"/tmp",
|
|
||||||
"/var/tmp"
|
|
||||||
]
|
|
||||||
|
|
||||||
for dir in directories:
|
|
||||||
fpath = f"{dir}/bean"
|
|
||||||
if os.path.exists(fpath):
|
|
||||||
print(f"{fpath} exists")
|
|
||||||
else:
|
|
||||||
with open(fpath, "w") as f:
|
|
||||||
f.write("beannn")
|
|
|
@ -1,6 +0,0 @@
|
||||||
import os
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
os.fork()
|
|
||||||
except:
|
|
||||||
pass
|
|
|
@ -1,8 +0,0 @@
|
||||||
"""
|
|
||||||
Description
|
|
||||||
Accessing external resources could be potentially dangerous
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
import urllib.request
|
|
||||||
contents = urllib.request.urlopen("https://emkc.org").read()
|
|
|
@ -1,9 +0,0 @@
|
||||||
# Exploit Tests
|
|
||||||
|
|
||||||
This directory contains a collection of exploits which have already been patched
|
|
||||||
|
|
||||||
Write exploits in any language supported by piston.
|
|
||||||
|
|
||||||
Hopefully when running any files in this directory, piston will resist the attack.
|
|
||||||
|
|
||||||
Leave a comment in the code describing how the exploit works.
|
|
|
@ -1,2 +0,0 @@
|
||||||
while True:
|
|
||||||
print("Piston is secure")
|
|
Loading…
Reference in New Issue