Compare commits
12 Commits
959830c652
...
9f98f393a7
Author | SHA1 | Date |
---|---|---|
JeffreyHuang06 | 9f98f393a7 | |
Thomas Hobson | 5509492a99 | |
Thomas Hobson | 0299810d6c | |
Thomas Hobson | 3bd73d07a9 | |
Thomas Hobson | a6bc24e22e | |
Thomas Hobson | 73391cf718 | |
Thomas Hobson | 6b138f2377 | |
Thomas Hobson | 724cbbaa9b | |
Dan Vargas | 3dfade7c91 | |
Dan Vargas | 08ea3b3740 | |
Victor Frazao | 3355ffafb2 | |
Victor Frazao | 65cbefa94e |
|
@ -74,6 +74,7 @@ jobs:
|
||||||
output_max_size: 1024
|
output_max_size: 1024
|
||||||
max_process_count: 64
|
max_process_count: 64
|
||||||
max_open_files: 2048
|
max_open_files: 2048
|
||||||
|
max_file_size: 1000000
|
||||||
repo_url: http://localhost:8000/index
|
repo_url: http://localhost:8000/index
|
||||||
|
|
||||||
write-mode: overwrite
|
write-mode: overwrite
|
||||||
|
|
|
@ -102,6 +102,12 @@ const options = [
|
||||||
default: 2048,
|
default: 2048,
|
||||||
validators: []
|
validators: []
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
key: 'max_file_size',
|
||||||
|
desc: 'Max file size in bytes for a file',
|
||||||
|
default: 1000000, //1MB
|
||||||
|
validators: []
|
||||||
|
},
|
||||||
{
|
{
|
||||||
key: 'repo_url',
|
key: 'repo_url',
|
||||||
desc: 'URL of repo index',
|
desc: 'URL of repo index',
|
||||||
|
|
|
@ -16,5 +16,11 @@ module.exports = {
|
||||||
},
|
},
|
||||||
version: require('../package.json').version,
|
version: require('../package.json').version,
|
||||||
platform,
|
platform,
|
||||||
pkg_installed_file: '.ppman-installed' //Used as indication for if a package was installed
|
pkg_installed_file: '.ppman-installed', //Used as indication for if a package was installed
|
||||||
|
clean_directories: [
|
||||||
|
"/dev/shm",
|
||||||
|
"/run/lock",
|
||||||
|
"/tmp",
|
||||||
|
"/var/tmp"
|
||||||
|
]
|
||||||
};
|
};
|
||||||
|
|
|
@ -74,7 +74,8 @@ class Job {
|
||||||
const prlimit = [
|
const prlimit = [
|
||||||
'prlimit',
|
'prlimit',
|
||||||
'--nproc=' + config.max_process_count,
|
'--nproc=' + config.max_process_count,
|
||||||
'--nofile=' + config.max_open_files
|
'--nofile=' + config.max_open_files,
|
||||||
|
'--fsize=' + config.max_file_size
|
||||||
];
|
];
|
||||||
|
|
||||||
const proc_call = [
|
const proc_call = [
|
||||||
|
@ -182,12 +183,10 @@ class Job {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
async cleanup() {
|
|
||||||
logger.info(`Cleaning up job uuid=${this.uuid}`);
|
|
||||||
await fs.rm(this.dir, { recursive: true, force: true });
|
|
||||||
let processes = [1]
|
|
||||||
while(processes.length > 0){
|
|
||||||
|
|
||||||
|
async cleanup_processes(){
|
||||||
|
let processes = [1];
|
||||||
|
while(processes.length > 0){
|
||||||
processes = await ps_list();
|
processes = await ps_list();
|
||||||
processes = processes.filter(proc => proc.uid == this.uid);
|
processes = processes.filter(proc => proc.uid == this.uid);
|
||||||
|
|
||||||
|
@ -211,8 +210,32 @@ class Job {
|
||||||
wait_pid(proc.pid);
|
wait_pid(proc.pid);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async cleanup_filesystem(){
|
||||||
|
|
||||||
|
for (const clean_path of globals.clean_directories) {
|
||||||
|
const contents = await fs.readdir(clean_path);
|
||||||
|
|
||||||
|
for (const file of contents) {
|
||||||
|
const file_path = path.join(clean_path, file);
|
||||||
|
const stat = await fs.stat(file_path);
|
||||||
|
if(stat.uid == this.uid)
|
||||||
|
await fs.rm(file_path, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
await fs.rm(this.dir, { recursive: true, force: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
async cleanup() {
|
||||||
|
logger.info(`Cleaning up job uuid=${this.uuid}`);
|
||||||
|
|
||||||
|
await Promise.all([
|
||||||
|
this.cleanup_processes(),
|
||||||
|
this.cleanup_filesystem()
|
||||||
|
]);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -11,3 +11,4 @@ services:
|
||||||
- ./data/piston:/piston
|
- ./data/piston:/piston
|
||||||
tmpfs:
|
tmpfs:
|
||||||
- /piston/jobs:exec
|
- /piston/jobs:exec
|
||||||
|
- /tmp
|
||||||
|
|
|
@ -0,0 +1,20 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
# Put instructions to build your package in here
|
||||||
|
PREFIX=$(realpath $(dirname $0))
|
||||||
|
|
||||||
|
mkdir -p build
|
||||||
|
|
||||||
|
cd build
|
||||||
|
|
||||||
|
curl -OL "https://downloads.sourceforge.net/project/gnucobol/gnucobol/3.1/gnucobol-3.1.2.tar.xz"
|
||||||
|
|
||||||
|
tar xf gnucobol-3.1.2.tar.xz --strip-components=1
|
||||||
|
|
||||||
|
# === autoconf based ===
|
||||||
|
./configure --prefix "$PREFIX" --without-db
|
||||||
|
|
||||||
|
make -j$(nproc)
|
||||||
|
make install -j$(nproc)
|
||||||
|
cd ../
|
||||||
|
rm -rf build
|
|
@ -0,0 +1,4 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
cobc -o binary --free -x -L lib "$@"
|
||||||
|
chmod +x binary
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
export PATH=$PWD/bin:$PATH
|
||||||
|
export LD_LIBRARY_PATH=$PWD/lib
|
||||||
|
|
|
@ -0,0 +1,5 @@
|
||||||
|
{
|
||||||
|
"language": "cobol",
|
||||||
|
"version": "3.1.2",
|
||||||
|
"aliases": ["cob"]
|
||||||
|
}
|
|
@ -0,0 +1,5 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
shift
|
||||||
|
./binary "$@"
|
||||||
|
|
|
@ -0,0 +1,8 @@
|
||||||
|
*> Test Program
|
||||||
|
identification division.
|
||||||
|
program-id. ok-test.
|
||||||
|
|
||||||
|
procedure division.
|
||||||
|
display "OK"
|
||||||
|
goback.
|
||||||
|
end program ok-test.
|
|
@ -17,7 +17,7 @@ tar xzf gcc.tar.gz --strip-components=1
|
||||||
cd ../obj
|
cd ../obj
|
||||||
|
|
||||||
# === autoconf based ===
|
# === autoconf based ===
|
||||||
../build/configure --prefix "$PREFIX" --enable-languages=c,c++,d --disable-multilib --disable-bootstrap
|
../build/configure --prefix "$PREFIX" --enable-languages=c,c++,d,fortran --disable-multilib --disable-bootstrap
|
||||||
|
|
||||||
make -j$(nproc)
|
make -j$(nproc)
|
||||||
make install -j$(nproc)
|
make install -j$(nproc)
|
||||||
|
|
|
@ -16,6 +16,10 @@ case "${PISTON_LANGUAGE}" in
|
||||||
rename 's/.code$/\.d/' "$@" # Add .d extension
|
rename 's/.code$/\.d/' "$@" # Add .d extension
|
||||||
gdc *.d
|
gdc *.d
|
||||||
;;
|
;;
|
||||||
|
fortran)
|
||||||
|
rename 's/.code$/\.f90/' "$@" # Add .f90 extension
|
||||||
|
gfortran *.f90
|
||||||
|
;;
|
||||||
*)
|
*)
|
||||||
echo "How did you get here? (${PISTON_LANGUAGE})"
|
echo "How did you get here? (${PISTON_LANGUAGE})"
|
||||||
exit 1
|
exit 1
|
||||||
|
|
|
@ -2,3 +2,4 @@
|
||||||
|
|
||||||
# Put 'export' statements here for environment variables
|
# Put 'export' statements here for environment variables
|
||||||
export PATH=$PWD/bin:$PATH
|
export PATH=$PWD/bin:$PATH
|
||||||
|
export LD_LIBRARY_PATH="$PWD/lib:$PWD/lib64" # Need this to properly link Fortran
|
||||||
|
|
|
@ -13,6 +13,10 @@
|
||||||
{
|
{
|
||||||
"language": "d",
|
"language": "d",
|
||||||
"aliases": ["gdc"]
|
"aliases": ["gdc"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"language": "fortran",
|
||||||
|
"aliases": ["fortran", "f90"]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,3 @@
|
||||||
|
program test
|
||||||
|
print "(a)", 'OK'
|
||||||
|
end program test
|
|
@ -2,9 +2,12 @@
|
||||||
|
|
||||||
PREFIX=$(realpath $(dirname $0))
|
PREFIX=$(realpath $(dirname $0))
|
||||||
|
|
||||||
# Cloning lolcode source
|
mkdir -p build
|
||||||
git clone https://github.com/justinmeza/lci.git lolcode
|
cd build
|
||||||
cd lolcode
|
|
||||||
|
# lolcode release
|
||||||
|
curl -L "https://github.com/justinmeza/lci/archive/refs/tags/v0.11.2.tar.gz" -o lolcode.tar.gz
|
||||||
|
tar xzf lolcode.tar.gz --strip-components=1
|
||||||
|
|
||||||
# Building and installing lolcode
|
# Building and installing lolcode
|
||||||
cmake -DCMAKE_INSTALL_PREFIX:STRING="$PREFIX" .
|
cmake -DCMAKE_INSTALL_PREFIX:STRING="$PREFIX" .
|
||||||
|
@ -12,4 +15,4 @@ make -j$(nproc)
|
||||||
make install -j$(nproc)
|
make install -j$(nproc)
|
||||||
|
|
||||||
# Cleaning up
|
# Cleaning up
|
||||||
cd ../ && rm -rf lolcode
|
cd ../ && rm -rf build
|
||||||
|
|
|
@ -170,9 +170,9 @@ The container exposes an API on port 2000 by default.
|
||||||
This is used by the CLI to carry out running jobs and package management.
|
This is used by the CLI to carry out running jobs and package management.
|
||||||
|
|
||||||
#### Runtimes Endpoint
|
#### Runtimes Endpoint
|
||||||
`GET /api/v1/runtimes`
|
`GET /api/v2/runtimes`
|
||||||
This endpoint will return the supported languages along with the current version and aliases. To execute
|
This endpoint will return the supported languages along with the current version and aliases. To execute
|
||||||
code for a particular language using the `/api/v1/execute` endpoint, either the name or one of the aliases must
|
code for a particular language using the `/api/v2/execute` endpoint, either the name or one of the aliases must
|
||||||
be provided, along with the version.
|
be provided, along with the version.
|
||||||
Multiple versions of the same language may be present at the same time, and may be selected when running a job.
|
Multiple versions of the same language may be present at the same time, and may be selected when running a job.
|
||||||
```json
|
```json
|
||||||
|
@ -199,7 +199,7 @@ Content-Type: application/json
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Execute Endpoint
|
#### Execute Endpoint
|
||||||
`POST /api/v1/execute`
|
`POST /api/v2/execute`
|
||||||
This endpoint requests execution of some arbitrary code.
|
This endpoint requests execution of some arbitrary code.
|
||||||
- `language` (**required**) The language to use for execution, must be a string and must be installed.
|
- `language` (**required**) The language to use for execution, must be a string and must be installed.
|
||||||
- `version` (**required**) The version of the language to use for execution, must be a string containing a SemVer selector for the version or the specific version number to use.
|
- `version` (**required**) The version of the language to use for execution, must be a string containing a SemVer selector for the version or the specific version number to use.
|
||||||
|
|
|
@ -1,6 +1,13 @@
|
||||||
FROM debian:buster-slim
|
FROM debian:buster-slim
|
||||||
ENV DEBIAN_FRONTEND noninteractive
|
ENV DEBIAN_FRONTEND noninteractive
|
||||||
RUN apt-get update && apt-get install -y unzip autoconf build-essential libssl-dev pkg-config zlib1g-dev libargon2-dev libsodium-dev libcurl4-openssl-dev sqlite3 libsqlite3-dev libonig-dev libxml2 libxml2-dev bc curl git linux-headers-amd64 perl xz-utils python3 python3-pip gnupg jq zlib1g-dev cmake cmake-doc extra-cmake-modules build-essential gcc binutils bash coreutils util-linux pciutils usbutils coreutils binutils findutils grep libncurses5-dev libncursesw5-dev python3-pip libgmp-dev libmpfr-dev python2 libffi-dev && \
|
RUN apt-get update && apt-get install -y unzip autoconf build-essential libssl-dev \
|
||||||
|
pkg-config zlib1g-dev libargon2-dev libsodium-dev libcurl4-openssl-dev \
|
||||||
|
sqlite3 libsqlite3-dev libonig-dev libxml2 libxml2-dev bc curl git \
|
||||||
|
linux-headers-amd64 perl xz-utils python3 python3-pip gnupg jq zlib1g-dev \
|
||||||
|
cmake cmake-doc extra-cmake-modules build-essential gcc binutils bash coreutils \
|
||||||
|
util-linux pciutils usbutils coreutils binutils findutils grep libncurses5-dev \
|
||||||
|
libncursesw5-dev python3-pip libgmp-dev libmpfr-dev python2 libffi-dev \
|
||||||
|
libreadline-dev && \
|
||||||
ln -sf /bin/bash /bin/sh && \
|
ln -sf /bin/bash /bin/sh && \
|
||||||
rm -rf /var/lib/apt/lists/* && \
|
rm -rf /var/lib/apt/lists/* && \
|
||||||
update-alternatives --install /usr/bin/python python /usr/bin/python3.7 2
|
update-alternatives --install /usr/bin/python python /usr/bin/python3.7 2
|
||||||
|
|
|
@ -0,0 +1,12 @@
|
||||||
|
"""
|
||||||
|
Description
|
||||||
|
Writing a large file to disk in the jobs directory, exhausting the
|
||||||
|
space will temporarly disable other jobs to be started.
|
||||||
|
|
||||||
|
Discovered by
|
||||||
|
Discord Derpius#9144
|
||||||
|
"""
|
||||||
|
|
||||||
|
with open("beans","w") as f:
|
||||||
|
n = 2**24
|
||||||
|
f.write("I love beans\n"*n)
|
|
@ -0,0 +1,25 @@
|
||||||
|
"""
|
||||||
|
Description
|
||||||
|
Files can be written into world writable directories without being removed,
|
||||||
|
potentially leading to disk space exhaustion
|
||||||
|
|
||||||
|
Run this test twice and there should be no output
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
directories = [
|
||||||
|
"/dev/shm",
|
||||||
|
"/run/lock",
|
||||||
|
"/tmp",
|
||||||
|
"/var/tmp"
|
||||||
|
]
|
||||||
|
|
||||||
|
for dir in directories:
|
||||||
|
fpath = f"{dir}/bean"
|
||||||
|
if os.path.exists(fpath):
|
||||||
|
print(f"{fpath} exists")
|
||||||
|
else:
|
||||||
|
with open(fpath, "w") as f:
|
||||||
|
f.write("beannn")
|
|
@ -0,0 +1,6 @@
|
||||||
|
import os
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
os.fork()
|
||||||
|
except:
|
||||||
|
pass
|
|
@ -0,0 +1,8 @@
|
||||||
|
"""
|
||||||
|
Description
|
||||||
|
Accessing external resources could be potentially dangerous
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import urllib.request
|
||||||
|
contents = urllib.request.urlopen("https://emkc.org").read()
|
|
@ -0,0 +1,9 @@
|
||||||
|
# Exploit Tests
|
||||||
|
|
||||||
|
This directory contains a collection of exploits which have already been patched
|
||||||
|
|
||||||
|
Write exploits in any language supported by piston.
|
||||||
|
|
||||||
|
Hopefully when running any files in this directory, piston will resist the attack.
|
||||||
|
|
||||||
|
Leave a comment in the code describing how the exploit works.
|
|
@ -0,0 +1,2 @@
|
||||||
|
while True:
|
||||||
|
print("Piston is secure")
|
Loading…
Reference in New Issue