Merge branch 'engineer-man:master' into master

This commit is contained in:
Hydrazer 2021-09-30 10:18:55 -06:00 committed by GitHub
commit aa252e509b
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
70 changed files with 1748 additions and 260 deletions

1
.envrc Normal file
View file

@ -0,0 +1 @@
use_nix

View file

@ -0,0 +1,10 @@
---
name: Language Request
about: Template for requesting language support
title: Add [insert language name here]
labels: package
assignees: ''
---
Provide links to different compilers/interpreters that could be used to implement this language, and discuss pros/cons of each.

View file

@ -0,0 +1,10 @@
Checklist:
* [ ] The package builds locally with `./piston build-pkg [package] [version]`
* [ ] The package installs with `./piston ppman install [package]=[version]`
* [ ] The package runs the test code with `./piston run [package] -l [version] packages/[package]/[version]/test.*`
* [ ] Package files are placed in the correct directory
* [ ] No old package versions are removed
* [ ] All source files are deleted in the `build.sh` script
* [ ] `metadata.json`'s `language` and `version` fields match the directory path
* [ ] Any extensions the language may use are set as aliases
* [ ] Any alternative names the language is referred to are set as aliases.

View file

@ -1,4 +1,4 @@
name: 'Package Pull Requests' name: "Package Pull Requests"
on: on:
pull_request: pull_request:
@ -8,9 +8,30 @@ on:
- reopened - reopened
- synchronize - synchronize
paths: paths:
- 'packages/**' - "packages/**"
jobs: jobs:
check-pkg:
name: Validate README
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Get list of changed files
uses: lots0logs/gh-action-get-changed-files@2.1.4
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Ensure README was updated
run: |
MISSING_LINES=$(comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /<br>/{flag=0} flag' readme.md | sort -u))
[[ $(echo $MISSING_LINES | wc -c) = "1" ]] && exit 0
echo "README has supported languages missing: "
comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /<br>/{flag=0} flag' readme.md | sort -u)
exit 1
build-pkg: build-pkg:
name: Check that package builds name: Check that package builds
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -34,7 +55,9 @@ jobs:
run: | run: |
PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u) PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
echo "Packages: $PACKAGES" echo "Packages: $PACKAGES"
docker run -v "${{ github.workspace }}:/piston" docker.pkg.github.com/engineer-man/piston/repo-builder:latest --no-server $PACKAGES docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
docker build -t repo-builder repo
docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
ls -la packages ls -la packages
- name: Upload package as artifact - name: Upload package as artifact
@ -43,7 +66,6 @@ jobs:
name: packages name: packages
path: packages/*.pkg.tar.gz path: packages/*.pkg.tar.gz
test-pkg: test-pkg:
name: Test package name: Test package
runs-on: ubuntu-latest runs-on: ubuntu-latest
@ -69,7 +91,9 @@ jobs:
run: | run: |
ls -la ls -la
docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api docker.pkg.github.com/engineer-man/piston/api docker pull docker.pkg.github.com/engineer-man/piston/api
docker build -t piston-api api
docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api
echo Waiting for API to start.. echo Waiting for API to start..
docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes
@ -109,17 +133,8 @@ jobs:
done done
done done
- name: Dump logs - name: Dump logs
if: ${{ always() }} if: ${{ always() }}
run: | run: |
docker logs api docker logs api
docker logs repo docker logs repo

View file

@ -33,7 +33,9 @@ jobs:
run: | run: |
PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u) PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
echo "Packages: $PACKAGES" echo "Packages: $PACKAGES"
docker run -v "${{ github.workspace }}:/piston" docker.pkg.github.com/engineer-man/piston/repo-builder:latest --no-server $PACKAGES docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
docker build -t repo-builder repo
docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
ls -la packages ls -la packages
- name: Upload Packages - name: Upload Packages

1
.gitignore vendored
View file

@ -1 +1,2 @@
data/ data/
.piston_env

8
.readthedocs.yaml Normal file
View file

@ -0,0 +1,8 @@
version: 2
mkdocs:
configuration: mkdocs.yml
python:
version: 3.7
install:
- requirements: docs/requirements.txt

View file

@ -13,7 +13,7 @@ RUN apt-get update && \
libncurses6 libncurses5 libedit-dev libseccomp-dev rename procps python3 \ libncurses6 libncurses5 libedit-dev libseccomp-dev rename procps python3 \
libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev \ libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev \
libfftw3-dev libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev \ libfftw3-dev libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev \
libsundials-dev && \ libsundials-dev libpcre2-dev && \
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen

49
api/package-lock.json generated
View file

@ -12,6 +12,7 @@
"body-parser": "^1.19.0", "body-parser": "^1.19.0",
"chownr": "^2.0.0", "chownr": "^2.0.0",
"express": "^4.17.1", "express": "^4.17.1",
"express-ws": "^5.0.2",
"is-docker": "^2.1.1", "is-docker": "^2.1.1",
"logplease": "^1.2.15", "logplease": "^1.2.15",
"nocamel": "HexF/nocamel#patch-1", "nocamel": "HexF/nocamel#patch-1",
@ -196,6 +197,20 @@
"node": ">= 0.10.0" "node": ">= 0.10.0"
} }
}, },
"node_modules/express-ws": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/express-ws/-/express-ws-5.0.2.tgz",
"integrity": "sha512-0uvmuk61O9HXgLhGl3QhNSEtRsQevtmbL94/eILaliEADZBHZOQUAiHFrGPrgsjikohyrmSG5g+sCfASTt0lkQ==",
"dependencies": {
"ws": "^7.4.6"
},
"engines": {
"node": ">=4.5.0"
},
"peerDependencies": {
"express": "^4.0.0 || ^5.0.0-alpha.1"
}
},
"node_modules/finalhandler": { "node_modules/finalhandler": {
"version": "1.1.2", "version": "1.1.2",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz",
@ -582,6 +597,26 @@
"node_modules/waitpid": { "node_modules/waitpid": {
"resolved": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa" "resolved": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa"
}, },
"node_modules/ws": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz",
"integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==",
"engines": {
"node": ">=8.3.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/yallist": { "node_modules/yallist": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
@ -728,6 +763,14 @@
"vary": "~1.1.2" "vary": "~1.1.2"
} }
}, },
"express-ws": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/express-ws/-/express-ws-5.0.2.tgz",
"integrity": "sha512-0uvmuk61O9HXgLhGl3QhNSEtRsQevtmbL94/eILaliEADZBHZOQUAiHFrGPrgsjikohyrmSG5g+sCfASTt0lkQ==",
"requires": {
"ws": "^7.4.6"
}
},
"finalhandler": { "finalhandler": {
"version": "1.1.2", "version": "1.1.2",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz",
@ -1010,6 +1053,12 @@
"version": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa", "version": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa",
"from": "waitpid@git+https://github.com/HexF/node-waitpid.git" "from": "waitpid@git+https://github.com/HexF/node-waitpid.git"
}, },
"ws": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz",
"integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==",
"requires": {}
},
"yallist": { "yallist": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",

View file

@ -1,12 +1,13 @@
{ {
"name": "piston-api", "name": "piston-api",
"version": "3.0.0", "version": "3.1.0",
"description": "API for piston - a high performance code execution engine", "description": "API for piston - a high performance code execution engine",
"main": "src/index.js", "main": "src/index.js",
"dependencies": { "dependencies": {
"body-parser": "^1.19.0", "body-parser": "^1.19.0",
"chownr": "^2.0.0", "chownr": "^2.0.0",
"express": "^4.17.1", "express": "^4.17.1",
"express-ws": "^5.0.2",
"is-docker": "^2.1.1", "is-docker": "^2.1.1",
"logplease": "^1.2.15", "logplease": "^1.2.15",
"nocamel": "HexF/nocamel#patch-1", "nocamel": "HexF/nocamel#patch-1",

View file

@ -1,12 +1,126 @@
const express = require('express'); const express = require('express');
const router = express.Router(); const router = express.Router();
const events = require('events');
const config = require('../config'); const config = require('../config');
const runtime = require('../runtime'); const runtime = require('../runtime');
const { Job } = require('../job'); const { Job } = require('../job');
const package = require('../package'); const package = require('../package');
const logger = require('logplease').create('api/v2'); const logger = require('logplease').create('api/v2');
const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
// ref: https://man7.org/linux/man-pages/man7/signal.7.html
function get_job(body){
const {
language,
version,
args,
stdin,
files,
compile_memory_limit,
run_memory_limit,
run_timeout,
compile_timeout
} = body;
return new Promise((resolve, reject) => {
if (!language || typeof language !== 'string') {
return reject({
message: 'language is required as a string',
});
}
if (!version || typeof version !== 'string') {
return reject({
message: 'version is required as a string',
});
}
if (!files || !Array.isArray(files)) {
return reject({
message: 'files is required as an array',
});
}
for (const [i, file] of files.entries()) {
if (typeof file.content !== 'string') {
return reject({
message: `files[${i}].content is required as a string`,
});
}
}
if (compile_memory_limit) {
if (typeof compile_memory_limit !== 'number') {
return reject({
message: 'if specified, compile_memory_limit must be a number',
});
}
if (
config.compile_memory_limit >= 0 &&
(compile_memory_limit > config.compile_memory_limit ||
compile_memory_limit < 0)
) {
return reject({
message:
'compile_memory_limit cannot exceed the configured limit of ' +
config.compile_memory_limit,
});
}
}
if (run_memory_limit) {
if (typeof run_memory_limit !== 'number') {
return reject({
message: 'if specified, run_memory_limit must be a number',
});
}
if (
config.run_memory_limit >= 0 &&
(run_memory_limit > config.run_memory_limit || run_memory_limit < 0)
) {
return reject({
message:
'run_memory_limit cannot exceed the configured limit of ' +
config.run_memory_limit,
});
}
}
const rt = runtime.get_latest_runtime_matching_language_version(
language,
version
);
if (rt === undefined) {
return reject({
message: `${language}-${version} runtime is unknown`,
});
}
resolve(new Job({
runtime: rt,
alias: language,
args: args || [],
stdin: stdin || "",
files,
timeouts: {
run: run_timeout || 3000,
compile: compile_timeout || 10000,
},
memory_limits: {
run: run_memory_limit || config.run_memory_limit,
compile: compile_memory_limit || config.compile_memory_limit,
}
}));
})
}
router.use((req, res, next) => { router.use((req, res, next) => {
if (['GET', 'HEAD', 'OPTIONS'].includes(req.method)) { if (['GET', 'HEAD', 'OPTIONS'].includes(req.method)) {
return next(); return next();
@ -21,118 +135,101 @@ router.use((req, res, next) => {
next(); next();
}); });
router.ws('/connect', async (ws, req) => {
let job = null;
let eventBus = new events.EventEmitter();
eventBus.on("stdout", (data) => ws.send(JSON.stringify({type: "data", stream: "stdout", data: data.toString()})))
eventBus.on("stderr", (data) => ws.send(JSON.stringify({type: "data", stream: "stderr", data: data.toString()})))
eventBus.on("stage", (stage)=> ws.send(JSON.stringify({type: "stage", stage})))
eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status})))
ws.on("message", async (data) => {
try{
const msg = JSON.parse(data);
switch(msg.type){
case "init":
if(job === null){
job = await get_job(msg);
await job.prime();
ws.send(JSON.stringify({
type: "runtime",
language: job.runtime.language,
version: job.runtime.version.raw
}))
await job.execute_interactive(eventBus);
ws.close(4999, "Job Completed");
}else{
ws.close(4000, "Already Initialized");
}
break;
case "data":
if(job !== null){
if(msg.stream === "stdin"){
eventBus.emit("stdin", msg.data)
}else{
ws.close(4004, "Can only write to stdin")
}
}else{
ws.close(4003, "Not yet initialized")
}
break;
case "signal":
if(job !== null){
if(SIGNALS.includes(msg.signal)){
eventBus.emit("signal", msg.signal)
}else{
ws.close(4005, "Invalid signal")
}
}else{
ws.close(4003, "Not yet initialized")
}
break;
}
}catch(error){
ws.send(JSON.stringify({type: "error", message: error.message}))
ws.close(4002, "Notified Error")
// ws.close message is limited to 123 characters, so we notify over WS then close.
}
})
ws.on("close", async ()=>{
if(job !== null){
await job.cleanup()
}
})
setTimeout(()=>{
//Terminate the socket after 1 second, if not initialized.
if(job === null)
ws.close(4001, "Initialization Timeout");
}, 1000)
})
router.post('/execute', async (req, res) => { router.post('/execute', async (req, res) => {
const {
language,
version,
files,
stdin,
args,
run_timeout,
compile_timeout,
compile_memory_limit,
run_memory_limit,
} = req.body;
if (!language || typeof language !== 'string') { try{
return res.status(400).send({ const job = await get_job(req.body);
message: 'language is required as a string',
}); await job.prime();
const result = await job.execute();
await job.cleanup();
return res.status(200).send(result);
}catch(error){
return res.status(400).json(error);
} }
if (!version || typeof version !== 'string') {
return res.status(400).send({
message: 'version is required as a string',
});
}
if (!files || !Array.isArray(files)) {
return res.status(400).send({
message: 'files is required as an array',
});
}
for (const [i, file] of files.entries()) {
if (typeof file.content !== 'string') {
return res.status(400).send({
message: `files[${i}].content is required as a string`,
});
}
}
if (compile_memory_limit) {
if (typeof compile_memory_limit !== 'number') {
return res.status(400).send({
message: 'if specified, compile_memory_limit must be a number',
});
}
if (
config.compile_memory_limit >= 0 &&
(compile_memory_limit > config.compile_memory_limit ||
compile_memory_limit < 0)
) {
return res.status(400).send({
message:
'compile_memory_limit cannot exceed the configured limit of ' +
config.compile_memory_limit,
});
}
}
if (run_memory_limit) {
if (typeof run_memory_limit !== 'number') {
return res.status(400).send({
message: 'if specified, run_memory_limit must be a number',
});
}
if (
config.run_memory_limit >= 0 &&
(run_memory_limit > config.run_memory_limit || run_memory_limit < 0)
) {
return res.status(400).send({
message:
'run_memory_limit cannot exceed the configured limit of ' +
config.run_memory_limit,
});
}
}
const rt = runtime.get_latest_runtime_matching_language_version(
language,
version
);
if (rt === undefined) {
return res.status(400).send({
message: `${language}-${version} runtime is unknown`,
});
}
const job = new Job({
runtime: rt,
alias: language,
files: files,
args: args || [],
stdin: stdin || '',
timeouts: {
run: run_timeout || 3000,
compile: compile_timeout || 10000,
},
memory_limits: {
run: run_memory_limit || config.run_memory_limit,
compile: compile_memory_limit || config.compile_memory_limit,
},
});
await job.prime();
const result = await job.execute();
await job.cleanup();
return res.status(200).send(result);
}); });
router.get('/runtimes', (req, res) => { router.get('/runtimes', (req, res) => {

View file

@ -2,6 +2,7 @@
require('nocamel'); require('nocamel');
const Logger = require('logplease'); const Logger = require('logplease');
const express = require('express'); const express = require('express');
const expressWs = require('express-ws');
const globals = require('./globals'); const globals = require('./globals');
const config = require('./config'); const config = require('./config');
const path = require('path'); const path = require('path');
@ -12,6 +13,9 @@ const runtime = require('./runtime');
const logger = Logger.create('index'); const logger = Logger.create('index');
const app = express(); const app = express();
expressWs(app);
(async () => { (async () => {
logger.info('Setting loglevel to', config.log_level); logger.info('Setting loglevel to', config.log_level);

View file

@ -59,6 +59,13 @@ class Job {
for (const file of this.files) { for (const file of this.files) {
let file_path = path.join(this.dir, file.name); let file_path = path.join(this.dir, file.name);
const rel = path.relative(this.dir, file_path);
if(rel.startsWith(".."))
throw Error(`File path "${file.name}" tries to escape parent directory: ${rel}`)
await fs.mkdir(path.dirname(file_path), {recursive: true, mode: 0o700})
await fs.chown(path.dirname(file_path), this.uid, this.gid);
await fs.write_file(file_path, file.content); await fs.write_file(file_path, file.content);
await fs.chown(file_path, this.uid, this.gid); await fs.chown(file_path, this.uid, this.gid);
@ -69,7 +76,7 @@ class Job {
logger.debug('Primed job'); logger.debug('Primed job');
} }
async safe_call(file, args, timeout, memory_limit) { async safe_call(file, args, timeout, memory_limit, eventBus = null) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const nonetwork = config.disable_networking ? ['nosocket'] : []; const nonetwork = config.disable_networking ? ['nosocket'] : [];
@ -102,9 +109,21 @@ class Job {
detached: true, //give this process its own process group detached: true, //give this process its own process group
}); });
proc.stdin.write(this.stdin); if(eventBus === null){
proc.stdin.end(); proc.stdin.write(this.stdin);
proc.stdin.destroy(); proc.stdin.end();
proc.stdin.destroy();
}else{
eventBus.on("stdin", (data) => {
proc.stdin.write(data);
})
eventBus.on("kill", (signal) => {
proc.kill(signal)
})
}
const kill_timeout = set_timeout( const kill_timeout = set_timeout(
_ => proc.kill('SIGKILL'), _ => proc.kill('SIGKILL'),
@ -112,7 +131,9 @@ class Job {
); );
proc.stderr.on('data', data => { proc.stderr.on('data', data => {
if (stderr.length > config.output_max_size) { if(eventBus !== null) {
eventBus.emit("stderr", data);
} else if (stderr.length > config.output_max_size) {
proc.kill('SIGKILL'); proc.kill('SIGKILL');
} else { } else {
stderr += data; stderr += data;
@ -121,7 +142,9 @@ class Job {
}); });
proc.stdout.on('data', data => { proc.stdout.on('data', data => {
if (stdout.length > config.output_max_size) { if(eventBus !== null){
eventBus.emit("stdout", data);
} else if (stdout.length > config.output_max_size) {
proc.kill('SIGKILL'); proc.kill('SIGKILL');
} else { } else {
stdout += data; stdout += data;
@ -139,7 +162,7 @@ class Job {
proc.on('exit', (code, signal) => { proc.on('exit', (code, signal) => {
exit_cleanup(); exit_cleanup();
resolve({ stdout, stderr, code, signal, output }); resolve({stdout, stderr, code, signal, output });
}); });
proc.on('error', err => { proc.on('error', err => {
@ -196,6 +219,49 @@ class Job {
}; };
} }
async execute_interactive(eventBus){
if (this.state !== job_states.PRIMED) {
throw new Error(
'Job must be in primed state, current state: ' +
this.state.toString()
);
}
logger.info(
`Interactively executing job uuid=${this.uuid} uid=${this.uid} gid=${
this.gid
} runtime=${this.runtime.toString()}`
);
if(this.runtime.compiled){
eventBus.emit("stage", "compile")
const {error, code, signal} = await this.safe_call(
path.join(this.runtime.pkgdir, 'compile'),
this.files.map(x => x.name),
this.timeouts.compile,
this.memory_limits.compile,
eventBus
)
eventBus.emit("exit", "compile", {error, code, signal})
}
logger.debug('Running');
eventBus.emit("stage", "run")
const {error, code, signal} = await this.safe_call(
path.join(this.runtime.pkgdir, 'run'),
[this.files[0].name, ...this.args],
this.timeouts.run,
this.memory_limits.run,
eventBus
);
eventBus.emit("exit", "run", {error, code, signal})
this.state = job_states.EXECUTED;
}
async cleanup_processes() { async cleanup_processes() {
let processes = [1]; let processes = [1];
@ -273,10 +339,8 @@ class Job {
async cleanup() { async cleanup() {
logger.info(`Cleaning up job uuid=${this.uuid}`); logger.info(`Cleaning up job uuid=${this.uuid}`);
await Promise.all([ await this.cleanup_processes();
this.cleanup_processes(), await this.cleanup_filesystem();
this.cleanup_filesystem(),
]);
} }
} }

1
builder/.gitignore vendored Normal file
View file

@ -0,0 +1 @@
build

2
builder/Dockerfile Normal file
View file

@ -0,0 +1,2 @@
FROM ghcr.io/engineer-man/piston:latest
ADD . /piston/packages/

61
builder/build.sh Executable file
View file

@ -0,0 +1,61 @@
#!/usr/bin/env bash
# Build a container using the spec file provided
help_msg(){
echo "Usage: $0 [specfile] [tag]"
echo
echo "$1"
exit 1
}
cleanup(){
echo "Exiting..."
docker stop builder_piston_instance && docker rm builder_piston_instance
}
fetch_packages(){
local port=$((5535 + $RANDOM % 60000))
mkdir build
# Start a piston container
docker run \
-v "$PWD/build":'/piston/packages' \
--tmpfs /piston/jobs \
-dit \
-p $port:2000 \
--name builder_piston_instance \
ghcr.io/engineer-man/piston
# Ensure the CLI is installed
cd ../cli
npm i
cd -
# Evalulate the specfile
../cli/index.js -u "http://127.0.0.1:$port" ppman spec $1
}
build_container(){
docker build -t $1 -f "$(dirname $0)/Dockerfile" "$PWD/build"
}
SPEC_FILE=$1
TAG=$2
[ -z "$SPEC_FILE" ] && help_msg "specfile is required"
[ -z "$TAG" ] && help_msg "tag is required"
[ -f "$SPEC_FILE" ] || help_msg "specfile does not exist"
which node || help_msg "nodejs is required"
which npm || help_msg "npm is required"
trap cleanup EXIT
fetch_packages $SPEC_FILE
build_container $TAG
echo "Start your custom piston container with"
echo "$ docker run --tmpfs /piston/jobs -dit -p 2000:2000 $TAG"

View file

@ -1,7 +1,10 @@
//const fetch = require('node-fetch');
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const chalk = require('chalk'); const chalk = require('chalk');
const WebSocket = require('ws');
const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
exports.command = ['execute <language> <file> [args..]']; exports.command = ['execute <language> <file> [args..]'];
exports.aliases = ['run']; exports.aliases = ['run'];
@ -35,17 +38,115 @@ exports.builder = {
alias: ['f'], alias: ['f'],
array: true, array: true,
desc: 'Additional files to add', desc: 'Additional files to add',
},
interactive: {
boolean: true,
alias: ['t'],
desc: 'Run interactively using WebSocket transport'
},
status: {
boolean: true,
alias: ['s'],
desc: 'Output additional status to stderr'
} }
}; };
exports.handler = async (argv) => { async function handle_interactive(files, argv){
const files = [...(argv.files || []),argv.file] const ws = new WebSocket(argv.pistonUrl.replace("http", "ws") + "/api/v2/connect")
.map(file_path => {
return { const log_message = (process.stderr.isTTY && argv.status) ? console.error : ()=>{};
name: path.basename(file_path),
content: fs.readFileSync(file_path).toString() process.on("exit", ()=>{
}; ws.close();
}); process.stdin.end();
process.stdin.destroy();
process.exit();
})
for(const signal of SIGNALS){
process.on(signal, ()=>{
ws.send(JSON.stringify({type: 'signal', signal}))
})
}
ws.on('open', ()=>{
const request = {
type: "init",
language: argv.language,
version: argv['language_version'],
files: files,
args: argv.args,
compile_timeout: argv.ct,
run_timeout: argv.rt
}
ws.send(JSON.stringify(request))
log_message(chalk.white.bold("Connected"))
process.stdin.resume();
process.stdin.on("data", (data) => {
ws.send(JSON.stringify({
type: "data",
stream: "stdin",
data: data.toString()
}))
})
})
ws.on("close", (code, reason)=>{
log_message(
chalk.white.bold("Disconnected: "),
chalk.white.bold("Reason: "),
chalk.yellow(`"${reason}"`),
chalk.white.bold("Code: "),
chalk.yellow(`"${code}"`),
)
process.stdin.pause()
})
ws.on('message', function(data){
const msg = JSON.parse(data);
switch(msg.type){
case "runtime":
log_message(chalk.bold.white("Runtime:"), chalk.yellow(`${msg.language} ${msg.version}`))
break;
case "stage":
log_message(chalk.bold.white("Stage:"), chalk.yellow(msg.stage))
break;
case "data":
if(msg.stream == "stdout") process.stdout.write(msg.data)
else if(msg.stream == "stderr") process.stderr.write(msg.data)
else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data)
break;
case "exit":
if(msg.signal === null)
log_message(
chalk.white.bold("Stage"),
chalk.yellow(msg.stage),
chalk.white.bold("exited with code"),
chalk.yellow(msg.code)
)
else
log_message(
chalk.white.bold("Stage"),
chalk.yellow(msg.stage),
chalk.white.bold("exited with signal"),
chalk.yellow(msg.signal)
)
break;
default:
log_message(chalk.red.bold("Unknown message:"), msg)
}
})
}
async function run_non_interactively(files, argv) {
const stdin = (argv.stdin && await new Promise((resolve, _) => { const stdin = (argv.stdin && await new Promise((resolve, _) => {
let data = ''; let data = '';
@ -99,3 +200,18 @@ exports.handler = async (argv) => {
step('Run', response.run); step('Run', response.run);
} }
exports.handler = async (argv) => {
const files = [...(argv.files || []),argv.file]
.map(file_path => {
return {
name: path.basename(file_path),
content: fs.readFileSync(file_path).toString()
};
});
if(argv.interactive) await handle_interactive(files, argv);
else await run_non_interactively(files, argv);
}

View file

@ -0,0 +1,160 @@
const chalk = require('chalk');
const fs = require('fs/promises');
const minimatch = require("minimatch");
const semver = require('semver');
exports.command = ['spec <specfile>'];
exports.aliases = ['s'];
exports.describe = 'Install the packages described in the spec file, uninstalling packages which aren\'t in the list'
function does_match(package, rule){
const nameMatch = minimatch(package.language, rule.package_selector);
const versionMatch = semver.satisfies(package.language_version, rule.version_selector)
return nameMatch && versionMatch;
}
exports.handler = async ({axios, specfile}) => {
const spec_contents = await fs.readFile(specfile);
const spec_lines = spec_contents.toString().split("\n");
const rules = [];
for(const line of spec_lines){
const rule = {
_raw: line.trim(),
comment: false,
package_selector: null,
version_selector: null,
negate: false
};
if(line.starts_with("#")){
rule.comment = true;
}else {
let l = line.trim();
if(line.starts_with("!")){
rule.negate = true;
l = line.slice(1).trim();
}
const [pkg, ver] = l.split(" ", 2);
rule.package_selector = pkg;
rule.version_selector = ver;
}
if(rule._raw.length != 0) rules.push(rule);
}
const packages_req = await axios.get('/api/v2/packages');
const packages = packages_req.data;
const installed = packages.filter(pkg => pkg.installed);
let ensure_packages = [];
for(const rule of rules){
if(rule.comment) continue;
const matches = [];
if(!rule.negate){
for(const package of packages){
if(does_match(package, rule))
matches.push(package)
}
const latest_matches = matches.filter(
pkg => {
const versions = matches
.filter(x=>x.language == pkg.language)
.map(x=>x.language_version).sort(semver.rcompare);
return versions[0] == pkg.language_version
}
);
for(const match of latest_matches){
if(!ensure_packages.find(pkg => pkg.language == match.language && pkg.language_version == match.language_version))
ensure_packages.push(match)
}
}else{
ensure_packages = ensure_packages.filter(
pkg => !does_match(pkg, rule)
)
}
}
const operations = [];
for(const package of ensure_packages){
if(!package.installed)
operations.push({
type: "install",
package: package.language,
version: package.language_version
});
}
for(const installed_package of installed){
if(!ensure_packages.find(
pkg => pkg.language == installed_package.language &&
pkg.language_version == installed_package.language_version
))
operations.push({
type: "uninstall",
package: installed_package.language,
version: installed_package.language_version
})
}
console.log(chalk.bold.yellow("Actions"))
for(const op of operations){
console.log((op.type == "install" ? chalk.green("Install") : chalk.red("Uninstall")) + ` ${op.package} ${op.version}`)
}
if(operations.length == 0){
console.log(chalk.gray("None"))
}
for(const op of operations){
if(op.type == "install"){
try{
const install = await axios.post(`/api/v2/packages`, {
language: op.package,
version: op.version
});
if(!install.data.language)
throw new Error(install.data.message); // Go to exception handler
console.log(chalk.bold.green("Installed"), op.package, op.version)
}catch(e){
console.log(chalk.bold.red("Failed to install") + ` ${op.package} ${op.version}:`, e.message)
}
}
else if(op.type == "uninstall"){
try{
const install = await axios.delete(`/api/v2/packages`, {
data: {
language: op.package,
version: op.version
}
});
if(!install.data.language)
throw new Error(install.data.message); // Go to exception handler
console.log(chalk.bold.green("Uninstalled"), op.package, op.version)
}catch(e){
console.log(chalk.bold.red("Failed to uninstall") + ` ${op.package} ${op.version}:`, e.message)
}
}
}
}

View file

@ -1,5 +1,5 @@
#!/usr/bin/env node #!/usr/bin/env node
require('nocamel');
const axios = require('axios').default; const axios = require('axios').default;
const axios_instance = argv => { const axios_instance = argv => {

189
cli/package-lock.json generated
View file

@ -9,8 +9,12 @@
"version": "1.0.0", "version": "1.0.0",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"axios": "^0.21.1", "axios": "^0.21.2",
"chalk": "^4.1.0", "chalk": "^4.1.0",
"minimatch": "^3.0.4",
"nocamel": "^1.0.2",
"semver": "^7.3.5",
"ws": "^7.5.3",
"yargs": "^16.2.0" "yargs": "^16.2.0"
} }
}, },
@ -34,11 +38,25 @@
} }
}, },
"node_modules/axios": { "node_modules/axios": {
"version": "0.21.1", "version": "0.21.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz", "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.2.tgz",
"integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==", "integrity": "sha512-87otirqUw3e8CzHTMO+/9kh/FSgXt/eVDvipijwDtEuwbkySWZ9SBm6VEubmJ/kLKEoLQV/POhxXFb66bfekfg==",
"dependencies": { "dependencies": {
"follow-redirects": "^1.10.0" "follow-redirects": "^1.14.0"
}
},
"node_modules/balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"node_modules/brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"dependencies": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
} }
}, },
"node_modules/chalk": { "node_modules/chalk": {
@ -79,6 +97,11 @@
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
}, },
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
},
"node_modules/emoji-regex": { "node_modules/emoji-regex": {
"version": "8.0.0", "version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
@ -93,11 +116,22 @@
} }
}, },
"node_modules/follow-redirects": { "node_modules/follow-redirects": {
"version": "1.13.3", "version": "1.14.3",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.13.3.tgz", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.3.tgz",
"integrity": "sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA==", "integrity": "sha512-3MkHxknWMUtb23apkgz/83fDoe+y+qr0TdgacGIA7bew+QLBo3vdgEN2xEsuXNivpFy4CyDhBBZnNZOtalmenw==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"engines": { "engines": {
"node": ">=4.0" "node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
} }
}, },
"node_modules/get-caller-file": { "node_modules/get-caller-file": {
@ -124,6 +158,33 @@
"node": ">=8" "node": ">=8"
} }
}, },
"node_modules/lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"dependencies": {
"yallist": "^4.0.0"
},
"engines": {
"node": ">=10"
}
},
"node_modules/minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"dependencies": {
"brace-expansion": "^1.1.7"
},
"engines": {
"node": "*"
}
},
"node_modules/nocamel": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/nocamel/-/nocamel-1.0.2.tgz",
"integrity": "sha512-CRkRSRLChj+H6e4lHS851QS6YGCoTETnSG/z+XGanxLSsTbBkvEeIWaIYMKzuBznFwWM0YcLGXsFyXg4xWYnWA=="
},
"node_modules/require-directory": { "node_modules/require-directory": {
"version": "2.1.1", "version": "2.1.1",
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
@ -132,6 +193,20 @@
"node": ">=0.10.0" "node": ">=0.10.0"
} }
}, },
"node_modules/semver": {
"version": "7.3.5",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
"integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
"dependencies": {
"lru-cache": "^6.0.0"
},
"bin": {
"semver": "bin/semver.js"
},
"engines": {
"node": ">=10"
}
},
"node_modules/string-width": { "node_modules/string-width": {
"version": "4.2.2", "version": "4.2.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
@ -180,6 +255,26 @@
"node": ">=10" "node": ">=10"
} }
}, },
"node_modules/ws": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz",
"integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==",
"engines": {
"node": ">=8.3.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/y18n": { "node_modules/y18n": {
"version": "5.0.5", "version": "5.0.5",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz",
@ -188,6 +283,11 @@
"node": ">=10" "node": ">=10"
} }
}, },
"node_modules/yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"node_modules/yargs": { "node_modules/yargs": {
"version": "16.2.0", "version": "16.2.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
@ -229,11 +329,25 @@
} }
}, },
"axios": { "axios": {
"version": "0.21.1", "version": "0.21.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz", "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.2.tgz",
"integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==", "integrity": "sha512-87otirqUw3e8CzHTMO+/9kh/FSgXt/eVDvipijwDtEuwbkySWZ9SBm6VEubmJ/kLKEoLQV/POhxXFb66bfekfg==",
"requires": { "requires": {
"follow-redirects": "^1.10.0" "follow-redirects": "^1.14.0"
}
},
"balanced-match": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz",
"integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="
},
"brace-expansion": {
"version": "1.1.11",
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz",
"integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==",
"requires": {
"balanced-match": "^1.0.0",
"concat-map": "0.0.1"
} }
}, },
"chalk": { "chalk": {
@ -268,6 +382,11 @@
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
}, },
"concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
"integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s="
},
"emoji-regex": { "emoji-regex": {
"version": "8.0.0", "version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
@ -279,9 +398,9 @@
"integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw=="
}, },
"follow-redirects": { "follow-redirects": {
"version": "1.13.3", "version": "1.14.3",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.13.3.tgz", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.3.tgz",
"integrity": "sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA==" "integrity": "sha512-3MkHxknWMUtb23apkgz/83fDoe+y+qr0TdgacGIA7bew+QLBo3vdgEN2xEsuXNivpFy4CyDhBBZnNZOtalmenw=="
}, },
"get-caller-file": { "get-caller-file": {
"version": "2.0.5", "version": "2.0.5",
@ -298,11 +417,40 @@
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
}, },
"lru-cache": {
"version": "6.0.0",
"resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
"integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
"requires": {
"yallist": "^4.0.0"
}
},
"minimatch": {
"version": "3.0.4",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
"integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
"requires": {
"brace-expansion": "^1.1.7"
}
},
"nocamel": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/nocamel/-/nocamel-1.0.2.tgz",
"integrity": "sha512-CRkRSRLChj+H6e4lHS851QS6YGCoTETnSG/z+XGanxLSsTbBkvEeIWaIYMKzuBznFwWM0YcLGXsFyXg4xWYnWA=="
},
"require-directory": { "require-directory": {
"version": "2.1.1", "version": "2.1.1",
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
"integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I="
}, },
"semver": {
"version": "7.3.5",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.3.5.tgz",
"integrity": "sha512-PoeGJYh8HK4BTO/a9Tf6ZG3veo/A7ZVsYrSA6J8ny9nb3B1VrpkuN+z9OE5wfE5p6H4LchYZsegiQgbJD94ZFQ==",
"requires": {
"lru-cache": "^6.0.0"
}
},
"string-width": { "string-width": {
"version": "4.2.2", "version": "4.2.2",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
@ -339,11 +487,22 @@
"strip-ansi": "^6.0.0" "strip-ansi": "^6.0.0"
} }
}, },
"ws": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz",
"integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==",
"requires": {}
},
"y18n": { "y18n": {
"version": "5.0.5", "version": "5.0.5",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz",
"integrity": "sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg==" "integrity": "sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg=="
}, },
"yallist": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
"integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
},
"yargs": { "yargs": {
"version": "16.2.0", "version": "16.2.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",

View file

@ -1,12 +1,16 @@
{ {
"name": "piston-cli", "name": "piston-cli",
"version": "1.0.0", "version": "1.1.0",
"description": "Piston Execution Engine CLI tools", "description": "Piston Execution Engine CLI tools",
"main": "index.js", "main": "index.js",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"axios": "^0.21.1", "axios": "^0.21.2",
"chalk": "^4.1.0", "chalk": "^4.1.0",
"minimatch": "^3.0.4",
"nocamel": "^1.0.2",
"semver": "^7.3.5",
"ws": "^7.5.3",
"yargs": "^16.2.0" "yargs": "^16.2.0"
} }
} }

8
dev.pps Normal file
View file

@ -0,0 +1,8 @@
#!/usr/bin/env -S piston ppman spec
# Development Piston Packages
# Defines packages to be installed by developers
# All packages, latest version
# Don't use this when connected to public repo, in excess of 10GB
* *

View file

@ -1,24 +1,24 @@
version: '3.2' version: "3.2"
services: services:
api: api:
build: api build: api
container_name: piston_api container_name: piston_api
cap_add: cap_add:
- CAP_SYS_ADMIN - CAP_SYS_ADMIN
restart: always restart: always
ports: ports:
- 2000:2000 - 2000:2000
volumes: volumes:
- ./data/piston:/piston - ./data/piston:/piston
environment: environment:
- PISTON_REPO_URL=http://repo:8000/index - PISTON_REPO_URL=http://repo:8000/index
tmpfs: tmpfs:
- /piston/jobs:exec - /piston/jobs:exec
repo: # Local testing of packages repo: # Local testing of packages
build: repo build: repo
container_name: piston_repo container_name: piston_repo
command: ['dart-2.12.1'] # Only build dart command: ["--no-build"] # Don't build anything
volumes: volumes:
- .:/piston - .:/piston

234
docs/api-v2.md Normal file
View file

@ -0,0 +1,234 @@
# API
Piston exposes an API for managing packages and executing user-defined code.
The API is broken in to 2 main sections - packages and jobs.
The API is exposed from the container, by default on port 2000, at `/api/v2/`.
All inputs are validated, and if an error occurs, a 4xx or 5xx status code is returned.
In this case, a JSON payload is sent back containing the error message as `message`
## Runtimes
### `GET /api/v2/runtimes`
Returns a list of available languages, including the version, runtime and aliases.
#### Response
- `[].language`: Name of the language
- `[].version`: Version of the runtime
- `[].aliases`: List of alternative names that can be used for the language
- `[].runtime` (_optional_): Name of the runtime used to run the langage, only provided if alternative runtimes exist for the language
#### Example
```
GET /api/v2/runtimes
```
```json
HTTP/1.1 200 OK
Content-Type: application/json
[
{
"language": "bash",
"version": "5.1.0",
"aliases": ["sh"]
},
{
"language": "javascript",
"version": "15.10.0",
"aliases": ["node-javascript", "node-js", "javascript", "js"],
"runtime": "node"
}
]
```
## Execute
### `POST /api/v2/execute`
Runs the given code, using the given runtime and arguments, returning the result.
#### Request
- `language`: Name or alias of a language listed in [runtimes](#runtimes)
- `version`: SemVer version selector of a language listed in [runtimes](#runtimes)
- `files`: An array of files which should be uploaded into the job context
- `files[].name` (_optional_): Name of file to be written, if none a random name is picked
- `files[].content`: Content of file to be written
- `stdin` (_optional_): Text to pass into stdin of the program. Defaults to blank string.
- `args` (_optional_): Arguments to pass to the program. Defaults to none
- `run_timeout` (_optional_): The maximum allowed time in milliseconds for the compile stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout.
- `compile_timeout` (_optional_): The maximum allowed time in milliseconds for the run stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout. Defaults to maximum.
- `compile_memory_limit` (_optional_): The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
- `run_memory_limit` (_optional_): The maximum amount of memory the run stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
#### Response
- `language`: Name (not alias) of the runtime used
- `version`: Version of the used runtime
- `run`: Results from the run stage
- `run.stdout`: stdout from run stage process
- `run.stderr`: stderr from run stage process
- `run.output`: stdout and stderr combined in order of data from run stage process
- `run.code`: Exit code from run process, or null if signal is not null
- `run.signal`: Signal from run process, or null if code is not null
- `compile` (_optional_): Results from the compile stage, only provided if the runtime has a compile stage
- `compile.stdout`: stdout from compile stage process
- `compile.stderr`: stderr from compile stage process
- `compile.output`: stdout and stderr combined in order of data from compile stage process
- `compile.code`: Exit code from compile process, or null if signal is not null
- `compile.signal`: Signal from compile process, or null if code is not null
#### Example
```json
POST /api/v2/execute
Content-Type: application/json
{
"language": "js",
"version": "15.10.0",
"files": [
{
"name": "my_cool_code.js",
"content": "console.log(process.argv)"
}
],
"stdin": "",
"args": ["1", "2", "3"],
"compile_timeout": 10000,
"run_timeout": 3000,
"compile_memory_limit": -1,
"run_memory_limit": -1
}
```
```json
HTTP/1.1 200 OK
Content-Type: application/json
{
"run": {
"stdout": "[\n '/piston/packages/node/15.10.0/bin/node',\n '/piston/jobs/e87afa0d-6c2a-40b8-a824-ffb9c5c6cb64/my_cool_code.js',\n '1',\n '2',\n '3'\n]\n",
"stderr": "",
"code": 0,
"signal": null,
"output": "[\n '/piston/packages/node/15.10.0/bin/node',\n '/piston/jobs/e87afa0d-6c2a-40b8-a824-ffb9c5c6cb64/my_cool_code.js',\n '1',\n '2',\n '3'\n]\n"
},
"language": "javascript",
"version": "15.10.0"
}
```
## Packages
### `GET /api/v2/packages`
Returns a list of all possible packages, and whether their installation status.
#### Response
- `[].language`: Name of the contained runtime
- `[].language_version`: Version of the contained runtime
- `[].installed`: Status on the package being installed
#### Example
```
GET /api/v2/packages
```
```json
HTTP/1.1 200 OK
Content-Type: application/json
[
{
"language": "node",
"language_version": "15.10.0",
"installed": true
},
{
"language": "bash",
"language_version": "5.1.0",
"installed": true
}
]
```
### `POST /api/v2/packages`
Install the given package.
#### Request
- `language`: Name of package from [package list](#get-apiv2packages)
- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
#### Response
- `language`: Name of package installed
- `version`: Version of package installed
#### Example
```json
POST /api/v2/packages
Content-Type: application/json
{
"language": "bash",
"version": "5.x"
}
```
```json
HTTP/1.1 200 OK
Content-Type: application/json
{
"language": "bash",
"version": "5.1.0"
}
```
### `DELETE /api/v2/packages`
Uninstall the given package.
#### Request
- `language`: Name of package from [package list](#get-apiv2packages)
- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
#### Response
- `language`: Name of package uninstalled
- `version`: Version of package uninstalled
#### Example
```json
DELETE /api/v2/packages
Content-Type: application/json
{
"language": "bash",
"version": "5.x"
}
```
```json
HTTP/1.1 200 OK
Content-Type: application/json
{
"language": "bash",
"version": "5.1.0"
}
```

147
docs/configuration.md Normal file
View file

@ -0,0 +1,147 @@
# Configuration
Piston provides many different configuration options to tweak Piston to meet your needs.
Configuration is specified through environment variables, prefixed with `PISTON_`.
## Log Level
```yaml
key: PISTON_LOG_LEVEL
default: INFO
```
Level of log output to provide.
One of `DEBUG`, `INFO`, `WARN`, `ERROR` or `NONE`
## Bind Address
```yaml
key: PISTON_BIND_ADDRESS
default: 0.0.0.0:2000
```
Port and IP address to bind the Piston API to.
<!-- prettier-ignore -->
!!! warning
Changing this value is not recommended.
This changes the bind address inside the container, and thus serves no purpose when running in a container
## Data Directory
```yaml
key: PISTON_DATA_DIRECTORY
default: /piston
```
Absolute path to piston related data, including packages and job contexts.
<!-- prettier-ignore -->
!!! warning
Changing this value is not recommended.
Some packages require absolute paths on disk at build time.
Due to this, some packages may break when changing this parameter.
## Runner GID/UID range
```yaml
key:
- PISTON_RUNNER_UID_MIN
- PISTON_RUNNER_UID_MAX
- PISTON_RUNNER_GID_MIN
- PISTON_RUNNER_GID_MAX
default:
- 1001
- 1500
- 1001
- 1500
```
UID and GID ranges to use when executing jobs.
<!-- prettier-ignore -->
!!! warning
Changing this value is not recommended.
The piston container creates 500 users and groups by default, and reserves user/group 1000 for running the API.
Any processes run by these users will be killed when cleaning up a job.
## Disable Networking
```yaml
key: PISTON_DISABLE_NETWORKING
default: true
```
Disallows access to `socket` syscalls, effectively disabling networking for jobs run by piston.
## Max Process Count
```yaml
key: PISTON_MAX_PROCESS_COUNT
default: 64
```
Maximum number of processess allowed to to have open for a job.
Resists against exhausting the process table, causing a full system lockup.
## Output Max Side
```yaml
key: PISTON_OUTPUT_MAX_SIZE
default: 1024
```
Maximum size of stdio buffers for each job.
Resist against run-away output which could lead to memory exhaustion.
## Max Open Files
```yaml
key: PISTON_MAX_OPEN_FILES
default: 64
```
Maximum number of open files at a given time by a job.
Resists against writing many smaller files to exhaust inodes.
## Max File Size
```yaml
key: PISTON_MAX_FILE_SIZE
default: 10000000 #10MB
```
Maximum size for a singular file written to disk.
Resists against large file writes to exhaust disk space.
## Compile/Run memory limits
```yaml
key:
- PISTON_COMPILE_MEMORY_LIMIT
- PISTON_RUN_MEMORY_LIMIT
default: -1
```
Maximum memory allowed by a stage in bytes.
Use -1 for unlimited memory usage.
Useful for running memory-limited contests.
## Repository URL
```yaml
key: PISTON_REPO_URL
default: https://github.com/engineer-man/piston/releases/download/pkgs/index
```
URL for repository index, where packages will be downloaded from.

3
docs/index.md Normal file
View file

@ -0,0 +1,3 @@
# Piston
These docs are a WIP

1
docs/requirements.txt Normal file
View file

@ -0,0 +1 @@
mkdocs==1.1.2

View file

@ -1,4 +1,4 @@
Copyright (c) 2018-2021 Brian Seymour, EMKC Contributors Copyright (c) 2018-2021 Brian Seymour, Thomas Hobson, EMKC Contributors
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

15
mkdocs.yml Normal file
View file

@ -0,0 +1,15 @@
site_name: Piston
nav:
- Home: index.md
- Configuration: configuration.md
- API: api-v2.md
theme:
name: readthedocs
highlightjs: true
hljs_languages:
- yaml
- json
markdown_extensions:
- admonition

View file

@ -1,7 +1,7 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# Grab the latest cow source from github # Grab the latest cow source from github
git clone -q https://github.com/BigZaphod/COW.git cow git clone -q https://github.com/Hydrazer/COW.git cow
# Generate the cow binary into bin # Generate the cow binary into bin
mkdir -p bin mkdir -p bin

View file

@ -1,6 +1,7 @@
#!/usr/bin/env bash #!/usr/bin/env bash
mv $1 $1.go mv $1 $1.go
filename=$1.go #filename=$1.go
filename=*.go
shift shift
GOCACHE=$PWD go run $filename "$@" GOCACHE=$PWD go run $filename "$@"

21
packages/julia/1.6.1/build.sh vendored Executable file
View file

@ -0,0 +1,21 @@
#!/usr/bin/env bash
# Install location
PREFIX=$(realpath $(dirname $0))
mkdir -p build
cd build
# Download and extract Julia source
curl -L "https://github.com/JuliaLang/julia/releases/download/v1.6.1/julia-1.6.1.tar.gz" -o julia.tar.gz
tar xzf julia.tar.gz --strip-components=1
# Build
echo "JULIA_CPU_TARGET=generic;sandybridge,-xsaveopt,clone_all;haswell,-rdrnd,base(1)
prefix=$PREFIX" > Make.user
make -j$(nproc)
make install -j$(nproc)
# Cleanup
cd ..
rm -rf build

4
packages/julia/1.6.1/environment vendored Normal file
View file

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# Add Julia binary to path
export PATH=$PWD/bin:$PATH

5
packages/julia/1.6.1/metadata.json vendored Normal file
View file

@ -0,0 +1,5 @@
{
"language": "julia",
"version": "1.6.1",
"aliases": ["jl"]
}

4
packages/julia/1.6.1/run vendored Executable file
View file

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# Run without startup or history file
julia --startup-file=no --history-file=no "$@"

1
packages/julia/1.6.1/test.jl vendored Normal file
View file

@ -0,0 +1 @@
println("OK")

View file

@ -2,12 +2,15 @@
PREFIX=$(realpath $(dirname $0)) PREFIX=$(realpath $(dirname $0))
mkdir -p build/mono mkdir -p build/mono build/mono-basic
cd build cd build
curl "https://download.mono-project.com/sources/mono/mono-6.12.0.122.tar.xz" -o mono.tar.xz curl "https://download.mono-project.com/sources/mono/mono-6.12.0.122.tar.xz" -o mono.tar.xz
curl -L "https://github.com/mono/mono-basic/archive/refs/tags/4.7.tar.gz" -o mono-basic.tar.gz
tar xf mono.tar.xz --strip-components=1 -C mono tar xf mono.tar.xz --strip-components=1 -C mono
tar xf mono-basic.tar.gz --strip-components=1 -C mono-basic
# Compiling Mono
cd mono cd mono
./configure --prefix "$PREFIX" ./configure --prefix "$PREFIX"
@ -15,6 +18,15 @@ cd mono
make -j$(nproc) make -j$(nproc)
make install -j$(nproc) make install -j$(nproc)
export PATH="$PREFIX/bin:$PATH" # To be able to use mono commands
# Compiling mono-basic
cd ../mono-basic
./configure --prefix="$PREFIX"
make -j$(nproc) PLATFORM="linux" # Avoids conflict with the $PLATFORM variable we have
make install -j$(nproc) PLATFORM="linux"
# Remove redundant files
cd ../../ cd ../../
rm -rf build rm -rf build

View file

@ -1,4 +1,23 @@
#!/bin/bash #!/bin/bash
rename 's/$/\.cs/' "$@" # Add .cs extension check_errors () {
csc -out:out *.cs grep -q 'error [A-Z]\+[0-9]\+:' check.txt && cat check.txt 1>&2 || cat check.txt
rm check.txt
}
case "${PISTON_LANGUAGE}" in
csharp)
rename 's/$/\.cs/' "$@" # Add .cs extension
csc -out:out *.cs > check.txt
check_errors
;;
basic)
rename 's/$/\.vb/' "$@" # Add .vb extension
vbnc -out:out *.vb > check.txt
check_errors
;;
*)
echo "How did you get here? (${PISTON_LANGUAGE})"
exit 1
;;
esac

View file

@ -5,6 +5,10 @@
{ {
"language": "csharp", "language": "csharp",
"aliases": ["mono", "mono-csharp", "mono-c#", "mono-cs", "c#", "cs"] "aliases": ["mono", "mono-csharp", "mono-c#", "mono-cs", "c#", "cs"]
},
{
"language": "basic",
"aliases": ["vb", "mono-vb", "mono-basic", "visual-basic", "visual basic"]
} }
] ]
} }

9
packages/mono/6.12.0/test.vb vendored Normal file
View file

@ -0,0 +1,9 @@
Imports System
Module Module1
Sub Main()
Console.WriteLine("OK")
End Sub
End Module

4
packages/node/16.3.0/build.sh vendored Executable file
View file

@ -0,0 +1,4 @@
#!/bin/bash
curl "https://nodejs.org/dist/v16.3.0/node-v16.3.0-linux-x64.tar.xz" -o node.tar.xz
tar xf node.tar.xz --strip-components=1
rm node.tar.xz

1
packages/node/16.3.0/environment vendored Normal file
View file

@ -0,0 +1 @@
export PATH=$PWD/bin:$PATH

10
packages/node/16.3.0/metadata.json vendored Normal file
View file

@ -0,0 +1,10 @@
{
"language": "node",
"version": "16.3.0",
"provides": [
{
"language": "javascript",
"aliases": ["node-javascript", "node-js", "javascript", "js"]
}
]
}

3
packages/node/16.3.0/run vendored Normal file
View file

@ -0,0 +1,3 @@
#!/bin/bash
node "$@"

1
packages/node/16.3.0/test.js vendored Normal file
View file

@ -0,0 +1 @@
console.log("OK")

6
packages/pwsh/7.1.4/build.sh vendored Executable file
View file

@ -0,0 +1,6 @@
#!/bin/bash
curl -L https://github.com/PowerShell/PowerShell/releases/download/v7.1.4/powershell-7.1.4-linux-x64.tar.gz -o powershell.tar.gz
tar zxf powershell.tar.gz
rm powershell.tar.gz
chmod +x pwsh

1
packages/pwsh/7.1.4/environment vendored Normal file
View file

@ -0,0 +1 @@
export PATH=$PWD:$PATH

10
packages/pwsh/7.1.4/metadata.json vendored Normal file
View file

@ -0,0 +1,10 @@
{
"language": "pwsh",
"version": "7.1.4",
"provides": [
{
"language": "powershell",
"aliases": ["ps", "pwsh", "ps1"]
}
]
}

3
packages/pwsh/7.1.4/run vendored Normal file
View file

@ -0,0 +1,3 @@
#!/bin/bash
pwsh "$@"

1
packages/pwsh/7.1.4/test.ps1 vendored Normal file
View file

@ -0,0 +1 @@
echo "OK"

16
packages/rscript/4.1.1/build.sh vendored Executable file
View file

@ -0,0 +1,16 @@
#!/bin/bash
PREFIX=$(realpath $(dirname $0))
mkdir build
cd build
curl https://cloud.r-project.org/src/base/R-4/R-4.1.1.tar.gz -o R.tar.gz
tar xzf R.tar.gz --strip-components 1
./configure --prefix="$PREFIX" --with-x=no
make -j$(nproc)
make install -j$(nproc)
cd ../
rm -rf build

1
packages/rscript/4.1.1/environment vendored Normal file
View file

@ -0,0 +1 @@
export PATH=$PWD/bin:$PATH

5
packages/rscript/4.1.1/metadata.json vendored Normal file
View file

@ -0,0 +1,5 @@
{
"language": "rscript",
"version": "4.1.1",
"aliases": ["r"]
}

2
packages/rscript/4.1.1/run vendored Normal file
View file

@ -0,0 +1,2 @@
#/bin/bash
Rscript "$@"

1
packages/rscript/4.1.1/test.r vendored Normal file
View file

@ -0,0 +1 @@
cat('OK')

View file

@ -6,8 +6,9 @@ export TMPDIR="$PWD"
# Put instructions to run the runtime # Put instructions to run the runtime
rename 's/$/\.v/' "$@" # Add .v extension filename=$1
rename 's/$/\.v/' $filename # Add .v extension
filename=$1.v
shift shift
v run $filename "$@" v run $filename.v "$@"

10
packages/zig/0.8.0/build.sh vendored Executable file
View file

@ -0,0 +1,10 @@
#!/usr/bin/env bash
mkdir -p bin
cd bin/
curl -L "https://ziglang.org/download/0.8.0/zig-linux-x86_64-0.8.0.tar.xz" -o zig.tar.xz
tar xf zig.tar.xz --strip-components=1
rm zig.tar.xz
cd ../

6
packages/zig/0.8.0/compile vendored Normal file
View file

@ -0,0 +1,6 @@
#!/usr/bin/env bash
# optimizing for small programs
rename 's/$/\.zig/' "$@" # Add .zig extension
zig build-exe -O ReleaseSafe --color off --cache-dir . --global-cache-dir . --name out *.zig

4
packages/zig/0.8.0/environment vendored Normal file
View file

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# compiler path
export PATH=$PWD/bin:$PATH

5
packages/zig/0.8.0/metadata.json vendored Normal file
View file

@ -0,0 +1,5 @@
{
"language": "zig",
"version": "0.8.0",
"aliases": ["zig"]
}

4
packages/zig/0.8.0/run vendored Normal file
View file

@ -0,0 +1,4 @@
#!/usr/bin/env bash
shift # Filename is only used in compile step, so we can take it out here
./out "$@"

6
packages/zig/0.8.0/test.zig vendored Normal file
View file

@ -0,0 +1,6 @@
const std = @import("std");
pub fn main() !void {
const stdout = std.io.getStdOut().writer();
try stdout.print("OK\n", .{});
}

83
piston
View file

@ -1,23 +1,82 @@
#!/usr/bin/env bash #!/usr/bin/env bash
cd "$(dirname "$0")"
PISTON_ENV=$(cat .piston_env || echo dev)
docker_compose(){
if [ -f "docker-compose.$PISTON_ENV.yaml" ]; then
docker-compose -f "docker-compose.$PISTON_ENV.yaml" "$@"
else
docker-compose "$@"
fi
}
case $1 in case $1 in
dev) help)
shift echo "=== Piston Management ==="
docker-compose -f docker-compose.dev.yaml "$@" echo "Current Environment: $PISTON_ENV"
;; echo
prod) echo "Commands:"
shift echo " select <environment> Select the environment"
docker-compose -f docker-compose.yaml "$@" echo " docker_compose <args...> Interact directly with the docker-compose for the selected environment"
echo
echo " start Starts piston"
echo " stop Stops piston"
echo " restart Restarts piston"
echo " bash Opens a bash shell for the piston_api container"
echo
echo " update Fetches and applies latest updates"
echo
echo " <args..> Passthrough to piston cli tool"
echo
echo "Development Commands:"
if [ $PISTON_ENV == dev ]; then
echo " clean-pkgs Clean any package build artifacts on disk"
echo " clean-repo Remove all packages from local repo"
echo " build-pkg <package> <version> Build a package"
echo " rebuild Build and restart the docker container"
else
echo " Switch to developement environment for more info"
echo " > piston select dev"
fi
;; ;;
select) echo "$2" > .piston_env ;;
docker_compose) shift; docker_compose "$@";;
restart) docker_compose restart ;;
start) docker_compose up -d ;;
stop) docker_compose down ;;
bash) docker_compose exec api /bin/bash ;;
rebuild) docker_compose build && docker_compose up -d ;;
update) update)
git pull git pull
docker-compose pull api docker_compose pull
docker-compose up -d api docker_compose up -d
;; ;;
clean-pkgs)
git clean -fqXd packages clean-pkgs) git clean -fqXd packages ;;
clean-repo) git clean -fqXd repo ;;
build-pkg)
PKGSLUG="$2-$3"
echo "Building $PKGSLUG"
echo "Ensuring latest builder image"
docker build repo -t piston-repo-builder
docker run -v "$(realpath $(dirname "$0")):/piston" piston-repo-builder --no-server $PKGSLUG
;; ;;
*) *)
cd cli
npm i > /dev/null
cd ../
node cli/index.js "$@" node cli/index.js "$@"
;; ;;
esac esac

14
public.pps Executable file
View file

@ -0,0 +1,14 @@
#!/usr/bin/env -S piston ppman spec
# Public Piston Packages
# Defines packages to be installed on the public piston installation
# All packages, latest version
* *
# Except python
!python *
# Install python 3.* and 2.*
python 3.*
python 2.*

138
readme.md
View file

@ -33,10 +33,26 @@
<a href="#Supported-Languages">Supported Languages</a> <a href="#Supported-Languages">Supported Languages</a>
<a href="#Principle-of-Operation">Principles</a> <a href="#Principle-of-Operation">Principles</a>
<a href="#Security">Security</a> <a href="#Security">Security</a>
<a href="#License">License</a> <a href="#License">License</a>
<a href="https://piston.readthedocs.io">Documentation</a>
</h4> </h4>
--- ---
<br>
# Notes About Hacktoberfest
While we are accepting pull requests for Hacktoberfest, we will reject any low-quality PRs.
If we see PR abuse for Hacktoberfest, we will stop providing Hacktoberfest approval for pull requests.
We are accepting PRs for:
* Packages - updating package versions, adding new packages
* Documentation updates
* CLI/API improvements - please discuss these with us in the Discord first
Any queries or concerns, ping @HexF#0015 in the Discord.
<br> <br>
# About # About
@ -49,18 +65,24 @@
<br> <br>
It's used in numerous places including: It's used in numerous places including:
* [EMKC Challenges](https://emkc.org/challenges)
* [EMKC Weekly Contests](https://emkc.org/contests) - [EMKC Challenges](https://emkc.org/challenges)
* [Engineer Man Discord Server](https://discord.gg/engineerman) - [EMKC Weekly Contests](https://emkc.org/contests)
* Web IDEs - [Engineer Man Discord Server](https://discord.gg/engineerman)
* 200+ direct integrations - Web IDEs
- 200+ direct integrations
<br> <br>
### Official Extensions ### Official Extensions
The following are approved and endorsed extensions/utilities to the core Piston offering. The following are approved and endorsed extensions/utilities to the core Piston offering.
- [I Run Code](https://github.com/engineer-man/piston-bot), a Discord bot used in 4100+ servers to handle arbitrary code evaluation in Discord. To get this bot in your own server, go here: https://emkc.org/run. - [I Run Code](https://github.com/engineer-man/piston-bot), a Discord bot used in 4100+ servers to handle arbitrary code evaluation in Discord. To get this bot in your own server, go here: https://emkc.org/run.
- [Piston CLI](https://github.com/Shivansh-007/piston-cli), a universal shell supporting code highlighting, files, and interpretation without the need to download a language. - [Piston CLI](https://github.com/Shivansh-007/piston-cli), a universal shell supporting code highlighting, files, and interpretation without the need to download a language.
- [Node Piston Client](https://github.com/dthree/node-piston), a Node.js wrapper for accessing the Piston API.
- [Piston4J](https://github.com/the-codeboy/Piston4J), a Java wrapper for accessing the Piston API.
- [Pyston](https://github.com/ffaanngg/pyston), a Python wrapper for accessing the Piston API.
<br> <br>
@ -72,13 +94,17 @@ The following are approved and endorsed extensions/utilities to the core Piston
<br> <br>
When using the public Piston API, use the following two URLs: When using the public Piston API, use the following two URLs:
``` ```
GET https://emkc.org/api/v2/piston/runtimes GET https://emkc.org/api/v2/piston/runtimes
POST https://emkc.org/api/v2/piston/execute POST https://emkc.org/api/v2/piston/execute
``` ```
> Important Note: The Piston API is rate limited to 5 requests per second. If you have a need for more requests than that > Important Note: The Piston API is rate limited to 5 requests per second. If you have a need for more requests than that
and it's for a good cause, please reach out to me (EngineerMan#0001) on [Discord](https://discord.gg/engineerman) > and it's for a good cause, please reach out to me (EngineerMan#0001) on [Discord](https://discord.gg/engineerman)
so we can discuss potentially getting you an unlimited key. > so we can discuss potentially getting you an unlimited key. What is and isn't a good cause is up to me, but, in general
> if your project is a) open source, b) helping people at no cost to them, and c) not likely to use tons of resources
> thereby impairing another's ability to enjoy Piston, you'll likely be granted a key.
<br> <br>
@ -90,7 +116,7 @@ so we can discuss potentially getting you an unlimited key.
- Docker - Docker
- Docker Compose - Docker Compose
- Node JS - Node JS (>= 13, preferably >= 15)
### After system dependencies are installed, clone this repository: ### After system dependencies are installed, clone this repository:
@ -129,6 +155,22 @@ docker run \
ghcr.io/engineer-man/piston ghcr.io/engineer-man/piston
``` ```
## Piston for testing packages locally
### Host System Package Dependencies
- Same as [All In One](#All-In-One)
### Installation
```sh
# Build the Docker containers
./piston start
# For more help
./piston help
```
<br> <br>
# Usage # Usage
@ -172,11 +214,13 @@ The container exposes an API on port 2000 by default.
This is used by the CLI to carry out running jobs and package management. This is used by the CLI to carry out running jobs and package management.
#### Runtimes Endpoint #### Runtimes Endpoint
`GET /api/v2/runtimes` `GET /api/v2/runtimes`
This endpoint will return the supported languages along with the current version and aliases. To execute This endpoint will return the supported languages along with the current version and aliases. To execute
code for a particular language using the `/api/v2/execute` endpoint, either the name or one of the aliases must code for a particular language using the `/api/v2/execute` endpoint, either the name or one of the aliases must
be provided, along with the version. be provided, along with the version.
Multiple versions of the same language may be present at the same time, and may be selected when running a job. Multiple versions of the same language may be present at the same time, and may be selected when running a job.
```json ```json
HTTP/1.1 200 OK HTTP/1.1 200 OK
Content-Type: application/json Content-Type: application/json
@ -201,47 +245,47 @@ Content-Type: application/json
``` ```
#### Execute Endpoint #### Execute Endpoint
`POST /api/v2/execute` `POST /api/v2/execute`
This endpoint requests execution of some arbitrary code. This endpoint requests execution of some arbitrary code.
- `language` (**required**) The language to use for execution, must be a string and must be installed. - `language` (**required**) The language to use for execution, must be a string and must be installed.
- `version` (**required**) The version of the language to use for execution, must be a string containing a SemVer selector for the version or the specific version number to use. - `version` (**required**) The version of the language to use for execution, must be a string containing a SemVer selector for the version or the specific version number to use.
- `files` (**required**) An array of files containing code or other data that should be used for execution. The first file in this array is considered the main file. - `files` (**required**) An array of files containing code or other data that should be used for execution. The first file in this array is considered the main file.
- `files[].name` (*optional*) The name of the file to upload, must be a string containing no path or left out. - `files[].name` (_optional_) The name of the file to upload, must be a string containing no path or left out.
- `files[].content` (**required**) The content of the files to upload, must be a string containing text to write. - `files[].content` (**required**) The content of the files to upload, must be a string containing text to write.
- `stdin` (*optional*) The text to pass as stdin to the program. Must be a string or left out. Defaults to blank string. - `stdin` (_optional_) The text to pass as stdin to the program. Must be a string or left out. Defaults to blank string.
- `args` (*optional*) The arguments to pass to the program. Must be an array or left out. Defaults to `[]`. - `args` (_optional_) The arguments to pass to the program. Must be an array or left out. Defaults to `[]`.
- `compile_timeout` (*optional*) The maximum time allowed for the compile stage to finish before bailing out in milliseconds. Must be a number or left out. Defaults to `10000` (10 seconds). - `compile_timeout` (_optional_) The maximum time allowed for the compile stage to finish before bailing out in milliseconds. Must be a number or left out. Defaults to `10000` (10 seconds).
- `run_timeout` (*optional*) The maximum time allowed for the run stage to finish before bailing out in milliseconds. Must be a number or left out. Defaults to `3000` (3 seconds). - `run_timeout` (_optional_) The maximum time allowed for the run stage to finish before bailing out in milliseconds. Must be a number or left out. Defaults to `3000` (3 seconds).
- `compile_memory_limit` (*optional*) The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number or left out. Defaults to `-1` (no limit) - `compile_memory_limit` (_optional_) The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number or left out. Defaults to `-1` (no limit)
- `run_memory_limit` (*optional*) The maximum amount of memory the run stage is allowed to use in bytes. Must be a number or left out. Defaults to `-1` (no limit) - `run_memory_limit` (_optional_) The maximum amount of memory the run stage is allowed to use in bytes. Must be a number or left out. Defaults to `-1` (no limit)
```json ```json
{ {
"language": "js", "language": "js",
"version": "15.10.0", "version": "15.10.0",
"files": [ "files": [
{ {
"name": "my_cool_code.js", "name": "my_cool_code.js",
"content": "console.log(process.argv)" "content": "console.log(process.argv)"
} }
], ],
"stdin": "", "stdin": "",
"args": [ "args": ["1", "2", "3"],
"1", "compile_timeout": 10000,
"2", "run_timeout": 3000,
"3" "compile_memory_limit": -1,
], "run_memory_limit": -1
"compile_timeout": 10000,
"run_timeout": 3000,
"compile_memory_limit": -1,
"run_memory_limit": -1
} }
``` ```
A typical response upon successful execution will contain 1 or 2 keys `run` and `compile`. A typical response upon successful execution will contain 1 or 2 keys `run` and `compile`.
`compile` will only be present if the language requested requires a compile stage. `compile` will only be present if the language requested requires a compile stage.
Each of these keys has an identical structure, containing both a `stdout` and `stderr` key, which is a string containing the text outputted during the stage into each buffer. Each of these keys has an identical structure, containing both a `stdout` and `stderr` key, which is a string containing the text outputted during the stage into each buffer.
It also contains the `code` and `signal` which was returned from each process. It also contains the `code` and `signal` which was returned from each process.
```json ```json
HTTP/1.1 200 OK HTTP/1.1 200 OK
Content-Type: application/json Content-Type: application/json
@ -260,6 +304,7 @@ Content-Type: application/json
``` ```
If a problem exists with the request, a `400` status code is returned and the reason in the `message` key. If a problem exists with the request, a `400` status code is returned and the reason in the `message` key.
```json ```json
HTTP/1.1 400 Bad Request HTTP/1.1 400 Bad Request
Content-Type: application/json Content-Type: application/json
@ -272,54 +317,66 @@ Content-Type: application/json
<br> <br>
# Supported Languages # Supported Languages
`awk`,
`bash`, `bash`,
`brainfuck`, `brainfuck`,
`c`,
`c++`,
`cjam`, `cjam`,
`clojure`, `clojure`,
`cobol`,
`coffeescript`, `coffeescript`,
`cow`, `cow`,
`crystal`, `crystal`,
`csharp`,
`d`,
`dart`, `dart`,
`dash`, `dash`,
`deno`,
`dotnet`, `dotnet`,
`dragon`, `dragon`,
`elixir`, `elixir`,
`emacs`, `emacs`,
`erlang`, `erlang`,
`gawk`, `fortran`,
`gcc`,
`go`, `go`,
`golfscript`, `golfscript`,
`groovy`, `groovy`,
`haskell`, `haskell`,
`java`, `java`,
`javascript`,
`jelly`, `jelly`,
`julia`, `julia`,
`kotlin`, `kotlin`,
`lisp`, `lisp`,
`lolcode`, `lolcode`,
`lua`, `lua`,
`mono`,
`nasm`, `nasm`,
`nasm64`,
`nim`, `nim`,
`node`,
`ocaml`, `ocaml`,
`octave`,
`osabie`, `osabie`,
`paradoc`, `paradoc`,
`pascal`, `pascal`,
`perl`, `perl`,
`php`, `php`,
`ponylang`, `ponylang`,
`powershell`,
`prolog`, `prolog`,
`pure`, `pure`,
`pyth`,
`python`, `python`,
`python2`,
`raku`,
`rockstar`, `rockstar`,
`rscript`,
`ruby`, `ruby`,
`rust`, `rust`,
`scala`, `scala`,
`swift`, `swift`,
`typescript`, `typescript`,
`basic`,
`vlang`, `vlang`,
`yeethon`, `yeethon`,
`zig`, `zig`,
@ -336,9 +393,11 @@ The source file is either ran or compiled and ran (in the case of languages like
<br> <br>
# Security # Security
Docker provides a great deal of security out of the box in that it's separate from the system. Docker provides a great deal of security out of the box in that it's separate from the system.
Piston takes additional steps to make it resistant to Piston takes additional steps to make it resistant to
various privilege escalation, denial-of-service, and resource saturation threats. These steps include: various privilege escalation, denial-of-service, and resource saturation threats. These steps include:
- Disabling outgoing network interaction - Disabling outgoing network interaction
- Capping max processes at 256 by default (resists `:(){ :|: &}:;`, `while True: os.fork()`, etc.) - Capping max processes at 256 by default (resists `:(){ :|: &}:;`, `while True: os.fork()`, etc.)
- Capping max files at 2048 (resists various file based attacks) - Capping max files at 2048 (resists various file based attacks)
@ -351,4 +410,5 @@ various privilege escalation, denial-of-service, and resource saturation threats
<br> <br>
# License # License
Piston is licensed under the MIT license. Piston is licensed under the MIT license.

1
repo/.dockerignore Normal file
View file

@ -0,0 +1 @@
*.pkg.tar.gz

View file

@ -8,7 +8,8 @@ RUN apt-get update && apt-get install -y unzip autoconf build-essential libssl-d
util-linux pciutils usbutils coreutils binutils findutils grep libncurses5-dev \ util-linux pciutils usbutils coreutils binutils findutils grep libncurses5-dev \
libncursesw5-dev python3-pip libgmp-dev libmpfr-dev python2 libffi-dev gfortran\ libncursesw5-dev python3-pip libgmp-dev libmpfr-dev python2 libffi-dev gfortran\
libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev libfftw3-dev \ libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev libfftw3-dev \
libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev libsundials-dev && \ libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev libsundials-dev \
libbz2-dev liblzma-dev libpcre2-dev && \
ln -sf /bin/bash /bin/sh && \ ln -sf /bin/bash /bin/sh && \
rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \
update-alternatives --install /usr/bin/python python /usr/bin/python3.7 2 update-alternatives --install /usr/bin/python python /usr/bin/python3.7 2
@ -17,4 +18,3 @@ ADD entrypoint.sh mkindex.sh /
ENTRYPOINT ["bash","/entrypoint.sh"] ENTRYPOINT ["bash","/entrypoint.sh"]
CMD ["--no-build"] CMD ["--no-build"]

View file

@ -1,5 +1,5 @@
{ pkgs ? import <nixpkgs> {} }: { pkgs ? import <nixpkgs> {} }:
pkgs.mkShell { pkgs.mkShell {
# nativeBuildInputs is usually what you want -- tools you need to run # nativeBuildInputs is usually what you want -- tools you need to run
nativeBuildInputs = with pkgs; [ nodejs-15_x jq ]; nativeBuildInputs = with pkgs; [ nodejs-15_x jq mkdocs ];
} }