Compare commits

..

No commits in common. "af5036d82c651f7de8c3eb2c615746c7c92116f5" and "0434877d03702845333eb2c6a4148dd6c9276806" have entirely different histories.

145 changed files with 1025 additions and 1919 deletions

View File

@ -4,6 +4,7 @@ about: Template for requesting language support
title: Add [insert language name here]
labels: package
assignees: ''
---
Provide links to different compilers/interpreters that could be used to implement this language, and discuss pros/cons of each.

View File

@ -1,11 +1,10 @@
Checklist:
- [ ] The package builds locally with `./piston build-pkg [package] [version]`
- [ ] The package installs with `./piston ppman install [package]=[version]`
- [ ] The package runs the test code with `./piston run [package] -l [version] packages/[package]/[version]/test.*`
- [ ] Package files are placed in the correct directory
- [ ] No old package versions are removed
- [ ] All source files are deleted in the `build.sh` script
- [ ] `metadata.json`'s `language` and `version` fields match the directory path
- [ ] Any extensions the language may use are set as aliases
- [ ] Any alternative names the language is referred to are set as aliases.
* [ ] The package builds locally with `./piston build-pkg [package] [version]`
* [ ] The package installs with `./piston ppman install [package]=[version]`
* [ ] The package runs the test code with `./piston run [package] -l [version] packages/[package]/[version]/test.*`
* [ ] Package files are placed in the correct directory
* [ ] No old package versions are removed
* [ ] All source files are deleted in the `build.sh` script
* [ ] `metadata.json`'s `language` and `version` fields match the directory path
* [ ] Any extensions the language may use are set as aliases
* [ ] Any alternative names the language is referred to are set as aliases.

View File

@ -1,38 +1,39 @@
name: Publish API image
on:
push:
branches:
- master
- v3
paths:
- api/**
push:
branches:
- master
- v3
paths:
- api/**
jobs:
push_to_registry:
runs-on: ubuntu-latest
name: Build and Push Docker image to Github Packages
steps:
- name: Check out repo
uses: actions/checkout@v2
- name: Login to GitHub registry
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
- name: Login to ghcr.io
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: ghcr.io
push_to_registry:
runs-on: ubuntu-latest
name: Build and Push Docker image to Github Packages
steps:
- name: Check out repo
uses: actions/checkout@v2
- name: Login to GitHub registry
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
- name: Login to ghcr.io
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: ghcr.io
- name: Build and push API
uses: docker/build-push-action@v2
with:
context: api
push: true
pull: true
tags: |
docker.pkg.github.com/engineer-man/piston/api
ghcr.io/engineer-man/piston
- name: Build and push API
uses: docker/build-push-action@v2
with:
context: api
push: true
pull: true
tags: |
docker.pkg.github.com/engineer-man/piston/api
ghcr.io/engineer-man/piston

View File

@ -1,139 +1,140 @@
name: 'Package Pull Requests'
name: "Package Pull Requests"
on:
pull_request:
types:
- opened
- reopened
- synchronize
paths:
- 'packages/**'
pull_request:
types:
- opened
- edited
- reopened
- synchronize
paths:
- "packages/**"
jobs:
check-pkg:
name: Validate README
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Get list of changed files
uses: lots0logs/gh-action-get-changed-files@2.1.4
with:
token: ${{ secrets.GITHUB_TOKEN }}
check-pkg:
name: Validate README
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Get list of changed files
uses: lots0logs/gh-action-get-changed-files@2.1.4
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Ensure README was updated
run: |
MISSING_LINES=$(comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /<br>/{flag=0} flag' readme.md | sort -u))
- name: Ensure README was updated
run: |
MISSING_LINES=$(comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /<br>/{flag=0} flag' readme.md | sort -u))
[[ $(echo $MISSING_LINES | wc -c) = "1" ]] && exit 0
[[ $(echo $MISSING_LINES | wc -c) = "1" ]] && exit 0
echo "README has supported languages missing: "
comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /<br>/{flag=0} flag' readme.md | sort -u)
exit 1
echo "README has supported languages missing: "
comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /<br>/{flag=0} flag' readme.md | sort -u)
exit 1
build-pkg:
name: Check that package builds
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
build-pkg:
name: Check that package builds
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Login to GitHub registry
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
- name: Login to GitHub registry
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
- name: Get list of changed files
uses: lots0logs/gh-action-get-changed-files@2.1.4
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Get list of changed files
uses: lots0logs/gh-action-get-changed-files@2.1.4
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Build Packages
run: |
PACKAGES=$(jq '.[]' -r ${HOME}/files*.json | awk -F/ '$1~/packages/ && $2 && $3{ print $2 "-" $3 }' | sort -u)
echo "Packages: $PACKAGES"
docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
docker build -t repo-builder repo
docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
ls -la packages
- name: Build Packages
run: |
PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
echo "Packages: $PACKAGES"
docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
docker build -t repo-builder repo
docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
ls -la packages
- name: Upload package as artifact
uses: actions/upload-artifact@v2
with:
name: packages
path: packages/*.pkg.tar.gz
- name: Upload package as artifact
uses: actions/upload-artifact@v2
with:
name: packages
path: packages/*.pkg.tar.gz
test-pkg:
name: Test package
runs-on: ubuntu-latest
needs: build-pkg
steps:
- uses: actions/checkout@v2
test-pkg:
name: Test package
runs-on: ubuntu-latest
needs: build-pkg
steps:
- uses: actions/checkout@v2
- uses: actions/download-artifact@v2
with:
name: packages
- uses: actions/download-artifact@v2
with:
name: packages
- name: Relocate downloaded packages
run: mv *.pkg.tar.gz packages/
- name: Relocate downloaded packages
run: mv *.pkg.tar.gz packages/
- name: Login to GitHub registry
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
- name: Login to GitHub registry
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
- name: Run tests
run: |
ls -la
docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
docker pull docker.pkg.github.com/engineer-man/piston/api
docker build -t piston-api api
docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api
echo Waiting for API to start..
docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes
- name: Run tests
run: |
ls -la
docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
docker pull docker.pkg.github.com/engineer-man/piston/api
docker build -t piston-api api
docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api
echo Waiting for API to start..
docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes
echo Waiting for Index to start..
docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index
echo Waiting for Index to start..
docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index
echo Adjusting index
sed -i 's/repo/localhost/g' repo/index
echo Adjusting index
sed -i 's/repo/localhost/g' repo/index
echo Listing Packages
PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v2/packages)
echo $PACKAGES_JSON
echo Listing Packages
PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v2/packages)
echo $PACKAGES_JSON
echo Getting CLI ready
docker run -v "$PWD/cli:/app" --entrypoint /bin/bash node:15 -c 'cd /app; npm i'
echo Getting CLI ready
docker run -v "$PWD/cli:/app" --entrypoint /bin/bash node:15 -c 'cd /app; npm i'
for package in $(jq -r '.[] | "\(.language)-\(.language_version)"' <<< "$PACKAGES_JSON")
do
echo "Testing $package"
PKG_PATH=$(sed 's|-|/|' <<< $package)
PKG_NAME=$(awk -F- '{ print $1 }' <<< $package)
PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package)
for package in $(jq -r '.[] | "\(.language)-\(.language_version)"' <<< "$PACKAGES_JSON")
do
echo "Testing $package"
PKG_PATH=$(sed 's|-|/|' <<< $package)
PKG_NAME=$(awk -F- '{ print $1 }' <<< $package)
PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package)
echo "Installing..."
docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v2/packages -H "Content-Type: application/json" -d "{\"language\":\"$PKG_NAME\",\"version\":\"$PKG_VERSION\"}"
echo "Installing..."
docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v2/packages -H "Content-Type: application/json" -d "{\"language\":\"$PKG_NAME\",\"version\":\"$PKG_VERSION\"}"
TEST_SCRIPTS=packages/$PKG_PATH/test.*
echo "Tests: $TEST_SCRIPTS"
TEST_SCRIPTS=packages/$PKG_PATH/test.*
echo "Tests: $TEST_SCRIPTS"
for tscript in $TEST_SCRIPTS
do
TEST_RUNTIME=$(awk -F. '{print $2}' <<< $(basename $tscript))
echo Running $tscript with runtime=$TEST_RUNTIME
docker run --network container:api -v "$PWD/cli:/app" -v "$PWD/$(dirname $tscript):/pkg" node:15 /app/index.js run $TEST_RUNTIME -l $PKG_VERSION /pkg/$(basename $tscript) > test_output
cat test_output
grep "OK" test_output
done
done
for tscript in $TEST_SCRIPTS
do
TEST_RUNTIME=$(awk -F. '{print $2}' <<< $(basename $tscript))
echo Running $tscript with runtime=$TEST_RUNTIME
docker run --network container:api -v "$PWD/cli:/app" -v "$PWD/$(dirname $tscript):/pkg" node:15 /app/index.js run $TEST_RUNTIME -l $PKG_VERSION /pkg/$(basename $tscript) > test_output
cat test_output
grep "OK" test_output
done
done
- name: Dump logs
if: ${{ always() }}
run: |
docker logs api
docker logs repo
- name: Dump logs
if: ${{ always() }}
run: |
docker logs api
docker logs repo

View File

@ -1,77 +1,78 @@
name: 'Package Pushed'
on:
push:
branches:
- master
- v3
paths:
- packages/**
push:
branches:
- master
- v3
paths:
- packages/**
jobs:
build-pkg:
name: Build package
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
build-pkg:
name: Build package
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Login to GitHub registry
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
- name: Login to GitHub registry
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
- name: Get list of changed files
uses: lots0logs/gh-action-get-changed-files@2.1.4
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Build Packages
run: |
PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
echo "Packages: $PACKAGES"
docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
docker build -t repo-builder repo
docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
ls -la packages
- name: Get list of changed files
uses: lots0logs/gh-action-get-changed-files@2.1.4
with:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Upload Packages
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: packages/*.pkg.tar.gz
tag: pkgs
overwrite: true
file_glob: true
create-index:
name: Create Index
runs-on: ubuntu-latest
needs: build-pkg
steps:
- name: "Download all release assets"
run: curl -s https://api.github.com/repos/engineer-man/piston/releases/latest | jq '.assets[].browser_download_url' -r | xargs -L 1 curl -sLO
- name: "Generate index file"
run: |
echo "" > index
BASEURL=https://github.com/engineer-man/piston/releases/download/pkgs/
for pkg in *.pkg.tar.gz
do
PKGFILE=$(basename $pkg)
PKGFILENAME=$(echo $PKGFILE | sed 's/\.pkg\.tar\.gz//g')
- name: Build Packages
run: |
PACKAGES=$(jq '.[]' -r ${HOME}/files*.json | awk -F/ '$1~/packages/ && $2 && $3{ print $2 "-" $3 }' | sort -u)
echo "Packages: $PACKAGES"
docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
docker build -t repo-builder repo
docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
ls -la packages
- name: Upload Packages
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: packages/*.pkg.tar.gz
tag: pkgs
overwrite: true
file_glob: true
create-index:
name: Create Index
runs-on: ubuntu-latest
needs: build-pkg
steps:
- name: 'Download all release assets'
run: curl -s https://api.github.com/repos/engineer-man/piston/releases/latest | jq '.assets[].browser_download_url' -r | xargs -L 1 curl -sLO
- name: 'Generate index file'
run: |
echo "" > index
BASEURL=https://github.com/engineer-man/piston/releases/download/pkgs/
for pkg in *.pkg.tar.gz
do
PKGFILE=$(basename $pkg)
PKGFILENAME=$(echo $PKGFILE | sed 's/\.pkg\.tar\.gz//g')
PKGNAME=$(echo $PKGFILENAME | grep -oP '^\K.+(?=-)')
PKGVERSION=$(echo $PKGFILENAME | grep -oP '^.+-\K.+')
PKGCHECKSUM=$(sha256sum $PKGFILE | awk '{print $1}')
echo "$PKGNAME,$PKGVERSION,$PKGCHECKSUM,$BASEURL$PKGFILE" >> index
echo "Adding package $PKGNAME-$PKGVERSION"
done
- name: Upload index
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: index
tag: pkgs
overwrite: true
file_glob: true
PKGNAME=$(echo $PKGFILENAME | grep -oP '^\K.+(?=-)')
PKGVERSION=$(echo $PKGFILENAME | grep -oP '^.+-\K.+')
PKGCHECKSUM=$(sha256sum $PKGFILE | awk '{print $1}')
echo "$PKGNAME,$PKGVERSION,$PKGCHECKSUM,$BASEURL$PKGFILE" >> index
echo "Adding package $PKGNAME-$PKGVERSION"
done
- name: Upload index
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: index
tag: pkgs
overwrite: true
file_glob: true

View File

@ -1,31 +1,31 @@
name: Publish Repo image
on:
push:
branches:
- master
- v3
paths:
- repo/**
push:
branches:
- master
- v3
paths:
- repo/**
jobs:
push_to_registry:
runs-on: ubuntu-latest
name: Build and Push Docker image to Github Packages
steps:
- name: Check out repo
uses: actions/checkout@v2
- name: Login to GitHub registry
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
push_to_registry:
runs-on: ubuntu-latest
name: Build and Push Docker image to Github Packages
steps:
- name: Check out repo
uses: actions/checkout@v2
- name: Login to GitHub registry
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
- name: Build and push repo
uses: docker/build-push-action@v2
with:
context: repo
pull: true
push: true
tags: |
docker.pkg.github.com/engineer-man/piston/repo-builder
- name: Build and push repo
uses: docker/build-push-action@v2
with:
context: repo
pull: true
push: true
tags: |
docker.pkg.github.com/engineer-man/piston/repo-builder

3
.gitignore vendored
View File

@ -1,4 +1,3 @@
data/
.piston_env
node_modules
result
result

View File

@ -1,12 +0,0 @@
node_modules
data/
api/_piston
repo/build
packages/*/*/*
packages/*.pkg.tar.gz
!packages/*/*/metadata.json
!packages/*/*/build.sh
!packages/*/*/environment
!packages/*/*/run
!packages/*/*/compile
!packages/*/*/test.*

View File

@ -1,8 +1,8 @@
version: 2
mkdocs:
configuration: mkdocs.yml
configuration: mkdocs.yml
python:
version: 3.7
install:
- requirements: docs/requirements.txt
version: 3.7
install:
- requirements: docs/requirements.txt

View File

@ -1,12 +0,0 @@
# This "FROM" image is previously emitted by nix
FROM ghcr.io/engineer-man/piston:base-latest
ENV PISTON_FLAKE_PATH=/piston/packages
COPY runtimes/ /piston/packages/runtimes
COPY flake.nix flake.lock /piston/packages/
ARG RUNTIMESET=all
ENV PISTON_RUNTIME_SET=$RUNTIMESET
RUN piston-install

3
api/.gitignore vendored
View File

@ -1 +1,2 @@
_piston
node_modules
_piston

1
api/.prettierignore Normal file
View File

@ -0,0 +1 @@
node_modules

View File

@ -51,8 +51,6 @@ with pkgs; rec {
do
echo "nixbld$i:x:$(( $i + 30000 )):30000:Nix build user $i:/var/empty:/run/current-system/sw/bin/nologin" >> etc/passwd
done
chmod 1777 {,var/}tmp/
'';
config = {
@ -63,21 +61,6 @@ with pkgs; rec {
"SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt"
"GIT_SSL_CAINFO=/etc/ssl/certs/ca-bundle.crt"
"NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt"
"PATH=${lib.concatStringsSep ":" [
"/usr/local/sbin"
"/usr/local/bin"
"/usr/sbin"
"/usr/bin"
"/sbin"
"/bin"
"/root/.nix-profile/bin"
"/nix/var/nix/profiles/default/bin"
"/nix/var/nix/profiles/default/sbin"
]}"
"MANPATH=${lib.concatStringsSep ":" [
"/root/.nix-profile/share/man"
"/nix/var/nix/profiles/default/share/man"
]}"
];
ExposedPorts = {

View File

@ -3,54 +3,16 @@ const router = express.Router();
const events = require('events');
const config = require('../config');
const runtime = require('../runtime');
const { Job } = require('../job');
const logger = require('logplease').create('api/v3');
const SIGNALS = [
'SIGABRT',
'SIGALRM',
'SIGBUS',
'SIGCHLD',
'SIGCLD',
'SIGCONT',
'SIGEMT',
'SIGFPE',
'SIGHUP',
'SIGILL',
'SIGINFO',
'SIGINT',
'SIGIO',
'SIGIOT',
'SIGKILL',
'SIGLOST',
'SIGPIPE',
'SIGPOLL',
'SIGPROF',
'SIGPWR',
'SIGQUIT',
'SIGSEGV',
'SIGSTKFLT',
'SIGSTOP',
'SIGTSTP',
'SIGSYS',
'SIGTERM',
'SIGTRAP',
'SIGTTIN',
'SIGTTOU',
'SIGUNUSED',
'SIGURG',
'SIGUSR1',
'SIGUSR2',
'SIGVTALRM',
'SIGXCPU',
'SIGXFSZ',
'SIGWINCH',
];
const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
// ref: https://man7.org/linux/man-pages/man7/signal.7.html
function get_job(body) {
let {
function get_job(body){
const {
language,
args,
stdin,
@ -58,7 +20,7 @@ function get_job(body) {
compile_memory_limit,
run_memory_limit,
run_timeout,
compile_timeout,
compile_timeout
} = body;
return new Promise((resolve, reject) => {
@ -73,6 +35,7 @@ function get_job(body) {
message: 'files is required as an array',
});
}
for (const [i, file] of files.entries()) {
if (typeof file.content !== 'string') {
return reject({
@ -131,65 +94,23 @@ function get_job(body) {
});
}
if (
rt.language !== 'file' &&
!files.some(file => !file.encoding || file.encoding === 'utf8')
) {
return reject({
message: 'files must include at least one utf8 encoded file',
});
}
for (const constraint of ['memory_limit', 'timeout']) {
for (const type of ['compile', 'run']) {
const constraint_name = `${type}_${constraint}`;
const constraint_value = body[constraint_name];
const configured_limit = rt[`${constraint}s`][type];
if (!constraint_value) {
continue;
}
if (typeof constraint_value !== 'number') {
return reject({
message: `If specified, ${constraint_name} must be a number`,
});
}
if (configured_limit <= 0) {
continue;
}
if (constraint_value > configured_limit) {
return reject({
message: `${constraint_name} cannot exceed the configured limit of ${configured_limit}`,
});
}
if (constraint_value < 0) {
return reject({
message: `${constraint_name} must be non-negative`,
});
}
resolve(new Job({
runtime: rt,
alias: language,
args: args || [],
stdin: stdin || "",
files,
timeouts: {
run: run_timeout || 3000,
compile: compile_timeout || 10000,
},
memory_limits: {
run: run_memory_limit || config.run_memory_limit,
compile: compile_memory_limit || config.compile_memory_limit,
}
}
}));
})
compile_timeout = compile_timeout || rt.timeouts.compile;
run_timeout = run_timeout || rt.timeouts.run;
compile_memory_limit = compile_memory_limit || rt.memory_limits.compile;
run_memory_limit = run_memory_limit || rt.memory_limits.run;
resolve(
new Job({
runtime: rt,
args: args || [],
stdin: stdin || '',
files,
timeouts: {
run: run_timeout,
compile: compile_timeout,
},
memory_limits: {
run: run_memory_limit,
compile: compile_memory_limit,
},
})
);
});
}
router.use((req, res, next) => {
@ -207,104 +128,88 @@ router.use((req, res, next) => {
});
router.ws('/connect', async (ws, req) => {
let job = null;
let eventBus = new events.EventEmitter();
eventBus.on('stdout', data =>
ws.send(
JSON.stringify({
type: 'data',
stream: 'stdout',
data: data.toString(),
})
)
);
eventBus.on('stderr', data =>
ws.send(
JSON.stringify({
type: 'data',
stream: 'stderr',
data: data.toString(),
})
)
);
eventBus.on('stage', stage =>
ws.send(JSON.stringify({ type: 'stage', stage }))
);
eventBus.on('exit', (stage, status) =>
ws.send(JSON.stringify({ type: 'exit', stage, ...status }))
);
eventBus.on("stdout", (data) => ws.send(JSON.stringify({type: "data", stream: "stdout", data: data.toString()})))
eventBus.on("stderr", (data) => ws.send(JSON.stringify({type: "data", stream: "stderr", data: data.toString()})))
eventBus.on("stage", (stage)=> ws.send(JSON.stringify({type: "stage", stage})))
eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status})))
ws.on('message', async data => {
try {
ws.on("message", async (data) => {
try{
const msg = JSON.parse(data);
switch (msg.type) {
case 'init':
if (job === null) {
switch(msg.type){
case "init":
if(job === null){
job = await get_job(msg);
await job.prime();
ws.send(
JSON.stringify({
type: 'runtime',
language: job.runtime.language,
version: job.runtime.version.raw,
})
);
ws.send(JSON.stringify({
type: "runtime",
language: job.runtime.language,
version: job.runtime.version.raw
}))
await job.execute_interactive(eventBus);
ws.close(4999, 'Job Completed');
} else {
ws.close(4000, 'Already Initialized');
ws.close(4999, "Job Completed");
}else{
ws.close(4000, "Already Initialized");
}
break;
case 'data':
if (job !== null) {
if (msg.stream === 'stdin') {
eventBus.emit('stdin', msg.data);
} else {
ws.close(4004, 'Can only write to stdin');
}
} else {
ws.close(4003, 'Not yet initialized');
case "data":
if(job !== null){
if(msg.stream === "stdin"){
eventBus.emit("stdin", msg.data)
}else{
ws.close(4004, "Can only write to stdin")
}
break;
case 'signal':
if (job !== null) {
if (SIGNALS.includes(msg.signal)) {
eventBus.emit('signal', msg.signal);
} else {
ws.close(4005, 'Invalid signal');
}
} else {
ws.close(4003, 'Not yet initialized');
}else{
ws.close(4003, "Not yet initialized")
}
break;
case "signal":
if(job !== null){
if(SIGNALS.includes(msg.signal)){
eventBus.emit("signal", msg.signal)
}else{
ws.close(4005, "Invalid signal")
}
break;
}else{
ws.close(4003, "Not yet initialized")
}
break;
}
} catch (error) {
ws.send(JSON.stringify({ type: 'error', message: error.message }));
ws.close(4002, 'Notified Error');
}catch(error){
ws.send(JSON.stringify({type: "error", message: error.message}))
ws.close(4002, "Notified Error")
// ws.close message is limited to 123 characters, so we notify over WS then close.
}
});
})
ws.on('close', async () => {
if (job !== null) {
await job.cleanup();
ws.on("close", async ()=>{
if(job !== null){
await job.cleanup()
}
});
})
setTimeout(() => {
setTimeout(()=>{
//Terminate the socket after 1 second, if not initialized.
if (job === null) ws.close(4001, 'Initialization Timeout');
}, 1000);
});
if(job === null)
ws.close(4001, "Initialization Timeout");
}, 1000)
})
router.post('/execute', async (req, res) => {
try {
try{
const job = await get_job(req.body);
await job.prime();
@ -314,7 +219,7 @@ router.post('/execute', async (req, res) => {
await job.cleanup();
return res.status(200).send(result);
} catch (error) {
}catch(error){
return res.status(400).json(error);
}
});

View File

@ -16,6 +16,8 @@ const logger = Logger.create('pistond');
const app = express();
expressWs(app);
(async () => {
logger.info('Setting loglevel to', config.log_level);
Logger.setLogLevel(config.log_level);

View File

@ -5,105 +5,108 @@ const config = require('../config');
const Logger = require('logplease');
const logger = Logger.create('test');
const cp = require('child_process');
const runtime = require('../runtime');
const runtime = require("../runtime");
const { Job } = require('../job');
(async function () {
(async function(){
logger.info('Setting loglevel to', config.log_level);
Logger.setLogLevel(config.log_level);
let runtimes_to_test;
let failed = false;
if (process.argv[2] === '--all') {
if(process.argv[2] === "--all"){
// load all
runtimes_to_test = JSON.parse(
cp.execSync(
`nix eval ${config.flake_path}#pistonRuntimes --json --apply builtins.attrNames`
)
cp.execSync(`nix eval ${config.flake_path}#pistonRuntimes --json --apply builtins.attrNames`)
);
} else {
}else{
runtimes_to_test = [process.argv[2]];
}
for (const runtime_name of runtimes_to_test) {
const runtime_path = `${config.flake_path}#pistonRuntimes.${runtime_name}`;
logger.info(`Testing runtime ${runtime_path}`);
logger.debug(`Loading runtime metadata`);
const metadata = JSON.parse(
cp.execSync(`nix eval --json ${runtime_path}.metadata --json`)
);
const metadata = JSON.parse(cp.execSync(`nix eval --json ${runtime_path}.metadata --json`));
logger.debug(`Loading runtime tests`);
const tests = JSON.parse(
cp.execSync(`nix eval --json ${runtime_path}.tests --json`)
);
const tests = JSON.parse(cp.execSync(`nix eval --json ${runtime_path}.tests --json`));
logger.debug(`Loading runtime`);
const testable_runtime = new runtime.Runtime({
...metadata,
...runtime.Runtime.compute_all_limits(
metadata.language,
metadata.limitOverrides
),
flake_path: runtime_path,
flake_path: runtime_path
});
testable_runtime.ensure_built();
logger.info(`Running tests`);
for (const test of tests) {
const files = [];
for (const file_name of Object.keys(test.files)) {
const file_content = test.files[file_name];
const this_file = {
name: file_name,
content: file_content,
content: file_content
};
if (file_name == test.main) files.unshift(this_file);
else files.push(this_file);
if(file_name == test.main)
files.unshift(this_file);
else
files.push(this_file);
}
const job = new Job({
runtime: testable_runtime,
args: test.args || [],
stdin: test.stdin || '',
stdin: test.stdin || "",
files,
timeouts: {
run: 3000,
compile: 10000,
compile: 10000
},
memory_limits: {
run: config.run_memory_limit,
compile: config.compile_memory_limit,
},
compile: config.compile_memory_limit
}
});
await job.prime();
const result = await job.execute();
await job.cleanup();
if (result.run.stdout.trim() !== 'OK') {
await job.prime()
const result = await job.execute()
await job.cleanup()
if(result.run.stdout.trim() !== "OK"){
failed = true;
logger.error('Test Failed:');
console.log(job, result);
} else {
logger.info('Test Passed');
logger.error("Test Failed:")
console.log(job, result)
}else{
logger.info("Test Passed")
}
}
}
if (failed) {
logger.error('One or more tests failed');
if(failed) {
logger.error("One or more tests failed")
process.exit(1);
} else {
logger.info('All tests passed');
}
else {
logger.info("All tests passed")
process.exit(0);
}
})();
})()

View File

@ -2,57 +2,6 @@ const fss = require('fs');
const Logger = require('logplease');
const logger = Logger.create('config');
function parse_overrides(overrides) {
try {
return JSON.parse(overrides);
} catch (e) {
return null;
}
}
function validate_overrides(overrides, options) {
for (const language in overrides) {
for (const key in overrides[language]) {
if (
![
'max_process_count',
'max_open_files',
'max_file_size',
'compile_memory_limit',
'run_memory_limit',
'compile_timeout',
'run_timeout',
'output_max_size',
].includes(key)
) {
logger.error(`Invalid overridden option: ${key}`);
return false;
}
const option = options.find(o => o.key === key);
const parser = option.parser;
const raw = overrides[language][key];
const value = parser(raw);
const validators = option.validators;
for (const validator of validators) {
const response = validator(value, raw);
if (response !== true) {
logger.error(
`Failed to validate overridden option: ${key}`,
response
);
return false;
}
}
overrides[language][key] = value;
}
// Modifies the reference
options[
options.index_of(options.find(o => o.key === 'limit_overrides'))
] = overrides;
}
return true;
}
const options = [
{
key: 'log_level',
@ -68,7 +17,7 @@ const options = [
{
key: 'bind_address',
desc: 'Address to bind REST API on',
default: `0.0.0.0:${process.env["PORT"] || 2000}`,
default: '0.0.0.0:2000',
validators: [],
},
{
@ -142,30 +91,18 @@ const options = [
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'compile_timeout',
desc: 'Max time allowed for compile stage in milliseconds',
default: 10000, // 10 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'run_timeout',
desc: 'Max time allowed for run stage in milliseconds',
default: 3000, // 3 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'compile_memory_limit',
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
desc:
'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'run_memory_limit',
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
desc:
'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
@ -187,22 +124,8 @@ const options = [
desc: 'Maximum number of concurrent jobs to run at one time',
default: 64,
parser: parse_int,
validators: [x => x > 0 || `${x} cannot be negative`],
},
{
key: 'limit_overrides',
desc: 'Per-language exceptions in JSON format for each of:\
max_process_count, max_open_files, max_file_size, compile_memory_limit,\
run_memory_limit, compile_timeout, run_timeout, output_max_size',
default: {},
parser: parse_overrides,
validators: [
x => !!x || `Invalid JSON format for the overrides\n${x}`,
(overrides, _, options) =>
validate_overrides(overrides, options) ||
`Failed to validate the overrides`,
],
},
validators: [(x) => x > 0 || `${x} cannot be negative`]
}
];
logger.info(`Loading Configuration from environment`);
@ -220,12 +143,12 @@ options.forEach(option => {
const parsed_val = parser(env_val);
const value = env_val === undefined ? option.default : parsed_val;
const value = env_val || option.default;
option.validators.for_each(validator => {
let response = null;
if (env_val) response = validator(parsed_val, env_val, options);
else response = validator(value, value, options);
if (env_val) response = validator(parsed_val, env_val);
else response = validator(value, value);
if (response !== true) {
errored = true;

View File

@ -1,12 +1,10 @@
const logplease = require('logplease');
const logger = logplease.create('job');
const logger = require('logplease').create('job');
const { v4: uuidv4 } = require('uuid');
const cp = require('child_process');
const path = require('path');
const config = require('./config');
const globals = require('./globals');
const fs = require('fs/promises');
const fss = require('fs');
const wait_pid = require('waitpid');
const job_states = {
@ -18,34 +16,30 @@ const job_states = {
let uid = 0;
let gid = 0;
let remaining_job_spaces = config.max_concurrent_jobs;
let remainingJobSpaces = config.max_concurrent_jobs;
let jobQueue = [];
setInterval(() => {
setInterval(()=>{
// Every 10ms try resolve a new job, if there is an available slot
if (jobQueue.length > 0 && remaining_job_spaces > 0) {
jobQueue.shift()();
if(jobQueue.length > 0 && remainingJobSpaces > 0){
jobQueue.shift()()
}
}, 10);
}, 10)
class Job {
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) {
this.uuid = uuidv4();
this.logger = logplease.create(`job/${this.uuid}`);
this.runtime = runtime;
this.files = files.map((file, i) => ({
name: file.name || `file${i}.code`,
content: file.content,
encoding: ['base64', 'hex', 'utf8'].includes(file.encoding)
? file.encoding
: 'utf8',
}));
this.args = args;
this.stdin = stdin;
this.timeouts = timeouts;
this.memory_limits = memory_limits;
@ -58,8 +52,6 @@ class Job {
uid %= config.runner_uid_max - config.runner_uid_min + 1;
gid %= config.runner_gid_max - config.runner_gid_min + 1;
this.logger.debug(`Assigned uid=${this.uid} gid=${this.gid}`);
this.state = job_states.READY;
this.dir = path.join(
config.data_directory,
@ -69,45 +61,39 @@ class Job {
}
async prime() {
if (remaining_job_spaces < 1) {
this.logger.info(`Awaiting job slot`);
await new Promise(resolve => {
jobQueue.push(resolve);
});
if(remainingJobSpaces < 1){
logger.info(`Awaiting job slot uuid=${this.uuid}`)
await new Promise((resolve)=>{
jobQueue.push(resolve)
})
}
this.logger.info(`Priming job`);
remaining_job_spaces--;
this.logger.debug('Writing files to job cache');
logger.info(`Priming job uuid=${this.uuid}`);
remainingJobSpaces--;
logger.debug('Writing files to job cache');
this.logger.debug(`Transfering ownership`);
logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`);
await fs.mkdir(this.dir, { mode: 0o700 });
await fs.chown(this.dir, this.uid, this.gid);
for (const file of this.files) {
const file_path = path.join(this.dir, file.name);
let file_path = path.join(this.dir, file.name);
const rel = path.relative(this.dir, file_path);
const file_content = Buffer.from(file.content, file.encoding);
if (rel.startsWith('..'))
throw Error(
`File path "${file.name}" tries to escape parent directory: ${rel}`
);
if(rel.startsWith(".."))
throw Error(`File path "${file.name}" tries to escape parent directory: ${rel}`)
await fs.mkdir(path.dirname(file_path), {
recursive: true,
mode: 0o700,
});
await fs.mkdir(path.dirname(file_path), {recursive: true, mode: 0o700})
await fs.chown(path.dirname(file_path), this.uid, this.gid);
await fs.write_file(file_path, file_content);
await fs.write_file(file_path, file.content);
await fs.chown(file_path, this.uid, this.gid);
}
this.state = job_states.PRIMED;
this.logger.debug('Primed job');
logger.debug('Primed job');
}
async safe_call(file, args, timeout, memory_limit, eventBus = null) {
@ -116,29 +102,26 @@ class Job {
const prlimit = [
'prlimit',
'--nproc=' + this.runtime.max_process_count,
'--nofile=' + this.runtime.max_open_files,
'--fsize=' + this.runtime.max_file_size,
'--nproc=' + config.max_process_count,
'--nofile=' + config.max_open_files,
'--fsize=' + config.max_file_size,
];
if (memory_limit >= 0) {
prlimit.push('--as=' + memory_limit);
}
const proc_call = [
'nice',
...prlimit,
...nonetwork,
'bash',
file,
...args,
];
const proc_call = [...prlimit, ...nonetwork, 'bash', file, ...args];
var stdout = '';
var stderr = '';
var output = '';
const proc = cp.spawn(proc_call[0], proc_call.splice(1), {
env: {
...this.runtime.env_vars,
PISTON_LANGUAGE: this.runtime.language,
},
stdio: 'pipe',
cwd: this.dir,
uid: this.uid,
@ -146,34 +129,36 @@ class Job {
detached: true, //give this process its own process group
});
if (eventBus === null) {
if(eventBus === null){
proc.stdin.write(this.stdin);
proc.stdin.end();
proc.stdin.destroy();
} else {
eventBus.on('stdin', data => {
}else{
eventBus.on("stdin", (data) => {
proc.stdin.write(data);
});
})
eventBus.on('kill', signal => {
proc.kill(signal);
});
eventBus.on("kill", (signal) => {
proc.kill(signal)
})
}
const kill_timeout =
(timeout >= 0 &&
set_timeout(async _ => {
this.logger.info(`Timeout exceeded timeout=${timeout}`);
process.kill(proc.pid, 'SIGKILL');
}, timeout)) ||
null;
const kill_timeout = set_timeout(
async _ => {
logger.info(`Timeout exceeded timeout=${timeout} uuid=${this.uuid}`)
process.kill(proc.pid, 'SIGKILL')
},
timeout
);
proc.stderr.on('data', async data => {
if (eventBus !== null) {
eventBus.emit('stderr', data);
} else if (stderr.length > this.runtime.output_max_size) {
this.logger.info(`stderr length exceeded`);
process.kill(proc.pid, 'SIGKILL');
if(eventBus !== null) {
eventBus.emit("stderr", data);
} else if (stderr.length > config.output_max_size) {
logger.info(`stderr length exceeded uuid=${this.uuid}`)
process.kill(proc.pid, 'SIGKILL')
} else {
stderr += data;
output += data;
@ -181,35 +166,35 @@ class Job {
});
proc.stdout.on('data', async data => {
if (eventBus !== null) {
eventBus.emit('stdout', data);
} else if (stdout.length > this.runtime.output_max_size) {
this.logger.info(`stdout length exceeded`);
process.kill(proc.pid, 'SIGKILL');
if(eventBus !== null){
eventBus.emit("stdout", data);
} else if (stdout.length > config.output_max_size) {
logger.info(`stdout length exceeded uuid=${this.uuid}`)
process.kill(proc.pid, 'SIGKILL')
} else {
stdout += data;
output += data;
}
});
const exit_cleanup = () => {
const exit_cleanup = async () => {
clear_timeout(kill_timeout);
proc.stderr.destroy();
proc.stdout.destroy();
this.cleanup_processes();
this.logger.debug(`Finished exit cleanup`);
await this.cleanup_processes()
logger.debug(`Finished exit cleanup uuid=${this.uuid}`)
};
proc.on('exit', (code, signal) => {
exit_cleanup();
proc.on('exit', async (code, signal) => {
await exit_cleanup();
resolve({ stdout, stderr, code, signal, output });
resolve({stdout, stderr, code, signal, output });
});
proc.on('error', err => {
exit_cleanup();
proc.on('error', async err => {
await exit_cleanup();
reject({ error: err, stdout, stderr, output });
});
@ -224,13 +209,13 @@ class Job {
);
}
this.logger.info(`Executing job runtime=${this.runtime.toString()}`);
logger.info(
`Executing job uuid=${this.uuid} uid=${this.uid} gid=${
this.gid
} runtime=${this.runtime.toString()}`
);
const code_files =
(this.runtime.language === 'file' && this.files) ||
this.files.filter(file => file.encoding == 'utf8');
this.logger.debug('Compiling');
logger.debug('Compiling');
let compile;
@ -243,11 +228,11 @@ class Job {
);
}
this.logger.debug('Running');
logger.debug('Running');
const run = await this.safe_call(
this.runtime.run,
[code_files[0].name, ...this.args],
[this.files[0].name, ...this.args],
this.timeouts.run,
this.memory_limits.run
);
@ -262,7 +247,7 @@ class Job {
};
}
async execute_interactive(eventBus) {
async execute_interactive(eventBus){
if (this.state !== job_states.PRIMED) {
throw new Error(
'Job must be in primed state, current state: ' +
@ -270,98 +255,84 @@ class Job {
);
}
this.logger.info(
`Interactively executing job runtime=${this.runtime.toString()}`
logger.info(
`Interactively executing job uuid=${this.uuid} uid=${this.uid} gid=${
this.gid
} runtime=${this.runtime.toString()}`
);
const code_files =
(this.runtime.language === 'file' && this.files) ||
this.files.filter(file => file.encoding == 'utf8');
if (this.runtime.compiled) {
eventBus.emit('stage', 'compile');
const { error, code, signal } = await this.safe_call(
path.join(this.runtime.pkgdir, 'compile'),
code_files.map(x => x.name),
if(this.runtime.compiled){
eventBus.emit("stage", "compile")
const {error, code, signal} = await this.safe_call(
this.runtime.compile,
this.files.map(x => x.name),
this.timeouts.compile,
this.memory_limits.compile,
eventBus
);
)
eventBus.emit('exit', 'compile', { error, code, signal });
eventBus.emit("exit", "compile", {error, code, signal})
}
this.logger.debug('Running');
eventBus.emit('stage', 'run');
const { error, code, signal } = await this.safe_call(
path.join(this.runtime.pkgdir, 'run'),
[code_files[0].name, ...this.args],
logger.debug('Running');
eventBus.emit("stage", "run")
const {error, code, signal} = await this.safe_call(
this.runtime.run,
[this.files[0].name, ...this.args],
this.timeouts.run,
this.memory_limits.run,
eventBus
);
eventBus.emit('exit', 'run', { error, code, signal });
eventBus.emit("exit", "run", {error, code, signal})
this.state = job_states.EXECUTED;
}
cleanup_processes(dont_wait = []) {
async cleanup_processes(dont_wait = []) {
let processes = [1];
const to_wait = [];
this.logger.debug(`Cleaning up processes`);
logger.debug(`Cleaning up processes uuid=${this.uuid}`)
while (processes.length > 0) {
processes = [];
processes = []
const proc_ids = fss.readdir_sync('/proc');
processes = proc_ids.map(proc_id => {
if (isNaN(proc_id)) return -1;
try {
const proc_status = fss.read_file_sync(
path.join('/proc', proc_id, 'status')
);
const proc_lines = proc_status.to_string().split('\n');
const state_line = proc_lines.find(line =>
line.starts_with('State:')
);
const uid_line = proc_lines.find(line =>
line.starts_with('Uid:')
);
const proc_ids = await fs.readdir("/proc");
processes = await Promise.all(proc_ids.map(async (proc_id) => {
if(isNaN(proc_id)) return -1;
try{
const proc_status = await fs.read_file(path.join("/proc",proc_id,"status"));
const proc_lines = proc_status.to_string().split("\n")
const uid_line = proc_lines.find(line=>line.starts_with("Uid:"))
const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/);
if(ruid == this.uid || euid == this.uid)
return parse_int(proc_id)
const [_1, state, user_friendly] = state_line.split(/\s+/);
if (state == 'Z')
// Zombie process, just needs to be waited
return -1;
// We should kill in all other state (Sleep, Stopped & Running)
if (ruid == this.uid || euid == this.uid)
return parse_int(proc_id);
} catch {
return -1;
}catch{
return -1
}
return -1;
});
return -1
}))
processes = processes.filter(p => p > 0)
if(processes.length > 0)
logger.debug(`Got processes to kill: ${processes} uuid=${this.uuid}`)
processes = processes.filter(p => p > 0);
if (processes.length > 0)
this.logger.debug(`Got processes to kill: ${processes}`);
for (const proc of processes) {
// First stop the processes, but keep their resources allocated so they cant re-fork
try {
process.kill(proc, 'SIGSTOP');
} catch (e) {
} catch {
// Could already be dead
this.logger.debug(
`Got error while SIGSTOPping process ${proc}:`,
e
);
}
}
@ -371,27 +342,14 @@ class Job {
process.kill(proc, 'SIGKILL');
} catch {
// Could already be dead and just needs to be waited on
this.logger.debug(
`Got error while SIGKILLing process ${proc}:`,
e
);
}
to_wait.push(proc);
if(!dont_wait.includes(proc))
wait_pid(proc);
}
}
this.logger.debug(
`Finished kill-loop, calling wait_pid to end any zombie processes`
);
for (const proc of to_wait) {
if (dont_wait.includes(proc)) continue;
wait_pid(proc);
}
this.logger.debug(`Cleaned up processes`);
logger.debug(`Cleaned up processes uuid=${this.uuid}`)
}
async cleanup_filesystem() {
@ -412,7 +370,7 @@ class Job {
}
} catch (e) {
// File was somehow deleted in the time that we read the dir to when we checked the file
this.logger.warn(`Error removing file ${file_path}: ${e}`);
logger.warn(`Error removing file ${file_path}: ${e}`);
}
}
}
@ -421,15 +379,15 @@ class Job {
}
async cleanup() {
this.logger.info(`Cleaning up job`);
logger.info(`Cleaning up job uuid=${this.uuid}`);
this.cleanup_processes(); // Run process janitor, just incase there are any residual processes somehow
await this.cleanup_filesystem();
remaining_job_spaces++;
remainingJobSpaces++;
}
}
module.exports = {
Job,
};

View File

@ -7,36 +7,14 @@ const path = require('path');
const runtimes = [];
class Runtime {
constructor({
language,
version,
aliases,
runtime,
run,
compile,
packageSupport,
flake_path,
timeouts,
memory_limits,
max_process_count,
max_open_files,
max_file_size,
output_max_size,
}) {
constructor({ language, version, aliases, runtime, run, compile, packageSupport, flake_path }) {
this.language = language;
this.runtime = runtime;
this.timeouts = timeouts;
this.memory_limits = memory_limits;
this.max_process_count = max_process_count;
this.max_open_files = max_open_files;
this.max_file_size = max_file_size;
this.output_max_size = output_max_size;
this.aliases = aliases;
this.version = version;
this.version = version;
this.run = run;
this.compile = compile;
@ -44,120 +22,58 @@ class Runtime {
this.package_support = packageSupport;
}
static compute_single_limit(
language_name,
limit_name,
language_limit_overrides
) {
return (
(config.limit_overrides[language_name] &&
config.limit_overrides[language_name][limit_name]) ||
(language_limit_overrides &&
language_limit_overrides[limit_name]) ||
config[limit_name]
);
}
static compute_all_limits(language_name, language_limit_overrides) {
return {
timeouts: {
compile: this.compute_single_limit(
language_name,
'compile_timeout',
language_limit_overrides
),
run: this.compute_single_limit(
language_name,
'run_timeout',
language_limit_overrides
),
},
memory_limits: {
compile: this.compute_single_limit(
language_name,
'compile_memory_limit',
language_limit_overrides
),
run: this.compute_single_limit(
language_name,
'run_memory_limit',
language_limit_overrides
),
},
max_process_count: this.compute_single_limit(
language_name,
'max_process_count',
language_limit_overrides
),
max_open_files: this.compute_single_limit(
language_name,
'max_open_files',
language_limit_overrides
),
max_file_size: this.compute_single_limit(
language_name,
'max_file_size',
language_limit_overrides
),
output_max_size: this.compute_single_limit(
language_name,
'output_max_size',
language_limit_overrides
),
};
}
ensure_built() {
ensure_built(){
logger.info(`Ensuring ${this} is built`);
const flake_path = this.flake_path;
function _ensure_built(key) {
function _ensure_built(key){
const command = `nix build ${flake_path}.metadata.${key} --no-link`;
cp.execSync(command, { stdio: 'pipe' });
cp.execSync(command, {stdio: "pipe"})
}
_ensure_built('run');
if (this.compiled) _ensure_built('compile');
_ensure_built("run");
if(this.compiled) _ensure_built("compile");
logger.debug(`Finished ensuring ${this} is installed`)
logger.debug(`Finished ensuring ${this} is installed`);
}
static load_runtime(flake_key) {
logger.info(`Loading ${flake_key}`);
static load_runtime(flake_key){
logger.info(`Loading ${flake_key}`)
const flake_path = `${config.flake_path}#pistonRuntimeSets.${config.runtime_set}.${flake_key}`;
const metadata_command = `nix eval --json ${flake_path}.metadata`;
const metadata = JSON.parse(cp.execSync(metadata_command));
const this_runtime = new Runtime({
...metadata,
...Runtime.compute_all_limits(
metadata.language,
metadata.limitOverrides
),
flake_path,
flake_path
});
this_runtime.ensure_built();
runtimes.push(this_runtime);
logger.debug(`Package ${flake_key} was loaded`);
}
get compiled() {
return this.compile !== null;
}
get id() {
get id(){
return runtimes.indexOf(this);
}
toString() {
return `${this.language}-${this.version}`;
}
}
module.exports = runtimes;
module.exports.Runtime = Runtime;
module.exports.load_runtime = Runtime.load_runtime;

1
cli/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
node_modules

View File

@ -3,44 +3,8 @@ const path = require('path');
const chalk = require('chalk');
const WebSocket = require('ws');
const SIGNALS = [
'SIGABRT',
'SIGALRM',
'SIGBUS',
'SIGCHLD',
'SIGCLD',
'SIGCONT',
'SIGEMT',
'SIGFPE',
'SIGHUP',
'SIGILL',
'SIGINFO',
'SIGINT',
'SIGIO',
'SIGIOT',
'SIGLOST',
'SIGPIPE',
'SIGPOLL',
'SIGPROF',
'SIGPWR',
'SIGQUIT',
'SIGSEGV',
'SIGSTKFLT',
'SIGTSTP',
'SIGSYS',
'SIGTERM',
'SIGTRAP',
'SIGTTIN',
'SIGTTOU',
'SIGUNUSED',
'SIGURG',
'SIGUSR1',
'SIGUSR2',
'SIGVTALRM',
'SIGXCPU',
'SIGXFSZ',
'SIGWINCH',
];
const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
exports.command = ['execute <language> <file> [args..]'];
exports.aliases = ['run'];
@ -51,18 +15,18 @@ exports.builder = {
string: true,
desc: 'Set the version of the language to use',
alias: ['l'],
default: '*',
default: '*'
},
stdin: {
boolean: true,
desc: 'Read input from stdin and pass to executor',
alias: ['i'],
alias: ['i']
},
run_timeout: {
alias: ['rt', 'r'],
number: true,
desc: 'Milliseconds before killing run process',
default: 3000,
default: 3000
},
compile_timeout: {
alias: ['ct', 'c'],
@ -78,126 +42,117 @@ exports.builder = {
interactive: {
boolean: true,
alias: ['t'],
desc: 'Run interactively using WebSocket transport',
desc: 'Run interactively using WebSocket transport'
},
status: {
boolean: true,
alias: ['s'],
desc: 'Output additional status to stderr',
},
desc: 'Output additional status to stderr'
}
};
async function handle_interactive(files, argv) {
const ws = new WebSocket(
argv.pistonUrl.replace('http', 'ws') + '/api/v2/connect'
);
async function handle_interactive(files, argv){
const ws = new WebSocket(argv.pistonUrl.replace("http", "ws") + "/api/v2/connect")
const log_message =
process.stderr.isTTY && argv.status ? console.error : () => {};
const log_message = (process.stderr.isTTY && argv.status) ? console.error : ()=>{};
process.on('exit', () => {
process.on("exit", ()=>{
ws.close();
process.stdin.end();
process.stdin.destroy();
process.exit();
});
process.exit();
})
for (const signal of SIGNALS) {
process.on(signal, () => {
ws.send(JSON.stringify({ type: 'signal', signal }));
});
for(const signal of SIGNALS){
process.on(signal, ()=>{
ws.send(JSON.stringify({type: 'signal', signal}))
})
}
ws.on('open', () => {
ws.on('open', ()=>{
const request = {
type: 'init',
type: "init",
language: argv.language,
version: argv['language_version'],
files: files,
args: argv.args,
compile_timeout: argv.ct,
run_timeout: argv.rt,
};
run_timeout: argv.rt
}
ws.send(JSON.stringify(request));
log_message(chalk.white.bold('Connected'));
ws.send(JSON.stringify(request))
log_message(chalk.white.bold("Connected"))
process.stdin.resume();
process.stdin.on('data', data => {
ws.send(
JSON.stringify({
type: 'data',
stream: 'stdin',
data: data.toString(),
})
);
});
});
process.stdin.on("data", (data) => {
ws.send(JSON.stringify({
type: "data",
stream: "stdin",
data: data.toString()
}))
})
})
ws.on('close', (code, reason) => {
ws.on("close", (code, reason)=>{
log_message(
chalk.white.bold('Disconnected: '),
chalk.white.bold('Reason: '),
chalk.white.bold("Disconnected: "),
chalk.white.bold("Reason: "),
chalk.yellow(`"${reason}"`),
chalk.white.bold('Code: '),
chalk.yellow(`"${code}"`)
);
process.stdin.pause();
});
chalk.white.bold("Code: "),
chalk.yellow(`"${code}"`),
)
process.stdin.pause()
})
ws.on('message', function (data) {
ws.on('message', function(data){
const msg = JSON.parse(data);
switch (msg.type) {
case 'runtime':
log_message(
chalk.bold.white('Runtime:'),
chalk.yellow(`${msg.language} ${msg.version}`)
);
switch(msg.type){
case "runtime":
log_message(chalk.bold.white("Runtime:"), chalk.yellow(`${msg.language} ${msg.version}`))
break;
case 'stage':
log_message(
chalk.bold.white('Stage:'),
chalk.yellow(msg.stage)
);
case "stage":
log_message(chalk.bold.white("Stage:"), chalk.yellow(msg.stage))
break;
case 'data':
if (msg.stream == 'stdout') process.stdout.write(msg.data);
else if (msg.stream == 'stderr') process.stderr.write(msg.data);
else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data);
case "data":
if(msg.stream == "stdout") process.stdout.write(msg.data)
else if(msg.stream == "stderr") process.stderr.write(msg.data)
else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data)
break;
case 'exit':
if (msg.signal === null)
case "exit":
if(msg.signal === null)
log_message(
chalk.white.bold('Stage'),
chalk.white.bold("Stage"),
chalk.yellow(msg.stage),
chalk.white.bold('exited with code'),
chalk.white.bold("exited with code"),
chalk.yellow(msg.code)
);
)
else
log_message(
chalk.white.bold('Stage'),
chalk.white.bold("Stage"),
chalk.yellow(msg.stage),
chalk.white.bold('exited with signal'),
chalk.white.bold("exited with signal"),
chalk.yellow(msg.signal)
);
break;
)
break;
default:
log_message(chalk.red.bold('Unknown message:'), msg);
log_message(chalk.red.bold("Unknown message:"), msg)
}
});
})
}
async function run_non_interactively(files, argv) {
const stdin =
(argv.stdin &&
(await new Promise((resolve, _) => {
let data = '';
process.stdin.on('data', d => (data += d));
process.stdin.on('end', _ => resolve(data));
}))) ||
'';
const stdin = (argv.stdin && await new Promise((resolve, _) => {
let data = '';
process.stdin.on('data', d => data += d);
process.stdin.on('end', _ => resolve(data));
})) || '';
const request = {
language: argv.language,
@ -206,7 +161,7 @@ async function run_non_interactively(files, argv) {
args: argv.args,
stdin,
compile_timeout: argv.ct,
run_timeout: argv.rt,
run_timeout: argv.rt
};
let { data: response } = await argv.axios.post('/api/v2/execute', request);
@ -215,13 +170,13 @@ async function run_non_interactively(files, argv) {
console.log(chalk.bold(`== ${name} ==`));
if (ctx.stdout) {
console.log(chalk.bold(`STDOUT`));
console.log(ctx.stdout.replace(/\n/g, '\n '));
console.log(chalk.bold(`STDOUT`))
console.log(ctx.stdout.replace(/\n/g,'\n '))
}
if (ctx.stderr) {
console.log(chalk.bold(`STDERR`));
console.log(ctx.stderr.replace(/\n/g, '\n '));
console.log(chalk.bold(`STDERR`))
console.log(ctx.stderr.replace(/\n/g,'\n '))
}
if (ctx.code) {
@ -232,9 +187,12 @@ async function run_non_interactively(files, argv) {
}
if (ctx.signal) {
console.log(chalk.bold(`Signal:`), chalk.bold.yellow(ctx.signal));
console.log(
chalk.bold(`Signal:`),
chalk.bold.yellow(ctx.signal)
);
}
};
}
if (response.compile) {
step('Compile', response.compile);
@ -243,23 +201,17 @@ async function run_non_interactively(files, argv) {
step('Run', response.run);
}
exports.handler = async argv => {
const files = [...(argv.files || []), argv.file].map(file_path => {
const buffer = fs.readFileSync(file_path);
const encoding =
(buffer
.toString()
.split('')
.some(x => x.charCodeAt(0) >= 128) &&
'base64') ||
'utf8';
return {
name: path.basename(file_path),
content: buffer.toString(encoding),
encoding,
};
});
exports.handler = async (argv) => {
const files = [...(argv.files || []),argv.file]
.map(file_path => {
return {
name: path.basename(file_path),
content: fs.readFileSync(file_path).toString()
};
});
if (argv.interactive) await handle_interactive(files, argv);
if(argv.interactive) await handle_interactive(files, argv);
else await run_non_interactively(files, argv);
};
}

View File

@ -6,8 +6,8 @@ const axios_instance = argv => {
argv.axios = axios.create({
baseURL: argv['piston-url'],
headers: {
'Content-Type': 'application/json',
},
'Content-Type': 'application/json'
}
});
return argv;
@ -18,11 +18,12 @@ require('yargs')(process.argv.slice(2))
alias: ['u'],
default: 'http://127.0.0.1:2000',
desc: 'Piston API URL',
string: true,
string: true
})
.middleware(axios_instance)
.scriptName('piston')
.commandDir('commands')
.demandCommand()
.help()
.wrap(72).argv;
.wrap(72)
.argv;

View File

@ -17,10 +17,10 @@ Returns a list of available languages, including the version, runtime and aliase
#### Response
- `[].language`: Name of the language
- `[].version`: Version of the runtime
- `[].aliases`: List of alternative names that can be used for the language
- `[].runtime` (_optional_): Name of the runtime used to run the langage, only provided if alternative runtimes exist for the language
- `[].language`: Name of the language
- `[].version`: Version of the runtime
- `[].aliases`: List of alternative names that can be used for the language
- `[].runtime` (_optional_): Name of the runtime used to run the langage, only provided if alternative runtimes exist for the language
#### Example
@ -55,35 +55,34 @@ Runs the given code, using the given runtime and arguments, returning the result
#### Request
- `language`: Name or alias of a language listed in [runtimes](#runtimes)
- `version`: SemVer version selector of a language listed in [runtimes](#runtimes)
- `files`: An array of files which should be uploaded into the job context
- `files[].name` (_optional_): Name of file to be written, if none a random name is picked
- `files[].content`: Content of file to be written
- `files[].encoding` (_optional_): The encoding scheme used for the file content. One of `base64`, `hex` or `utf8`. Defaults to `utf8`.
- `stdin` (_optional_): Text to pass into stdin of the program. Defaults to blank string.
- `args` (_optional_): Arguments to pass to the program. Defaults to none
- `run_timeout` (_optional_): The maximum allowed time in milliseconds for the compile stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout.
- `compile_timeout` (_optional_): The maximum allowed time in milliseconds for the run stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout. Defaults to maximum.
- `compile_memory_limit` (_optional_): The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
- `run_memory_limit` (_optional_): The maximum amount of memory the run stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
- `language`: Name or alias of a language listed in [runtimes](#runtimes)
- `version`: SemVer version selector of a language listed in [runtimes](#runtimes)
- `files`: An array of files which should be uploaded into the job context
- `files[].name` (_optional_): Name of file to be written, if none a random name is picked
- `files[].content`: Content of file to be written
- `stdin` (_optional_): Text to pass into stdin of the program. Defaults to blank string.
- `args` (_optional_): Arguments to pass to the program. Defaults to none
- `run_timeout` (_optional_): The maximum allowed time in milliseconds for the compile stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout.
- `compile_timeout` (_optional_): The maximum allowed time in milliseconds for the run stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout. Defaults to maximum.
- `compile_memory_limit` (_optional_): The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
- `run_memory_limit` (_optional_): The maximum amount of memory the run stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
#### Response
- `language`: Name (not alias) of the runtime used
- `version`: Version of the used runtime
- `run`: Results from the run stage
- `run.stdout`: stdout from run stage process
- `run.stderr`: stderr from run stage process
- `run.output`: stdout and stderr combined in order of data from run stage process
- `run.code`: Exit code from run process, or null if signal is not null
- `run.signal`: Signal from run process, or null if code is not null
- `compile` (_optional_): Results from the compile stage, only provided if the runtime has a compile stage
- `compile.stdout`: stdout from compile stage process
- `compile.stderr`: stderr from compile stage process
- `compile.output`: stdout and stderr combined in order of data from compile stage process
- `compile.code`: Exit code from compile process, or null if signal is not null
- `compile.signal`: Signal from compile process, or null if code is not null
- `language`: Name (not alias) of the runtime used
- `version`: Version of the used runtime
- `run`: Results from the run stage
- `run.stdout`: stdout from run stage process
- `run.stderr`: stderr from run stage process
- `run.output`: stdout and stderr combined in order of data from run stage process
- `run.code`: Exit code from run process, or null if signal is not null
- `run.signal`: Signal from run process, or null if code is not null
- `compile` (_optional_): Results from the compile stage, only provided if the runtime has a compile stage
- `compile.stdout`: stdout from compile stage process
- `compile.stderr`: stderr from compile stage process
- `compile.output`: stdout and stderr combined in order of data from compile stage process
- `compile.code`: Exit code from compile process, or null if signal is not null
- `compile.signal`: Signal from compile process, or null if code is not null
#### Example
@ -134,9 +133,9 @@ Returns a list of all possible packages, and whether their installation status.
#### Response
- `[].language`: Name of the contained runtime
- `[].language_version`: Version of the contained runtime
- `[].installed`: Status on the package being installed
- `[].language`: Name of the contained runtime
- `[].language_version`: Version of the contained runtime
- `[].installed`: Status on the package being installed
#### Example
@ -168,13 +167,13 @@ Install the given package.
#### Request
- `language`: Name of package from [package list](#get-apiv2packages)
- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
- `language`: Name of package from [package list](#get-apiv2packages)
- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
#### Response
- `language`: Name of package installed
- `version`: Version of package installed
- `language`: Name of package installed
- `version`: Version of package installed
#### Example
@ -204,13 +203,13 @@ Uninstall the given package.
#### Request
- `language`: Name of package from [package list](#get-apiv2packages)
- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
- `language`: Name of package from [package list](#get-apiv2packages)
- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
#### Response
- `language`: Name of package uninstalled
- `version`: Version of package uninstalled
- `language`: Name of package uninstalled
- `version`: Version of package uninstalled
#### Example

View File

@ -50,15 +50,15 @@ Absolute path to piston related data, including packages and job contexts.
```yaml
key:
- PISTON_RUNNER_UID_MIN
- PISTON_RUNNER_UID_MAX
- PISTON_RUNNER_GID_MIN
- PISTON_RUNNER_GID_MAX
- PISTON_RUNNER_UID_MIN
- PISTON_RUNNER_UID_MAX
- PISTON_RUNNER_GID_MIN
- PISTON_RUNNER_GID_MAX
default:
- 1001
- 1500
- 1001
- 1500
- 1001
- 1500
- 1001
- 1500
```
UID and GID ranges to use when executing jobs.
@ -86,11 +86,11 @@ key: PISTON_MAX_PROCESS_COUNT
default: 64
```
Maximum number of processes allowed to to have open for a job.
Maximum number of processess allowed to to have open for a job.
Resists against exhausting the process table, causing a full system lockup.
## Output Max Size
## Output Max Side
```yaml
key: PISTON_OUTPUT_MAX_SIZE
@ -123,27 +123,12 @@ Maximum size for a singular file written to disk.
Resists against large file writes to exhaust disk space.
## Compile/Run timeouts
```yaml
key:
- PISTON_COMPILE_TIMEOUT
default: 10000
key:
- PISTON_RUN_TIMEOUT
default: 3000
```
The maximum time that is allowed to be taken by a stage in milliseconds.
Use -1 for unlimited time.
## Compile/Run memory limits
```yaml
key:
- PISTON_COMPILE_MEMORY_LIMIT
- PISTON_RUN_MEMORY_LIMIT
- PISTON_COMPILE_MEMORY_LIMIT
- PISTON_RUN_MEMORY_LIMIT
default: -1
```
@ -169,19 +154,3 @@ default: 64
```
Maximum number of jobs to run concurrently.
## Limit overrides
```yaml
key: PISTON_LIMIT_OVERRIDES
default: {}
```
Per-language overrides/exceptions for the each of `max_process_count`, `max_open_files`, `max_file_size`,
`compile_memory_limit`, `run_memory_limit`, `compile_timeout`, `run_timeout`, `output_max_size`. Defined as follows:
```
PISTON_LIMIT_OVERRIDES={"c++":{"max_process_count":128}}
```
This will give `c++` a max_process_count of 128 regardless of the configuration.

View File

@ -1 +1 @@
mkdocs==1.2.3
mkdocs==1.1.2

View File

@ -21,7 +21,6 @@
compile? null,
packages? null,
aliases? [],
limitOverrides? {},
tests
}: let
compileFile = if compile != null then
@ -29,7 +28,7 @@
else null;
runFile = pkgs.writeShellScript "run" run;
metadata = {
inherit language version runtime aliases limitOverrides;
inherit language version runtime aliases;
run = runFile;
compile = compileFile;
packageSupport = packages != null;

View File

@ -1,15 +1,15 @@
site_name: Piston
nav:
- Home: index.md
- Configuration: configuration.md
- API: api-v2.md
- Home: index.md
- Configuration: configuration.md
- API: api-v2.md
theme:
name: readthedocs
highlightjs: true
hljs_languages:
- yaml
- json
name: readthedocs
highlightjs: true
hljs_languages:
- yaml
- json
markdown_extensions:
- admonition
- admonition

32
package-lock.json generated
View File

@ -1,32 +0,0 @@
{
"name": "piston",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"devDependencies": {
"prettier": "2.4.1"
}
},
"node_modules/prettier": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.1.tgz",
"integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==",
"dev": true,
"bin": {
"prettier": "bin-prettier.js"
},
"engines": {
"node": ">=10.13.0"
}
}
},
"dependencies": {
"prettier": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.1.tgz",
"integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==",
"dev": true
}
}
}

View File

@ -1,5 +0,0 @@
{
"devDependencies": {
"prettier": "2.4.1"
}
}

View File

@ -1,15 +0,0 @@
#!/usr/bin/env bash
# source python 2.7
source ../../python/2.7.18/build.sh
# clone befunge repo
git clone -q 'https://github.com/programble/befungee' befunge93
# go inside befunge93 so we can checkout
cd befunge93
# checkout the version 0.2.0
git checkout tag/v0.2.0
cd ..

View File

@ -1,5 +0,0 @@
{
"language": "befunge93",
"version": "0.2.0",
"aliases": ["b93"]
}

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
# run the befunge program with the file name
python2.7 "$BEFUNGE93_PATH"/befungee.py "$1"

View File

@ -1 +0,0 @@
64+"KO">:#,_@

View File

@ -1,20 +0,0 @@
#!/usr/bin/env bash
# build prolog 8.2.4 as dependency
source ../../prolog/8.2.4/build.sh
# curl brachylog 1.0.0
curl -L "https://github.com/JCumin/Brachylog/archive/refs/tags/v1.0-ascii.tar.gz" -o brachylog.tar.gz
tar xzf brachylog.tar.gz --strip-components=1
rm brachylog.tar.gz
# move swi prolog to working directory
cp bin/swipl swipl
# give execution permission to swipl
chmod +x swipl
# add some code the branchylog.pl so we don't have to escape backslashes while using the interactive mode
echo '
:-feature(argv, [Code, Stdin]), run_from_atom(Code, Stdin, _), halt.' >> prolog_parser/brachylog.pl

View File

@ -1,5 +0,0 @@
{
"language": "brachylog",
"version": "1.0.0",
"aliases": []
}

View File

@ -1,19 +0,0 @@
#!/usr/bin/env bash
# save the file for later
file="$1"
# remove the file from $@
shift
# save stdin as $@ joined by newlines
stdin=`printf "%s\n" "$@"`
# save code as the contents of $file
code=`cat "$file"`
# go to the directory where brachylog.pl is so the imports work
cd "$BRACHYLOG_PATH"/prolog_parser
# run swi prolog with code and stdin
swipl -f brachylog.pl "$code" "$stdin"

View File

@ -1 +0,0 @@
"OK"w

View File

@ -1,5 +1,5 @@
{
"language": "cjam",
"version": "0.6.5",
"aliases": []
"language": "cjam",
"version": "0.6.5",
"aliases": []
}

7
packages/crystal/0.36.1/build.sh vendored Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
PREFIX=$(realpath $(dirname $0))
curl -L "https://github.com/crystal-lang/crystal/releases/download/0.36.1/crystal-0.36.1-1-linux-x86_64.tar.gz" -o crystal.tar.gz
tar xzf crystal.tar.gz --strip-components=1
rm crystal.tar.gz

5
packages/crystal/0.36.1/compile vendored Normal file
View File

@ -0,0 +1,5 @@
#!/usr/bin/env bash
# Compile crystal files into out file
crystal build "$@" -o out --no-color && \
chmod +x out

1
packages/crystal/0.36.1/environment vendored Normal file
View File

@ -0,0 +1 @@
export PATH=$PWD/bin:$PATH

5
packages/crystal/0.36.1/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "crystal",
"version": "0.36.1",
"aliases": ["crystal", "cr"]
}

4
packages/crystal/0.36.1/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/bin/bash
shift # Filename is only used to compile
./out "$@"

1
packages/crystal/0.36.1/test.cr vendored Normal file
View File

@ -0,0 +1 @@
puts("OK")

11
packages/dart/2.12.1/build.sh vendored Executable file
View File

@ -0,0 +1,11 @@
#!/usr/bin/env bash
curl -L "https://storage.googleapis.com/dart-archive/channels/stable/release/2.12.1/sdk/dartsdk-linux-x64-release.zip" -o dart.zip
unzip dart.zip
rm dart.zip
cp -r dart-sdk/* .
rm -rf dart-sdk
chmod -R +rx bin

View File

@ -2,4 +2,3 @@
# Put 'export' statements here for environment variables
export PATH=$PWD/bin:$PATH
export BRACHYLOG_PATH=$PWD

5
packages/dart/2.12.1/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "dart",
"version": "2.12.1",
"aliases": []
}

4
packages/dart/2.12.1/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# Put instructions to run the runtime
dart run "$@"

3
packages/dart/2.12.1/test.dart vendored Normal file
View File

@ -0,0 +1,3 @@
void main() {
print('OK');
}

19
packages/dash/0.5.11/build.sh vendored Executable file
View File

@ -0,0 +1,19 @@
#!/usr/bin/env bash
# Put instructions to build your package in here
PREFIX=$(realpath $(dirname $0))
mkdir -p build
cd build
curl "http://gondor.apana.org.au/~herbert/dash/files/dash-0.5.11.tar.gz" -o dash.tar.gz
tar xzf dash.tar.gz --strip-components=1
./configure --prefix "$PREFIX" &&
make -j$(nproc) &&
make install -j$(nproc)
cd ../
rm -rf build

View File

@ -2,4 +2,3 @@
# Put 'export' statements here for environment variables
export PATH=$PWD/bin:$PATH
export BEFUNGE93_PATH=$PWD/befunge93

5
packages/dash/0.5.11/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "dash",
"version": "0.5.11",
"aliases": ["dash"]
}

4
packages/dash/0.5.11/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# Put instructions to run the runtime
dash "$@"

1
packages/dash/0.5.11/test.dash vendored Normal file
View File

@ -0,0 +1 @@
echo "OK"

5
packages/deno/1.7.5/build.sh vendored Executable file
View File

@ -0,0 +1,5 @@
curl -L https://github.com/denoland/deno/releases/download/v1.7.5/deno-x86_64-unknown-linux-gnu.zip --output deno.zip
unzip -o deno.zip
rm deno.zip
chmod +x deno

1
packages/deno/1.7.5/environment vendored Normal file
View File

@ -0,0 +1 @@
export PATH=$PWD:$PATH

14
packages/deno/1.7.5/metadata.json vendored Normal file
View File

@ -0,0 +1,14 @@
{
"language": "deno",
"version": "1.7.5",
"provides": [
{
"language": "typescript",
"aliases": ["deno-ts","deno"]
},
{
"language": "javascript",
"aliases": ["deno-js"]
}
]
}

2
packages/deno/1.7.5/run vendored Normal file
View File

@ -0,0 +1,2 @@
#!/bin/bash
DENO_DIR=$PWD deno run "$@"

1
packages/deno/1.7.5/test.deno.ts vendored Normal file
View File

@ -0,0 +1 @@
console.log("OK")

6
packages/dotnet/5.0.201/build.sh vendored Executable file → Normal file
View File

@ -7,10 +7,8 @@ rm dotnet.tar.gz
# Cache nuget packages
export DOTNET_CLI_HOME=$PWD
./dotnet new console -o cache_application
./dotnet new console -lang F# -o fs_cache_application
./dotnet new console -lang VB -o vb_cache_application
# This calls a restore on the global-packages index ($DOTNET_CLI_HOME/.nuget/packages)
# If we want to allow more packages, we could add them to this cache_application
rm -rf cache_application fs_cache_application vb_cache_application
# Get rid of it, we don't actually need the application - just the restore
rm -rf cache_application
# Get rid of it, we don't actually need the application - just the restore

View File

@ -1,36 +1,15 @@
#!/usr/bin/env bash
[ "${PISTON_LANGUAGE}" == "fsi" ] && exit 0
export DOTNET_CLI_HOME=$PWD
export HOME=$PWD
rename 's/$/\.cs/' "$@" # Add .cs extension
dotnet build --help > /dev/null # Shut the thing up
case "${PISTON_LANGUAGE}" in
basic.net)
rename 's/$/\.vb/' "$@" # Add .vb extension
dotnet new console -lang VB -o . --no-restore
rm Program.vb
;;
fsharp.net)
first_file=$1
shift
rename 's/$/\.fs/' "$@" # Add .fs extension
dotnet new console -lang F# -o . --no-restore
mv $first_file Program.fs # For some reason F#.net doesn't work unless the file name is Program.fs
;;
csharp.net)
rename 's/$/\.cs/' "$@" # Add .cs extension
dotnet new console -o . --no-restore
rm Program.cs
;;
*)
echo "How did you get here? (${PISTON_LANGUAGE})"
exit 1
;;
dotnet new console -o . --no-restore
rm Program.cs
esac
dotnet restore --source $DOTNET_ROOT/.nuget/packages
dotnet build --no-restore
dotnet build --no-restore

View File

@ -2,5 +2,4 @@
# Put 'export' statements here for environment variables
export DOTNET_ROOT=$PWD
export PATH=$DOTNET_ROOT:$PATH
export FSI_PATH=$(find $(pwd) -name fsi.dll)
export PATH=$DOTNET_ROOT:$PATH

View File

@ -1,66 +1,5 @@
{
"language": "dotnet",
"version": "5.0.201",
"provides": [
{
"language": "basic.net",
"aliases": [
"basic",
"visual-basic",
"visual-basic.net",
"vb",
"vb.net",
"vb-dotnet",
"dotnet-vb",
"basic-dotnet",
"dotnet-basic"
],
"limit_overrides": { "max_process_count": 128 }
},
{
"language": "fsharp.net",
"aliases": [
"fsharp",
"fs",
"f#",
"fs.net",
"f#.net",
"fsharp-dotnet",
"fs-dotnet",
"f#-dotnet",
"dotnet-fsharp",
"dotnet-fs",
"dotnet-fs"
],
"limit_overrides": { "max_process_count": 128 }
},
{
"language": "csharp.net",
"aliases": [
"csharp",
"c#",
"cs",
"c#.net",
"cs.net",
"c#-dotnet",
"cs-dotnet",
"csharp-dotnet",
"dotnet-c#",
"dotnet-cs",
"dotnet-csharp"
],
"limit_overrides": { "max_process_count": 128 }
},
{
"language": "fsi",
"aliases": [
"fsx",
"fsharp-interactive",
"f#-interactive",
"dotnet-fsi",
"fsi-dotnet",
"fsi.net"
]
}
]
"aliases": ["cs", "csharp"]
}

View File

@ -3,23 +3,5 @@
# Put instructions to run the runtime
export DOTNET_CLI_HOME=$PWD
case "${PISTON_LANGUAGE}" in
basic.net)
;&
fsharp.net)
;&
csharp.net)
shift
dotnet bin/Debug/net5.0/$(basename $(realpath .)).dll "$@"
;;
fsi)
FILENAME=$1
rename 's/$/\.fsx/' $FILENAME # Add .fsx extension
shift
dotnet $FSI_PATH $FILENAME.fsx "$@"
;;
*)
echo "How did you get here? (${PISTON_LANGUAGE})"
exit 1
;;
esac
shift
dotnet bin/Debug/net5.0/$(basename $(realpath .)).dll "$@"

View File

@ -1,6 +0,0 @@
open System
[<EntryPoint>]
let main argv =
printfn "OK"
0

View File

@ -1 +0,0 @@
printfn "OK"

View File

@ -1,9 +0,0 @@
Imports System
Module Module1
Sub Main()
Console.WriteLine("OK")
End Sub
End Module

View File

@ -1,5 +1,5 @@
{
"language": "dragon",
"version": "1.9.8",
"aliases": []
"language": "dragon",
"version": "1.9.8",
"aliases": []
}

25
packages/elixir/1.11.3/build.sh vendored Executable file
View File

@ -0,0 +1,25 @@
#!/bin/bash
source ../../erlang/23.0.0/build.sh
export PATH=$PWD/bin:$PATH
PREFIX=$(realpath $(dirname $0))
mkdir -p build
cd build
curl -L "https://github.com/elixir-lang/elixir/archive/v1.11.3.tar.gz" -o elixir.tar.gz
tar xzf elixir.tar.gz --strip-components=1
rm elixir.tar.gz
./configure --prefix "$PREFIX"
make -j$(nproc)
cd ..
cp -r build/bin .
cp -r build/lib .
rm -rf build

5
packages/elixir/1.11.3/environment vendored Normal file
View File

@ -0,0 +1,5 @@
#!/usr/bin/env bash
# Put 'export' statements here for environment variables
export LC_ALL=en_US.UTF-8
export PATH=$PWD/bin:$PATH

5
packages/elixir/1.11.3/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "elixir",
"version": "1.11.3",
"aliases": ["elixir", "exs"]
}

4
packages/elixir/1.11.3/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/bin/bash
# Put instructions to run the runtime
elixir "$@"

1
packages/elixir/1.11.3/test.exs vendored Normal file
View File

@ -0,0 +1 @@
IO.puts("OK")

21
packages/erlang/23.0.0/build.sh vendored Executable file
View File

@ -0,0 +1,21 @@
#!/bin/bash
PREFIX=$(realpath $(dirname $0))
mkdir -p build
cd build
curl "http://erlang.org/download/otp_src_23.0.tar.gz" -o erlang.tar.gz
tar xzf erlang.tar.gz --strip-components=1
rm erlang.tar.gz
export ERL_TOP=$(pwd)
./configure --prefix "$PREFIX"
make -j$(nproc)
make install -j$(nproc)
cd ..
rm -rf build

4
packages/erlang/23.0.0/environment vendored Normal file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# Put 'export' statements here for environment variables
export PATH=$PWD/bin:$PATH

5
packages/erlang/23.0.0/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "erlang",
"version": "23.0.0",
"aliases": ["erlang", "erl", "escript"]
}

4
packages/erlang/23.0.0/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/bin/bash
# Put instructions to run the runtime
escript "$@"

3
packages/erlang/23.0.0/test.erl vendored Normal file
View File

@ -0,0 +1,3 @@
main(_) ->
io:format("OK~n").

View File

@ -1,5 +1,5 @@
{
"language": "forte",
"version": "1.0.0",
"aliases": ["forter"]
"language": "forte",
"version": "1.0.0",
"aliases": ["forter"]
}

21
packages/gawk/5.1.0/build.sh vendored Normal file
View File

@ -0,0 +1,21 @@
#!/usr/bin/env bash
# Put instructions to build your package in here
PREFIX=$(realpath $(dirname $0))
mkdir -p build
cd build
curl "https://ftp.gnu.org/gnu/gawk/gawk-5.1.0.tar.gz" -o gawk.tar.gz
tar xzf gawk.tar.gz --strip-components=1
# === autoconf based ===
./configure --prefix "$PREFIX"
make -j$(nproc)
make install -j$(nproc)
cd ../
rm -rf build

4
packages/gawk/5.1.0/environment vendored Normal file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# Put 'export' statements here for environment variables
export PATH=$PWD/bin:$PATH

10
packages/gawk/5.1.0/metadata.json vendored Normal file
View File

@ -0,0 +1,10 @@
{
"language": "gawk",
"version": "5.1.0",
"provides": [
{
"language": "awk",
"aliases": ["gawk"]
}
]
}

4
packages/gawk/5.1.0/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# Put instructions to run the runtime
gawk-5.1.0 -f "$@"

1
packages/gawk/5.1.0/test.awk vendored Normal file
View File

@ -0,0 +1 @@
{print "OK"}

View File

@ -3,7 +3,7 @@
"version": "10.2.0",
"provides": [
{
"language": "c",
"language":"c",
"aliases": ["gcc"]
},
{

View File

@ -1,5 +1,5 @@
{
"language": "golfscript",
"version": "1.0.0",
"aliases": ["golfscript"]
"language": "golfscript",
"version": "1.0.0",
"aliases": ["golfscript"]
}

View File

@ -1,5 +1,5 @@
{
"language": "groovy",
"version": "3.0.7",
"aliases": ["groovy", "gvy"]
"language": "groovy",
"version": "3.0.7",
"aliases": ["groovy", "gvy"]
}

View File

@ -1,14 +0,0 @@
#!/usr/bin/env bash
cp ../../haskell/9.0.1/build.sh ./haskell-build.sh
sed -Ei 's/9\.0\.1/8\.10\.7/g' ./haskell-build.sh
source ./haskell-build.sh
# compile Husk from source
git clone -q "https://github.com/barbuz/husk.git"
cd husk
../bin/ghc -O2 Husk
# cleanup
cd ..
rm -f haskell-build.sh

View File

@ -1,6 +0,0 @@
#!/usr/bin/env bash
# haskell and husk path
export PATH=$PWD/bin:$PATH
export HUSK_PATH=$PWD/husk
export LANG=en_US.UTF8

View File

@ -1,5 +0,0 @@
{
"language": "husk",
"version": "1.0.0",
"aliases": []
}

View File

@ -1,10 +0,0 @@
#!/usr/bin/env bash
# Store the current path because we'll need it to run the program file
PROGRAM_PATH=$PWD
# For now, Husk can only be run within the folder that has the imported modules
cd $HUSK_PATH
# Run Husk from file in unicode format with the given args
./Husk -uf "${PROGRAM_PATH}/${@}"

View File

@ -1 +0,0 @@
"OK

View File

@ -1,5 +1,5 @@
{
"language": "japt",
"version": "2.0.0",
"aliases": ["japt"]
}
"language": "japt",
"version": "2.0.0",
"aliases": ["japt"]
}

View File

@ -1,6 +0,0 @@
#!/usr/bin/env bash
curl -L "https://github.com/llvm/llvm-project/releases/download/llvmorg-12.0.1/clang+llvm-12.0.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz" -o llvm-ir.tar.xz
tar xf llvm-ir.tar.xz clang+llvm-12.0.1-x86_64-linux-gnu-ubuntu-/bin --strip-components=1
rm llvm-ir.tar.xz

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
llc "$@" -o binary.s
clang binary.s -o binary

View File

@ -1,2 +0,0 @@
#!/usr/bin/env bash
export PATH=$PWD/bin:$PATH

View File

@ -1,5 +0,0 @@
{
"language": "llvm_ir",
"version": "12.0.1",
"aliases": ["llvm", "llvm-ir", "ll"]
}

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
shift
binary "$@"

View File

@ -1,10 +0,0 @@
@.str = private unnamed_addr constant [2 x i8] c"OK"
declare i32 @puts(i8* nocapture) nounwind
define i32 @main() {
%cast210 = getelementptr [2 x i8],[2 x i8]* @.str, i64 0, i64 0
call i32 @puts(i8* %cast210)
ret i32 0
}

Some files were not shown because too many files have changed in this diff Show More