Compare commits

..

No commits in common. "af5036d82c651f7de8c3eb2c615746c7c92116f5" and "0434877d03702845333eb2c6a4148dd6c9276806" have entirely different histories.

145 changed files with 1025 additions and 1919 deletions

View File

@ -4,6 +4,7 @@ about: Template for requesting language support
title: Add [insert language name here] title: Add [insert language name here]
labels: package labels: package
assignees: '' assignees: ''
--- ---
Provide links to different compilers/interpreters that could be used to implement this language, and discuss pros/cons of each. Provide links to different compilers/interpreters that could be used to implement this language, and discuss pros/cons of each.

View File

@ -1,11 +1,10 @@
Checklist: Checklist:
* [ ] The package builds locally with `./piston build-pkg [package] [version]`
- [ ] The package builds locally with `./piston build-pkg [package] [version]` * [ ] The package installs with `./piston ppman install [package]=[version]`
- [ ] The package installs with `./piston ppman install [package]=[version]` * [ ] The package runs the test code with `./piston run [package] -l [version] packages/[package]/[version]/test.*`
- [ ] The package runs the test code with `./piston run [package] -l [version] packages/[package]/[version]/test.*` * [ ] Package files are placed in the correct directory
- [ ] Package files are placed in the correct directory * [ ] No old package versions are removed
- [ ] No old package versions are removed * [ ] All source files are deleted in the `build.sh` script
- [ ] All source files are deleted in the `build.sh` script * [ ] `metadata.json`'s `language` and `version` fields match the directory path
- [ ] `metadata.json`'s `language` and `version` fields match the directory path * [ ] Any extensions the language may use are set as aliases
- [ ] Any extensions the language may use are set as aliases * [ ] Any alternative names the language is referred to are set as aliases.
- [ ] Any alternative names the language is referred to are set as aliases.

View File

@ -1,38 +1,39 @@
name: Publish API image name: Publish API image
on: on:
push: push:
branches: branches:
- master - master
- v3 - v3
paths: paths:
- api/** - api/**
jobs: jobs:
push_to_registry: push_to_registry:
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: Build and Push Docker image to Github Packages name: Build and Push Docker image to Github Packages
steps: steps:
- name: Check out repo - name: Check out repo
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: Login to GitHub registry - name: Login to GitHub registry
uses: docker/login-action@v1 uses: docker/login-action@v1
with: with:
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com registry: docker.pkg.github.com
- name: Login to ghcr.io - name: Login to ghcr.io
uses: docker/login-action@v1 uses: docker/login-action@v1
with: with:
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
registry: ghcr.io registry: ghcr.io
- name: Build and push API - name: Build and push API
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2
with: with:
context: api context: api
push: true push: true
pull: true pull: true
tags: | tags: |
docker.pkg.github.com/engineer-man/piston/api docker.pkg.github.com/engineer-man/piston/api
ghcr.io/engineer-man/piston ghcr.io/engineer-man/piston

View File

@ -1,139 +1,140 @@
name: 'Package Pull Requests' name: "Package Pull Requests"
on: on:
pull_request: pull_request:
types: types:
- opened - opened
- reopened - edited
- synchronize - reopened
paths: - synchronize
- 'packages/**' paths:
- "packages/**"
jobs: jobs:
check-pkg: check-pkg:
name: Validate README name: Validate README
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: Get list of changed files - name: Get list of changed files
uses: lots0logs/gh-action-get-changed-files@2.1.4 uses: lots0logs/gh-action-get-changed-files@2.1.4
with: with:
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}
- name: Ensure README was updated - name: Ensure README was updated
run: | run: |
MISSING_LINES=$(comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /<br>/{flag=0} flag' readme.md | sort -u)) MISSING_LINES=$(comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /<br>/{flag=0} flag' readme.md | sort -u))
[[ $(echo $MISSING_LINES | wc -c) = "1" ]] && exit 0 [[ $(echo $MISSING_LINES | wc -c) = "1" ]] && exit 0
echo "README has supported languages missing: " echo "README has supported languages missing: "
comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /<br>/{flag=0} flag' readme.md | sort -u) comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /<br>/{flag=0} flag' readme.md | sort -u)
exit 1 exit 1
build-pkg: build-pkg:
name: Check that package builds name: Check that package builds
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: Login to GitHub registry - name: Login to GitHub registry
uses: docker/login-action@v1 uses: docker/login-action@v1
with: with:
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com registry: docker.pkg.github.com
- name: Get list of changed files - name: Get list of changed files
uses: lots0logs/gh-action-get-changed-files@2.1.4 uses: lots0logs/gh-action-get-changed-files@2.1.4
with: with:
token: ${{ secrets.GITHUB_TOKEN }} token: ${{ secrets.GITHUB_TOKEN }}
- name: Build Packages - name: Build Packages
run: | run: |
PACKAGES=$(jq '.[]' -r ${HOME}/files*.json | awk -F/ '$1~/packages/ && $2 && $3{ print $2 "-" $3 }' | sort -u) PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
echo "Packages: $PACKAGES" echo "Packages: $PACKAGES"
docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
docker build -t repo-builder repo docker build -t repo-builder repo
docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
ls -la packages ls -la packages
- name: Upload package as artifact - name: Upload package as artifact
uses: actions/upload-artifact@v2 uses: actions/upload-artifact@v2
with: with:
name: packages name: packages
path: packages/*.pkg.tar.gz path: packages/*.pkg.tar.gz
test-pkg: test-pkg:
name: Test package name: Test package
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: build-pkg needs: build-pkg
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions/download-artifact@v2 - uses: actions/download-artifact@v2
with: with:
name: packages name: packages
- name: Relocate downloaded packages - name: Relocate downloaded packages
run: mv *.pkg.tar.gz packages/ run: mv *.pkg.tar.gz packages/
- name: Login to GitHub registry - name: Login to GitHub registry
uses: docker/login-action@v1 uses: docker/login-action@v1
with: with:
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com registry: docker.pkg.github.com
- name: Run tests - name: Run tests
run: | run: |
ls -la ls -la
docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
docker pull docker.pkg.github.com/engineer-man/piston/api docker pull docker.pkg.github.com/engineer-man/piston/api
docker build -t piston-api api docker build -t piston-api api
docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api
echo Waiting for API to start.. echo Waiting for API to start..
docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes
echo Waiting for Index to start.. echo Waiting for Index to start..
docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index
echo Adjusting index echo Adjusting index
sed -i 's/repo/localhost/g' repo/index sed -i 's/repo/localhost/g' repo/index
echo Listing Packages echo Listing Packages
PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v2/packages) PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v2/packages)
echo $PACKAGES_JSON echo $PACKAGES_JSON
echo Getting CLI ready echo Getting CLI ready
docker run -v "$PWD/cli:/app" --entrypoint /bin/bash node:15 -c 'cd /app; npm i' docker run -v "$PWD/cli:/app" --entrypoint /bin/bash node:15 -c 'cd /app; npm i'
for package in $(jq -r '.[] | "\(.language)-\(.language_version)"' <<< "$PACKAGES_JSON") for package in $(jq -r '.[] | "\(.language)-\(.language_version)"' <<< "$PACKAGES_JSON")
do do
echo "Testing $package" echo "Testing $package"
PKG_PATH=$(sed 's|-|/|' <<< $package) PKG_PATH=$(sed 's|-|/|' <<< $package)
PKG_NAME=$(awk -F- '{ print $1 }' <<< $package) PKG_NAME=$(awk -F- '{ print $1 }' <<< $package)
PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package) PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package)
echo "Installing..." echo "Installing..."
docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v2/packages -H "Content-Type: application/json" -d "{\"language\":\"$PKG_NAME\",\"version\":\"$PKG_VERSION\"}" docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v2/packages -H "Content-Type: application/json" -d "{\"language\":\"$PKG_NAME\",\"version\":\"$PKG_VERSION\"}"
TEST_SCRIPTS=packages/$PKG_PATH/test.* TEST_SCRIPTS=packages/$PKG_PATH/test.*
echo "Tests: $TEST_SCRIPTS" echo "Tests: $TEST_SCRIPTS"
for tscript in $TEST_SCRIPTS for tscript in $TEST_SCRIPTS
do do
TEST_RUNTIME=$(awk -F. '{print $2}' <<< $(basename $tscript)) TEST_RUNTIME=$(awk -F. '{print $2}' <<< $(basename $tscript))
echo Running $tscript with runtime=$TEST_RUNTIME echo Running $tscript with runtime=$TEST_RUNTIME
docker run --network container:api -v "$PWD/cli:/app" -v "$PWD/$(dirname $tscript):/pkg" node:15 /app/index.js run $TEST_RUNTIME -l $PKG_VERSION /pkg/$(basename $tscript) > test_output docker run --network container:api -v "$PWD/cli:/app" -v "$PWD/$(dirname $tscript):/pkg" node:15 /app/index.js run $TEST_RUNTIME -l $PKG_VERSION /pkg/$(basename $tscript) > test_output
cat test_output cat test_output
grep "OK" test_output grep "OK" test_output
done done
done done
- name: Dump logs - name: Dump logs
if: ${{ always() }} if: ${{ always() }}
run: | run: |
docker logs api docker logs api
docker logs repo docker logs repo

View File

@ -1,77 +1,78 @@
name: 'Package Pushed' name: 'Package Pushed'
on: on:
push: push:
branches: branches:
- master - master
- v3 - v3
paths: paths:
- packages/** - packages/**
jobs: jobs:
build-pkg: build-pkg:
name: Build package name: Build package
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- name: Checkout - name: Checkout
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: Login to GitHub registry
uses: docker/login-action@v1
with:
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com
- name: Login to GitHub registry - name: Get list of changed files
uses: docker/login-action@v1 uses: lots0logs/gh-action-get-changed-files@2.1.4
with: with:
username: ${{ github.actor }} token: ${{ secrets.GITHUB_TOKEN }}
password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com - name: Build Packages
run: |
PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
echo "Packages: $PACKAGES"
docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
docker build -t repo-builder repo
docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
ls -la packages
- name: Get list of changed files - name: Upload Packages
uses: lots0logs/gh-action-get-changed-files@2.1.4 uses: svenstaro/upload-release-action@v2
with: with:
token: ${{ secrets.GITHUB_TOKEN }} repo_token: ${{ secrets.GITHUB_TOKEN }}
file: packages/*.pkg.tar.gz
tag: pkgs
overwrite: true
file_glob: true
create-index:
name: Create Index
runs-on: ubuntu-latest
needs: build-pkg
steps:
- name: "Download all release assets"
run: curl -s https://api.github.com/repos/engineer-man/piston/releases/latest | jq '.assets[].browser_download_url' -r | xargs -L 1 curl -sLO
- name: "Generate index file"
run: |
echo "" > index
BASEURL=https://github.com/engineer-man/piston/releases/download/pkgs/
for pkg in *.pkg.tar.gz
do
PKGFILE=$(basename $pkg)
PKGFILENAME=$(echo $PKGFILE | sed 's/\.pkg\.tar\.gz//g')
- name: Build Packages PKGNAME=$(echo $PKGFILENAME | grep -oP '^\K.+(?=-)')
run: | PKGVERSION=$(echo $PKGFILENAME | grep -oP '^.+-\K.+')
PACKAGES=$(jq '.[]' -r ${HOME}/files*.json | awk -F/ '$1~/packages/ && $2 && $3{ print $2 "-" $3 }' | sort -u) PKGCHECKSUM=$(sha256sum $PKGFILE | awk '{print $1}')
echo "Packages: $PACKAGES" echo "$PKGNAME,$PKGVERSION,$PKGCHECKSUM,$BASEURL$PKGFILE" >> index
docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest echo "Adding package $PKGNAME-$PKGVERSION"
docker build -t repo-builder repo done
docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES - name: Upload index
ls -la packages uses: svenstaro/upload-release-action@v2
with:
- name: Upload Packages repo_token: ${{ secrets.GITHUB_TOKEN }}
uses: svenstaro/upload-release-action@v2 file: index
with: tag: pkgs
repo_token: ${{ secrets.GITHUB_TOKEN }} overwrite: true
file: packages/*.pkg.tar.gz file_glob: true
tag: pkgs
overwrite: true
file_glob: true
create-index:
name: Create Index
runs-on: ubuntu-latest
needs: build-pkg
steps:
- name: 'Download all release assets'
run: curl -s https://api.github.com/repos/engineer-man/piston/releases/latest | jq '.assets[].browser_download_url' -r | xargs -L 1 curl -sLO
- name: 'Generate index file'
run: |
echo "" > index
BASEURL=https://github.com/engineer-man/piston/releases/download/pkgs/
for pkg in *.pkg.tar.gz
do
PKGFILE=$(basename $pkg)
PKGFILENAME=$(echo $PKGFILE | sed 's/\.pkg\.tar\.gz//g')
PKGNAME=$(echo $PKGFILENAME | grep -oP '^\K.+(?=-)')
PKGVERSION=$(echo $PKGFILENAME | grep -oP '^.+-\K.+')
PKGCHECKSUM=$(sha256sum $PKGFILE | awk '{print $1}')
echo "$PKGNAME,$PKGVERSION,$PKGCHECKSUM,$BASEURL$PKGFILE" >> index
echo "Adding package $PKGNAME-$PKGVERSION"
done
- name: Upload index
uses: svenstaro/upload-release-action@v2
with:
repo_token: ${{ secrets.GITHUB_TOKEN }}
file: index
tag: pkgs
overwrite: true
file_glob: true

View File

@ -1,31 +1,31 @@
name: Publish Repo image name: Publish Repo image
on: on:
push: push:
branches: branches:
- master - master
- v3 - v3
paths: paths:
- repo/** - repo/**
jobs: jobs:
push_to_registry: push_to_registry:
runs-on: ubuntu-latest runs-on: ubuntu-latest
name: Build and Push Docker image to Github Packages name: Build and Push Docker image to Github Packages
steps: steps:
- name: Check out repo - name: Check out repo
uses: actions/checkout@v2 uses: actions/checkout@v2
- name: Login to GitHub registry - name: Login to GitHub registry
uses: docker/login-action@v1 uses: docker/login-action@v1
with: with:
username: ${{ github.actor }} username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }} password: ${{ secrets.GITHUB_TOKEN }}
registry: docker.pkg.github.com registry: docker.pkg.github.com
- name: Build and push repo - name: Build and push repo
uses: docker/build-push-action@v2 uses: docker/build-push-action@v2
with: with:
context: repo context: repo
pull: true pull: true
push: true push: true
tags: | tags: |
docker.pkg.github.com/engineer-man/piston/repo-builder docker.pkg.github.com/engineer-man/piston/repo-builder

3
.gitignore vendored
View File

@ -1,4 +1,3 @@
data/ data/
.piston_env .piston_env
node_modules result
result

View File

@ -1,12 +0,0 @@
node_modules
data/
api/_piston
repo/build
packages/*/*/*
packages/*.pkg.tar.gz
!packages/*/*/metadata.json
!packages/*/*/build.sh
!packages/*/*/environment
!packages/*/*/run
!packages/*/*/compile
!packages/*/*/test.*

View File

@ -1,8 +1,8 @@
version: 2 version: 2
mkdocs: mkdocs:
configuration: mkdocs.yml configuration: mkdocs.yml
python: python:
version: 3.7 version: 3.7
install: install:
- requirements: docs/requirements.txt - requirements: docs/requirements.txt

View File

@ -1,12 +0,0 @@
# This "FROM" image is previously emitted by nix
FROM ghcr.io/engineer-man/piston:base-latest
ENV PISTON_FLAKE_PATH=/piston/packages
COPY runtimes/ /piston/packages/runtimes
COPY flake.nix flake.lock /piston/packages/
ARG RUNTIMESET=all
ENV PISTON_RUNTIME_SET=$RUNTIMESET
RUN piston-install

3
api/.gitignore vendored
View File

@ -1 +1,2 @@
_piston node_modules
_piston

1
api/.prettierignore Normal file
View File

@ -0,0 +1 @@
node_modules

View File

@ -51,8 +51,6 @@ with pkgs; rec {
do do
echo "nixbld$i:x:$(( $i + 30000 )):30000:Nix build user $i:/var/empty:/run/current-system/sw/bin/nologin" >> etc/passwd echo "nixbld$i:x:$(( $i + 30000 )):30000:Nix build user $i:/var/empty:/run/current-system/sw/bin/nologin" >> etc/passwd
done done
chmod 1777 {,var/}tmp/
''; '';
config = { config = {
@ -63,21 +61,6 @@ with pkgs; rec {
"SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt" "SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt"
"GIT_SSL_CAINFO=/etc/ssl/certs/ca-bundle.crt" "GIT_SSL_CAINFO=/etc/ssl/certs/ca-bundle.crt"
"NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt" "NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt"
"PATH=${lib.concatStringsSep ":" [
"/usr/local/sbin"
"/usr/local/bin"
"/usr/sbin"
"/usr/bin"
"/sbin"
"/bin"
"/root/.nix-profile/bin"
"/nix/var/nix/profiles/default/bin"
"/nix/var/nix/profiles/default/sbin"
]}"
"MANPATH=${lib.concatStringsSep ":" [
"/root/.nix-profile/share/man"
"/nix/var/nix/profiles/default/share/man"
]}"
]; ];
ExposedPorts = { ExposedPorts = {

View File

@ -3,54 +3,16 @@ const router = express.Router();
const events = require('events'); const events = require('events');
const config = require('../config');
const runtime = require('../runtime'); const runtime = require('../runtime');
const { Job } = require('../job'); const { Job } = require('../job');
const logger = require('logplease').create('api/v3'); const logger = require('logplease').create('api/v3');
const SIGNALS = [ const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
'SIGABRT',
'SIGALRM',
'SIGBUS',
'SIGCHLD',
'SIGCLD',
'SIGCONT',
'SIGEMT',
'SIGFPE',
'SIGHUP',
'SIGILL',
'SIGINFO',
'SIGINT',
'SIGIO',
'SIGIOT',
'SIGKILL',
'SIGLOST',
'SIGPIPE',
'SIGPOLL',
'SIGPROF',
'SIGPWR',
'SIGQUIT',
'SIGSEGV',
'SIGSTKFLT',
'SIGSTOP',
'SIGTSTP',
'SIGSYS',
'SIGTERM',
'SIGTRAP',
'SIGTTIN',
'SIGTTOU',
'SIGUNUSED',
'SIGURG',
'SIGUSR1',
'SIGUSR2',
'SIGVTALRM',
'SIGXCPU',
'SIGXFSZ',
'SIGWINCH',
];
// ref: https://man7.org/linux/man-pages/man7/signal.7.html // ref: https://man7.org/linux/man-pages/man7/signal.7.html
function get_job(body) { function get_job(body){
let { const {
language, language,
args, args,
stdin, stdin,
@ -58,7 +20,7 @@ function get_job(body) {
compile_memory_limit, compile_memory_limit,
run_memory_limit, run_memory_limit,
run_timeout, run_timeout,
compile_timeout, compile_timeout
} = body; } = body;
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
@ -73,6 +35,7 @@ function get_job(body) {
message: 'files is required as an array', message: 'files is required as an array',
}); });
} }
for (const [i, file] of files.entries()) { for (const [i, file] of files.entries()) {
if (typeof file.content !== 'string') { if (typeof file.content !== 'string') {
return reject({ return reject({
@ -131,65 +94,23 @@ function get_job(body) {
}); });
} }
if ( resolve(new Job({
rt.language !== 'file' && runtime: rt,
!files.some(file => !file.encoding || file.encoding === 'utf8') alias: language,
) { args: args || [],
return reject({ stdin: stdin || "",
message: 'files must include at least one utf8 encoded file', files,
}); timeouts: {
} run: run_timeout || 3000,
compile: compile_timeout || 10000,
for (const constraint of ['memory_limit', 'timeout']) { },
for (const type of ['compile', 'run']) { memory_limits: {
const constraint_name = `${type}_${constraint}`; run: run_memory_limit || config.run_memory_limit,
const constraint_value = body[constraint_name]; compile: compile_memory_limit || config.compile_memory_limit,
const configured_limit = rt[`${constraint}s`][type];
if (!constraint_value) {
continue;
}
if (typeof constraint_value !== 'number') {
return reject({
message: `If specified, ${constraint_name} must be a number`,
});
}
if (configured_limit <= 0) {
continue;
}
if (constraint_value > configured_limit) {
return reject({
message: `${constraint_name} cannot exceed the configured limit of ${configured_limit}`,
});
}
if (constraint_value < 0) {
return reject({
message: `${constraint_name} must be non-negative`,
});
}
} }
} }));
})
compile_timeout = compile_timeout || rt.timeouts.compile;
run_timeout = run_timeout || rt.timeouts.run;
compile_memory_limit = compile_memory_limit || rt.memory_limits.compile;
run_memory_limit = run_memory_limit || rt.memory_limits.run;
resolve(
new Job({
runtime: rt,
args: args || [],
stdin: stdin || '',
files,
timeouts: {
run: run_timeout,
compile: compile_timeout,
},
memory_limits: {
run: run_memory_limit,
compile: compile_memory_limit,
},
})
);
});
} }
router.use((req, res, next) => { router.use((req, res, next) => {
@ -207,104 +128,88 @@ router.use((req, res, next) => {
}); });
router.ws('/connect', async (ws, req) => { router.ws('/connect', async (ws, req) => {
let job = null; let job = null;
let eventBus = new events.EventEmitter(); let eventBus = new events.EventEmitter();
eventBus.on('stdout', data => eventBus.on("stdout", (data) => ws.send(JSON.stringify({type: "data", stream: "stdout", data: data.toString()})))
ws.send( eventBus.on("stderr", (data) => ws.send(JSON.stringify({type: "data", stream: "stderr", data: data.toString()})))
JSON.stringify({ eventBus.on("stage", (stage)=> ws.send(JSON.stringify({type: "stage", stage})))
type: 'data', eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status})))
stream: 'stdout',
data: data.toString(),
})
)
);
eventBus.on('stderr', data =>
ws.send(
JSON.stringify({
type: 'data',
stream: 'stderr',
data: data.toString(),
})
)
);
eventBus.on('stage', stage =>
ws.send(JSON.stringify({ type: 'stage', stage }))
);
eventBus.on('exit', (stage, status) =>
ws.send(JSON.stringify({ type: 'exit', stage, ...status }))
);
ws.on('message', async data => { ws.on("message", async (data) => {
try {
try{
const msg = JSON.parse(data); const msg = JSON.parse(data);
switch (msg.type) { switch(msg.type){
case 'init': case "init":
if (job === null) { if(job === null){
job = await get_job(msg); job = await get_job(msg);
await job.prime(); await job.prime();
ws.send( ws.send(JSON.stringify({
JSON.stringify({ type: "runtime",
type: 'runtime', language: job.runtime.language,
language: job.runtime.language, version: job.runtime.version.raw
version: job.runtime.version.raw, }))
})
);
await job.execute_interactive(eventBus); await job.execute_interactive(eventBus);
ws.close(4999, 'Job Completed'); ws.close(4999, "Job Completed");
} else {
ws.close(4000, 'Already Initialized'); }else{
ws.close(4000, "Already Initialized");
} }
break; break;
case 'data': case "data":
if (job !== null) { if(job !== null){
if (msg.stream === 'stdin') { if(msg.stream === "stdin"){
eventBus.emit('stdin', msg.data); eventBus.emit("stdin", msg.data)
} else { }else{
ws.close(4004, 'Can only write to stdin'); ws.close(4004, "Can only write to stdin")
}
} else {
ws.close(4003, 'Not yet initialized');
} }
break; }else{
case 'signal': ws.close(4003, "Not yet initialized")
if (job !== null) { }
if (SIGNALS.includes(msg.signal)) { break;
eventBus.emit('signal', msg.signal); case "signal":
} else { if(job !== null){
ws.close(4005, 'Invalid signal'); if(SIGNALS.includes(msg.signal)){
} eventBus.emit("signal", msg.signal)
} else { }else{
ws.close(4003, 'Not yet initialized'); ws.close(4005, "Invalid signal")
} }
break; }else{
ws.close(4003, "Not yet initialized")
}
break;
} }
} catch (error) {
ws.send(JSON.stringify({ type: 'error', message: error.message })); }catch(error){
ws.close(4002, 'Notified Error'); ws.send(JSON.stringify({type: "error", message: error.message}))
ws.close(4002, "Notified Error")
// ws.close message is limited to 123 characters, so we notify over WS then close. // ws.close message is limited to 123 characters, so we notify over WS then close.
} }
}); })
ws.on('close', async () => { ws.on("close", async ()=>{
if (job !== null) { if(job !== null){
await job.cleanup(); await job.cleanup()
} }
}); })
setTimeout(() => { setTimeout(()=>{
//Terminate the socket after 1 second, if not initialized. //Terminate the socket after 1 second, if not initialized.
if (job === null) ws.close(4001, 'Initialization Timeout'); if(job === null)
}, 1000); ws.close(4001, "Initialization Timeout");
}); }, 1000)
})
router.post('/execute', async (req, res) => { router.post('/execute', async (req, res) => {
try {
try{
const job = await get_job(req.body); const job = await get_job(req.body);
await job.prime(); await job.prime();
@ -314,7 +219,7 @@ router.post('/execute', async (req, res) => {
await job.cleanup(); await job.cleanup();
return res.status(200).send(result); return res.status(200).send(result);
} catch (error) { }catch(error){
return res.status(400).json(error); return res.status(400).json(error);
} }
}); });

View File

@ -16,6 +16,8 @@ const logger = Logger.create('pistond');
const app = express(); const app = express();
expressWs(app); expressWs(app);
(async () => { (async () => {
logger.info('Setting loglevel to', config.log_level); logger.info('Setting loglevel to', config.log_level);
Logger.setLogLevel(config.log_level); Logger.setLogLevel(config.log_level);

View File

@ -5,105 +5,108 @@ const config = require('../config');
const Logger = require('logplease'); const Logger = require('logplease');
const logger = Logger.create('test'); const logger = Logger.create('test');
const cp = require('child_process'); const cp = require('child_process');
const runtime = require('../runtime'); const runtime = require("../runtime");
const { Job } = require('../job'); const { Job } = require('../job');
(async function () { (async function(){
logger.info('Setting loglevel to', config.log_level); logger.info('Setting loglevel to', config.log_level);
Logger.setLogLevel(config.log_level); Logger.setLogLevel(config.log_level);
let runtimes_to_test; let runtimes_to_test;
let failed = false; let failed = false;
if (process.argv[2] === '--all') { if(process.argv[2] === "--all"){
// load all // load all
runtimes_to_test = JSON.parse( runtimes_to_test = JSON.parse(
cp.execSync( cp.execSync(`nix eval ${config.flake_path}#pistonRuntimes --json --apply builtins.attrNames`)
`nix eval ${config.flake_path}#pistonRuntimes --json --apply builtins.attrNames`
)
); );
} else { }else{
runtimes_to_test = [process.argv[2]]; runtimes_to_test = [process.argv[2]];
} }
for (const runtime_name of runtimes_to_test) { for (const runtime_name of runtimes_to_test) {
const runtime_path = `${config.flake_path}#pistonRuntimes.${runtime_name}`; const runtime_path = `${config.flake_path}#pistonRuntimes.${runtime_name}`;
logger.info(`Testing runtime ${runtime_path}`); logger.info(`Testing runtime ${runtime_path}`);
logger.debug(`Loading runtime metadata`); logger.debug(`Loading runtime metadata`);
const metadata = JSON.parse( const metadata = JSON.parse(cp.execSync(`nix eval --json ${runtime_path}.metadata --json`));
cp.execSync(`nix eval --json ${runtime_path}.metadata --json`)
);
logger.debug(`Loading runtime tests`); logger.debug(`Loading runtime tests`);
const tests = JSON.parse( const tests = JSON.parse(cp.execSync(`nix eval --json ${runtime_path}.tests --json`));
cp.execSync(`nix eval --json ${runtime_path}.tests --json`)
);
logger.debug(`Loading runtime`); logger.debug(`Loading runtime`);
const testable_runtime = new runtime.Runtime({ const testable_runtime = new runtime.Runtime({
...metadata, ...metadata,
...runtime.Runtime.compute_all_limits( flake_path: runtime_path
metadata.language,
metadata.limitOverrides
),
flake_path: runtime_path,
}); });
testable_runtime.ensure_built(); testable_runtime.ensure_built();
logger.info(`Running tests`); logger.info(`Running tests`);
for (const test of tests) { for (const test of tests) {
const files = []; const files = [];
for (const file_name of Object.keys(test.files)) { for (const file_name of Object.keys(test.files)) {
const file_content = test.files[file_name]; const file_content = test.files[file_name];
const this_file = { const this_file = {
name: file_name, name: file_name,
content: file_content, content: file_content
}; };
if (file_name == test.main) files.unshift(this_file); if(file_name == test.main)
else files.push(this_file); files.unshift(this_file);
else
files.push(this_file);
} }
const job = new Job({ const job = new Job({
runtime: testable_runtime, runtime: testable_runtime,
args: test.args || [], args: test.args || [],
stdin: test.stdin || '', stdin: test.stdin || "",
files, files,
timeouts: { timeouts: {
run: 3000, run: 3000,
compile: 10000, compile: 10000
}, },
memory_limits: { memory_limits: {
run: config.run_memory_limit, run: config.run_memory_limit,
compile: config.compile_memory_limit, compile: config.compile_memory_limit
}, }
}); });
await job.prime(); await job.prime()
const result = await job.execute(); const result = await job.execute()
await job.cleanup(); await job.cleanup()
if (result.run.stdout.trim() !== 'OK') { if(result.run.stdout.trim() !== "OK"){
failed = true; failed = true;
logger.error('Test Failed:'); logger.error("Test Failed:")
console.log(job, result); console.log(job, result)
} else { }else{
logger.info('Test Passed'); logger.info("Test Passed")
} }
} }
} }
if (failed) { if(failed) {
logger.error('One or more tests failed'); logger.error("One or more tests failed")
process.exit(1); process.exit(1);
} else { }
logger.info('All tests passed'); else {
logger.info("All tests passed")
process.exit(0); process.exit(0);
} }
})(); })()

View File

@ -2,57 +2,6 @@ const fss = require('fs');
const Logger = require('logplease'); const Logger = require('logplease');
const logger = Logger.create('config'); const logger = Logger.create('config');
function parse_overrides(overrides) {
try {
return JSON.parse(overrides);
} catch (e) {
return null;
}
}
function validate_overrides(overrides, options) {
for (const language in overrides) {
for (const key in overrides[language]) {
if (
![
'max_process_count',
'max_open_files',
'max_file_size',
'compile_memory_limit',
'run_memory_limit',
'compile_timeout',
'run_timeout',
'output_max_size',
].includes(key)
) {
logger.error(`Invalid overridden option: ${key}`);
return false;
}
const option = options.find(o => o.key === key);
const parser = option.parser;
const raw = overrides[language][key];
const value = parser(raw);
const validators = option.validators;
for (const validator of validators) {
const response = validator(value, raw);
if (response !== true) {
logger.error(
`Failed to validate overridden option: ${key}`,
response
);
return false;
}
}
overrides[language][key] = value;
}
// Modifies the reference
options[
options.index_of(options.find(o => o.key === 'limit_overrides'))
] = overrides;
}
return true;
}
const options = [ const options = [
{ {
key: 'log_level', key: 'log_level',
@ -68,7 +17,7 @@ const options = [
{ {
key: 'bind_address', key: 'bind_address',
desc: 'Address to bind REST API on', desc: 'Address to bind REST API on',
default: `0.0.0.0:${process.env["PORT"] || 2000}`, default: '0.0.0.0:2000',
validators: [], validators: [],
}, },
{ {
@ -142,30 +91,18 @@ const options = [
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{
key: 'compile_timeout',
desc: 'Max time allowed for compile stage in milliseconds',
default: 10000, // 10 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'run_timeout',
desc: 'Max time allowed for run stage in milliseconds',
default: 3000, // 3 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{ {
key: 'compile_memory_limit', key: 'compile_memory_limit',
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)', desc:
'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit default: -1, // no limit
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ {
key: 'run_memory_limit', key: 'run_memory_limit',
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)', desc:
'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit default: -1, // no limit
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
@ -187,22 +124,8 @@ const options = [
desc: 'Maximum number of concurrent jobs to run at one time', desc: 'Maximum number of concurrent jobs to run at one time',
default: 64, default: 64,
parser: parse_int, parser: parse_int,
validators: [x => x > 0 || `${x} cannot be negative`], validators: [(x) => x > 0 || `${x} cannot be negative`]
}, }
{
key: 'limit_overrides',
desc: 'Per-language exceptions in JSON format for each of:\
max_process_count, max_open_files, max_file_size, compile_memory_limit,\
run_memory_limit, compile_timeout, run_timeout, output_max_size',
default: {},
parser: parse_overrides,
validators: [
x => !!x || `Invalid JSON format for the overrides\n${x}`,
(overrides, _, options) =>
validate_overrides(overrides, options) ||
`Failed to validate the overrides`,
],
},
]; ];
logger.info(`Loading Configuration from environment`); logger.info(`Loading Configuration from environment`);
@ -220,12 +143,12 @@ options.forEach(option => {
const parsed_val = parser(env_val); const parsed_val = parser(env_val);
const value = env_val === undefined ? option.default : parsed_val; const value = env_val || option.default;
option.validators.for_each(validator => { option.validators.for_each(validator => {
let response = null; let response = null;
if (env_val) response = validator(parsed_val, env_val, options); if (env_val) response = validator(parsed_val, env_val);
else response = validator(value, value, options); else response = validator(value, value);
if (response !== true) { if (response !== true) {
errored = true; errored = true;

View File

@ -1,12 +1,10 @@
const logplease = require('logplease'); const logger = require('logplease').create('job');
const logger = logplease.create('job');
const { v4: uuidv4 } = require('uuid'); const { v4: uuidv4 } = require('uuid');
const cp = require('child_process'); const cp = require('child_process');
const path = require('path'); const path = require('path');
const config = require('./config'); const config = require('./config');
const globals = require('./globals'); const globals = require('./globals');
const fs = require('fs/promises'); const fs = require('fs/promises');
const fss = require('fs');
const wait_pid = require('waitpid'); const wait_pid = require('waitpid');
const job_states = { const job_states = {
@ -18,34 +16,30 @@ const job_states = {
let uid = 0; let uid = 0;
let gid = 0; let gid = 0;
let remaining_job_spaces = config.max_concurrent_jobs; let remainingJobSpaces = config.max_concurrent_jobs;
let jobQueue = []; let jobQueue = [];
setInterval(() => {
setInterval(()=>{
// Every 10ms try resolve a new job, if there is an available slot // Every 10ms try resolve a new job, if there is an available slot
if (jobQueue.length > 0 && remaining_job_spaces > 0) { if(jobQueue.length > 0 && remainingJobSpaces > 0){
jobQueue.shift()(); jobQueue.shift()()
} }
}, 10); }, 10)
class Job { class Job {
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) { constructor({ runtime, files, args, stdin, timeouts, memory_limits }) {
this.uuid = uuidv4(); this.uuid = uuidv4();
this.logger = logplease.create(`job/${this.uuid}`);
this.runtime = runtime; this.runtime = runtime;
this.files = files.map((file, i) => ({ this.files = files.map((file, i) => ({
name: file.name || `file${i}.code`, name: file.name || `file${i}.code`,
content: file.content, content: file.content,
encoding: ['base64', 'hex', 'utf8'].includes(file.encoding)
? file.encoding
: 'utf8',
})); }));
this.args = args; this.args = args;
this.stdin = stdin; this.stdin = stdin;
this.timeouts = timeouts; this.timeouts = timeouts;
this.memory_limits = memory_limits; this.memory_limits = memory_limits;
@ -58,8 +52,6 @@ class Job {
uid %= config.runner_uid_max - config.runner_uid_min + 1; uid %= config.runner_uid_max - config.runner_uid_min + 1;
gid %= config.runner_gid_max - config.runner_gid_min + 1; gid %= config.runner_gid_max - config.runner_gid_min + 1;
this.logger.debug(`Assigned uid=${this.uid} gid=${this.gid}`);
this.state = job_states.READY; this.state = job_states.READY;
this.dir = path.join( this.dir = path.join(
config.data_directory, config.data_directory,
@ -69,45 +61,39 @@ class Job {
} }
async prime() { async prime() {
if (remaining_job_spaces < 1) { if(remainingJobSpaces < 1){
this.logger.info(`Awaiting job slot`); logger.info(`Awaiting job slot uuid=${this.uuid}`)
await new Promise(resolve => { await new Promise((resolve)=>{
jobQueue.push(resolve); jobQueue.push(resolve)
}); })
} }
this.logger.info(`Priming job`); logger.info(`Priming job uuid=${this.uuid}`);
remaining_job_spaces--; remainingJobSpaces--;
this.logger.debug('Writing files to job cache'); logger.debug('Writing files to job cache');
this.logger.debug(`Transfering ownership`); logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`);
await fs.mkdir(this.dir, { mode: 0o700 }); await fs.mkdir(this.dir, { mode: 0o700 });
await fs.chown(this.dir, this.uid, this.gid); await fs.chown(this.dir, this.uid, this.gid);
for (const file of this.files) { for (const file of this.files) {
const file_path = path.join(this.dir, file.name); let file_path = path.join(this.dir, file.name);
const rel = path.relative(this.dir, file_path); const rel = path.relative(this.dir, file_path);
const file_content = Buffer.from(file.content, file.encoding);
if (rel.startsWith('..')) if(rel.startsWith(".."))
throw Error( throw Error(`File path "${file.name}" tries to escape parent directory: ${rel}`)
`File path "${file.name}" tries to escape parent directory: ${rel}`
);
await fs.mkdir(path.dirname(file_path), { await fs.mkdir(path.dirname(file_path), {recursive: true, mode: 0o700})
recursive: true,
mode: 0o700,
});
await fs.chown(path.dirname(file_path), this.uid, this.gid); await fs.chown(path.dirname(file_path), this.uid, this.gid);
await fs.write_file(file_path, file_content); await fs.write_file(file_path, file.content);
await fs.chown(file_path, this.uid, this.gid); await fs.chown(file_path, this.uid, this.gid);
} }
this.state = job_states.PRIMED; this.state = job_states.PRIMED;
this.logger.debug('Primed job'); logger.debug('Primed job');
} }
async safe_call(file, args, timeout, memory_limit, eventBus = null) { async safe_call(file, args, timeout, memory_limit, eventBus = null) {
@ -116,29 +102,26 @@ class Job {
const prlimit = [ const prlimit = [
'prlimit', 'prlimit',
'--nproc=' + this.runtime.max_process_count, '--nproc=' + config.max_process_count,
'--nofile=' + this.runtime.max_open_files, '--nofile=' + config.max_open_files,
'--fsize=' + this.runtime.max_file_size, '--fsize=' + config.max_file_size,
]; ];
if (memory_limit >= 0) { if (memory_limit >= 0) {
prlimit.push('--as=' + memory_limit); prlimit.push('--as=' + memory_limit);
} }
const proc_call = [ const proc_call = [...prlimit, ...nonetwork, 'bash', file, ...args];
'nice',
...prlimit,
...nonetwork,
'bash',
file,
...args,
];
var stdout = ''; var stdout = '';
var stderr = ''; var stderr = '';
var output = ''; var output = '';
const proc = cp.spawn(proc_call[0], proc_call.splice(1), { const proc = cp.spawn(proc_call[0], proc_call.splice(1), {
env: {
...this.runtime.env_vars,
PISTON_LANGUAGE: this.runtime.language,
},
stdio: 'pipe', stdio: 'pipe',
cwd: this.dir, cwd: this.dir,
uid: this.uid, uid: this.uid,
@ -146,34 +129,36 @@ class Job {
detached: true, //give this process its own process group detached: true, //give this process its own process group
}); });
if (eventBus === null) { if(eventBus === null){
proc.stdin.write(this.stdin); proc.stdin.write(this.stdin);
proc.stdin.end(); proc.stdin.end();
proc.stdin.destroy(); proc.stdin.destroy();
} else { }else{
eventBus.on('stdin', data => { eventBus.on("stdin", (data) => {
proc.stdin.write(data); proc.stdin.write(data);
}); })
eventBus.on('kill', signal => { eventBus.on("kill", (signal) => {
proc.kill(signal); proc.kill(signal)
}); })
} }
const kill_timeout = const kill_timeout = set_timeout(
(timeout >= 0 && async _ => {
set_timeout(async _ => { logger.info(`Timeout exceeded timeout=${timeout} uuid=${this.uuid}`)
this.logger.info(`Timeout exceeded timeout=${timeout}`); process.kill(proc.pid, 'SIGKILL')
process.kill(proc.pid, 'SIGKILL'); },
}, timeout)) || timeout
null; );
proc.stderr.on('data', async data => { proc.stderr.on('data', async data => {
if (eventBus !== null) { if(eventBus !== null) {
eventBus.emit('stderr', data); eventBus.emit("stderr", data);
} else if (stderr.length > this.runtime.output_max_size) { } else if (stderr.length > config.output_max_size) {
this.logger.info(`stderr length exceeded`); logger.info(`stderr length exceeded uuid=${this.uuid}`)
process.kill(proc.pid, 'SIGKILL'); process.kill(proc.pid, 'SIGKILL')
} else { } else {
stderr += data; stderr += data;
output += data; output += data;
@ -181,35 +166,35 @@ class Job {
}); });
proc.stdout.on('data', async data => { proc.stdout.on('data', async data => {
if (eventBus !== null) { if(eventBus !== null){
eventBus.emit('stdout', data); eventBus.emit("stdout", data);
} else if (stdout.length > this.runtime.output_max_size) { } else if (stdout.length > config.output_max_size) {
this.logger.info(`stdout length exceeded`); logger.info(`stdout length exceeded uuid=${this.uuid}`)
process.kill(proc.pid, 'SIGKILL'); process.kill(proc.pid, 'SIGKILL')
} else { } else {
stdout += data; stdout += data;
output += data; output += data;
} }
}); });
const exit_cleanup = () => { const exit_cleanup = async () => {
clear_timeout(kill_timeout); clear_timeout(kill_timeout);
proc.stderr.destroy(); proc.stderr.destroy();
proc.stdout.destroy(); proc.stdout.destroy();
this.cleanup_processes(); await this.cleanup_processes()
this.logger.debug(`Finished exit cleanup`); logger.debug(`Finished exit cleanup uuid=${this.uuid}`)
}; };
proc.on('exit', (code, signal) => { proc.on('exit', async (code, signal) => {
exit_cleanup(); await exit_cleanup();
resolve({ stdout, stderr, code, signal, output }); resolve({stdout, stderr, code, signal, output });
}); });
proc.on('error', err => { proc.on('error', async err => {
exit_cleanup(); await exit_cleanup();
reject({ error: err, stdout, stderr, output }); reject({ error: err, stdout, stderr, output });
}); });
@ -224,13 +209,13 @@ class Job {
); );
} }
this.logger.info(`Executing job runtime=${this.runtime.toString()}`); logger.info(
`Executing job uuid=${this.uuid} uid=${this.uid} gid=${
this.gid
} runtime=${this.runtime.toString()}`
);
const code_files = logger.debug('Compiling');
(this.runtime.language === 'file' && this.files) ||
this.files.filter(file => file.encoding == 'utf8');
this.logger.debug('Compiling');
let compile; let compile;
@ -243,11 +228,11 @@ class Job {
); );
} }
this.logger.debug('Running'); logger.debug('Running');
const run = await this.safe_call( const run = await this.safe_call(
this.runtime.run, this.runtime.run,
[code_files[0].name, ...this.args], [this.files[0].name, ...this.args],
this.timeouts.run, this.timeouts.run,
this.memory_limits.run this.memory_limits.run
); );
@ -262,7 +247,7 @@ class Job {
}; };
} }
async execute_interactive(eventBus) { async execute_interactive(eventBus){
if (this.state !== job_states.PRIMED) { if (this.state !== job_states.PRIMED) {
throw new Error( throw new Error(
'Job must be in primed state, current state: ' + 'Job must be in primed state, current state: ' +
@ -270,98 +255,84 @@ class Job {
); );
} }
this.logger.info( logger.info(
`Interactively executing job runtime=${this.runtime.toString()}` `Interactively executing job uuid=${this.uuid} uid=${this.uid} gid=${
this.gid
} runtime=${this.runtime.toString()}`
); );
const code_files = if(this.runtime.compiled){
(this.runtime.language === 'file' && this.files) || eventBus.emit("stage", "compile")
this.files.filter(file => file.encoding == 'utf8'); const {error, code, signal} = await this.safe_call(
this.runtime.compile,
if (this.runtime.compiled) { this.files.map(x => x.name),
eventBus.emit('stage', 'compile');
const { error, code, signal } = await this.safe_call(
path.join(this.runtime.pkgdir, 'compile'),
code_files.map(x => x.name),
this.timeouts.compile, this.timeouts.compile,
this.memory_limits.compile, this.memory_limits.compile,
eventBus eventBus
); )
eventBus.emit('exit', 'compile', { error, code, signal }); eventBus.emit("exit", "compile", {error, code, signal})
} }
this.logger.debug('Running'); logger.debug('Running');
eventBus.emit('stage', 'run'); eventBus.emit("stage", "run")
const { error, code, signal } = await this.safe_call( const {error, code, signal} = await this.safe_call(
path.join(this.runtime.pkgdir, 'run'), this.runtime.run,
[code_files[0].name, ...this.args], [this.files[0].name, ...this.args],
this.timeouts.run, this.timeouts.run,
this.memory_limits.run, this.memory_limits.run,
eventBus eventBus
); );
eventBus.emit('exit', 'run', { error, code, signal }); eventBus.emit("exit", "run", {error, code, signal})
this.state = job_states.EXECUTED; this.state = job_states.EXECUTED;
} }
cleanup_processes(dont_wait = []) { async cleanup_processes(dont_wait = []) {
let processes = [1]; let processes = [1];
const to_wait = []; logger.debug(`Cleaning up processes uuid=${this.uuid}`)
this.logger.debug(`Cleaning up processes`);
while (processes.length > 0) { while (processes.length > 0) {
processes = []; processes = []
const proc_ids = fss.readdir_sync('/proc');
processes = proc_ids.map(proc_id => { const proc_ids = await fs.readdir("/proc");
if (isNaN(proc_id)) return -1;
try {
const proc_status = fss.read_file_sync( processes = await Promise.all(proc_ids.map(async (proc_id) => {
path.join('/proc', proc_id, 'status') if(isNaN(proc_id)) return -1;
); try{
const proc_lines = proc_status.to_string().split('\n'); const proc_status = await fs.read_file(path.join("/proc",proc_id,"status"));
const state_line = proc_lines.find(line => const proc_lines = proc_status.to_string().split("\n")
line.starts_with('State:') const uid_line = proc_lines.find(line=>line.starts_with("Uid:"))
);
const uid_line = proc_lines.find(line =>
line.starts_with('Uid:')
);
const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/); const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/);
if(ruid == this.uid || euid == this.uid)
return parse_int(proc_id)
const [_1, state, user_friendly] = state_line.split(/\s+/); }catch{
return -1
if (state == 'Z')
// Zombie process, just needs to be waited
return -1;
// We should kill in all other state (Sleep, Stopped & Running)
if (ruid == this.uid || euid == this.uid)
return parse_int(proc_id);
} catch {
return -1;
} }
return -1; return -1
}); }))
processes = processes.filter(p => p > 0)
if(processes.length > 0)
logger.debug(`Got processes to kill: ${processes} uuid=${this.uuid}`)
processes = processes.filter(p => p > 0);
if (processes.length > 0)
this.logger.debug(`Got processes to kill: ${processes}`);
for (const proc of processes) { for (const proc of processes) {
// First stop the processes, but keep their resources allocated so they cant re-fork // First stop the processes, but keep their resources allocated so they cant re-fork
try { try {
process.kill(proc, 'SIGSTOP'); process.kill(proc, 'SIGSTOP');
} catch (e) { } catch {
// Could already be dead // Could already be dead
this.logger.debug(
`Got error while SIGSTOPping process ${proc}:`,
e
);
} }
} }
@ -371,27 +342,14 @@ class Job {
process.kill(proc, 'SIGKILL'); process.kill(proc, 'SIGKILL');
} catch { } catch {
// Could already be dead and just needs to be waited on // Could already be dead and just needs to be waited on
this.logger.debug(
`Got error while SIGKILLing process ${proc}:`,
e
);
} }
to_wait.push(proc); if(!dont_wait.includes(proc))
wait_pid(proc);
} }
} }
this.logger.debug( logger.debug(`Cleaned up processes uuid=${this.uuid}`)
`Finished kill-loop, calling wait_pid to end any zombie processes`
);
for (const proc of to_wait) {
if (dont_wait.includes(proc)) continue;
wait_pid(proc);
}
this.logger.debug(`Cleaned up processes`);
} }
async cleanup_filesystem() { async cleanup_filesystem() {
@ -412,7 +370,7 @@ class Job {
} }
} catch (e) { } catch (e) {
// File was somehow deleted in the time that we read the dir to when we checked the file // File was somehow deleted in the time that we read the dir to when we checked the file
this.logger.warn(`Error removing file ${file_path}: ${e}`); logger.warn(`Error removing file ${file_path}: ${e}`);
} }
} }
} }
@ -421,15 +379,15 @@ class Job {
} }
async cleanup() { async cleanup() {
this.logger.info(`Cleaning up job`); logger.info(`Cleaning up job uuid=${this.uuid}`);
this.cleanup_processes(); // Run process janitor, just incase there are any residual processes somehow
await this.cleanup_filesystem(); await this.cleanup_filesystem();
remaining_job_spaces++; remainingJobSpaces++;
} }
} }
module.exports = { module.exports = {
Job, Job,
}; };

View File

@ -7,36 +7,14 @@ const path = require('path');
const runtimes = []; const runtimes = [];
class Runtime { class Runtime {
constructor({ constructor({ language, version, aliases, runtime, run, compile, packageSupport, flake_path }) {
language,
version,
aliases,
runtime,
run,
compile,
packageSupport,
flake_path,
timeouts,
memory_limits,
max_process_count,
max_open_files,
max_file_size,
output_max_size,
}) {
this.language = language; this.language = language;
this.runtime = runtime; this.runtime = runtime;
this.timeouts = timeouts;
this.memory_limits = memory_limits;
this.max_process_count = max_process_count;
this.max_open_files = max_open_files;
this.max_file_size = max_file_size;
this.output_max_size = output_max_size;
this.aliases = aliases; this.aliases = aliases;
this.version = version; this.version = version;
this.run = run; this.run = run;
this.compile = compile; this.compile = compile;
@ -44,120 +22,58 @@ class Runtime {
this.package_support = packageSupport; this.package_support = packageSupport;
} }
static compute_single_limit( ensure_built(){
language_name,
limit_name,
language_limit_overrides
) {
return (
(config.limit_overrides[language_name] &&
config.limit_overrides[language_name][limit_name]) ||
(language_limit_overrides &&
language_limit_overrides[limit_name]) ||
config[limit_name]
);
}
static compute_all_limits(language_name, language_limit_overrides) {
return {
timeouts: {
compile: this.compute_single_limit(
language_name,
'compile_timeout',
language_limit_overrides
),
run: this.compute_single_limit(
language_name,
'run_timeout',
language_limit_overrides
),
},
memory_limits: {
compile: this.compute_single_limit(
language_name,
'compile_memory_limit',
language_limit_overrides
),
run: this.compute_single_limit(
language_name,
'run_memory_limit',
language_limit_overrides
),
},
max_process_count: this.compute_single_limit(
language_name,
'max_process_count',
language_limit_overrides
),
max_open_files: this.compute_single_limit(
language_name,
'max_open_files',
language_limit_overrides
),
max_file_size: this.compute_single_limit(
language_name,
'max_file_size',
language_limit_overrides
),
output_max_size: this.compute_single_limit(
language_name,
'output_max_size',
language_limit_overrides
),
};
}
ensure_built() {
logger.info(`Ensuring ${this} is built`); logger.info(`Ensuring ${this} is built`);
const flake_path = this.flake_path; const flake_path = this.flake_path;
function _ensure_built(key) { function _ensure_built(key){
const command = `nix build ${flake_path}.metadata.${key} --no-link`; const command = `nix build ${flake_path}.metadata.${key} --no-link`;
cp.execSync(command, { stdio: 'pipe' }); cp.execSync(command, {stdio: "pipe"})
} }
_ensure_built('run'); _ensure_built("run");
if (this.compiled) _ensure_built('compile'); if(this.compiled) _ensure_built("compile");
logger.debug(`Finished ensuring ${this} is installed`)
logger.debug(`Finished ensuring ${this} is installed`);
} }
static load_runtime(flake_key) { static load_runtime(flake_key){
logger.info(`Loading ${flake_key}`); logger.info(`Loading ${flake_key}`)
const flake_path = `${config.flake_path}#pistonRuntimeSets.${config.runtime_set}.${flake_key}`; const flake_path = `${config.flake_path}#pistonRuntimeSets.${config.runtime_set}.${flake_key}`;
const metadata_command = `nix eval --json ${flake_path}.metadata`; const metadata_command = `nix eval --json ${flake_path}.metadata`;
const metadata = JSON.parse(cp.execSync(metadata_command)); const metadata = JSON.parse(cp.execSync(metadata_command));
const this_runtime = new Runtime({ const this_runtime = new Runtime({
...metadata, ...metadata,
...Runtime.compute_all_limits( flake_path
metadata.language,
metadata.limitOverrides
),
flake_path,
}); });
this_runtime.ensure_built(); this_runtime.ensure_built();
runtimes.push(this_runtime); runtimes.push(this_runtime);
logger.debug(`Package ${flake_key} was loaded`); logger.debug(`Package ${flake_key} was loaded`);
} }
get compiled() { get compiled() {
return this.compile !== null; return this.compile !== null;
} }
get id() { get id(){
return runtimes.indexOf(this); return runtimes.indexOf(this);
} }
toString() { toString() {
return `${this.language}-${this.version}`; return `${this.language}-${this.version}`;
} }
} }
module.exports = runtimes; module.exports = runtimes;
module.exports.Runtime = Runtime; module.exports.Runtime = Runtime;
module.exports.load_runtime = Runtime.load_runtime; module.exports.load_runtime = Runtime.load_runtime;

1
cli/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
node_modules

View File

@ -3,44 +3,8 @@ const path = require('path');
const chalk = require('chalk'); const chalk = require('chalk');
const WebSocket = require('ws'); const WebSocket = require('ws');
const SIGNALS = [ const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
'SIGABRT',
'SIGALRM',
'SIGBUS',
'SIGCHLD',
'SIGCLD',
'SIGCONT',
'SIGEMT',
'SIGFPE',
'SIGHUP',
'SIGILL',
'SIGINFO',
'SIGINT',
'SIGIO',
'SIGIOT',
'SIGLOST',
'SIGPIPE',
'SIGPOLL',
'SIGPROF',
'SIGPWR',
'SIGQUIT',
'SIGSEGV',
'SIGSTKFLT',
'SIGTSTP',
'SIGSYS',
'SIGTERM',
'SIGTRAP',
'SIGTTIN',
'SIGTTOU',
'SIGUNUSED',
'SIGURG',
'SIGUSR1',
'SIGUSR2',
'SIGVTALRM',
'SIGXCPU',
'SIGXFSZ',
'SIGWINCH',
];
exports.command = ['execute <language> <file> [args..]']; exports.command = ['execute <language> <file> [args..]'];
exports.aliases = ['run']; exports.aliases = ['run'];
@ -51,18 +15,18 @@ exports.builder = {
string: true, string: true,
desc: 'Set the version of the language to use', desc: 'Set the version of the language to use',
alias: ['l'], alias: ['l'],
default: '*', default: '*'
}, },
stdin: { stdin: {
boolean: true, boolean: true,
desc: 'Read input from stdin and pass to executor', desc: 'Read input from stdin and pass to executor',
alias: ['i'], alias: ['i']
}, },
run_timeout: { run_timeout: {
alias: ['rt', 'r'], alias: ['rt', 'r'],
number: true, number: true,
desc: 'Milliseconds before killing run process', desc: 'Milliseconds before killing run process',
default: 3000, default: 3000
}, },
compile_timeout: { compile_timeout: {
alias: ['ct', 'c'], alias: ['ct', 'c'],
@ -78,126 +42,117 @@ exports.builder = {
interactive: { interactive: {
boolean: true, boolean: true,
alias: ['t'], alias: ['t'],
desc: 'Run interactively using WebSocket transport', desc: 'Run interactively using WebSocket transport'
}, },
status: { status: {
boolean: true, boolean: true,
alias: ['s'], alias: ['s'],
desc: 'Output additional status to stderr', desc: 'Output additional status to stderr'
}, }
}; };
async function handle_interactive(files, argv) { async function handle_interactive(files, argv){
const ws = new WebSocket( const ws = new WebSocket(argv.pistonUrl.replace("http", "ws") + "/api/v2/connect")
argv.pistonUrl.replace('http', 'ws') + '/api/v2/connect'
);
const log_message = const log_message = (process.stderr.isTTY && argv.status) ? console.error : ()=>{};
process.stderr.isTTY && argv.status ? console.error : () => {};
process.on('exit', () => { process.on("exit", ()=>{
ws.close(); ws.close();
process.stdin.end(); process.stdin.end();
process.stdin.destroy(); process.stdin.destroy();
process.exit(); process.exit();
}); })
for (const signal of SIGNALS) { for(const signal of SIGNALS){
process.on(signal, () => { process.on(signal, ()=>{
ws.send(JSON.stringify({ type: 'signal', signal })); ws.send(JSON.stringify({type: 'signal', signal}))
}); })
} }
ws.on('open', () => {
ws.on('open', ()=>{
const request = { const request = {
type: 'init', type: "init",
language: argv.language, language: argv.language,
version: argv['language_version'], version: argv['language_version'],
files: files, files: files,
args: argv.args, args: argv.args,
compile_timeout: argv.ct, compile_timeout: argv.ct,
run_timeout: argv.rt, run_timeout: argv.rt
}; }
ws.send(JSON.stringify(request)); ws.send(JSON.stringify(request))
log_message(chalk.white.bold('Connected')); log_message(chalk.white.bold("Connected"))
process.stdin.resume(); process.stdin.resume();
process.stdin.on('data', data => { process.stdin.on("data", (data) => {
ws.send( ws.send(JSON.stringify({
JSON.stringify({ type: "data",
type: 'data', stream: "stdin",
stream: 'stdin', data: data.toString()
data: data.toString(), }))
}) })
); })
});
});
ws.on('close', (code, reason) => { ws.on("close", (code, reason)=>{
log_message( log_message(
chalk.white.bold('Disconnected: '), chalk.white.bold("Disconnected: "),
chalk.white.bold('Reason: '), chalk.white.bold("Reason: "),
chalk.yellow(`"${reason}"`), chalk.yellow(`"${reason}"`),
chalk.white.bold('Code: '), chalk.white.bold("Code: "),
chalk.yellow(`"${code}"`) chalk.yellow(`"${code}"`),
); )
process.stdin.pause(); process.stdin.pause()
}); })
ws.on('message', function (data) { ws.on('message', function(data){
const msg = JSON.parse(data); const msg = JSON.parse(data);
switch (msg.type) { switch(msg.type){
case 'runtime': case "runtime":
log_message( log_message(chalk.bold.white("Runtime:"), chalk.yellow(`${msg.language} ${msg.version}`))
chalk.bold.white('Runtime:'),
chalk.yellow(`${msg.language} ${msg.version}`)
);
break; break;
case 'stage': case "stage":
log_message( log_message(chalk.bold.white("Stage:"), chalk.yellow(msg.stage))
chalk.bold.white('Stage:'),
chalk.yellow(msg.stage)
);
break; break;
case 'data': case "data":
if (msg.stream == 'stdout') process.stdout.write(msg.data); if(msg.stream == "stdout") process.stdout.write(msg.data)
else if (msg.stream == 'stderr') process.stderr.write(msg.data); else if(msg.stream == "stderr") process.stderr.write(msg.data)
else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data); else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data)
break; break;
case 'exit': case "exit":
if (msg.signal === null) if(msg.signal === null)
log_message( log_message(
chalk.white.bold('Stage'), chalk.white.bold("Stage"),
chalk.yellow(msg.stage), chalk.yellow(msg.stage),
chalk.white.bold('exited with code'), chalk.white.bold("exited with code"),
chalk.yellow(msg.code) chalk.yellow(msg.code)
); )
else else
log_message( log_message(
chalk.white.bold('Stage'), chalk.white.bold("Stage"),
chalk.yellow(msg.stage), chalk.yellow(msg.stage),
chalk.white.bold('exited with signal'), chalk.white.bold("exited with signal"),
chalk.yellow(msg.signal) chalk.yellow(msg.signal)
); )
break; break;
default: default:
log_message(chalk.red.bold('Unknown message:'), msg); log_message(chalk.red.bold("Unknown message:"), msg)
} }
}); })
} }
async function run_non_interactively(files, argv) { async function run_non_interactively(files, argv) {
const stdin =
(argv.stdin &&
(await new Promise((resolve, _) => { const stdin = (argv.stdin && await new Promise((resolve, _) => {
let data = ''; let data = '';
process.stdin.on('data', d => (data += d)); process.stdin.on('data', d => data += d);
process.stdin.on('end', _ => resolve(data)); process.stdin.on('end', _ => resolve(data));
}))) || })) || '';
'';
const request = { const request = {
language: argv.language, language: argv.language,
@ -206,7 +161,7 @@ async function run_non_interactively(files, argv) {
args: argv.args, args: argv.args,
stdin, stdin,
compile_timeout: argv.ct, compile_timeout: argv.ct,
run_timeout: argv.rt, run_timeout: argv.rt
}; };
let { data: response } = await argv.axios.post('/api/v2/execute', request); let { data: response } = await argv.axios.post('/api/v2/execute', request);
@ -215,13 +170,13 @@ async function run_non_interactively(files, argv) {
console.log(chalk.bold(`== ${name} ==`)); console.log(chalk.bold(`== ${name} ==`));
if (ctx.stdout) { if (ctx.stdout) {
console.log(chalk.bold(`STDOUT`)); console.log(chalk.bold(`STDOUT`))
console.log(ctx.stdout.replace(/\n/g, '\n ')); console.log(ctx.stdout.replace(/\n/g,'\n '))
} }
if (ctx.stderr) { if (ctx.stderr) {
console.log(chalk.bold(`STDERR`)); console.log(chalk.bold(`STDERR`))
console.log(ctx.stderr.replace(/\n/g, '\n ')); console.log(ctx.stderr.replace(/\n/g,'\n '))
} }
if (ctx.code) { if (ctx.code) {
@ -232,9 +187,12 @@ async function run_non_interactively(files, argv) {
} }
if (ctx.signal) { if (ctx.signal) {
console.log(chalk.bold(`Signal:`), chalk.bold.yellow(ctx.signal)); console.log(
chalk.bold(`Signal:`),
chalk.bold.yellow(ctx.signal)
);
} }
}; }
if (response.compile) { if (response.compile) {
step('Compile', response.compile); step('Compile', response.compile);
@ -243,23 +201,17 @@ async function run_non_interactively(files, argv) {
step('Run', response.run); step('Run', response.run);
} }
exports.handler = async argv => { exports.handler = async (argv) => {
const files = [...(argv.files || []), argv.file].map(file_path => { const files = [...(argv.files || []),argv.file]
const buffer = fs.readFileSync(file_path); .map(file_path => {
const encoding = return {
(buffer name: path.basename(file_path),
.toString() content: fs.readFileSync(file_path).toString()
.split('') };
.some(x => x.charCodeAt(0) >= 128) && });
'base64') ||
'utf8';
return {
name: path.basename(file_path),
content: buffer.toString(encoding),
encoding,
};
});
if (argv.interactive) await handle_interactive(files, argv); if(argv.interactive) await handle_interactive(files, argv);
else await run_non_interactively(files, argv); else await run_non_interactively(files, argv);
}; }

View File

@ -6,8 +6,8 @@ const axios_instance = argv => {
argv.axios = axios.create({ argv.axios = axios.create({
baseURL: argv['piston-url'], baseURL: argv['piston-url'],
headers: { headers: {
'Content-Type': 'application/json', 'Content-Type': 'application/json'
}, }
}); });
return argv; return argv;
@ -18,11 +18,12 @@ require('yargs')(process.argv.slice(2))
alias: ['u'], alias: ['u'],
default: 'http://127.0.0.1:2000', default: 'http://127.0.0.1:2000',
desc: 'Piston API URL', desc: 'Piston API URL',
string: true, string: true
}) })
.middleware(axios_instance) .middleware(axios_instance)
.scriptName('piston') .scriptName('piston')
.commandDir('commands') .commandDir('commands')
.demandCommand() .demandCommand()
.help() .help()
.wrap(72).argv; .wrap(72)
.argv;

View File

@ -17,10 +17,10 @@ Returns a list of available languages, including the version, runtime and aliase
#### Response #### Response
- `[].language`: Name of the language - `[].language`: Name of the language
- `[].version`: Version of the runtime - `[].version`: Version of the runtime
- `[].aliases`: List of alternative names that can be used for the language - `[].aliases`: List of alternative names that can be used for the language
- `[].runtime` (_optional_): Name of the runtime used to run the langage, only provided if alternative runtimes exist for the language - `[].runtime` (_optional_): Name of the runtime used to run the langage, only provided if alternative runtimes exist for the language
#### Example #### Example
@ -55,35 +55,34 @@ Runs the given code, using the given runtime and arguments, returning the result
#### Request #### Request
- `language`: Name or alias of a language listed in [runtimes](#runtimes) - `language`: Name or alias of a language listed in [runtimes](#runtimes)
- `version`: SemVer version selector of a language listed in [runtimes](#runtimes) - `version`: SemVer version selector of a language listed in [runtimes](#runtimes)
- `files`: An array of files which should be uploaded into the job context - `files`: An array of files which should be uploaded into the job context
- `files[].name` (_optional_): Name of file to be written, if none a random name is picked - `files[].name` (_optional_): Name of file to be written, if none a random name is picked
- `files[].content`: Content of file to be written - `files[].content`: Content of file to be written
- `files[].encoding` (_optional_): The encoding scheme used for the file content. One of `base64`, `hex` or `utf8`. Defaults to `utf8`. - `stdin` (_optional_): Text to pass into stdin of the program. Defaults to blank string.
- `stdin` (_optional_): Text to pass into stdin of the program. Defaults to blank string. - `args` (_optional_): Arguments to pass to the program. Defaults to none
- `args` (_optional_): Arguments to pass to the program. Defaults to none - `run_timeout` (_optional_): The maximum allowed time in milliseconds for the compile stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout.
- `run_timeout` (_optional_): The maximum allowed time in milliseconds for the compile stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout. - `compile_timeout` (_optional_): The maximum allowed time in milliseconds for the run stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout. Defaults to maximum.
- `compile_timeout` (_optional_): The maximum allowed time in milliseconds for the run stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout. Defaults to maximum. - `compile_memory_limit` (_optional_): The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
- `compile_memory_limit` (_optional_): The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured. - `run_memory_limit` (_optional_): The maximum amount of memory the run stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
- `run_memory_limit` (_optional_): The maximum amount of memory the run stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
#### Response #### Response
- `language`: Name (not alias) of the runtime used - `language`: Name (not alias) of the runtime used
- `version`: Version of the used runtime - `version`: Version of the used runtime
- `run`: Results from the run stage - `run`: Results from the run stage
- `run.stdout`: stdout from run stage process - `run.stdout`: stdout from run stage process
- `run.stderr`: stderr from run stage process - `run.stderr`: stderr from run stage process
- `run.output`: stdout and stderr combined in order of data from run stage process - `run.output`: stdout and stderr combined in order of data from run stage process
- `run.code`: Exit code from run process, or null if signal is not null - `run.code`: Exit code from run process, or null if signal is not null
- `run.signal`: Signal from run process, or null if code is not null - `run.signal`: Signal from run process, or null if code is not null
- `compile` (_optional_): Results from the compile stage, only provided if the runtime has a compile stage - `compile` (_optional_): Results from the compile stage, only provided if the runtime has a compile stage
- `compile.stdout`: stdout from compile stage process - `compile.stdout`: stdout from compile stage process
- `compile.stderr`: stderr from compile stage process - `compile.stderr`: stderr from compile stage process
- `compile.output`: stdout and stderr combined in order of data from compile stage process - `compile.output`: stdout and stderr combined in order of data from compile stage process
- `compile.code`: Exit code from compile process, or null if signal is not null - `compile.code`: Exit code from compile process, or null if signal is not null
- `compile.signal`: Signal from compile process, or null if code is not null - `compile.signal`: Signal from compile process, or null if code is not null
#### Example #### Example
@ -134,9 +133,9 @@ Returns a list of all possible packages, and whether their installation status.
#### Response #### Response
- `[].language`: Name of the contained runtime - `[].language`: Name of the contained runtime
- `[].language_version`: Version of the contained runtime - `[].language_version`: Version of the contained runtime
- `[].installed`: Status on the package being installed - `[].installed`: Status on the package being installed
#### Example #### Example
@ -168,13 +167,13 @@ Install the given package.
#### Request #### Request
- `language`: Name of package from [package list](#get-apiv2packages) - `language`: Name of package from [package list](#get-apiv2packages)
- `version`: SemVer version selector for package from [package list](#get-apiv2packages) - `version`: SemVer version selector for package from [package list](#get-apiv2packages)
#### Response #### Response
- `language`: Name of package installed - `language`: Name of package installed
- `version`: Version of package installed - `version`: Version of package installed
#### Example #### Example
@ -204,13 +203,13 @@ Uninstall the given package.
#### Request #### Request
- `language`: Name of package from [package list](#get-apiv2packages) - `language`: Name of package from [package list](#get-apiv2packages)
- `version`: SemVer version selector for package from [package list](#get-apiv2packages) - `version`: SemVer version selector for package from [package list](#get-apiv2packages)
#### Response #### Response
- `language`: Name of package uninstalled - `language`: Name of package uninstalled
- `version`: Version of package uninstalled - `version`: Version of package uninstalled
#### Example #### Example

View File

@ -50,15 +50,15 @@ Absolute path to piston related data, including packages and job contexts.
```yaml ```yaml
key: key:
- PISTON_RUNNER_UID_MIN - PISTON_RUNNER_UID_MIN
- PISTON_RUNNER_UID_MAX - PISTON_RUNNER_UID_MAX
- PISTON_RUNNER_GID_MIN - PISTON_RUNNER_GID_MIN
- PISTON_RUNNER_GID_MAX - PISTON_RUNNER_GID_MAX
default: default:
- 1001 - 1001
- 1500 - 1500
- 1001 - 1001
- 1500 - 1500
``` ```
UID and GID ranges to use when executing jobs. UID and GID ranges to use when executing jobs.
@ -86,11 +86,11 @@ key: PISTON_MAX_PROCESS_COUNT
default: 64 default: 64
``` ```
Maximum number of processes allowed to to have open for a job. Maximum number of processess allowed to to have open for a job.
Resists against exhausting the process table, causing a full system lockup. Resists against exhausting the process table, causing a full system lockup.
## Output Max Size ## Output Max Side
```yaml ```yaml
key: PISTON_OUTPUT_MAX_SIZE key: PISTON_OUTPUT_MAX_SIZE
@ -123,27 +123,12 @@ Maximum size for a singular file written to disk.
Resists against large file writes to exhaust disk space. Resists against large file writes to exhaust disk space.
## Compile/Run timeouts
```yaml
key:
- PISTON_COMPILE_TIMEOUT
default: 10000
key:
- PISTON_RUN_TIMEOUT
default: 3000
```
The maximum time that is allowed to be taken by a stage in milliseconds.
Use -1 for unlimited time.
## Compile/Run memory limits ## Compile/Run memory limits
```yaml ```yaml
key: key:
- PISTON_COMPILE_MEMORY_LIMIT - PISTON_COMPILE_MEMORY_LIMIT
- PISTON_RUN_MEMORY_LIMIT - PISTON_RUN_MEMORY_LIMIT
default: -1 default: -1
``` ```
@ -169,19 +154,3 @@ default: 64
``` ```
Maximum number of jobs to run concurrently. Maximum number of jobs to run concurrently.
## Limit overrides
```yaml
key: PISTON_LIMIT_OVERRIDES
default: {}
```
Per-language overrides/exceptions for the each of `max_process_count`, `max_open_files`, `max_file_size`,
`compile_memory_limit`, `run_memory_limit`, `compile_timeout`, `run_timeout`, `output_max_size`. Defined as follows:
```
PISTON_LIMIT_OVERRIDES={"c++":{"max_process_count":128}}
```
This will give `c++` a max_process_count of 128 regardless of the configuration.

View File

@ -1 +1 @@
mkdocs==1.2.3 mkdocs==1.1.2

View File

@ -21,7 +21,6 @@
compile? null, compile? null,
packages? null, packages? null,
aliases? [], aliases? [],
limitOverrides? {},
tests tests
}: let }: let
compileFile = if compile != null then compileFile = if compile != null then
@ -29,7 +28,7 @@
else null; else null;
runFile = pkgs.writeShellScript "run" run; runFile = pkgs.writeShellScript "run" run;
metadata = { metadata = {
inherit language version runtime aliases limitOverrides; inherit language version runtime aliases;
run = runFile; run = runFile;
compile = compileFile; compile = compileFile;
packageSupport = packages != null; packageSupport = packages != null;

View File

@ -1,15 +1,15 @@
site_name: Piston site_name: Piston
nav: nav:
- Home: index.md - Home: index.md
- Configuration: configuration.md - Configuration: configuration.md
- API: api-v2.md - API: api-v2.md
theme: theme:
name: readthedocs name: readthedocs
highlightjs: true highlightjs: true
hljs_languages: hljs_languages:
- yaml - yaml
- json - json
markdown_extensions: markdown_extensions:
- admonition - admonition

32
package-lock.json generated
View File

@ -1,32 +0,0 @@
{
"name": "piston",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"devDependencies": {
"prettier": "2.4.1"
}
},
"node_modules/prettier": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.1.tgz",
"integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==",
"dev": true,
"bin": {
"prettier": "bin-prettier.js"
},
"engines": {
"node": ">=10.13.0"
}
}
},
"dependencies": {
"prettier": {
"version": "2.4.1",
"resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.1.tgz",
"integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==",
"dev": true
}
}
}

View File

@ -1,5 +0,0 @@
{
"devDependencies": {
"prettier": "2.4.1"
}
}

View File

@ -1,15 +0,0 @@
#!/usr/bin/env bash
# source python 2.7
source ../../python/2.7.18/build.sh
# clone befunge repo
git clone -q 'https://github.com/programble/befungee' befunge93
# go inside befunge93 so we can checkout
cd befunge93
# checkout the version 0.2.0
git checkout tag/v0.2.0
cd ..

View File

@ -1,5 +0,0 @@
{
"language": "befunge93",
"version": "0.2.0",
"aliases": ["b93"]
}

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
# run the befunge program with the file name
python2.7 "$BEFUNGE93_PATH"/befungee.py "$1"

View File

@ -1 +0,0 @@
64+"KO">:#,_@

View File

@ -1,20 +0,0 @@
#!/usr/bin/env bash
# build prolog 8.2.4 as dependency
source ../../prolog/8.2.4/build.sh
# curl brachylog 1.0.0
curl -L "https://github.com/JCumin/Brachylog/archive/refs/tags/v1.0-ascii.tar.gz" -o brachylog.tar.gz
tar xzf brachylog.tar.gz --strip-components=1
rm brachylog.tar.gz
# move swi prolog to working directory
cp bin/swipl swipl
# give execution permission to swipl
chmod +x swipl
# add some code the branchylog.pl so we don't have to escape backslashes while using the interactive mode
echo '
:-feature(argv, [Code, Stdin]), run_from_atom(Code, Stdin, _), halt.' >> prolog_parser/brachylog.pl

View File

@ -1,5 +0,0 @@
{
"language": "brachylog",
"version": "1.0.0",
"aliases": []
}

View File

@ -1,19 +0,0 @@
#!/usr/bin/env bash
# save the file for later
file="$1"
# remove the file from $@
shift
# save stdin as $@ joined by newlines
stdin=`printf "%s\n" "$@"`
# save code as the contents of $file
code=`cat "$file"`
# go to the directory where brachylog.pl is so the imports work
cd "$BRACHYLOG_PATH"/prolog_parser
# run swi prolog with code and stdin
swipl -f brachylog.pl "$code" "$stdin"

View File

@ -1 +0,0 @@
"OK"w

View File

@ -1,5 +1,5 @@
{ {
"language": "cjam", "language": "cjam",
"version": "0.6.5", "version": "0.6.5",
"aliases": [] "aliases": []
} }

7
packages/crystal/0.36.1/build.sh vendored Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
PREFIX=$(realpath $(dirname $0))
curl -L "https://github.com/crystal-lang/crystal/releases/download/0.36.1/crystal-0.36.1-1-linux-x86_64.tar.gz" -o crystal.tar.gz
tar xzf crystal.tar.gz --strip-components=1
rm crystal.tar.gz

5
packages/crystal/0.36.1/compile vendored Normal file
View File

@ -0,0 +1,5 @@
#!/usr/bin/env bash
# Compile crystal files into out file
crystal build "$@" -o out --no-color && \
chmod +x out

1
packages/crystal/0.36.1/environment vendored Normal file
View File

@ -0,0 +1 @@
export PATH=$PWD/bin:$PATH

5
packages/crystal/0.36.1/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "crystal",
"version": "0.36.1",
"aliases": ["crystal", "cr"]
}

4
packages/crystal/0.36.1/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/bin/bash
shift # Filename is only used to compile
./out "$@"

1
packages/crystal/0.36.1/test.cr vendored Normal file
View File

@ -0,0 +1 @@
puts("OK")

11
packages/dart/2.12.1/build.sh vendored Executable file
View File

@ -0,0 +1,11 @@
#!/usr/bin/env bash
curl -L "https://storage.googleapis.com/dart-archive/channels/stable/release/2.12.1/sdk/dartsdk-linux-x64-release.zip" -o dart.zip
unzip dart.zip
rm dart.zip
cp -r dart-sdk/* .
rm -rf dart-sdk
chmod -R +rx bin

View File

@ -2,4 +2,3 @@
# Put 'export' statements here for environment variables # Put 'export' statements here for environment variables
export PATH=$PWD/bin:$PATH export PATH=$PWD/bin:$PATH
export BRACHYLOG_PATH=$PWD

5
packages/dart/2.12.1/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "dart",
"version": "2.12.1",
"aliases": []
}

4
packages/dart/2.12.1/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# Put instructions to run the runtime
dart run "$@"

3
packages/dart/2.12.1/test.dart vendored Normal file
View File

@ -0,0 +1,3 @@
void main() {
print('OK');
}

19
packages/dash/0.5.11/build.sh vendored Executable file
View File

@ -0,0 +1,19 @@
#!/usr/bin/env bash
# Put instructions to build your package in here
PREFIX=$(realpath $(dirname $0))
mkdir -p build
cd build
curl "http://gondor.apana.org.au/~herbert/dash/files/dash-0.5.11.tar.gz" -o dash.tar.gz
tar xzf dash.tar.gz --strip-components=1
./configure --prefix "$PREFIX" &&
make -j$(nproc) &&
make install -j$(nproc)
cd ../
rm -rf build

View File

@ -2,4 +2,3 @@
# Put 'export' statements here for environment variables # Put 'export' statements here for environment variables
export PATH=$PWD/bin:$PATH export PATH=$PWD/bin:$PATH
export BEFUNGE93_PATH=$PWD/befunge93

5
packages/dash/0.5.11/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "dash",
"version": "0.5.11",
"aliases": ["dash"]
}

4
packages/dash/0.5.11/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# Put instructions to run the runtime
dash "$@"

1
packages/dash/0.5.11/test.dash vendored Normal file
View File

@ -0,0 +1 @@
echo "OK"

5
packages/deno/1.7.5/build.sh vendored Executable file
View File

@ -0,0 +1,5 @@
curl -L https://github.com/denoland/deno/releases/download/v1.7.5/deno-x86_64-unknown-linux-gnu.zip --output deno.zip
unzip -o deno.zip
rm deno.zip
chmod +x deno

1
packages/deno/1.7.5/environment vendored Normal file
View File

@ -0,0 +1 @@
export PATH=$PWD:$PATH

14
packages/deno/1.7.5/metadata.json vendored Normal file
View File

@ -0,0 +1,14 @@
{
"language": "deno",
"version": "1.7.5",
"provides": [
{
"language": "typescript",
"aliases": ["deno-ts","deno"]
},
{
"language": "javascript",
"aliases": ["deno-js"]
}
]
}

2
packages/deno/1.7.5/run vendored Normal file
View File

@ -0,0 +1,2 @@
#!/bin/bash
DENO_DIR=$PWD deno run "$@"

1
packages/deno/1.7.5/test.deno.ts vendored Normal file
View File

@ -0,0 +1 @@
console.log("OK")

6
packages/dotnet/5.0.201/build.sh vendored Executable file → Normal file
View File

@ -7,10 +7,8 @@ rm dotnet.tar.gz
# Cache nuget packages # Cache nuget packages
export DOTNET_CLI_HOME=$PWD export DOTNET_CLI_HOME=$PWD
./dotnet new console -o cache_application ./dotnet new console -o cache_application
./dotnet new console -lang F# -o fs_cache_application
./dotnet new console -lang VB -o vb_cache_application
# This calls a restore on the global-packages index ($DOTNET_CLI_HOME/.nuget/packages) # This calls a restore on the global-packages index ($DOTNET_CLI_HOME/.nuget/packages)
# If we want to allow more packages, we could add them to this cache_application # If we want to allow more packages, we could add them to this cache_application
rm -rf cache_application fs_cache_application vb_cache_application rm -rf cache_application
# Get rid of it, we don't actually need the application - just the restore # Get rid of it, we don't actually need the application - just the restore

View File

@ -1,36 +1,15 @@
#!/usr/bin/env bash #!/usr/bin/env bash
[ "${PISTON_LANGUAGE}" == "fsi" ] && exit 0
export DOTNET_CLI_HOME=$PWD export DOTNET_CLI_HOME=$PWD
export HOME=$PWD export HOME=$PWD
rename 's/$/\.cs/' "$@" # Add .cs extension
dotnet build --help > /dev/null # Shut the thing up dotnet build --help > /dev/null # Shut the thing up
case "${PISTON_LANGUAGE}" in dotnet new console -o . --no-restore
basic.net) rm Program.cs
rename 's/$/\.vb/' "$@" # Add .vb extension
dotnet new console -lang VB -o . --no-restore
rm Program.vb
;;
fsharp.net)
first_file=$1
shift
rename 's/$/\.fs/' "$@" # Add .fs extension
dotnet new console -lang F# -o . --no-restore
mv $first_file Program.fs # For some reason F#.net doesn't work unless the file name is Program.fs
;;
csharp.net)
rename 's/$/\.cs/' "$@" # Add .cs extension
dotnet new console -o . --no-restore
rm Program.cs
;;
*)
echo "How did you get here? (${PISTON_LANGUAGE})"
exit 1
;;
esac
dotnet restore --source $DOTNET_ROOT/.nuget/packages dotnet restore --source $DOTNET_ROOT/.nuget/packages
dotnet build --no-restore dotnet build --no-restore

View File

@ -2,5 +2,4 @@
# Put 'export' statements here for environment variables # Put 'export' statements here for environment variables
export DOTNET_ROOT=$PWD export DOTNET_ROOT=$PWD
export PATH=$DOTNET_ROOT:$PATH export PATH=$DOTNET_ROOT:$PATH
export FSI_PATH=$(find $(pwd) -name fsi.dll)

View File

@ -1,66 +1,5 @@
{ {
"language": "dotnet", "language": "dotnet",
"version": "5.0.201", "version": "5.0.201",
"provides": [ "aliases": ["cs", "csharp"]
{
"language": "basic.net",
"aliases": [
"basic",
"visual-basic",
"visual-basic.net",
"vb",
"vb.net",
"vb-dotnet",
"dotnet-vb",
"basic-dotnet",
"dotnet-basic"
],
"limit_overrides": { "max_process_count": 128 }
},
{
"language": "fsharp.net",
"aliases": [
"fsharp",
"fs",
"f#",
"fs.net",
"f#.net",
"fsharp-dotnet",
"fs-dotnet",
"f#-dotnet",
"dotnet-fsharp",
"dotnet-fs",
"dotnet-fs"
],
"limit_overrides": { "max_process_count": 128 }
},
{
"language": "csharp.net",
"aliases": [
"csharp",
"c#",
"cs",
"c#.net",
"cs.net",
"c#-dotnet",
"cs-dotnet",
"csharp-dotnet",
"dotnet-c#",
"dotnet-cs",
"dotnet-csharp"
],
"limit_overrides": { "max_process_count": 128 }
},
{
"language": "fsi",
"aliases": [
"fsx",
"fsharp-interactive",
"f#-interactive",
"dotnet-fsi",
"fsi-dotnet",
"fsi.net"
]
}
]
} }

View File

@ -3,23 +3,5 @@
# Put instructions to run the runtime # Put instructions to run the runtime
export DOTNET_CLI_HOME=$PWD export DOTNET_CLI_HOME=$PWD
case "${PISTON_LANGUAGE}" in shift
basic.net) dotnet bin/Debug/net5.0/$(basename $(realpath .)).dll "$@"
;&
fsharp.net)
;&
csharp.net)
shift
dotnet bin/Debug/net5.0/$(basename $(realpath .)).dll "$@"
;;
fsi)
FILENAME=$1
rename 's/$/\.fsx/' $FILENAME # Add .fsx extension
shift
dotnet $FSI_PATH $FILENAME.fsx "$@"
;;
*)
echo "How did you get here? (${PISTON_LANGUAGE})"
exit 1
;;
esac

View File

@ -1,6 +0,0 @@
open System
[<EntryPoint>]
let main argv =
printfn "OK"
0

View File

@ -1 +0,0 @@
printfn "OK"

View File

@ -1,9 +0,0 @@
Imports System
Module Module1
Sub Main()
Console.WriteLine("OK")
End Sub
End Module

View File

@ -1,5 +1,5 @@
{ {
"language": "dragon", "language": "dragon",
"version": "1.9.8", "version": "1.9.8",
"aliases": [] "aliases": []
} }

25
packages/elixir/1.11.3/build.sh vendored Executable file
View File

@ -0,0 +1,25 @@
#!/bin/bash
source ../../erlang/23.0.0/build.sh
export PATH=$PWD/bin:$PATH
PREFIX=$(realpath $(dirname $0))
mkdir -p build
cd build
curl -L "https://github.com/elixir-lang/elixir/archive/v1.11.3.tar.gz" -o elixir.tar.gz
tar xzf elixir.tar.gz --strip-components=1
rm elixir.tar.gz
./configure --prefix "$PREFIX"
make -j$(nproc)
cd ..
cp -r build/bin .
cp -r build/lib .
rm -rf build

5
packages/elixir/1.11.3/environment vendored Normal file
View File

@ -0,0 +1,5 @@
#!/usr/bin/env bash
# Put 'export' statements here for environment variables
export LC_ALL=en_US.UTF-8
export PATH=$PWD/bin:$PATH

5
packages/elixir/1.11.3/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "elixir",
"version": "1.11.3",
"aliases": ["elixir", "exs"]
}

4
packages/elixir/1.11.3/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/bin/bash
# Put instructions to run the runtime
elixir "$@"

1
packages/elixir/1.11.3/test.exs vendored Normal file
View File

@ -0,0 +1 @@
IO.puts("OK")

21
packages/erlang/23.0.0/build.sh vendored Executable file
View File

@ -0,0 +1,21 @@
#!/bin/bash
PREFIX=$(realpath $(dirname $0))
mkdir -p build
cd build
curl "http://erlang.org/download/otp_src_23.0.tar.gz" -o erlang.tar.gz
tar xzf erlang.tar.gz --strip-components=1
rm erlang.tar.gz
export ERL_TOP=$(pwd)
./configure --prefix "$PREFIX"
make -j$(nproc)
make install -j$(nproc)
cd ..
rm -rf build

4
packages/erlang/23.0.0/environment vendored Normal file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# Put 'export' statements here for environment variables
export PATH=$PWD/bin:$PATH

5
packages/erlang/23.0.0/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "erlang",
"version": "23.0.0",
"aliases": ["erlang", "erl", "escript"]
}

4
packages/erlang/23.0.0/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/bin/bash
# Put instructions to run the runtime
escript "$@"

3
packages/erlang/23.0.0/test.erl vendored Normal file
View File

@ -0,0 +1,3 @@
main(_) ->
io:format("OK~n").

View File

@ -1,5 +1,5 @@
{ {
"language": "forte", "language": "forte",
"version": "1.0.0", "version": "1.0.0",
"aliases": ["forter"] "aliases": ["forter"]
} }

21
packages/gawk/5.1.0/build.sh vendored Normal file
View File

@ -0,0 +1,21 @@
#!/usr/bin/env bash
# Put instructions to build your package in here
PREFIX=$(realpath $(dirname $0))
mkdir -p build
cd build
curl "https://ftp.gnu.org/gnu/gawk/gawk-5.1.0.tar.gz" -o gawk.tar.gz
tar xzf gawk.tar.gz --strip-components=1
# === autoconf based ===
./configure --prefix "$PREFIX"
make -j$(nproc)
make install -j$(nproc)
cd ../
rm -rf build

4
packages/gawk/5.1.0/environment vendored Normal file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# Put 'export' statements here for environment variables
export PATH=$PWD/bin:$PATH

10
packages/gawk/5.1.0/metadata.json vendored Normal file
View File

@ -0,0 +1,10 @@
{
"language": "gawk",
"version": "5.1.0",
"provides": [
{
"language": "awk",
"aliases": ["gawk"]
}
]
}

4
packages/gawk/5.1.0/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env bash
# Put instructions to run the runtime
gawk-5.1.0 -f "$@"

1
packages/gawk/5.1.0/test.awk vendored Normal file
View File

@ -0,0 +1 @@
{print "OK"}

View File

@ -3,7 +3,7 @@
"version": "10.2.0", "version": "10.2.0",
"provides": [ "provides": [
{ {
"language": "c", "language":"c",
"aliases": ["gcc"] "aliases": ["gcc"]
}, },
{ {

View File

@ -1,5 +1,5 @@
{ {
"language": "golfscript", "language": "golfscript",
"version": "1.0.0", "version": "1.0.0",
"aliases": ["golfscript"] "aliases": ["golfscript"]
} }

View File

@ -1,5 +1,5 @@
{ {
"language": "groovy", "language": "groovy",
"version": "3.0.7", "version": "3.0.7",
"aliases": ["groovy", "gvy"] "aliases": ["groovy", "gvy"]
} }

View File

@ -1,14 +0,0 @@
#!/usr/bin/env bash
cp ../../haskell/9.0.1/build.sh ./haskell-build.sh
sed -Ei 's/9\.0\.1/8\.10\.7/g' ./haskell-build.sh
source ./haskell-build.sh
# compile Husk from source
git clone -q "https://github.com/barbuz/husk.git"
cd husk
../bin/ghc -O2 Husk
# cleanup
cd ..
rm -f haskell-build.sh

View File

@ -1,6 +0,0 @@
#!/usr/bin/env bash
# haskell and husk path
export PATH=$PWD/bin:$PATH
export HUSK_PATH=$PWD/husk
export LANG=en_US.UTF8

View File

@ -1,5 +0,0 @@
{
"language": "husk",
"version": "1.0.0",
"aliases": []
}

View File

@ -1,10 +0,0 @@
#!/usr/bin/env bash
# Store the current path because we'll need it to run the program file
PROGRAM_PATH=$PWD
# For now, Husk can only be run within the folder that has the imported modules
cd $HUSK_PATH
# Run Husk from file in unicode format with the given args
./Husk -uf "${PROGRAM_PATH}/${@}"

View File

@ -1 +0,0 @@
"OK

View File

@ -1,5 +1,5 @@
{ {
"language": "japt", "language": "japt",
"version": "2.0.0", "version": "2.0.0",
"aliases": ["japt"] "aliases": ["japt"]
} }

View File

@ -1,6 +0,0 @@
#!/usr/bin/env bash
curl -L "https://github.com/llvm/llvm-project/releases/download/llvmorg-12.0.1/clang+llvm-12.0.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz" -o llvm-ir.tar.xz
tar xf llvm-ir.tar.xz clang+llvm-12.0.1-x86_64-linux-gnu-ubuntu-/bin --strip-components=1
rm llvm-ir.tar.xz

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
llc "$@" -o binary.s
clang binary.s -o binary

View File

@ -1,2 +0,0 @@
#!/usr/bin/env bash
export PATH=$PWD/bin:$PATH

View File

@ -1,5 +0,0 @@
{
"language": "llvm_ir",
"version": "12.0.1",
"aliases": ["llvm", "llvm-ir", "ll"]
}

View File

@ -1,4 +0,0 @@
#!/usr/bin/env bash
shift
binary "$@"

View File

@ -1,10 +0,0 @@
@.str = private unnamed_addr constant [2 x i8] c"OK"
declare i32 @puts(i8* nocapture) nounwind
define i32 @main() {
%cast210 = getelementptr [2 x i8],[2 x i8]* @.str, i64 0, i64 0
call i32 @puts(i8* %cast210)
ret i32 0
}

Some files were not shown because too many files have changed in this diff Show More