diff --git a/.github/ISSUE_TEMPLATE/language-request.md b/.github/ISSUE_TEMPLATE/language-request.md
index 5ae2661..3f42d90 100644
--- a/.github/ISSUE_TEMPLATE/language-request.md
+++ b/.github/ISSUE_TEMPLATE/language-request.md
@@ -4,7 +4,6 @@ about: Template for requesting language support
title: Add [insert language name here]
labels: package
assignees: ''
-
---
Provide links to different compilers/interpreters that could be used to implement this language, and discuss pros/cons of each.
diff --git a/.github/PULL_REQUEST_TEMPLATE/package.md b/.github/PULL_REQUEST_TEMPLATE/package.md
index 6cd3c98..da59fe0 100644
--- a/.github/PULL_REQUEST_TEMPLATE/package.md
+++ b/.github/PULL_REQUEST_TEMPLATE/package.md
@@ -1,10 +1,11 @@
Checklist:
-* [ ] The package builds locally with `./piston build-pkg [package] [version]`
-* [ ] The package installs with `./piston ppman install [package]=[version]`
-* [ ] The package runs the test code with `./piston run [package] -l [version] packages/[package]/[version]/test.*`
-* [ ] Package files are placed in the correct directory
-* [ ] No old package versions are removed
-* [ ] All source files are deleted in the `build.sh` script
-* [ ] `metadata.json`'s `language` and `version` fields match the directory path
-* [ ] Any extensions the language may use are set as aliases
-* [ ] Any alternative names the language is referred to are set as aliases.
+
+- [ ] The package builds locally with `./piston build-pkg [package] [version]`
+- [ ] The package installs with `./piston ppman install [package]=[version]`
+- [ ] The package runs the test code with `./piston run [package] -l [version] packages/[package]/[version]/test.*`
+- [ ] Package files are placed in the correct directory
+- [ ] No old package versions are removed
+- [ ] All source files are deleted in the `build.sh` script
+- [ ] `metadata.json`'s `language` and `version` fields match the directory path
+- [ ] Any extensions the language may use are set as aliases
+- [ ] Any alternative names the language is referred to are set as aliases.
diff --git a/.github/workflows/api-push.yaml b/.github/workflows/api-push.yaml
index bcf0472..dec3bce 100644
--- a/.github/workflows/api-push.yaml
+++ b/.github/workflows/api-push.yaml
@@ -1,39 +1,38 @@
name: Publish API image
on:
- push:
- branches:
- - master
- - v3
- paths:
- - api/**
+ push:
+ branches:
+ - master
+ - v3
+ paths:
+ - api/**
-
jobs:
- push_to_registry:
- runs-on: ubuntu-latest
- name: Build and Push Docker image to Github Packages
- steps:
- - name: Check out repo
- uses: actions/checkout@v2
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
- - name: Login to ghcr.io
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: ghcr.io
+ push_to_registry:
+ runs-on: ubuntu-latest
+ name: Build and Push Docker image to Github Packages
+ steps:
+ - name: Check out repo
+ uses: actions/checkout@v2
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
+ - name: Login to ghcr.io
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: ghcr.io
- - name: Build and push API
- uses: docker/build-push-action@v2
- with:
- context: api
- push: true
- pull: true
- tags: |
- docker.pkg.github.com/engineer-man/piston/api
- ghcr.io/engineer-man/piston
+ - name: Build and push API
+ uses: docker/build-push-action@v2
+ with:
+ context: api
+ push: true
+ pull: true
+ tags: |
+ docker.pkg.github.com/engineer-man/piston/api
+ ghcr.io/engineer-man/piston
diff --git a/.github/workflows/package-pr.yaml b/.github/workflows/package-pr.yaml
index bb264a3..d5bfe78 100644
--- a/.github/workflows/package-pr.yaml
+++ b/.github/workflows/package-pr.yaml
@@ -1,140 +1,139 @@
-name: "Package Pull Requests"
+name: 'Package Pull Requests'
on:
- pull_request:
- types:
- - opened
- - edited
- - reopened
- - synchronize
- paths:
- - "packages/**"
+ pull_request:
+ types:
+ - opened
+ - reopened
+ - synchronize
+ paths:
+ - 'packages/**'
jobs:
- check-pkg:
- name: Validate README
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Get list of changed files
- uses: lots0logs/gh-action-get-changed-files@2.1.4
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
+ check-pkg:
+ name: Validate README
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+ - name: Get list of changed files
+ uses: lots0logs/gh-action-get-changed-files@2.1.4
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
- - name: Ensure README was updated
- run: |
- MISSING_LINES=$(comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /
/{flag=0} flag' readme.md | sort -u))
+ - name: Ensure README was updated
+ run: |
+ MISSING_LINES=$(comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /
/{flag=0} flag' readme.md | sort -u))
- [[ $(echo $MISSING_LINES | wc -c) = "1" ]] && exit 0
+ [[ $(echo $MISSING_LINES | wc -c) = "1" ]] && exit 0
- echo "README has supported languages missing: "
- comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /
/{flag=0} flag' readme.md | sort -u)
- exit 1
+ echo "README has supported languages missing: "
+ comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /
/{flag=0} flag' readme.md | sort -u)
+ exit 1
- build-pkg:
- name: Check that package builds
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
+ build-pkg:
+ name: Check that package builds
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
- - name: Get list of changed files
- uses: lots0logs/gh-action-get-changed-files@2.1.4
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
+ - name: Get list of changed files
+ uses: lots0logs/gh-action-get-changed-files@2.1.4
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
- - name: Build Packages
- run: |
- PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
- echo "Packages: $PACKAGES"
- docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
- docker build -t repo-builder repo
- docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
- ls -la packages
+ - name: Build Packages
+ run: |
+ PACKAGES=$(jq '.[]' -r ${HOME}/files*.json | awk -F/ '$1~/packages/ && $2 && $3{ print $2 "-" $3 }' | sort -u)
+ echo "Packages: $PACKAGES"
+ docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
+ docker build -t repo-builder repo
+ docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
+ ls -la packages
- - name: Upload package as artifact
- uses: actions/upload-artifact@v2
- with:
- name: packages
- path: packages/*.pkg.tar.gz
+ - name: Upload package as artifact
+ uses: actions/upload-artifact@v2
+ with:
+ name: packages
+ path: packages/*.pkg.tar.gz
- test-pkg:
- name: Test package
- runs-on: ubuntu-latest
- needs: build-pkg
- steps:
- - uses: actions/checkout@v2
+ test-pkg:
+ name: Test package
+ runs-on: ubuntu-latest
+ needs: build-pkg
+ steps:
+ - uses: actions/checkout@v2
- - uses: actions/download-artifact@v2
- with:
- name: packages
+ - uses: actions/download-artifact@v2
+ with:
+ name: packages
- - name: Relocate downloaded packages
- run: mv *.pkg.tar.gz packages/
+ - name: Relocate downloaded packages
+ run: mv *.pkg.tar.gz packages/
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
- - name: Run tests
- run: |
- ls -la
- docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
- docker pull docker.pkg.github.com/engineer-man/piston/api
- docker build -t piston-api api
- docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api
- echo Waiting for API to start..
- docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes
+ - name: Run tests
+ run: |
+ ls -la
+ docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
+ docker pull docker.pkg.github.com/engineer-man/piston/api
+ docker build -t piston-api api
+ docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api
+ echo Waiting for API to start..
+ docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes
- echo Waiting for Index to start..
- docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index
+ echo Waiting for Index to start..
+ docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index
- echo Adjusting index
- sed -i 's/repo/localhost/g' repo/index
+ echo Adjusting index
+ sed -i 's/repo/localhost/g' repo/index
- echo Listing Packages
- PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v2/packages)
- echo $PACKAGES_JSON
+ echo Listing Packages
+ PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v2/packages)
+ echo $PACKAGES_JSON
- echo Getting CLI ready
- docker run -v "$PWD/cli:/app" --entrypoint /bin/bash node:15 -c 'cd /app; npm i'
+ echo Getting CLI ready
+ docker run -v "$PWD/cli:/app" --entrypoint /bin/bash node:15 -c 'cd /app; npm i'
- for package in $(jq -r '.[] | "\(.language)-\(.language_version)"' <<< "$PACKAGES_JSON")
- do
- echo "Testing $package"
- PKG_PATH=$(sed 's|-|/|' <<< $package)
- PKG_NAME=$(awk -F- '{ print $1 }' <<< $package)
- PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package)
+ for package in $(jq -r '.[] | "\(.language)-\(.language_version)"' <<< "$PACKAGES_JSON")
+ do
+ echo "Testing $package"
+ PKG_PATH=$(sed 's|-|/|' <<< $package)
+ PKG_NAME=$(awk -F- '{ print $1 }' <<< $package)
+ PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package)
- echo "Installing..."
- docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v2/packages -H "Content-Type: application/json" -d "{\"language\":\"$PKG_NAME\",\"version\":\"$PKG_VERSION\"}"
+ echo "Installing..."
+ docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v2/packages -H "Content-Type: application/json" -d "{\"language\":\"$PKG_NAME\",\"version\":\"$PKG_VERSION\"}"
- TEST_SCRIPTS=packages/$PKG_PATH/test.*
- echo "Tests: $TEST_SCRIPTS"
+ TEST_SCRIPTS=packages/$PKG_PATH/test.*
+ echo "Tests: $TEST_SCRIPTS"
- for tscript in $TEST_SCRIPTS
- do
- TEST_RUNTIME=$(awk -F. '{print $2}' <<< $(basename $tscript))
- echo Running $tscript with runtime=$TEST_RUNTIME
- docker run --network container:api -v "$PWD/cli:/app" -v "$PWD/$(dirname $tscript):/pkg" node:15 /app/index.js run $TEST_RUNTIME -l $PKG_VERSION /pkg/$(basename $tscript) > test_output
- cat test_output
- grep "OK" test_output
- done
- done
+ for tscript in $TEST_SCRIPTS
+ do
+ TEST_RUNTIME=$(awk -F. '{print $2}' <<< $(basename $tscript))
+ echo Running $tscript with runtime=$TEST_RUNTIME
+ docker run --network container:api -v "$PWD/cli:/app" -v "$PWD/$(dirname $tscript):/pkg" node:15 /app/index.js run $TEST_RUNTIME -l $PKG_VERSION /pkg/$(basename $tscript) > test_output
+ cat test_output
+ grep "OK" test_output
+ done
+ done
- - name: Dump logs
- if: ${{ always() }}
- run: |
- docker logs api
- docker logs repo
+ - name: Dump logs
+ if: ${{ always() }}
+ run: |
+ docker logs api
+ docker logs repo
diff --git a/.github/workflows/package-push.yaml b/.github/workflows/package-push.yaml
index bbb44af..9de6051 100644
--- a/.github/workflows/package-push.yaml
+++ b/.github/workflows/package-push.yaml
@@ -1,78 +1,77 @@
name: 'Package Pushed'
on:
- push:
- branches:
- - master
- - v3
- paths:
- - packages/**
-
+ push:
+ branches:
+ - master
+ - v3
+ paths:
+ - packages/**
jobs:
- build-pkg:
- name: Build package
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
-
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
+ build-pkg:
+ name: Build package
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
- - name: Get list of changed files
- uses: lots0logs/gh-action-get-changed-files@2.1.4
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
-
- - name: Build Packages
- run: |
- PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
- echo "Packages: $PACKAGES"
- docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
- docker build -t repo-builder repo
- docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
- ls -la packages
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
- - name: Upload Packages
- uses: svenstaro/upload-release-action@v2
- with:
- repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: packages/*.pkg.tar.gz
- tag: pkgs
- overwrite: true
- file_glob: true
- create-index:
- name: Create Index
- runs-on: ubuntu-latest
- needs: build-pkg
- steps:
- - name: "Download all release assets"
- run: curl -s https://api.github.com/repos/engineer-man/piston/releases/latest | jq '.assets[].browser_download_url' -r | xargs -L 1 curl -sLO
- - name: "Generate index file"
- run: |
- echo "" > index
- BASEURL=https://github.com/engineer-man/piston/releases/download/pkgs/
- for pkg in *.pkg.tar.gz
- do
- PKGFILE=$(basename $pkg)
- PKGFILENAME=$(echo $PKGFILE | sed 's/\.pkg\.tar\.gz//g')
+ - name: Get list of changed files
+ uses: lots0logs/gh-action-get-changed-files@2.1.4
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
- PKGNAME=$(echo $PKGFILENAME | grep -oP '^\K.+(?=-)')
- PKGVERSION=$(echo $PKGFILENAME | grep -oP '^.+-\K.+')
- PKGCHECKSUM=$(sha256sum $PKGFILE | awk '{print $1}')
- echo "$PKGNAME,$PKGVERSION,$PKGCHECKSUM,$BASEURL$PKGFILE" >> index
- echo "Adding package $PKGNAME-$PKGVERSION"
- done
- - name: Upload index
- uses: svenstaro/upload-release-action@v2
- with:
- repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: index
- tag: pkgs
- overwrite: true
- file_glob: true
+ - name: Build Packages
+ run: |
+ PACKAGES=$(jq '.[]' -r ${HOME}/files*.json | awk -F/ '$1~/packages/ && $2 && $3{ print $2 "-" $3 }' | sort -u)
+ echo "Packages: $PACKAGES"
+ docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
+ docker build -t repo-builder repo
+ docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
+ ls -la packages
+
+ - name: Upload Packages
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: packages/*.pkg.tar.gz
+ tag: pkgs
+ overwrite: true
+ file_glob: true
+ create-index:
+ name: Create Index
+ runs-on: ubuntu-latest
+ needs: build-pkg
+ steps:
+ - name: 'Download all release assets'
+ run: curl -s https://api.github.com/repos/engineer-man/piston/releases/latest | jq '.assets[].browser_download_url' -r | xargs -L 1 curl -sLO
+ - name: 'Generate index file'
+ run: |
+ echo "" > index
+ BASEURL=https://github.com/engineer-man/piston/releases/download/pkgs/
+ for pkg in *.pkg.tar.gz
+ do
+ PKGFILE=$(basename $pkg)
+ PKGFILENAME=$(echo $PKGFILE | sed 's/\.pkg\.tar\.gz//g')
+
+ PKGNAME=$(echo $PKGFILENAME | grep -oP '^\K.+(?=-)')
+ PKGVERSION=$(echo $PKGFILENAME | grep -oP '^.+-\K.+')
+ PKGCHECKSUM=$(sha256sum $PKGFILE | awk '{print $1}')
+ echo "$PKGNAME,$PKGVERSION,$PKGCHECKSUM,$BASEURL$PKGFILE" >> index
+ echo "Adding package $PKGNAME-$PKGVERSION"
+ done
+ - name: Upload index
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: index
+ tag: pkgs
+ overwrite: true
+ file_glob: true
diff --git a/.github/workflows/repo-push.yaml b/.github/workflows/repo-push.yaml
index b5a603c..c887b01 100644
--- a/.github/workflows/repo-push.yaml
+++ b/.github/workflows/repo-push.yaml
@@ -1,31 +1,31 @@
name: Publish Repo image
on:
- push:
- branches:
- - master
- - v3
- paths:
- - repo/**
-
-jobs:
- push_to_registry:
- runs-on: ubuntu-latest
- name: Build and Push Docker image to Github Packages
- steps:
- - name: Check out repo
- uses: actions/checkout@v2
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
+ push:
+ branches:
+ - master
+ - v3
+ paths:
+ - repo/**
- - name: Build and push repo
- uses: docker/build-push-action@v2
- with:
- context: repo
- pull: true
- push: true
- tags: |
- docker.pkg.github.com/engineer-man/piston/repo-builder
\ No newline at end of file
+jobs:
+ push_to_registry:
+ runs-on: ubuntu-latest
+ name: Build and Push Docker image to Github Packages
+ steps:
+ - name: Check out repo
+ uses: actions/checkout@v2
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
+
+ - name: Build and push repo
+ uses: docker/build-push-action@v2
+ with:
+ context: repo
+ pull: true
+ push: true
+ tags: |
+ docker.pkg.github.com/engineer-man/piston/repo-builder
diff --git a/.gitignore b/.gitignore
index b706dd9..6d3d2cb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
data/
.piston_env
-result
\ No newline at end of file
+node_modules
+result
diff --git a/.prettierignore b/.prettierignore
new file mode 100644
index 0000000..bb310ab
--- /dev/null
+++ b/.prettierignore
@@ -0,0 +1,12 @@
+node_modules
+data/
+api/_piston
+repo/build
+packages/*/*/*
+packages/*.pkg.tar.gz
+!packages/*/*/metadata.json
+!packages/*/*/build.sh
+!packages/*/*/environment
+!packages/*/*/run
+!packages/*/*/compile
+!packages/*/*/test.*
diff --git a/api/.prettierrc.yaml b/.prettierrc.yaml
similarity index 100%
rename from api/.prettierrc.yaml
rename to .prettierrc.yaml
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index e651ad5..70f2b5e 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -1,8 +1,8 @@
version: 2
mkdocs:
- configuration: mkdocs.yml
+ configuration: mkdocs.yml
python:
- version: 3.7
- install:
- - requirements: docs/requirements.txt
+ version: 3.7
+ install:
+ - requirements: docs/requirements.txt
diff --git a/Dockerfile.withset b/Dockerfile.withset
new file mode 100644
index 0000000..4fd9a0b
--- /dev/null
+++ b/Dockerfile.withset
@@ -0,0 +1,12 @@
+# This "FROM" image is previously emitted by nix
+FROM ghcr.io/engineer-man/piston:base-latest
+
+ENV PISTON_FLAKE_PATH=/piston/packages
+COPY runtimes/ /piston/packages/runtimes
+COPY flake.nix flake.lock /piston/packages/
+
+
+ARG RUNTIMESET=all
+ENV PISTON_RUNTIME_SET=$RUNTIMESET
+
+RUN piston-install
\ No newline at end of file
diff --git a/api/.gitignore b/api/.gitignore
index adbd330..4b5a9b8 100644
--- a/api/.gitignore
+++ b/api/.gitignore
@@ -1,2 +1 @@
-node_modules
-_piston
\ No newline at end of file
+_piston
diff --git a/api/.prettierignore b/api/.prettierignore
deleted file mode 100644
index 3c3629e..0000000
--- a/api/.prettierignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules
diff --git a/api/default.nix b/api/default.nix
index c194587..a4d4868 100644
--- a/api/default.nix
+++ b/api/default.nix
@@ -51,6 +51,8 @@ with pkgs; rec {
do
echo "nixbld$i:x:$(( $i + 30000 )):30000:Nix build user $i:/var/empty:/run/current-system/sw/bin/nologin" >> etc/passwd
done
+
+ chmod 1777 {,var/}tmp/
'';
config = {
@@ -61,6 +63,21 @@ with pkgs; rec {
"SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt"
"GIT_SSL_CAINFO=/etc/ssl/certs/ca-bundle.crt"
"NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt"
+ "PATH=${lib.concatStringsSep ":" [
+ "/usr/local/sbin"
+ "/usr/local/bin"
+ "/usr/sbin"
+ "/usr/bin"
+ "/sbin"
+ "/bin"
+ "/root/.nix-profile/bin"
+ "/nix/var/nix/profiles/default/bin"
+ "/nix/var/nix/profiles/default/sbin"
+ ]}"
+ "MANPATH=${lib.concatStringsSep ":" [
+ "/root/.nix-profile/share/man"
+ "/nix/var/nix/profiles/default/share/man"
+ ]}"
];
ExposedPorts = {
diff --git a/api/src/api/v2.js b/api/src/api/v2.js
index 13d31bf..86294b4 100644
--- a/api/src/api/v2.js
+++ b/api/src/api/v2.js
@@ -3,16 +3,54 @@ const router = express.Router();
const events = require('events');
-const config = require('../config');
const runtime = require('../runtime');
const { Job } = require('../job');
const logger = require('logplease').create('api/v3');
-const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
+const SIGNALS = [
+ 'SIGABRT',
+ 'SIGALRM',
+ 'SIGBUS',
+ 'SIGCHLD',
+ 'SIGCLD',
+ 'SIGCONT',
+ 'SIGEMT',
+ 'SIGFPE',
+ 'SIGHUP',
+ 'SIGILL',
+ 'SIGINFO',
+ 'SIGINT',
+ 'SIGIO',
+ 'SIGIOT',
+ 'SIGKILL',
+ 'SIGLOST',
+ 'SIGPIPE',
+ 'SIGPOLL',
+ 'SIGPROF',
+ 'SIGPWR',
+ 'SIGQUIT',
+ 'SIGSEGV',
+ 'SIGSTKFLT',
+ 'SIGSTOP',
+ 'SIGTSTP',
+ 'SIGSYS',
+ 'SIGTERM',
+ 'SIGTRAP',
+ 'SIGTTIN',
+ 'SIGTTOU',
+ 'SIGUNUSED',
+ 'SIGURG',
+ 'SIGUSR1',
+ 'SIGUSR2',
+ 'SIGVTALRM',
+ 'SIGXCPU',
+ 'SIGXFSZ',
+ 'SIGWINCH',
+];
// ref: https://man7.org/linux/man-pages/man7/signal.7.html
-function get_job(body){
- const {
+function get_job(body) {
+ let {
language,
args,
stdin,
@@ -20,7 +58,7 @@ function get_job(body){
compile_memory_limit,
run_memory_limit,
run_timeout,
- compile_timeout
+ compile_timeout,
} = body;
return new Promise((resolve, reject) => {
@@ -35,7 +73,6 @@ function get_job(body){
message: 'files is required as an array',
});
}
-
for (const [i, file] of files.entries()) {
if (typeof file.content !== 'string') {
return reject({
@@ -94,23 +131,65 @@ function get_job(body){
});
}
- resolve(new Job({
- runtime: rt,
- alias: language,
- args: args || [],
- stdin: stdin || "",
- files,
- timeouts: {
- run: run_timeout || 3000,
- compile: compile_timeout || 10000,
- },
- memory_limits: {
- run: run_memory_limit || config.run_memory_limit,
- compile: compile_memory_limit || config.compile_memory_limit,
- }
- }));
- })
+ if (
+ rt.language !== 'file' &&
+ !files.some(file => !file.encoding || file.encoding === 'utf8')
+ ) {
+ return reject({
+ message: 'files must include at least one utf8 encoded file',
+ });
+ }
+ for (const constraint of ['memory_limit', 'timeout']) {
+ for (const type of ['compile', 'run']) {
+ const constraint_name = `${type}_${constraint}`;
+ const constraint_value = body[constraint_name];
+ const configured_limit = rt[`${constraint}s`][type];
+ if (!constraint_value) {
+ continue;
+ }
+ if (typeof constraint_value !== 'number') {
+ return reject({
+ message: `If specified, ${constraint_name} must be a number`,
+ });
+ }
+ if (configured_limit <= 0) {
+ continue;
+ }
+ if (constraint_value > configured_limit) {
+ return reject({
+ message: `${constraint_name} cannot exceed the configured limit of ${configured_limit}`,
+ });
+ }
+ if (constraint_value < 0) {
+ return reject({
+ message: `${constraint_name} must be non-negative`,
+ });
+ }
+ }
+ }
+
+ compile_timeout = compile_timeout || rt.timeouts.compile;
+ run_timeout = run_timeout || rt.timeouts.run;
+ compile_memory_limit = compile_memory_limit || rt.memory_limits.compile;
+ run_memory_limit = run_memory_limit || rt.memory_limits.run;
+ resolve(
+ new Job({
+ runtime: rt,
+ args: args || [],
+ stdin: stdin || '',
+ files,
+ timeouts: {
+ run: run_timeout,
+ compile: compile_timeout,
+ },
+ memory_limits: {
+ run: run_memory_limit,
+ compile: compile_memory_limit,
+ },
+ })
+ );
+ });
}
router.use((req, res, next) => {
@@ -128,88 +207,104 @@ router.use((req, res, next) => {
});
router.ws('/connect', async (ws, req) => {
-
let job = null;
let eventBus = new events.EventEmitter();
- eventBus.on("stdout", (data) => ws.send(JSON.stringify({type: "data", stream: "stdout", data: data.toString()})))
- eventBus.on("stderr", (data) => ws.send(JSON.stringify({type: "data", stream: "stderr", data: data.toString()})))
- eventBus.on("stage", (stage)=> ws.send(JSON.stringify({type: "stage", stage})))
- eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status})))
+ eventBus.on('stdout', data =>
+ ws.send(
+ JSON.stringify({
+ type: 'data',
+ stream: 'stdout',
+ data: data.toString(),
+ })
+ )
+ );
+ eventBus.on('stderr', data =>
+ ws.send(
+ JSON.stringify({
+ type: 'data',
+ stream: 'stderr',
+ data: data.toString(),
+ })
+ )
+ );
+ eventBus.on('stage', stage =>
+ ws.send(JSON.stringify({ type: 'stage', stage }))
+ );
+ eventBus.on('exit', (stage, status) =>
+ ws.send(JSON.stringify({ type: 'exit', stage, ...status }))
+ );
- ws.on("message", async (data) => {
-
- try{
+ ws.on('message', async data => {
+ try {
const msg = JSON.parse(data);
- switch(msg.type){
- case "init":
- if(job === null){
+ switch (msg.type) {
+ case 'init':
+ if (job === null) {
job = await get_job(msg);
await job.prime();
- ws.send(JSON.stringify({
- type: "runtime",
- language: job.runtime.language,
- version: job.runtime.version.raw
- }))
+ ws.send(
+ JSON.stringify({
+ type: 'runtime',
+ language: job.runtime.language,
+ version: job.runtime.version.raw,
+ })
+ );
await job.execute_interactive(eventBus);
- ws.close(4999, "Job Completed");
-
- }else{
- ws.close(4000, "Already Initialized");
+ ws.close(4999, 'Job Completed');
+ } else {
+ ws.close(4000, 'Already Initialized');
}
break;
- case "data":
- if(job !== null){
- if(msg.stream === "stdin"){
- eventBus.emit("stdin", msg.data)
- }else{
- ws.close(4004, "Can only write to stdin")
+ case 'data':
+ if (job !== null) {
+ if (msg.stream === 'stdin') {
+ eventBus.emit('stdin', msg.data);
+ } else {
+ ws.close(4004, 'Can only write to stdin');
+ }
+ } else {
+ ws.close(4003, 'Not yet initialized');
}
- }else{
- ws.close(4003, "Not yet initialized")
- }
- break;
- case "signal":
- if(job !== null){
- if(SIGNALS.includes(msg.signal)){
- eventBus.emit("signal", msg.signal)
- }else{
- ws.close(4005, "Invalid signal")
+ break;
+ case 'signal':
+ if (job !== null) {
+ if (SIGNALS.includes(msg.signal)) {
+ eventBus.emit('signal', msg.signal);
+ } else {
+ ws.close(4005, 'Invalid signal');
+ }
+ } else {
+ ws.close(4003, 'Not yet initialized');
}
- }else{
- ws.close(4003, "Not yet initialized")
- }
- break;
+ break;
}
-
- }catch(error){
- ws.send(JSON.stringify({type: "error", message: error.message}))
- ws.close(4002, "Notified Error")
+ } catch (error) {
+ ws.send(JSON.stringify({ type: 'error', message: error.message }));
+ ws.close(4002, 'Notified Error');
// ws.close message is limited to 123 characters, so we notify over WS then close.
}
- })
+ });
- ws.on("close", async ()=>{
- if(job !== null){
- await job.cleanup()
+ ws.on('close', async () => {
+ if (job !== null) {
+ await job.cleanup();
}
- })
+ });
- setTimeout(()=>{
+ setTimeout(() => {
//Terminate the socket after 1 second, if not initialized.
- if(job === null)
- ws.close(4001, "Initialization Timeout");
- }, 1000)
-})
+ if (job === null) ws.close(4001, 'Initialization Timeout');
+ }, 1000);
+});
router.post('/execute', async (req, res) => {
-
- try{
+ try {
const job = await get_job(req.body);
await job.prime();
@@ -219,7 +314,7 @@ router.post('/execute', async (req, res) => {
await job.cleanup();
return res.status(200).send(result);
- }catch(error){
+ } catch (error) {
return res.status(400).json(error);
}
});
diff --git a/api/src/bin/pistond.js b/api/src/bin/pistond.js
index 9016eee..c56cc99 100755
--- a/api/src/bin/pistond.js
+++ b/api/src/bin/pistond.js
@@ -16,8 +16,6 @@ const logger = Logger.create('pistond');
const app = express();
expressWs(app);
-
-
(async () => {
logger.info('Setting loglevel to', config.log_level);
Logger.setLogLevel(config.log_level);
diff --git a/api/src/bin/test.js b/api/src/bin/test.js
index 134ce2a..16de55d 100755
--- a/api/src/bin/test.js
+++ b/api/src/bin/test.js
@@ -5,108 +5,105 @@ const config = require('../config');
const Logger = require('logplease');
const logger = Logger.create('test');
const cp = require('child_process');
-const runtime = require("../runtime");
+const runtime = require('../runtime');
const { Job } = require('../job');
-(async function(){
+(async function () {
logger.info('Setting loglevel to', config.log_level);
Logger.setLogLevel(config.log_level);
-
-
let runtimes_to_test;
let failed = false;
- if(process.argv[2] === "--all"){
+ if (process.argv[2] === '--all') {
// load all
runtimes_to_test = JSON.parse(
- cp.execSync(`nix eval ${config.flake_path}#pistonRuntimes --json --apply builtins.attrNames`)
+ cp.execSync(
+ `nix eval ${config.flake_path}#pistonRuntimes --json --apply builtins.attrNames`
+ )
);
- }else{
+ } else {
runtimes_to_test = [process.argv[2]];
}
-
-
for (const runtime_name of runtimes_to_test) {
-
-
const runtime_path = `${config.flake_path}#pistonRuntimes.${runtime_name}`;
logger.info(`Testing runtime ${runtime_path}`);
logger.debug(`Loading runtime metadata`);
- const metadata = JSON.parse(cp.execSync(`nix eval --json ${runtime_path}.metadata --json`));
+ const metadata = JSON.parse(
+ cp.execSync(`nix eval --json ${runtime_path}.metadata --json`)
+ );
logger.debug(`Loading runtime tests`);
- const tests = JSON.parse(cp.execSync(`nix eval --json ${runtime_path}.tests --json`));
+ const tests = JSON.parse(
+ cp.execSync(`nix eval --json ${runtime_path}.tests --json`)
+ );
logger.debug(`Loading runtime`);
const testable_runtime = new runtime.Runtime({
...metadata,
- flake_path: runtime_path
+ ...runtime.Runtime.compute_all_limits(
+ metadata.language,
+ metadata.limitOverrides
+ ),
+ flake_path: runtime_path,
});
testable_runtime.ensure_built();
-
logger.info(`Running tests`);
for (const test of tests) {
-
const files = [];
for (const file_name of Object.keys(test.files)) {
const file_content = test.files[file_name];
const this_file = {
name: file_name,
- content: file_content
+ content: file_content,
};
- if(file_name == test.main)
- files.unshift(this_file);
- else
- files.push(this_file);
-
+ if (file_name == test.main) files.unshift(this_file);
+ else files.push(this_file);
}
-
const job = new Job({
runtime: testable_runtime,
args: test.args || [],
- stdin: test.stdin || "",
+ stdin: test.stdin || '',
files,
timeouts: {
run: 3000,
- compile: 10000
+ compile: 10000,
},
memory_limits: {
run: config.run_memory_limit,
- compile: config.compile_memory_limit
- }
+ compile: config.compile_memory_limit,
+ },
});
- await job.prime()
- const result = await job.execute()
- await job.cleanup()
-
- if(result.run.stdout.trim() !== "OK"){
+ await job.prime();
+ const result = await job.execute();
+ await job.cleanup();
+
+ if (result.run.stdout.trim() !== 'OK') {
failed = true;
- logger.error("Test Failed:")
- console.log(job, result)
- }else{
- logger.info("Test Passed")
+ logger.error('Test Failed:');
+ console.log(job, result);
+ } else {
+ logger.info('Test Passed');
}
}
}
- if(failed) {
- logger.error("One or more tests failed")
+ if (failed) {
+ logger.error('One or more tests failed');
process.exit(1);
- }
- else {
- logger.info("All tests passed")
+ } else {
+ logger.info('All tests passed');
process.exit(0);
}
-})()
\ No newline at end of file
+})();
diff --git a/api/src/config.js b/api/src/config.js
index fb83228..897a938 100644
--- a/api/src/config.js
+++ b/api/src/config.js
@@ -2,6 +2,57 @@ const fss = require('fs');
const Logger = require('logplease');
const logger = Logger.create('config');
+function parse_overrides(overrides) {
+ try {
+ return JSON.parse(overrides);
+ } catch (e) {
+ return null;
+ }
+}
+
+function validate_overrides(overrides, options) {
+ for (const language in overrides) {
+ for (const key in overrides[language]) {
+ if (
+ ![
+ 'max_process_count',
+ 'max_open_files',
+ 'max_file_size',
+ 'compile_memory_limit',
+ 'run_memory_limit',
+ 'compile_timeout',
+ 'run_timeout',
+ 'output_max_size',
+ ].includes(key)
+ ) {
+ logger.error(`Invalid overridden option: ${key}`);
+ return false;
+ }
+ const option = options.find(o => o.key === key);
+ const parser = option.parser;
+ const raw = overrides[language][key];
+ const value = parser(raw);
+ const validators = option.validators;
+ for (const validator of validators) {
+ const response = validator(value, raw);
+ if (response !== true) {
+ logger.error(
+ `Failed to validate overridden option: ${key}`,
+ response
+ );
+ return false;
+ }
+ }
+ overrides[language][key] = value;
+ }
+ // Modifies the reference
+ options[
+ options.index_of(options.find(o => o.key === 'limit_overrides'))
+ ] = overrides;
+ }
+ return true;
+}
+
const options = [
{
key: 'log_level',
@@ -17,7 +68,7 @@ const options = [
{
key: 'bind_address',
desc: 'Address to bind REST API on',
- default: '0.0.0.0:2000',
+ default: `0.0.0.0:${process.env["PORT"] || 2000}`,
validators: [],
},
{
@@ -91,18 +142,30 @@ const options = [
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
+ {
+ key: 'compile_timeout',
+ desc: 'Max time allowed for compile stage in milliseconds',
+ default: 10000, // 10 seconds
+ parser: parse_int,
+ validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
+ },
+ {
+ key: 'run_timeout',
+ desc: 'Max time allowed for run stage in milliseconds',
+ default: 3000, // 3 seconds
+ parser: parse_int,
+ validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
+ },
{
key: 'compile_memory_limit',
- desc:
- 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
+ desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'run_memory_limit',
- desc:
- 'Max memory usage for run stage in bytes (set to -1 for no limit)',
+ desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
@@ -124,8 +187,22 @@ const options = [
desc: 'Maximum number of concurrent jobs to run at one time',
default: 64,
parser: parse_int,
- validators: [(x) => x > 0 || `${x} cannot be negative`]
- }
+ validators: [x => x > 0 || `${x} cannot be negative`],
+ },
+ {
+ key: 'limit_overrides',
+ desc: 'Per-language exceptions in JSON format for each of:\
+ max_process_count, max_open_files, max_file_size, compile_memory_limit,\
+ run_memory_limit, compile_timeout, run_timeout, output_max_size',
+ default: {},
+ parser: parse_overrides,
+ validators: [
+ x => !!x || `Invalid JSON format for the overrides\n${x}`,
+ (overrides, _, options) =>
+ validate_overrides(overrides, options) ||
+ `Failed to validate the overrides`,
+ ],
+ },
];
logger.info(`Loading Configuration from environment`);
@@ -143,12 +220,12 @@ options.forEach(option => {
const parsed_val = parser(env_val);
- const value = env_val || option.default;
+ const value = env_val === undefined ? option.default : parsed_val;
option.validators.for_each(validator => {
let response = null;
- if (env_val) response = validator(parsed_val, env_val);
- else response = validator(value, value);
+ if (env_val) response = validator(parsed_val, env_val, options);
+ else response = validator(value, value, options);
if (response !== true) {
errored = true;
diff --git a/api/src/job.js b/api/src/job.js
index bffd0ea..c562693 100644
--- a/api/src/job.js
+++ b/api/src/job.js
@@ -1,10 +1,12 @@
-const logger = require('logplease').create('job');
+const logplease = require('logplease');
+const logger = logplease.create('job');
const { v4: uuidv4 } = require('uuid');
const cp = require('child_process');
const path = require('path');
const config = require('./config');
const globals = require('./globals');
const fs = require('fs/promises');
+const fss = require('fs');
const wait_pid = require('waitpid');
const job_states = {
@@ -16,30 +18,34 @@ const job_states = {
let uid = 0;
let gid = 0;
-let remainingJobSpaces = config.max_concurrent_jobs;
+let remaining_job_spaces = config.max_concurrent_jobs;
let jobQueue = [];
-
-setInterval(()=>{
+setInterval(() => {
// Every 10ms try resolve a new job, if there is an available slot
- if(jobQueue.length > 0 && remainingJobSpaces > 0){
- jobQueue.shift()()
+ if (jobQueue.length > 0 && remaining_job_spaces > 0) {
+ jobQueue.shift()();
}
-}, 10)
-
-
+}, 10);
class Job {
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) {
this.uuid = uuidv4();
+
+ this.logger = logplease.create(`job/${this.uuid}`);
+
this.runtime = runtime;
this.files = files.map((file, i) => ({
name: file.name || `file${i}.code`,
content: file.content,
+ encoding: ['base64', 'hex', 'utf8'].includes(file.encoding)
+ ? file.encoding
+ : 'utf8',
}));
this.args = args;
this.stdin = stdin;
+
this.timeouts = timeouts;
this.memory_limits = memory_limits;
@@ -52,6 +58,8 @@ class Job {
uid %= config.runner_uid_max - config.runner_uid_min + 1;
gid %= config.runner_gid_max - config.runner_gid_min + 1;
+ this.logger.debug(`Assigned uid=${this.uid} gid=${this.gid}`);
+
this.state = job_states.READY;
this.dir = path.join(
config.data_directory,
@@ -61,39 +69,45 @@ class Job {
}
async prime() {
- if(remainingJobSpaces < 1){
- logger.info(`Awaiting job slot uuid=${this.uuid}`)
- await new Promise((resolve)=>{
- jobQueue.push(resolve)
- })
+ if (remaining_job_spaces < 1) {
+ this.logger.info(`Awaiting job slot`);
+ await new Promise(resolve => {
+ jobQueue.push(resolve);
+ });
}
- logger.info(`Priming job uuid=${this.uuid}`);
- remainingJobSpaces--;
- logger.debug('Writing files to job cache');
+ this.logger.info(`Priming job`);
+ remaining_job_spaces--;
+ this.logger.debug('Writing files to job cache');
- logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`);
+ this.logger.debug(`Transfering ownership`);
await fs.mkdir(this.dir, { mode: 0o700 });
await fs.chown(this.dir, this.uid, this.gid);
for (const file of this.files) {
- let file_path = path.join(this.dir, file.name);
+ const file_path = path.join(this.dir, file.name);
const rel = path.relative(this.dir, file_path);
+ const file_content = Buffer.from(file.content, file.encoding);
- if(rel.startsWith(".."))
- throw Error(`File path "${file.name}" tries to escape parent directory: ${rel}`)
+ if (rel.startsWith('..'))
+ throw Error(
+ `File path "${file.name}" tries to escape parent directory: ${rel}`
+ );
- await fs.mkdir(path.dirname(file_path), {recursive: true, mode: 0o700})
+ await fs.mkdir(path.dirname(file_path), {
+ recursive: true,
+ mode: 0o700,
+ });
await fs.chown(path.dirname(file_path), this.uid, this.gid);
- await fs.write_file(file_path, file.content);
+ await fs.write_file(file_path, file_content);
await fs.chown(file_path, this.uid, this.gid);
}
this.state = job_states.PRIMED;
- logger.debug('Primed job');
+ this.logger.debug('Primed job');
}
async safe_call(file, args, timeout, memory_limit, eventBus = null) {
@@ -102,26 +116,29 @@ class Job {
const prlimit = [
'prlimit',
- '--nproc=' + config.max_process_count,
- '--nofile=' + config.max_open_files,
- '--fsize=' + config.max_file_size,
+ '--nproc=' + this.runtime.max_process_count,
+ '--nofile=' + this.runtime.max_open_files,
+ '--fsize=' + this.runtime.max_file_size,
];
if (memory_limit >= 0) {
prlimit.push('--as=' + memory_limit);
}
- const proc_call = [...prlimit, ...nonetwork, 'bash', file, ...args];
+ const proc_call = [
+ 'nice',
+ ...prlimit,
+ ...nonetwork,
+ 'bash',
+ file,
+ ...args,
+ ];
var stdout = '';
var stderr = '';
var output = '';
const proc = cp.spawn(proc_call[0], proc_call.splice(1), {
- env: {
- ...this.runtime.env_vars,
- PISTON_LANGUAGE: this.runtime.language,
- },
stdio: 'pipe',
cwd: this.dir,
uid: this.uid,
@@ -129,36 +146,34 @@ class Job {
detached: true, //give this process its own process group
});
- if(eventBus === null){
+ if (eventBus === null) {
proc.stdin.write(this.stdin);
proc.stdin.end();
proc.stdin.destroy();
- }else{
- eventBus.on("stdin", (data) => {
+ } else {
+ eventBus.on('stdin', data => {
proc.stdin.write(data);
- })
+ });
- eventBus.on("kill", (signal) => {
- proc.kill(signal)
- })
+ eventBus.on('kill', signal => {
+ proc.kill(signal);
+ });
}
-
-
- const kill_timeout = set_timeout(
- async _ => {
- logger.info(`Timeout exceeded timeout=${timeout} uuid=${this.uuid}`)
- process.kill(proc.pid, 'SIGKILL')
- },
- timeout
- );
+ const kill_timeout =
+ (timeout >= 0 &&
+ set_timeout(async _ => {
+ this.logger.info(`Timeout exceeded timeout=${timeout}`);
+ process.kill(proc.pid, 'SIGKILL');
+ }, timeout)) ||
+ null;
proc.stderr.on('data', async data => {
- if(eventBus !== null) {
- eventBus.emit("stderr", data);
- } else if (stderr.length > config.output_max_size) {
- logger.info(`stderr length exceeded uuid=${this.uuid}`)
- process.kill(proc.pid, 'SIGKILL')
+ if (eventBus !== null) {
+ eventBus.emit('stderr', data);
+ } else if (stderr.length > this.runtime.output_max_size) {
+ this.logger.info(`stderr length exceeded`);
+ process.kill(proc.pid, 'SIGKILL');
} else {
stderr += data;
output += data;
@@ -166,35 +181,35 @@ class Job {
});
proc.stdout.on('data', async data => {
- if(eventBus !== null){
- eventBus.emit("stdout", data);
- } else if (stdout.length > config.output_max_size) {
- logger.info(`stdout length exceeded uuid=${this.uuid}`)
- process.kill(proc.pid, 'SIGKILL')
+ if (eventBus !== null) {
+ eventBus.emit('stdout', data);
+ } else if (stdout.length > this.runtime.output_max_size) {
+ this.logger.info(`stdout length exceeded`);
+ process.kill(proc.pid, 'SIGKILL');
} else {
stdout += data;
output += data;
}
});
- const exit_cleanup = async () => {
+ const exit_cleanup = () => {
clear_timeout(kill_timeout);
proc.stderr.destroy();
proc.stdout.destroy();
- await this.cleanup_processes()
- logger.debug(`Finished exit cleanup uuid=${this.uuid}`)
+ this.cleanup_processes();
+ this.logger.debug(`Finished exit cleanup`);
};
- proc.on('exit', async (code, signal) => {
- await exit_cleanup();
+ proc.on('exit', (code, signal) => {
+ exit_cleanup();
- resolve({stdout, stderr, code, signal, output });
+ resolve({ stdout, stderr, code, signal, output });
});
- proc.on('error', async err => {
- await exit_cleanup();
+ proc.on('error', err => {
+ exit_cleanup();
reject({ error: err, stdout, stderr, output });
});
@@ -209,13 +224,13 @@ class Job {
);
}
- logger.info(
- `Executing job uuid=${this.uuid} uid=${this.uid} gid=${
- this.gid
- } runtime=${this.runtime.toString()}`
- );
+ this.logger.info(`Executing job runtime=${this.runtime.toString()}`);
- logger.debug('Compiling');
+ const code_files =
+ (this.runtime.language === 'file' && this.files) ||
+ this.files.filter(file => file.encoding == 'utf8');
+
+ this.logger.debug('Compiling');
let compile;
@@ -228,11 +243,11 @@ class Job {
);
}
- logger.debug('Running');
+ this.logger.debug('Running');
const run = await this.safe_call(
this.runtime.run,
- [this.files[0].name, ...this.args],
+ [code_files[0].name, ...this.args],
this.timeouts.run,
this.memory_limits.run
);
@@ -247,7 +262,7 @@ class Job {
};
}
- async execute_interactive(eventBus){
+ async execute_interactive(eventBus) {
if (this.state !== job_states.PRIMED) {
throw new Error(
'Job must be in primed state, current state: ' +
@@ -255,84 +270,98 @@ class Job {
);
}
- logger.info(
- `Interactively executing job uuid=${this.uuid} uid=${this.uid} gid=${
- this.gid
- } runtime=${this.runtime.toString()}`
+ this.logger.info(
+ `Interactively executing job runtime=${this.runtime.toString()}`
);
- if(this.runtime.compiled){
- eventBus.emit("stage", "compile")
- const {error, code, signal} = await this.safe_call(
- this.runtime.compile,
- this.files.map(x => x.name),
+ const code_files =
+ (this.runtime.language === 'file' && this.files) ||
+ this.files.filter(file => file.encoding == 'utf8');
+
+ if (this.runtime.compiled) {
+ eventBus.emit('stage', 'compile');
+ const { error, code, signal } = await this.safe_call(
+ path.join(this.runtime.pkgdir, 'compile'),
+ code_files.map(x => x.name),
this.timeouts.compile,
this.memory_limits.compile,
eventBus
- )
+ );
- eventBus.emit("exit", "compile", {error, code, signal})
+ eventBus.emit('exit', 'compile', { error, code, signal });
}
- logger.debug('Running');
- eventBus.emit("stage", "run")
- const {error, code, signal} = await this.safe_call(
- this.runtime.run,
- [this.files[0].name, ...this.args],
+ this.logger.debug('Running');
+ eventBus.emit('stage', 'run');
+ const { error, code, signal } = await this.safe_call(
+ path.join(this.runtime.pkgdir, 'run'),
+ [code_files[0].name, ...this.args],
this.timeouts.run,
this.memory_limits.run,
eventBus
);
- eventBus.emit("exit", "run", {error, code, signal})
+ eventBus.emit('exit', 'run', { error, code, signal });
-
this.state = job_states.EXECUTED;
}
- async cleanup_processes(dont_wait = []) {
+ cleanup_processes(dont_wait = []) {
let processes = [1];
- logger.debug(`Cleaning up processes uuid=${this.uuid}`)
+ const to_wait = [];
+ this.logger.debug(`Cleaning up processes`);
while (processes.length > 0) {
- processes = []
+ processes = [];
+ const proc_ids = fss.readdir_sync('/proc');
- const proc_ids = await fs.readdir("/proc");
-
-
- processes = await Promise.all(proc_ids.map(async (proc_id) => {
- if(isNaN(proc_id)) return -1;
- try{
- const proc_status = await fs.read_file(path.join("/proc",proc_id,"status"));
- const proc_lines = proc_status.to_string().split("\n")
- const uid_line = proc_lines.find(line=>line.starts_with("Uid:"))
+ processes = proc_ids.map(proc_id => {
+ if (isNaN(proc_id)) return -1;
+ try {
+ const proc_status = fss.read_file_sync(
+ path.join('/proc', proc_id, 'status')
+ );
+ const proc_lines = proc_status.to_string().split('\n');
+ const state_line = proc_lines.find(line =>
+ line.starts_with('State:')
+ );
+ const uid_line = proc_lines.find(line =>
+ line.starts_with('Uid:')
+ );
const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/);
-
-
- if(ruid == this.uid || euid == this.uid)
- return parse_int(proc_id)
- }catch{
- return -1
+ const [_1, state, user_friendly] = state_line.split(/\s+/);
+
+ if (state == 'Z')
+ // Zombie process, just needs to be waited
+ return -1;
+ // We should kill in all other state (Sleep, Stopped & Running)
+
+ if (ruid == this.uid || euid == this.uid)
+ return parse_int(proc_id);
+ } catch {
+ return -1;
}
- return -1
- }))
-
- processes = processes.filter(p => p > 0)
-
- if(processes.length > 0)
- logger.debug(`Got processes to kill: ${processes} uuid=${this.uuid}`)
+ return -1;
+ });
+ processes = processes.filter(p => p > 0);
+ if (processes.length > 0)
+ this.logger.debug(`Got processes to kill: ${processes}`);
for (const proc of processes) {
// First stop the processes, but keep their resources allocated so they cant re-fork
try {
process.kill(proc, 'SIGSTOP');
- } catch {
+ } catch (e) {
// Could already be dead
+ this.logger.debug(
+ `Got error while SIGSTOPping process ${proc}:`,
+ e
+ );
}
}
@@ -342,14 +371,27 @@ class Job {
process.kill(proc, 'SIGKILL');
} catch {
// Could already be dead and just needs to be waited on
+ this.logger.debug(
+ `Got error while SIGKILLing process ${proc}:`,
+ e
+ );
}
- if(!dont_wait.includes(proc))
- wait_pid(proc);
+ to_wait.push(proc);
}
}
- logger.debug(`Cleaned up processes uuid=${this.uuid}`)
+ this.logger.debug(
+ `Finished kill-loop, calling wait_pid to end any zombie processes`
+ );
+
+ for (const proc of to_wait) {
+ if (dont_wait.includes(proc)) continue;
+
+ wait_pid(proc);
+ }
+
+ this.logger.debug(`Cleaned up processes`);
}
async cleanup_filesystem() {
@@ -370,7 +412,7 @@ class Job {
}
} catch (e) {
// File was somehow deleted in the time that we read the dir to when we checked the file
- logger.warn(`Error removing file ${file_path}: ${e}`);
+ this.logger.warn(`Error removing file ${file_path}: ${e}`);
}
}
}
@@ -379,15 +421,15 @@ class Job {
}
async cleanup() {
- logger.info(`Cleaning up job uuid=${this.uuid}`);
+ this.logger.info(`Cleaning up job`);
+ this.cleanup_processes(); // Run process janitor, just incase there are any residual processes somehow
await this.cleanup_filesystem();
- remainingJobSpaces++;
+ remaining_job_spaces++;
}
}
-
module.exports = {
Job,
};
diff --git a/api/src/runtime.js b/api/src/runtime.js
index 02c416b..b239426 100644
--- a/api/src/runtime.js
+++ b/api/src/runtime.js
@@ -7,14 +7,36 @@ const path = require('path');
const runtimes = [];
-
class Runtime {
- constructor({ language, version, aliases, runtime, run, compile, packageSupport, flake_path }) {
+ constructor({
+ language,
+ version,
+ aliases,
+ runtime,
+ run,
+ compile,
+ packageSupport,
+ flake_path,
+ timeouts,
+ memory_limits,
+ max_process_count,
+ max_open_files,
+ max_file_size,
+ output_max_size,
+ }) {
this.language = language;
this.runtime = runtime;
+
+ this.timeouts = timeouts;
+ this.memory_limits = memory_limits;
+ this.max_process_count = max_process_count;
+ this.max_open_files = max_open_files;
+ this.max_file_size = max_file_size;
+ this.output_max_size = output_max_size;
+
this.aliases = aliases;
- this.version = version;
-
+ this.version = version;
+
this.run = run;
this.compile = compile;
@@ -22,58 +44,120 @@ class Runtime {
this.package_support = packageSupport;
}
- ensure_built(){
+ static compute_single_limit(
+ language_name,
+ limit_name,
+ language_limit_overrides
+ ) {
+ return (
+ (config.limit_overrides[language_name] &&
+ config.limit_overrides[language_name][limit_name]) ||
+ (language_limit_overrides &&
+ language_limit_overrides[limit_name]) ||
+ config[limit_name]
+ );
+ }
+
+ static compute_all_limits(language_name, language_limit_overrides) {
+ return {
+ timeouts: {
+ compile: this.compute_single_limit(
+ language_name,
+ 'compile_timeout',
+ language_limit_overrides
+ ),
+ run: this.compute_single_limit(
+ language_name,
+ 'run_timeout',
+ language_limit_overrides
+ ),
+ },
+ memory_limits: {
+ compile: this.compute_single_limit(
+ language_name,
+ 'compile_memory_limit',
+ language_limit_overrides
+ ),
+ run: this.compute_single_limit(
+ language_name,
+ 'run_memory_limit',
+ language_limit_overrides
+ ),
+ },
+ max_process_count: this.compute_single_limit(
+ language_name,
+ 'max_process_count',
+ language_limit_overrides
+ ),
+ max_open_files: this.compute_single_limit(
+ language_name,
+ 'max_open_files',
+ language_limit_overrides
+ ),
+ max_file_size: this.compute_single_limit(
+ language_name,
+ 'max_file_size',
+ language_limit_overrides
+ ),
+ output_max_size: this.compute_single_limit(
+ language_name,
+ 'output_max_size',
+ language_limit_overrides
+ ),
+ };
+ }
+
+ ensure_built() {
logger.info(`Ensuring ${this} is built`);
const flake_path = this.flake_path;
- function _ensure_built(key){
+ function _ensure_built(key) {
const command = `nix build ${flake_path}.metadata.${key} --no-link`;
- cp.execSync(command, {stdio: "pipe"})
+ cp.execSync(command, { stdio: 'pipe' });
}
- _ensure_built("run");
- if(this.compiled) _ensure_built("compile");
-
- logger.debug(`Finished ensuring ${this} is installed`)
+ _ensure_built('run');
+ if (this.compiled) _ensure_built('compile');
+ logger.debug(`Finished ensuring ${this} is installed`);
}
- static load_runtime(flake_key){
- logger.info(`Loading ${flake_key}`)
+ static load_runtime(flake_key) {
+ logger.info(`Loading ${flake_key}`);
const flake_path = `${config.flake_path}#pistonRuntimeSets.${config.runtime_set}.${flake_key}`;
const metadata_command = `nix eval --json ${flake_path}.metadata`;
const metadata = JSON.parse(cp.execSync(metadata_command));
-
+
const this_runtime = new Runtime({
...metadata,
- flake_path
+ ...Runtime.compute_all_limits(
+ metadata.language,
+ metadata.limitOverrides
+ ),
+ flake_path,
});
this_runtime.ensure_built();
runtimes.push(this_runtime);
-
-
- logger.debug(`Package ${flake_key} was loaded`);
+ logger.debug(`Package ${flake_key} was loaded`);
}
get compiled() {
return this.compile !== null;
}
- get id(){
+ get id() {
return runtimes.indexOf(this);
}
toString() {
return `${this.language}-${this.version}`;
}
-
}
module.exports = runtimes;
module.exports.Runtime = Runtime;
module.exports.load_runtime = Runtime.load_runtime;
-
diff --git a/cli/.gitignore b/cli/.gitignore
deleted file mode 100644
index b512c09..0000000
--- a/cli/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules
\ No newline at end of file
diff --git a/cli/commands/execute.js b/cli/commands/execute.js
index abb1f63..0d906bc 100644
--- a/cli/commands/execute.js
+++ b/cli/commands/execute.js
@@ -3,8 +3,44 @@ const path = require('path');
const chalk = require('chalk');
const WebSocket = require('ws');
-const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
-
+const SIGNALS = [
+ 'SIGABRT',
+ 'SIGALRM',
+ 'SIGBUS',
+ 'SIGCHLD',
+ 'SIGCLD',
+ 'SIGCONT',
+ 'SIGEMT',
+ 'SIGFPE',
+ 'SIGHUP',
+ 'SIGILL',
+ 'SIGINFO',
+ 'SIGINT',
+ 'SIGIO',
+ 'SIGIOT',
+ 'SIGLOST',
+ 'SIGPIPE',
+ 'SIGPOLL',
+ 'SIGPROF',
+ 'SIGPWR',
+ 'SIGQUIT',
+ 'SIGSEGV',
+ 'SIGSTKFLT',
+ 'SIGTSTP',
+ 'SIGSYS',
+ 'SIGTERM',
+ 'SIGTRAP',
+ 'SIGTTIN',
+ 'SIGTTOU',
+ 'SIGUNUSED',
+ 'SIGURG',
+ 'SIGUSR1',
+ 'SIGUSR2',
+ 'SIGVTALRM',
+ 'SIGXCPU',
+ 'SIGXFSZ',
+ 'SIGWINCH',
+];
exports.command = ['execute [args..]'];
exports.aliases = ['run'];
@@ -15,18 +51,18 @@ exports.builder = {
string: true,
desc: 'Set the version of the language to use',
alias: ['l'],
- default: '*'
+ default: '*',
},
stdin: {
boolean: true,
desc: 'Read input from stdin and pass to executor',
- alias: ['i']
+ alias: ['i'],
},
run_timeout: {
alias: ['rt', 'r'],
number: true,
desc: 'Milliseconds before killing run process',
- default: 3000
+ default: 3000,
},
compile_timeout: {
alias: ['ct', 'c'],
@@ -42,117 +78,126 @@ exports.builder = {
interactive: {
boolean: true,
alias: ['t'],
- desc: 'Run interactively using WebSocket transport'
+ desc: 'Run interactively using WebSocket transport',
},
status: {
boolean: true,
alias: ['s'],
- desc: 'Output additional status to stderr'
- }
+ desc: 'Output additional status to stderr',
+ },
};
-async function handle_interactive(files, argv){
- const ws = new WebSocket(argv.pistonUrl.replace("http", "ws") + "/api/v2/connect")
+async function handle_interactive(files, argv) {
+ const ws = new WebSocket(
+ argv.pistonUrl.replace('http', 'ws') + '/api/v2/connect'
+ );
- const log_message = (process.stderr.isTTY && argv.status) ? console.error : ()=>{};
+ const log_message =
+ process.stderr.isTTY && argv.status ? console.error : () => {};
- process.on("exit", ()=>{
+ process.on('exit', () => {
ws.close();
process.stdin.end();
process.stdin.destroy();
- process.exit();
- })
+ process.exit();
+ });
- for(const signal of SIGNALS){
- process.on(signal, ()=>{
- ws.send(JSON.stringify({type: 'signal', signal}))
- })
+ for (const signal of SIGNALS) {
+ process.on(signal, () => {
+ ws.send(JSON.stringify({ type: 'signal', signal }));
+ });
}
-
-
- ws.on('open', ()=>{
+ ws.on('open', () => {
const request = {
- type: "init",
+ type: 'init',
language: argv.language,
version: argv['language_version'],
files: files,
args: argv.args,
compile_timeout: argv.ct,
- run_timeout: argv.rt
- }
+ run_timeout: argv.rt,
+ };
- ws.send(JSON.stringify(request))
- log_message(chalk.white.bold("Connected"))
+ ws.send(JSON.stringify(request));
+ log_message(chalk.white.bold('Connected'));
process.stdin.resume();
- process.stdin.on("data", (data) => {
- ws.send(JSON.stringify({
- type: "data",
- stream: "stdin",
- data: data.toString()
- }))
- })
- })
+ process.stdin.on('data', data => {
+ ws.send(
+ JSON.stringify({
+ type: 'data',
+ stream: 'stdin',
+ data: data.toString(),
+ })
+ );
+ });
+ });
- ws.on("close", (code, reason)=>{
+ ws.on('close', (code, reason) => {
log_message(
- chalk.white.bold("Disconnected: "),
- chalk.white.bold("Reason: "),
+ chalk.white.bold('Disconnected: '),
+ chalk.white.bold('Reason: '),
chalk.yellow(`"${reason}"`),
- chalk.white.bold("Code: "),
- chalk.yellow(`"${code}"`),
- )
- process.stdin.pause()
- })
+ chalk.white.bold('Code: '),
+ chalk.yellow(`"${code}"`)
+ );
+ process.stdin.pause();
+ });
- ws.on('message', function(data){
+ ws.on('message', function (data) {
const msg = JSON.parse(data);
-
- switch(msg.type){
- case "runtime":
- log_message(chalk.bold.white("Runtime:"), chalk.yellow(`${msg.language} ${msg.version}`))
+
+ switch (msg.type) {
+ case 'runtime':
+ log_message(
+ chalk.bold.white('Runtime:'),
+ chalk.yellow(`${msg.language} ${msg.version}`)
+ );
break;
- case "stage":
- log_message(chalk.bold.white("Stage:"), chalk.yellow(msg.stage))
+ case 'stage':
+ log_message(
+ chalk.bold.white('Stage:'),
+ chalk.yellow(msg.stage)
+ );
break;
- case "data":
- if(msg.stream == "stdout") process.stdout.write(msg.data)
- else if(msg.stream == "stderr") process.stderr.write(msg.data)
- else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data)
+ case 'data':
+ if (msg.stream == 'stdout') process.stdout.write(msg.data);
+ else if (msg.stream == 'stderr') process.stderr.write(msg.data);
+ else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data);
break;
- case "exit":
- if(msg.signal === null)
+ case 'exit':
+ if (msg.signal === null)
log_message(
- chalk.white.bold("Stage"),
+ chalk.white.bold('Stage'),
chalk.yellow(msg.stage),
- chalk.white.bold("exited with code"),
+ chalk.white.bold('exited with code'),
chalk.yellow(msg.code)
- )
+ );
else
log_message(
- chalk.white.bold("Stage"),
+ chalk.white.bold('Stage'),
chalk.yellow(msg.stage),
- chalk.white.bold("exited with signal"),
+ chalk.white.bold('exited with signal'),
chalk.yellow(msg.signal)
- )
- break;
+ );
+ break;
default:
- log_message(chalk.red.bold("Unknown message:"), msg)
+ log_message(chalk.red.bold('Unknown message:'), msg);
}
- })
-
+ });
}
async function run_non_interactively(files, argv) {
-
-
- const stdin = (argv.stdin && await new Promise((resolve, _) => {
- let data = '';
- process.stdin.on('data', d => data += d);
- process.stdin.on('end', _ => resolve(data));
- })) || '';
+ const stdin =
+ (argv.stdin &&
+ (await new Promise((resolve, _) => {
+ let data = '';
+ process.stdin.on('data', d => (data += d));
+ process.stdin.on('end', _ => resolve(data));
+ }))) ||
+ '';
const request = {
language: argv.language,
@@ -161,7 +206,7 @@ async function run_non_interactively(files, argv) {
args: argv.args,
stdin,
compile_timeout: argv.ct,
- run_timeout: argv.rt
+ run_timeout: argv.rt,
};
let { data: response } = await argv.axios.post('/api/v2/execute', request);
@@ -170,13 +215,13 @@ async function run_non_interactively(files, argv) {
console.log(chalk.bold(`== ${name} ==`));
if (ctx.stdout) {
- console.log(chalk.bold(`STDOUT`))
- console.log(ctx.stdout.replace(/\n/g,'\n '))
+ console.log(chalk.bold(`STDOUT`));
+ console.log(ctx.stdout.replace(/\n/g, '\n '));
}
if (ctx.stderr) {
- console.log(chalk.bold(`STDERR`))
- console.log(ctx.stderr.replace(/\n/g,'\n '))
+ console.log(chalk.bold(`STDERR`));
+ console.log(ctx.stderr.replace(/\n/g, '\n '));
}
if (ctx.code) {
@@ -187,12 +232,9 @@ async function run_non_interactively(files, argv) {
}
if (ctx.signal) {
- console.log(
- chalk.bold(`Signal:`),
- chalk.bold.yellow(ctx.signal)
- );
+ console.log(chalk.bold(`Signal:`), chalk.bold.yellow(ctx.signal));
}
- }
+ };
if (response.compile) {
step('Compile', response.compile);
@@ -201,17 +243,23 @@ async function run_non_interactively(files, argv) {
step('Run', response.run);
}
-exports.handler = async (argv) => {
- const files = [...(argv.files || []),argv.file]
- .map(file_path => {
- return {
- name: path.basename(file_path),
- content: fs.readFileSync(file_path).toString()
- };
- });
+exports.handler = async argv => {
+ const files = [...(argv.files || []), argv.file].map(file_path => {
+ const buffer = fs.readFileSync(file_path);
+ const encoding =
+ (buffer
+ .toString()
+ .split('')
+ .some(x => x.charCodeAt(0) >= 128) &&
+ 'base64') ||
+ 'utf8';
+ return {
+ name: path.basename(file_path),
+ content: buffer.toString(encoding),
+ encoding,
+ };
+ });
- if(argv.interactive) await handle_interactive(files, argv);
+ if (argv.interactive) await handle_interactive(files, argv);
else await run_non_interactively(files, argv);
-}
-
-
+};
diff --git a/cli/index.js b/cli/index.js
index c0c25ee..340cdab 100755
--- a/cli/index.js
+++ b/cli/index.js
@@ -6,8 +6,8 @@ const axios_instance = argv => {
argv.axios = axios.create({
baseURL: argv['piston-url'],
headers: {
- 'Content-Type': 'application/json'
- }
+ 'Content-Type': 'application/json',
+ },
});
return argv;
@@ -18,12 +18,11 @@ require('yargs')(process.argv.slice(2))
alias: ['u'],
default: 'http://127.0.0.1:2000',
desc: 'Piston API URL',
- string: true
+ string: true,
})
.middleware(axios_instance)
.scriptName('piston')
.commandDir('commands')
.demandCommand()
.help()
- .wrap(72)
- .argv;
+ .wrap(72).argv;
diff --git a/docs/api-v2.md b/docs/api-v2.md
index 111b514..b25e142 100644
--- a/docs/api-v2.md
+++ b/docs/api-v2.md
@@ -17,10 +17,10 @@ Returns a list of available languages, including the version, runtime and aliase
#### Response
-- `[].language`: Name of the language
-- `[].version`: Version of the runtime
-- `[].aliases`: List of alternative names that can be used for the language
-- `[].runtime` (_optional_): Name of the runtime used to run the langage, only provided if alternative runtimes exist for the language
+- `[].language`: Name of the language
+- `[].version`: Version of the runtime
+- `[].aliases`: List of alternative names that can be used for the language
+- `[].runtime` (_optional_): Name of the runtime used to run the langage, only provided if alternative runtimes exist for the language
#### Example
@@ -55,34 +55,35 @@ Runs the given code, using the given runtime and arguments, returning the result
#### Request
-- `language`: Name or alias of a language listed in [runtimes](#runtimes)
-- `version`: SemVer version selector of a language listed in [runtimes](#runtimes)
-- `files`: An array of files which should be uploaded into the job context
-- `files[].name` (_optional_): Name of file to be written, if none a random name is picked
-- `files[].content`: Content of file to be written
-- `stdin` (_optional_): Text to pass into stdin of the program. Defaults to blank string.
-- `args` (_optional_): Arguments to pass to the program. Defaults to none
-- `run_timeout` (_optional_): The maximum allowed time in milliseconds for the compile stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout.
-- `compile_timeout` (_optional_): The maximum allowed time in milliseconds for the run stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout. Defaults to maximum.
-- `compile_memory_limit` (_optional_): The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
-- `run_memory_limit` (_optional_): The maximum amount of memory the run stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
+- `language`: Name or alias of a language listed in [runtimes](#runtimes)
+- `version`: SemVer version selector of a language listed in [runtimes](#runtimes)
+- `files`: An array of files which should be uploaded into the job context
+- `files[].name` (_optional_): Name of file to be written, if none a random name is picked
+- `files[].content`: Content of file to be written
+- `files[].encoding` (_optional_): The encoding scheme used for the file content. One of `base64`, `hex` or `utf8`. Defaults to `utf8`.
+- `stdin` (_optional_): Text to pass into stdin of the program. Defaults to blank string.
+- `args` (_optional_): Arguments to pass to the program. Defaults to none
+- `run_timeout` (_optional_): The maximum allowed time in milliseconds for the compile stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout.
+- `compile_timeout` (_optional_): The maximum allowed time in milliseconds for the run stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout. Defaults to maximum.
+- `compile_memory_limit` (_optional_): The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
+- `run_memory_limit` (_optional_): The maximum amount of memory the run stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
#### Response
-- `language`: Name (not alias) of the runtime used
-- `version`: Version of the used runtime
-- `run`: Results from the run stage
-- `run.stdout`: stdout from run stage process
-- `run.stderr`: stderr from run stage process
-- `run.output`: stdout and stderr combined in order of data from run stage process
-- `run.code`: Exit code from run process, or null if signal is not null
-- `run.signal`: Signal from run process, or null if code is not null
-- `compile` (_optional_): Results from the compile stage, only provided if the runtime has a compile stage
-- `compile.stdout`: stdout from compile stage process
-- `compile.stderr`: stderr from compile stage process
-- `compile.output`: stdout and stderr combined in order of data from compile stage process
-- `compile.code`: Exit code from compile process, or null if signal is not null
-- `compile.signal`: Signal from compile process, or null if code is not null
+- `language`: Name (not alias) of the runtime used
+- `version`: Version of the used runtime
+- `run`: Results from the run stage
+- `run.stdout`: stdout from run stage process
+- `run.stderr`: stderr from run stage process
+- `run.output`: stdout and stderr combined in order of data from run stage process
+- `run.code`: Exit code from run process, or null if signal is not null
+- `run.signal`: Signal from run process, or null if code is not null
+- `compile` (_optional_): Results from the compile stage, only provided if the runtime has a compile stage
+- `compile.stdout`: stdout from compile stage process
+- `compile.stderr`: stderr from compile stage process
+- `compile.output`: stdout and stderr combined in order of data from compile stage process
+- `compile.code`: Exit code from compile process, or null if signal is not null
+- `compile.signal`: Signal from compile process, or null if code is not null
#### Example
@@ -133,9 +134,9 @@ Returns a list of all possible packages, and whether their installation status.
#### Response
-- `[].language`: Name of the contained runtime
-- `[].language_version`: Version of the contained runtime
-- `[].installed`: Status on the package being installed
+- `[].language`: Name of the contained runtime
+- `[].language_version`: Version of the contained runtime
+- `[].installed`: Status on the package being installed
#### Example
@@ -167,13 +168,13 @@ Install the given package.
#### Request
-- `language`: Name of package from [package list](#get-apiv2packages)
-- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
+- `language`: Name of package from [package list](#get-apiv2packages)
+- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
#### Response
-- `language`: Name of package installed
-- `version`: Version of package installed
+- `language`: Name of package installed
+- `version`: Version of package installed
#### Example
@@ -203,13 +204,13 @@ Uninstall the given package.
#### Request
-- `language`: Name of package from [package list](#get-apiv2packages)
-- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
+- `language`: Name of package from [package list](#get-apiv2packages)
+- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
#### Response
-- `language`: Name of package uninstalled
-- `version`: Version of package uninstalled
+- `language`: Name of package uninstalled
+- `version`: Version of package uninstalled
#### Example
diff --git a/docs/configuration.md b/docs/configuration.md
index 16a5df0..1a6f5bd 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -50,15 +50,15 @@ Absolute path to piston related data, including packages and job contexts.
```yaml
key:
- - PISTON_RUNNER_UID_MIN
- - PISTON_RUNNER_UID_MAX
- - PISTON_RUNNER_GID_MIN
- - PISTON_RUNNER_GID_MAX
+ - PISTON_RUNNER_UID_MIN
+ - PISTON_RUNNER_UID_MAX
+ - PISTON_RUNNER_GID_MIN
+ - PISTON_RUNNER_GID_MAX
default:
- - 1001
- - 1500
- - 1001
- - 1500
+ - 1001
+ - 1500
+ - 1001
+ - 1500
```
UID and GID ranges to use when executing jobs.
@@ -86,11 +86,11 @@ key: PISTON_MAX_PROCESS_COUNT
default: 64
```
-Maximum number of processess allowed to to have open for a job.
+Maximum number of processes allowed to to have open for a job.
Resists against exhausting the process table, causing a full system lockup.
-## Output Max Side
+## Output Max Size
```yaml
key: PISTON_OUTPUT_MAX_SIZE
@@ -123,12 +123,27 @@ Maximum size for a singular file written to disk.
Resists against large file writes to exhaust disk space.
+## Compile/Run timeouts
+
+```yaml
+key:
+ - PISTON_COMPILE_TIMEOUT
+default: 10000
+
+key:
+ - PISTON_RUN_TIMEOUT
+default: 3000
+```
+
+The maximum time that is allowed to be taken by a stage in milliseconds.
+Use -1 for unlimited time.
+
## Compile/Run memory limits
```yaml
key:
- - PISTON_COMPILE_MEMORY_LIMIT
- - PISTON_RUN_MEMORY_LIMIT
+ - PISTON_COMPILE_MEMORY_LIMIT
+ - PISTON_RUN_MEMORY_LIMIT
default: -1
```
@@ -154,3 +169,19 @@ default: 64
```
Maximum number of jobs to run concurrently.
+
+## Limit overrides
+
+```yaml
+key: PISTON_LIMIT_OVERRIDES
+default: {}
+```
+
+Per-language overrides/exceptions for the each of `max_process_count`, `max_open_files`, `max_file_size`,
+`compile_memory_limit`, `run_memory_limit`, `compile_timeout`, `run_timeout`, `output_max_size`. Defined as follows:
+
+```
+PISTON_LIMIT_OVERRIDES={"c++":{"max_process_count":128}}
+```
+
+This will give `c++` a max_process_count of 128 regardless of the configuration.
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 53dbf05..33ce51e 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1 +1 @@
-mkdocs==1.1.2
\ No newline at end of file
+mkdocs==1.2.3
\ No newline at end of file
diff --git a/flake.nix b/flake.nix
index 78b2ee1..fde9cee 100644
--- a/flake.nix
+++ b/flake.nix
@@ -21,6 +21,7 @@
compile? null,
packages? null,
aliases? [],
+ limitOverrides? {},
tests
}: let
compileFile = if compile != null then
@@ -28,7 +29,7 @@
else null;
runFile = pkgs.writeShellScript "run" run;
metadata = {
- inherit language version runtime aliases;
+ inherit language version runtime aliases limitOverrides;
run = runFile;
compile = compileFile;
packageSupport = packages != null;
diff --git a/mkdocs.yml b/mkdocs.yml
index 148ba91..a6ef999 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -1,15 +1,15 @@
site_name: Piston
nav:
- - Home: index.md
- - Configuration: configuration.md
- - API: api-v2.md
+ - Home: index.md
+ - Configuration: configuration.md
+ - API: api-v2.md
theme:
- name: readthedocs
- highlightjs: true
- hljs_languages:
- - yaml
- - json
+ name: readthedocs
+ highlightjs: true
+ hljs_languages:
+ - yaml
+ - json
markdown_extensions:
- - admonition
+ - admonition
diff --git a/package-lock.json b/package-lock.json
new file mode 100644
index 0000000..5c51a1d
--- /dev/null
+++ b/package-lock.json
@@ -0,0 +1,32 @@
+{
+ "name": "piston",
+ "lockfileVersion": 2,
+ "requires": true,
+ "packages": {
+ "": {
+ "devDependencies": {
+ "prettier": "2.4.1"
+ }
+ },
+ "node_modules/prettier": {
+ "version": "2.4.1",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.1.tgz",
+ "integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==",
+ "dev": true,
+ "bin": {
+ "prettier": "bin-prettier.js"
+ },
+ "engines": {
+ "node": ">=10.13.0"
+ }
+ }
+ },
+ "dependencies": {
+ "prettier": {
+ "version": "2.4.1",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.1.tgz",
+ "integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==",
+ "dev": true
+ }
+ }
+}
diff --git a/package.json b/package.json
new file mode 100644
index 0000000..8f07606
--- /dev/null
+++ b/package.json
@@ -0,0 +1,5 @@
+{
+ "devDependencies": {
+ "prettier": "2.4.1"
+ }
+}
diff --git a/packages/befunge93/0.2.0/build.sh b/packages/befunge93/0.2.0/build.sh
new file mode 100644
index 0000000..de6bdbf
--- /dev/null
+++ b/packages/befunge93/0.2.0/build.sh
@@ -0,0 +1,15 @@
+#!/usr/bin/env bash
+
+# source python 2.7
+source ../../python/2.7.18/build.sh
+
+# clone befunge repo
+git clone -q 'https://github.com/programble/befungee' befunge93
+
+# go inside befunge93 so we can checkout
+cd befunge93
+
+# checkout the version 0.2.0
+git checkout tag/v0.2.0
+
+cd ..
\ No newline at end of file
diff --git a/packages/dash/0.5.11/environment b/packages/befunge93/0.2.0/environment
similarity index 74%
rename from packages/dash/0.5.11/environment
rename to packages/befunge93/0.2.0/environment
index 780b668..43f2db1 100644
--- a/packages/dash/0.5.11/environment
+++ b/packages/befunge93/0.2.0/environment
@@ -2,3 +2,4 @@
# Put 'export' statements here for environment variables
export PATH=$PWD/bin:$PATH
+export BEFUNGE93_PATH=$PWD/befunge93
\ No newline at end of file
diff --git a/packages/befunge93/0.2.0/metadata.json b/packages/befunge93/0.2.0/metadata.json
new file mode 100644
index 0000000..16c2643
--- /dev/null
+++ b/packages/befunge93/0.2.0/metadata.json
@@ -0,0 +1,5 @@
+{
+ "language": "befunge93",
+ "version": "0.2.0",
+ "aliases": ["b93"]
+}
diff --git a/packages/befunge93/0.2.0/run b/packages/befunge93/0.2.0/run
new file mode 100644
index 0000000..3d95114
--- /dev/null
+++ b/packages/befunge93/0.2.0/run
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+# run the befunge program with the file name
+python2.7 "$BEFUNGE93_PATH"/befungee.py "$1"
\ No newline at end of file
diff --git a/packages/befunge93/0.2.0/test.b93 b/packages/befunge93/0.2.0/test.b93
new file mode 100644
index 0000000..4cb5175
--- /dev/null
+++ b/packages/befunge93/0.2.0/test.b93
@@ -0,0 +1 @@
+64+"KO">:#,_@
\ No newline at end of file
diff --git a/packages/brachylog/1.0.0/build.sh b/packages/brachylog/1.0.0/build.sh
new file mode 100644
index 0000000..8f35ef2
--- /dev/null
+++ b/packages/brachylog/1.0.0/build.sh
@@ -0,0 +1,20 @@
+#!/usr/bin/env bash
+
+# build prolog 8.2.4 as dependency
+source ../../prolog/8.2.4/build.sh
+
+# curl brachylog 1.0.0
+curl -L "https://github.com/JCumin/Brachylog/archive/refs/tags/v1.0-ascii.tar.gz" -o brachylog.tar.gz
+tar xzf brachylog.tar.gz --strip-components=1
+rm brachylog.tar.gz
+
+# move swi prolog to working directory
+cp bin/swipl swipl
+
+# give execution permission to swipl
+chmod +x swipl
+
+# add some code the branchylog.pl so we don't have to escape backslashes while using the interactive mode
+echo '
+
+:-feature(argv, [Code, Stdin]), run_from_atom(Code, Stdin, _), halt.' >> prolog_parser/brachylog.pl
\ No newline at end of file
diff --git a/packages/dart/2.12.1/environment b/packages/brachylog/1.0.0/environment
similarity index 80%
rename from packages/dart/2.12.1/environment
rename to packages/brachylog/1.0.0/environment
index 780b668..7012c44 100644
--- a/packages/dart/2.12.1/environment
+++ b/packages/brachylog/1.0.0/environment
@@ -2,3 +2,4 @@
# Put 'export' statements here for environment variables
export PATH=$PWD/bin:$PATH
+export BRACHYLOG_PATH=$PWD
\ No newline at end of file
diff --git a/packages/brachylog/1.0.0/metadata.json b/packages/brachylog/1.0.0/metadata.json
new file mode 100644
index 0000000..d5df839
--- /dev/null
+++ b/packages/brachylog/1.0.0/metadata.json
@@ -0,0 +1,5 @@
+{
+ "language": "brachylog",
+ "version": "1.0.0",
+ "aliases": []
+}
diff --git a/packages/brachylog/1.0.0/run b/packages/brachylog/1.0.0/run
new file mode 100644
index 0000000..16ce3cd
--- /dev/null
+++ b/packages/brachylog/1.0.0/run
@@ -0,0 +1,19 @@
+#!/usr/bin/env bash
+
+# save the file for later
+file="$1"
+
+# remove the file from $@
+shift
+
+# save stdin as $@ joined by newlines
+stdin=`printf "%s\n" "$@"`
+
+# save code as the contents of $file
+code=`cat "$file"`
+
+# go to the directory where brachylog.pl is so the imports work
+cd "$BRACHYLOG_PATH"/prolog_parser
+
+# run swi prolog with code and stdin
+swipl -f brachylog.pl "$code" "$stdin"
\ No newline at end of file
diff --git a/packages/brachylog/1.0.0/test.brachylog b/packages/brachylog/1.0.0/test.brachylog
new file mode 100644
index 0000000..d90d668
--- /dev/null
+++ b/packages/brachylog/1.0.0/test.brachylog
@@ -0,0 +1 @@
+"OK"w
\ No newline at end of file
diff --git a/packages/cjam/0.6.5/metadata.json b/packages/cjam/0.6.5/metadata.json
index af510fd..bd25bde 100644
--- a/packages/cjam/0.6.5/metadata.json
+++ b/packages/cjam/0.6.5/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "cjam",
- "version": "0.6.5",
- "aliases": []
+ "language": "cjam",
+ "version": "0.6.5",
+ "aliases": []
}
diff --git a/packages/crystal/0.36.1/build.sh b/packages/crystal/0.36.1/build.sh
deleted file mode 100755
index ba10f3f..0000000
--- a/packages/crystal/0.36.1/build.sh
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/bash
-
-PREFIX=$(realpath $(dirname $0))
-
-curl -L "https://github.com/crystal-lang/crystal/releases/download/0.36.1/crystal-0.36.1-1-linux-x86_64.tar.gz" -o crystal.tar.gz
-tar xzf crystal.tar.gz --strip-components=1
-rm crystal.tar.gz
diff --git a/packages/crystal/0.36.1/compile b/packages/crystal/0.36.1/compile
deleted file mode 100644
index afbad6c..0000000
--- a/packages/crystal/0.36.1/compile
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-# Compile crystal files into out file
-crystal build "$@" -o out --no-color && \
-chmod +x out
diff --git a/packages/crystal/0.36.1/environment b/packages/crystal/0.36.1/environment
deleted file mode 100644
index bd0ff98..0000000
--- a/packages/crystal/0.36.1/environment
+++ /dev/null
@@ -1 +0,0 @@
-export PATH=$PWD/bin:$PATH
\ No newline at end of file
diff --git a/packages/crystal/0.36.1/metadata.json b/packages/crystal/0.36.1/metadata.json
deleted file mode 100644
index ee995eb..0000000
--- a/packages/crystal/0.36.1/metadata.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "language": "crystal",
- "version": "0.36.1",
- "aliases": ["crystal", "cr"]
-}
diff --git a/packages/crystal/0.36.1/run b/packages/crystal/0.36.1/run
deleted file mode 100644
index 6955ba9..0000000
--- a/packages/crystal/0.36.1/run
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-
-shift # Filename is only used to compile
-./out "$@"
diff --git a/packages/crystal/0.36.1/test.cr b/packages/crystal/0.36.1/test.cr
deleted file mode 100644
index 3fca0dd..0000000
--- a/packages/crystal/0.36.1/test.cr
+++ /dev/null
@@ -1 +0,0 @@
-puts("OK")
\ No newline at end of file
diff --git a/packages/dart/2.12.1/build.sh b/packages/dart/2.12.1/build.sh
deleted file mode 100755
index d202d7f..0000000
--- a/packages/dart/2.12.1/build.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/usr/bin/env bash
-
-curl -L "https://storage.googleapis.com/dart-archive/channels/stable/release/2.12.1/sdk/dartsdk-linux-x64-release.zip" -o dart.zip
-
-unzip dart.zip
-rm dart.zip
-
-cp -r dart-sdk/* .
-rm -rf dart-sdk
-
-chmod -R +rx bin
diff --git a/packages/dart/2.12.1/metadata.json b/packages/dart/2.12.1/metadata.json
deleted file mode 100644
index cec3d77..0000000
--- a/packages/dart/2.12.1/metadata.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "language": "dart",
- "version": "2.12.1",
- "aliases": []
-}
diff --git a/packages/dart/2.12.1/run b/packages/dart/2.12.1/run
deleted file mode 100644
index aae792a..0000000
--- a/packages/dart/2.12.1/run
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/env bash
-
-# Put instructions to run the runtime
-dart run "$@"
diff --git a/packages/dart/2.12.1/test.dart b/packages/dart/2.12.1/test.dart
deleted file mode 100644
index 27e87b2..0000000
--- a/packages/dart/2.12.1/test.dart
+++ /dev/null
@@ -1,3 +0,0 @@
-void main() {
- print('OK');
-}
\ No newline at end of file
diff --git a/packages/dash/0.5.11/build.sh b/packages/dash/0.5.11/build.sh
deleted file mode 100755
index 202d5aa..0000000
--- a/packages/dash/0.5.11/build.sh
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env bash
-
-# Put instructions to build your package in here
-PREFIX=$(realpath $(dirname $0))
-
-mkdir -p build
-
-cd build
-
-curl "http://gondor.apana.org.au/~herbert/dash/files/dash-0.5.11.tar.gz" -o dash.tar.gz
-tar xzf dash.tar.gz --strip-components=1
-
-./configure --prefix "$PREFIX" &&
-make -j$(nproc) &&
-make install -j$(nproc)
-
-cd ../
-
-rm -rf build
diff --git a/packages/dash/0.5.11/metadata.json b/packages/dash/0.5.11/metadata.json
deleted file mode 100644
index a2e5be5..0000000
--- a/packages/dash/0.5.11/metadata.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "language": "dash",
- "version": "0.5.11",
- "aliases": ["dash"]
-}
diff --git a/packages/dash/0.5.11/run b/packages/dash/0.5.11/run
deleted file mode 100644
index 6a8a1da..0000000
--- a/packages/dash/0.5.11/run
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/env bash
-
-# Put instructions to run the runtime
-dash "$@"
diff --git a/packages/dash/0.5.11/test.dash b/packages/dash/0.5.11/test.dash
deleted file mode 100644
index 727518f..0000000
--- a/packages/dash/0.5.11/test.dash
+++ /dev/null
@@ -1 +0,0 @@
-echo "OK"
\ No newline at end of file
diff --git a/packages/deno/1.7.5/build.sh b/packages/deno/1.7.5/build.sh
deleted file mode 100755
index 165d3b6..0000000
--- a/packages/deno/1.7.5/build.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-curl -L https://github.com/denoland/deno/releases/download/v1.7.5/deno-x86_64-unknown-linux-gnu.zip --output deno.zip
-unzip -o deno.zip
-rm deno.zip
-
-chmod +x deno
diff --git a/packages/deno/1.7.5/environment b/packages/deno/1.7.5/environment
deleted file mode 100644
index 98fd770..0000000
--- a/packages/deno/1.7.5/environment
+++ /dev/null
@@ -1 +0,0 @@
-export PATH=$PWD:$PATH
\ No newline at end of file
diff --git a/packages/deno/1.7.5/metadata.json b/packages/deno/1.7.5/metadata.json
deleted file mode 100644
index d30608b..0000000
--- a/packages/deno/1.7.5/metadata.json
+++ /dev/null
@@ -1,14 +0,0 @@
-{
- "language": "deno",
- "version": "1.7.5",
- "provides": [
- {
- "language": "typescript",
- "aliases": ["deno-ts","deno"]
- },
- {
- "language": "javascript",
- "aliases": ["deno-js"]
- }
- ]
-}
diff --git a/packages/deno/1.7.5/run b/packages/deno/1.7.5/run
deleted file mode 100644
index d1b196f..0000000
--- a/packages/deno/1.7.5/run
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/bin/bash
-DENO_DIR=$PWD deno run "$@"
\ No newline at end of file
diff --git a/packages/deno/1.7.5/test.deno.ts b/packages/deno/1.7.5/test.deno.ts
deleted file mode 100644
index 56ed4a0..0000000
--- a/packages/deno/1.7.5/test.deno.ts
+++ /dev/null
@@ -1 +0,0 @@
-console.log("OK")
\ No newline at end of file
diff --git a/packages/dotnet/5.0.201/build.sh b/packages/dotnet/5.0.201/build.sh
old mode 100644
new mode 100755
index c685668..6318b07
--- a/packages/dotnet/5.0.201/build.sh
+++ b/packages/dotnet/5.0.201/build.sh
@@ -7,8 +7,10 @@ rm dotnet.tar.gz
# Cache nuget packages
export DOTNET_CLI_HOME=$PWD
./dotnet new console -o cache_application
+./dotnet new console -lang F# -o fs_cache_application
+./dotnet new console -lang VB -o vb_cache_application
# This calls a restore on the global-packages index ($DOTNET_CLI_HOME/.nuget/packages)
# If we want to allow more packages, we could add them to this cache_application
-rm -rf cache_application
-# Get rid of it, we don't actually need the application - just the restore
\ No newline at end of file
+rm -rf cache_application fs_cache_application vb_cache_application
+# Get rid of it, we don't actually need the application - just the restore
diff --git a/packages/dotnet/5.0.201/compile b/packages/dotnet/5.0.201/compile
index 8bfcc27..1c34213 100644
--- a/packages/dotnet/5.0.201/compile
+++ b/packages/dotnet/5.0.201/compile
@@ -1,15 +1,36 @@
#!/usr/bin/env bash
+[ "${PISTON_LANGUAGE}" == "fsi" ] && exit 0
+
export DOTNET_CLI_HOME=$PWD
export HOME=$PWD
-rename 's/$/\.cs/' "$@" # Add .cs extension
-
dotnet build --help > /dev/null # Shut the thing up
-dotnet new console -o . --no-restore
-rm Program.cs
+case "${PISTON_LANGUAGE}" in
+ basic.net)
+ rename 's/$/\.vb/' "$@" # Add .vb extension
+ dotnet new console -lang VB -o . --no-restore
+ rm Program.vb
+ ;;
+ fsharp.net)
+ first_file=$1
+ shift
+ rename 's/$/\.fs/' "$@" # Add .fs extension
+ dotnet new console -lang F# -o . --no-restore
+ mv $first_file Program.fs # For some reason F#.net doesn't work unless the file name is Program.fs
+ ;;
+ csharp.net)
+ rename 's/$/\.cs/' "$@" # Add .cs extension
+ dotnet new console -o . --no-restore
+ rm Program.cs
+ ;;
+ *)
+ echo "How did you get here? (${PISTON_LANGUAGE})"
+ exit 1
+ ;;
+esac
dotnet restore --source $DOTNET_ROOT/.nuget/packages
-dotnet build --no-restore
\ No newline at end of file
+dotnet build --no-restore
diff --git a/packages/dotnet/5.0.201/environment b/packages/dotnet/5.0.201/environment
index 596d56e..468463d 100644
--- a/packages/dotnet/5.0.201/environment
+++ b/packages/dotnet/5.0.201/environment
@@ -2,4 +2,5 @@
# Put 'export' statements here for environment variables
export DOTNET_ROOT=$PWD
-export PATH=$DOTNET_ROOT:$PATH
\ No newline at end of file
+export PATH=$DOTNET_ROOT:$PATH
+export FSI_PATH=$(find $(pwd) -name fsi.dll)
diff --git a/packages/dotnet/5.0.201/metadata.json b/packages/dotnet/5.0.201/metadata.json
index 619265d..7c73c58 100644
--- a/packages/dotnet/5.0.201/metadata.json
+++ b/packages/dotnet/5.0.201/metadata.json
@@ -1,5 +1,66 @@
{
"language": "dotnet",
"version": "5.0.201",
- "aliases": ["cs", "csharp"]
+ "provides": [
+ {
+ "language": "basic.net",
+ "aliases": [
+ "basic",
+ "visual-basic",
+ "visual-basic.net",
+ "vb",
+ "vb.net",
+ "vb-dotnet",
+ "dotnet-vb",
+ "basic-dotnet",
+ "dotnet-basic"
+ ],
+ "limit_overrides": { "max_process_count": 128 }
+ },
+ {
+ "language": "fsharp.net",
+ "aliases": [
+ "fsharp",
+ "fs",
+ "f#",
+ "fs.net",
+ "f#.net",
+ "fsharp-dotnet",
+ "fs-dotnet",
+ "f#-dotnet",
+ "dotnet-fsharp",
+ "dotnet-fs",
+ "dotnet-fs"
+ ],
+ "limit_overrides": { "max_process_count": 128 }
+ },
+ {
+ "language": "csharp.net",
+ "aliases": [
+ "csharp",
+ "c#",
+ "cs",
+ "c#.net",
+ "cs.net",
+ "c#-dotnet",
+ "cs-dotnet",
+ "csharp-dotnet",
+ "dotnet-c#",
+ "dotnet-cs",
+ "dotnet-csharp"
+ ],
+ "limit_overrides": { "max_process_count": 128 }
+ },
+ {
+ "language": "fsi",
+ "aliases": [
+ "fsx",
+ "fsharp-interactive",
+ "f#-interactive",
+ "dotnet-fsi",
+ "fsi-dotnet",
+ "fsi.net"
+ ]
+ }
+ ]
}
diff --git a/packages/dotnet/5.0.201/run b/packages/dotnet/5.0.201/run
index 774a08a..6b5c995 100644
--- a/packages/dotnet/5.0.201/run
+++ b/packages/dotnet/5.0.201/run
@@ -3,5 +3,23 @@
# Put instructions to run the runtime
export DOTNET_CLI_HOME=$PWD
-shift
-dotnet bin/Debug/net5.0/$(basename $(realpath .)).dll "$@"
\ No newline at end of file
+case "${PISTON_LANGUAGE}" in
+ basic.net)
+ ;&
+ fsharp.net)
+ ;&
+ csharp.net)
+ shift
+ dotnet bin/Debug/net5.0/$(basename $(realpath .)).dll "$@"
+ ;;
+ fsi)
+ FILENAME=$1
+ rename 's/$/\.fsx/' $FILENAME # Add .fsx extension
+ shift
+ dotnet $FSI_PATH $FILENAME.fsx "$@"
+ ;;
+ *)
+ echo "How did you get here? (${PISTON_LANGUAGE})"
+ exit 1
+ ;;
+esac
diff --git a/packages/dotnet/5.0.201/test.fs b/packages/dotnet/5.0.201/test.fs
new file mode 100644
index 0000000..006ac10
--- /dev/null
+++ b/packages/dotnet/5.0.201/test.fs
@@ -0,0 +1,6 @@
+open System
+
+[]
+let main argv =
+ printfn "OK"
+ 0
diff --git a/packages/dotnet/5.0.201/test.fsx b/packages/dotnet/5.0.201/test.fsx
new file mode 100644
index 0000000..33d166f
--- /dev/null
+++ b/packages/dotnet/5.0.201/test.fsx
@@ -0,0 +1 @@
+printfn "OK"
diff --git a/packages/dotnet/5.0.201/test.vb b/packages/dotnet/5.0.201/test.vb
new file mode 100644
index 0000000..291042e
--- /dev/null
+++ b/packages/dotnet/5.0.201/test.vb
@@ -0,0 +1,9 @@
+Imports System
+
+Module Module1
+
+ Sub Main()
+ Console.WriteLine("OK")
+ End Sub
+
+End Module
diff --git a/packages/dragon/1.9.8/metadata.json b/packages/dragon/1.9.8/metadata.json
index 86cfc4c..3fbc015 100644
--- a/packages/dragon/1.9.8/metadata.json
+++ b/packages/dragon/1.9.8/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "dragon",
- "version": "1.9.8",
- "aliases": []
+ "language": "dragon",
+ "version": "1.9.8",
+ "aliases": []
}
diff --git a/packages/elixir/1.11.3/build.sh b/packages/elixir/1.11.3/build.sh
deleted file mode 100755
index 9328d8c..0000000
--- a/packages/elixir/1.11.3/build.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-
-source ../../erlang/23.0.0/build.sh
-
-export PATH=$PWD/bin:$PATH
-
-PREFIX=$(realpath $(dirname $0))
-
-mkdir -p build
-
-cd build
-
-curl -L "https://github.com/elixir-lang/elixir/archive/v1.11.3.tar.gz" -o elixir.tar.gz
-tar xzf elixir.tar.gz --strip-components=1
-rm elixir.tar.gz
-
-./configure --prefix "$PREFIX"
-make -j$(nproc)
-
-cd ..
-
-cp -r build/bin .
-cp -r build/lib .
-
-rm -rf build
diff --git a/packages/elixir/1.11.3/environment b/packages/elixir/1.11.3/environment
deleted file mode 100644
index ea24603..0000000
--- a/packages/elixir/1.11.3/environment
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-# Put 'export' statements here for environment variables
-export LC_ALL=en_US.UTF-8
-export PATH=$PWD/bin:$PATH
diff --git a/packages/elixir/1.11.3/metadata.json b/packages/elixir/1.11.3/metadata.json
deleted file mode 100644
index 9c1a2fc..0000000
--- a/packages/elixir/1.11.3/metadata.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "language": "elixir",
- "version": "1.11.3",
- "aliases": ["elixir", "exs"]
-}
diff --git a/packages/elixir/1.11.3/run b/packages/elixir/1.11.3/run
deleted file mode 100644
index 9b3ba9c..0000000
--- a/packages/elixir/1.11.3/run
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-
-# Put instructions to run the runtime
-elixir "$@"
\ No newline at end of file
diff --git a/packages/elixir/1.11.3/test.exs b/packages/elixir/1.11.3/test.exs
deleted file mode 100644
index bc837c6..0000000
--- a/packages/elixir/1.11.3/test.exs
+++ /dev/null
@@ -1 +0,0 @@
-IO.puts("OK")
\ No newline at end of file
diff --git a/packages/erlang/23.0.0/build.sh b/packages/erlang/23.0.0/build.sh
deleted file mode 100755
index f282bc2..0000000
--- a/packages/erlang/23.0.0/build.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-PREFIX=$(realpath $(dirname $0))
-
-mkdir -p build
-
-cd build
-
-curl "http://erlang.org/download/otp_src_23.0.tar.gz" -o erlang.tar.gz
-tar xzf erlang.tar.gz --strip-components=1
-rm erlang.tar.gz
-
-export ERL_TOP=$(pwd)
-./configure --prefix "$PREFIX"
-make -j$(nproc)
-make install -j$(nproc)
-
-cd ..
-
-rm -rf build
-
diff --git a/packages/erlang/23.0.0/environment b/packages/erlang/23.0.0/environment
deleted file mode 100644
index 780b668..0000000
--- a/packages/erlang/23.0.0/environment
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/env bash
-
-# Put 'export' statements here for environment variables
-export PATH=$PWD/bin:$PATH
diff --git a/packages/erlang/23.0.0/metadata.json b/packages/erlang/23.0.0/metadata.json
deleted file mode 100644
index e82b4b3..0000000
--- a/packages/erlang/23.0.0/metadata.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "language": "erlang",
- "version": "23.0.0",
- "aliases": ["erlang", "erl", "escript"]
-}
diff --git a/packages/erlang/23.0.0/run b/packages/erlang/23.0.0/run
deleted file mode 100644
index 135d9f4..0000000
--- a/packages/erlang/23.0.0/run
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-
-# Put instructions to run the runtime
-escript "$@"
\ No newline at end of file
diff --git a/packages/erlang/23.0.0/test.erl b/packages/erlang/23.0.0/test.erl
deleted file mode 100644
index d898d2c..0000000
--- a/packages/erlang/23.0.0/test.erl
+++ /dev/null
@@ -1,3 +0,0 @@
-
-main(_) ->
- io:format("OK~n").
\ No newline at end of file
diff --git a/packages/forte/1.0.0/metadata.json b/packages/forte/1.0.0/metadata.json
index fd4ec12..f7f4137 100644
--- a/packages/forte/1.0.0/metadata.json
+++ b/packages/forte/1.0.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "forte",
- "version": "1.0.0",
- "aliases": ["forter"]
+ "language": "forte",
+ "version": "1.0.0",
+ "aliases": ["forter"]
}
diff --git a/packages/gawk/5.1.0/build.sh b/packages/gawk/5.1.0/build.sh
deleted file mode 100644
index 81fb998..0000000
--- a/packages/gawk/5.1.0/build.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/usr/bin/env bash
-
-# Put instructions to build your package in here
-PREFIX=$(realpath $(dirname $0))
-
-mkdir -p build
-
-cd build
-
-curl "https://ftp.gnu.org/gnu/gawk/gawk-5.1.0.tar.gz" -o gawk.tar.gz
-
-tar xzf gawk.tar.gz --strip-components=1
-
-# === autoconf based ===
-./configure --prefix "$PREFIX"
-
-make -j$(nproc)
-make install -j$(nproc)
-cd ../
-rm -rf build
-
diff --git a/packages/gawk/5.1.0/environment b/packages/gawk/5.1.0/environment
deleted file mode 100644
index 780b668..0000000
--- a/packages/gawk/5.1.0/environment
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/env bash
-
-# Put 'export' statements here for environment variables
-export PATH=$PWD/bin:$PATH
diff --git a/packages/gawk/5.1.0/metadata.json b/packages/gawk/5.1.0/metadata.json
deleted file mode 100644
index 1ae8c16..0000000
--- a/packages/gawk/5.1.0/metadata.json
+++ /dev/null
@@ -1,10 +0,0 @@
-{
- "language": "gawk",
- "version": "5.1.0",
- "provides": [
- {
- "language": "awk",
- "aliases": ["gawk"]
- }
- ]
-}
diff --git a/packages/gawk/5.1.0/run b/packages/gawk/5.1.0/run
deleted file mode 100644
index 5134ddf..0000000
--- a/packages/gawk/5.1.0/run
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/env bash
-
-# Put instructions to run the runtime
-gawk-5.1.0 -f "$@"
diff --git a/packages/gawk/5.1.0/test.awk b/packages/gawk/5.1.0/test.awk
deleted file mode 100644
index 25e1bd3..0000000
--- a/packages/gawk/5.1.0/test.awk
+++ /dev/null
@@ -1 +0,0 @@
-{print "OK"}
\ No newline at end of file
diff --git a/packages/gcc/10.2.0/metadata.json b/packages/gcc/10.2.0/metadata.json
index f969bf5..367de7c 100644
--- a/packages/gcc/10.2.0/metadata.json
+++ b/packages/gcc/10.2.0/metadata.json
@@ -3,7 +3,7 @@
"version": "10.2.0",
"provides": [
{
- "language":"c",
+ "language": "c",
"aliases": ["gcc"]
},
{
diff --git a/packages/golfscript/1.0.0/metadata.json b/packages/golfscript/1.0.0/metadata.json
index 4ef3a62..cb4f356 100644
--- a/packages/golfscript/1.0.0/metadata.json
+++ b/packages/golfscript/1.0.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "golfscript",
- "version": "1.0.0",
- "aliases": ["golfscript"]
+ "language": "golfscript",
+ "version": "1.0.0",
+ "aliases": ["golfscript"]
}
diff --git a/packages/groovy/3.0.7/metadata.json b/packages/groovy/3.0.7/metadata.json
index b790007..34ab93d 100644
--- a/packages/groovy/3.0.7/metadata.json
+++ b/packages/groovy/3.0.7/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "groovy",
- "version": "3.0.7",
- "aliases": ["groovy", "gvy"]
+ "language": "groovy",
+ "version": "3.0.7",
+ "aliases": ["groovy", "gvy"]
}
diff --git a/packages/husk/1.0.0/build.sh b/packages/husk/1.0.0/build.sh
new file mode 100644
index 0000000..e2ee19c
--- /dev/null
+++ b/packages/husk/1.0.0/build.sh
@@ -0,0 +1,14 @@
+#!/usr/bin/env bash
+
+cp ../../haskell/9.0.1/build.sh ./haskell-build.sh
+sed -Ei 's/9\.0\.1/8\.10\.7/g' ./haskell-build.sh
+source ./haskell-build.sh
+
+# compile Husk from source
+git clone -q "https://github.com/barbuz/husk.git"
+cd husk
+../bin/ghc -O2 Husk
+
+# cleanup
+cd ..
+rm -f haskell-build.sh
\ No newline at end of file
diff --git a/packages/husk/1.0.0/environment b/packages/husk/1.0.0/environment
new file mode 100644
index 0000000..8b8421d
--- /dev/null
+++ b/packages/husk/1.0.0/environment
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+# haskell and husk path
+export PATH=$PWD/bin:$PATH
+export HUSK_PATH=$PWD/husk
+export LANG=en_US.UTF8
diff --git a/packages/husk/1.0.0/metadata.json b/packages/husk/1.0.0/metadata.json
new file mode 100644
index 0000000..69c5b7e
--- /dev/null
+++ b/packages/husk/1.0.0/metadata.json
@@ -0,0 +1,5 @@
+{
+ "language": "husk",
+ "version": "1.0.0",
+ "aliases": []
+}
diff --git a/packages/husk/1.0.0/run b/packages/husk/1.0.0/run
new file mode 100644
index 0000000..1fa65a0
--- /dev/null
+++ b/packages/husk/1.0.0/run
@@ -0,0 +1,10 @@
+#!/usr/bin/env bash
+
+# Store the current path because we'll need it to run the program file
+PROGRAM_PATH=$PWD
+
+# For now, Husk can only be run within the folder that has the imported modules
+cd $HUSK_PATH
+
+# Run Husk from file in unicode format with the given args
+./Husk -uf "${PROGRAM_PATH}/${@}"
\ No newline at end of file
diff --git a/packages/husk/1.0.0/test.husk b/packages/husk/1.0.0/test.husk
new file mode 100644
index 0000000..d096585
--- /dev/null
+++ b/packages/husk/1.0.0/test.husk
@@ -0,0 +1 @@
+"OK
\ No newline at end of file
diff --git a/packages/japt/2.0.0/metadata.json b/packages/japt/2.0.0/metadata.json
index 7a3e5aa..ef0ff8d 100644
--- a/packages/japt/2.0.0/metadata.json
+++ b/packages/japt/2.0.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "japt",
- "version": "2.0.0",
- "aliases": ["japt"]
-}
\ No newline at end of file
+ "language": "japt",
+ "version": "2.0.0",
+ "aliases": ["japt"]
+}
diff --git a/packages/llvm_ir/12.0.1/build.sh b/packages/llvm_ir/12.0.1/build.sh
new file mode 100755
index 0000000..7afd41a
--- /dev/null
+++ b/packages/llvm_ir/12.0.1/build.sh
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+curl -L "https://github.com/llvm/llvm-project/releases/download/llvmorg-12.0.1/clang+llvm-12.0.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz" -o llvm-ir.tar.xz
+
+tar xf llvm-ir.tar.xz clang+llvm-12.0.1-x86_64-linux-gnu-ubuntu-/bin --strip-components=1
+
+rm llvm-ir.tar.xz
diff --git a/packages/llvm_ir/12.0.1/compile b/packages/llvm_ir/12.0.1/compile
new file mode 100755
index 0000000..082fb5c
--- /dev/null
+++ b/packages/llvm_ir/12.0.1/compile
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+llc "$@" -o binary.s
+clang binary.s -o binary
diff --git a/packages/llvm_ir/12.0.1/environment b/packages/llvm_ir/12.0.1/environment
new file mode 100644
index 0000000..85a25b6
--- /dev/null
+++ b/packages/llvm_ir/12.0.1/environment
@@ -0,0 +1,2 @@
+#!/usr/bin/env bash
+export PATH=$PWD/bin:$PATH
diff --git a/packages/llvm_ir/12.0.1/metadata.json b/packages/llvm_ir/12.0.1/metadata.json
new file mode 100644
index 0000000..50dfbbc
--- /dev/null
+++ b/packages/llvm_ir/12.0.1/metadata.json
@@ -0,0 +1,5 @@
+{
+ "language": "llvm_ir",
+ "version": "12.0.1",
+ "aliases": ["llvm", "llvm-ir", "ll"]
+}
diff --git a/packages/llvm_ir/12.0.1/run b/packages/llvm_ir/12.0.1/run
new file mode 100644
index 0000000..94b3b0b
--- /dev/null
+++ b/packages/llvm_ir/12.0.1/run
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+shift
+binary "$@"
diff --git a/packages/llvm_ir/12.0.1/test.ll b/packages/llvm_ir/12.0.1/test.ll
new file mode 100644
index 0000000..be7d5d3
--- /dev/null
+++ b/packages/llvm_ir/12.0.1/test.ll
@@ -0,0 +1,10 @@
+@.str = private unnamed_addr constant [2 x i8] c"OK"
+
+declare i32 @puts(i8* nocapture) nounwind
+
+define i32 @main() {
+ %cast210 = getelementptr [2 x i8],[2 x i8]* @.str, i64 0, i64 0
+
+ call i32 @puts(i8* %cast210)
+ ret i32 0
+}
diff --git a/packages/mono/6.12.0/compile b/packages/mono/6.12.0/compile
index e3ae230..5246bc2 100644
--- a/packages/mono/6.12.0/compile
+++ b/packages/mono/6.12.0/compile
@@ -1,20 +1,13 @@
#!/bin/bash
-check_errors () {
- grep -q 'error [A-Z]\+[0-9]\+:' check.txt && cat check.txt 1>&2 || cat check.txt
- rm check.txt
-}
-
case "${PISTON_LANGUAGE}" in
csharp)
rename 's/$/\.cs/' "$@" # Add .cs extension
- csc -out:out *.cs > check.txt
- check_errors
+ csc -out:out *.cs
;;
basic)
rename 's/$/\.vb/' "$@" # Add .vb extension
- vbnc -out:out *.vb > check.txt
- check_errors
+ vbnc -out:out *.vb
;;
*)
echo "How did you get here? (${PISTON_LANGUAGE})"
diff --git a/packages/mono/6.12.0/metadata.json b/packages/mono/6.12.0/metadata.json
index 4d09ae7..3f483a4 100644
--- a/packages/mono/6.12.0/metadata.json
+++ b/packages/mono/6.12.0/metadata.json
@@ -8,7 +8,13 @@
},
{
"language": "basic",
- "aliases": ["vb", "mono-vb", "mono-basic", "visual-basic", "visual basic"]
+ "aliases": [
+ "vb",
+ "mono-vb",
+ "mono-basic",
+ "visual-basic",
+ "visual basic"
+ ]
}
]
}
diff --git a/packages/ocaml/4.12.0/metadata.json b/packages/ocaml/4.12.0/metadata.json
index ddbfb89..6c2f733 100644
--- a/packages/ocaml/4.12.0/metadata.json
+++ b/packages/ocaml/4.12.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "ocaml",
- "version": "4.12.0",
- "aliases": ["ocaml", "ml"]
+ "language": "ocaml",
+ "version": "4.12.0",
+ "aliases": ["ocaml", "ml"]
}
diff --git a/packages/octave/6.2.0/metadata.json b/packages/octave/6.2.0/metadata.json
index ab9dbb1..0b209ce 100644
--- a/packages/octave/6.2.0/metadata.json
+++ b/packages/octave/6.2.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "octave",
- "version": "6.2.0",
- "aliases": ["matlab", "m"]
+ "language": "octave",
+ "version": "6.2.0",
+ "aliases": ["matlab", "m"]
}
diff --git a/packages/pyth/1.0.0/metadata.json b/packages/pyth/1.0.0/metadata.json
index bcddb7a..e9bbfe9 100644
--- a/packages/pyth/1.0.0/metadata.json
+++ b/packages/pyth/1.0.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "pyth",
- "version": "1.0.0",
- "aliases": ["pyth"]
+ "language": "pyth",
+ "version": "1.0.0",
+ "aliases": ["pyth"]
}
diff --git a/packages/racket/8.3.0/build.sh b/packages/racket/8.3.0/build.sh
new file mode 100644
index 0000000..187aef4
--- /dev/null
+++ b/packages/racket/8.3.0/build.sh
@@ -0,0 +1,10 @@
+#!/usr/bin/env bash
+
+# curl racket 8.3 linux installation shell file
+curl -L 'https://download.racket-lang.org/installers/8.3/racket-8.3-x86_64-linux-cs.sh' -o racket.sh
+
+# provide settings "no" "4" and "" to racket.sh
+echo "no
+4
+" | sh racket.sh
+
diff --git a/packages/racket/8.3.0/environment b/packages/racket/8.3.0/environment
new file mode 100644
index 0000000..1a30eef
--- /dev/null
+++ b/packages/racket/8.3.0/environment
@@ -0,0 +1,5 @@
+#!/bin/bash
+
+# Path to racket binary
+export PATH=$PWD/bin:$PATH
+export RACKET_PATH=$PWD/racket
\ No newline at end of file
diff --git a/packages/racket/8.3.0/metadata.json b/packages/racket/8.3.0/metadata.json
new file mode 100644
index 0000000..14b902c
--- /dev/null
+++ b/packages/racket/8.3.0/metadata.json
@@ -0,0 +1,5 @@
+{
+ "language": "racket",
+ "version": "8.3.0",
+ "aliases": ["rkt"]
+}
diff --git a/packages/racket/8.3.0/run b/packages/racket/8.3.0/run
new file mode 100644
index 0000000..227bfaa
--- /dev/null
+++ b/packages/racket/8.3.0/run
@@ -0,0 +1,3 @@
+#!/bin/bash
+
+"$RACKET_PATH"/bin/racket "$@"
\ No newline at end of file
diff --git a/packages/racket/8.3.0/test.rkt b/packages/racket/8.3.0/test.rkt
new file mode 100644
index 0000000..7e0825c
--- /dev/null
+++ b/packages/racket/8.3.0/test.rkt
@@ -0,0 +1,3 @@
+#lang racket
+
+(display "OK")
\ No newline at end of file
diff --git a/packages/raku/6.100.0/metadata.json b/packages/raku/6.100.0/metadata.json
index 7cda1ed..e1fbad8 100644
--- a/packages/raku/6.100.0/metadata.json
+++ b/packages/raku/6.100.0/metadata.json
@@ -2,4 +2,4 @@
"language": "raku",
"version": "6.100.0",
"aliases": ["raku", "rakudo", "perl6", "p6", "pl6"]
-}
\ No newline at end of file
+}
diff --git a/packages/retina/1.2.0/build.sh b/packages/retina/1.2.0/build.sh
new file mode 100644
index 0000000..9712458
--- /dev/null
+++ b/packages/retina/1.2.0/build.sh
@@ -0,0 +1,22 @@
+#!/usr/bin/env bash
+
+# get dotnet 2.2.8 as a dependency for retina
+curl "https://download.visualstudio.microsoft.com/download/pr/022d9abf-35f0-4fd5-8d1c-86056df76e89/477f1ebb70f314054129a9f51e9ec8ec/dotnet-sdk-2.2.207-linux-x64.tar.gz" -Lo dotnet.tar.gz
+tar xzf dotnet.tar.gz --strip-components=1
+rm dotnet.tar.gz
+
+export DOTNET_CLI_HOME=$PWD
+./dotnet new console -o cache_application
+
+rm -rf cache_application
+
+# curl retina version 1.2.0
+curl -L "https://github.com/m-ender/retina/releases/download/v1.2.0/retina-linux-x64.tar.gz" -o retina.tar.xz
+tar xf retina.tar.xz --strip-components=1
+rm retina.tar.xz
+
+# move the libhostfxr.so file to the current directory so we don't have to set DOTNET_ROOT
+mv host/fxr/2.2.8/libhostfxr.so libhostfxr.so
+
+# give execute permissions to retina
+chmod +x Retina
\ No newline at end of file
diff --git a/packages/retina/1.2.0/environment b/packages/retina/1.2.0/environment
new file mode 100644
index 0000000..d9b5a4d
--- /dev/null
+++ b/packages/retina/1.2.0/environment
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+export PATH=$PWD/bin:$PATH
+export RETINA_PATH=$PWD
\ No newline at end of file
diff --git a/packages/retina/1.2.0/metadata.json b/packages/retina/1.2.0/metadata.json
new file mode 100644
index 0000000..f632d6b
--- /dev/null
+++ b/packages/retina/1.2.0/metadata.json
@@ -0,0 +1,5 @@
+{
+ "language": "retina",
+ "version": "1.2.0",
+ "aliases": ["ret"]
+}
diff --git a/packages/retina/1.2.0/run b/packages/retina/1.2.0/run
new file mode 100644
index 0000000..bb64b05
--- /dev/null
+++ b/packages/retina/1.2.0/run
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+# retina doesn't take interactive stdin and doesn't take args either
+
+# save the file for later
+file="$1"
+
+# remove file from $@
+shift
+
+# join arguments on newline
+stdin=`printf "%s\n" "$@"`
+
+# pass stdin into Retina command with file as argument
+echo "$stdin" | "$RETINA_PATH"/Retina "$file"
\ No newline at end of file
diff --git a/packages/retina/1.2.0/test.ret b/packages/retina/1.2.0/test.ret
new file mode 100644
index 0000000..c4b21da
--- /dev/null
+++ b/packages/retina/1.2.0/test.ret
@@ -0,0 +1 @@
+K`OK
\ No newline at end of file
diff --git a/packages/rust/1.56.1/build.sh b/packages/rust/1.56.1/build.sh
new file mode 100644
index 0000000..6be556f
--- /dev/null
+++ b/packages/rust/1.56.1/build.sh
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+curl -OL "https://static.rust-lang.org/dist/rust-1.56.1-x86_64-unknown-linux-gnu.tar.gz"
+tar xzvf rust-1.56.1-x86_64-unknown-linux-gnu.tar.gz
+rm rust-1.56.1-x86_64-unknown-linux-gnu.tar.gz
diff --git a/packages/rust/1.56.1/compile b/packages/rust/1.56.1/compile
new file mode 100644
index 0000000..201318a
--- /dev/null
+++ b/packages/rust/1.56.1/compile
@@ -0,0 +1,6 @@
+#!/usr/bin/env bash
+
+# https://stackoverflow.com/questions/38041331/rust-compiler-cant-find-crate-for-std
+# Rust compiler needs to find the stdlib to link against
+rustc -o binary -L ${RUST_INSTALL_LOC}/rustc/lib -L ${RUST_INSTALL_LOC}/rust-std-x86_64-unknown-linux-gnu/lib/rustlib/x86_64-unknown-linux-gnu/lib "$@"
+chmod +x binary
diff --git a/packages/rust/1.56.1/environment b/packages/rust/1.56.1/environment
new file mode 100644
index 0000000..c28b1b7
--- /dev/null
+++ b/packages/rust/1.56.1/environment
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+# Put 'export' statements here for environment variables
+export PATH=$PWD/rust-1.56.1-x86_64-unknown-linux-gnu/rustc/bin/:$PATH
+export RUST_INSTALL_LOC=$PWD/rust-1.56.1-x86_64-unknown-linux-gnu
diff --git a/packages/rust/1.56.1/metadata.json b/packages/rust/1.56.1/metadata.json
new file mode 100644
index 0000000..1f7319a
--- /dev/null
+++ b/packages/rust/1.56.1/metadata.json
@@ -0,0 +1,7 @@
+{
+ "language": "rust",
+ "version": "1.56.1",
+ "aliases": [
+ "rs"
+ ]
+}
diff --git a/packages/rust/1.56.1/run b/packages/rust/1.56.1/run
new file mode 100644
index 0000000..d377dd9
--- /dev/null
+++ b/packages/rust/1.56.1/run
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+shift
+./binary "$@"
diff --git a/packages/rust/1.56.1/test.rs b/packages/rust/1.56.1/test.rs
new file mode 100644
index 0000000..9561664
--- /dev/null
+++ b/packages/rust/1.56.1/test.rs
@@ -0,0 +1,3 @@
+fn main() {
+ println!("OK");
+}
diff --git a/packages/typescript/4.2.3/run b/packages/typescript/4.2.3/run
index 1d26f3f..5a8c60e 100644
--- a/packages/typescript/4.2.3/run
+++ b/packages/typescript/4.2.3/run
@@ -2,7 +2,7 @@
# Put instructions to run the runtime
-CODE=$(sed 's/ts$/js/' <<<"$1")
+CODE=$1.js
shift
node $CODE "$@"
diff --git a/packages/typescript/4.2.3/test.ts b/packages/typescript/4.2.3/test.ts
index 56ed4a0..e106678 100644
--- a/packages/typescript/4.2.3/test.ts
+++ b/packages/typescript/4.2.3/test.ts
@@ -1 +1 @@
-console.log("OK")
\ No newline at end of file
+console.log('OK');
diff --git a/packages/vyxal/2.4.1/build.sh b/packages/vyxal/2.4.1/build.sh
new file mode 100644
index 0000000..e7ce729
--- /dev/null
+++ b/packages/vyxal/2.4.1/build.sh
@@ -0,0 +1,18 @@
+#!/usr/bin/env bash
+
+# get Python source
+source ../../python/3.9.4/build.sh
+
+# add regex and pwn modules
+bin/pip3 install regex pwn
+
+# make vyxal directory
+mkdir vyxal
+cd vyxal
+
+# Vyxal install
+curl -L "https://github.com/Vyxal/Vyxal/archive/refs/tags/v2.4.1.tar.gz" -o vyxal.tar.xz
+tar xf vyxal.tar.xz --strip-components=1
+rm vyxal.tar.xz
+
+cd ..
\ No newline at end of file
diff --git a/packages/vyxal/2.4.1/environment b/packages/vyxal/2.4.1/environment
new file mode 100644
index 0000000..f0008c8
--- /dev/null
+++ b/packages/vyxal/2.4.1/environment
@@ -0,0 +1,8 @@
+#!/usr/bin/env bash
+
+# Python and Vyxal path
+export PATH=$PWD/bin:$PATH
+export VYXAL_PATH=$PWD/vyxal
+
+# export term to fix curses warning
+export TERM=xterm
\ No newline at end of file
diff --git a/packages/vyxal/2.4.1/metadata.json b/packages/vyxal/2.4.1/metadata.json
new file mode 100644
index 0000000..e5427fb
--- /dev/null
+++ b/packages/vyxal/2.4.1/metadata.json
@@ -0,0 +1,5 @@
+{
+ "language": "vyxal",
+ "version": "2.4.1",
+ "aliases": []
+}
\ No newline at end of file
diff --git a/packages/vyxal/2.4.1/run b/packages/vyxal/2.4.1/run
new file mode 100644
index 0000000..c9b08a6
--- /dev/null
+++ b/packages/vyxal/2.4.1/run
@@ -0,0 +1,3 @@
+#!/usr/bin/env bash
+
+python3 "$VYXAL_PATH"/Vyxal.py "$1"
\ No newline at end of file
diff --git a/packages/vyxal/2.4.1/test.vyxal b/packages/vyxal/2.4.1/test.vyxal
new file mode 100644
index 0000000..6d0cb6e
--- /dev/null
+++ b/packages/vyxal/2.4.1/test.vyxal
@@ -0,0 +1 @@
+`OK
\ No newline at end of file
diff --git a/packages/zig/0.8.0/metadata.json b/packages/zig/0.8.0/metadata.json
index 7af8ed6..8c02d33 100644
--- a/packages/zig/0.8.0/metadata.json
+++ b/packages/zig/0.8.0/metadata.json
@@ -1,5 +1,8 @@
{
"language": "zig",
"version": "0.8.0",
- "aliases": ["zig"]
+ "aliases": ["zig"],
+ "limit_overrides": {
+ "compile_timeout": 15000
+ }
}
diff --git a/piston b/piston
index 67d2fa0..cacf23e 100755
--- a/piston
+++ b/piston
@@ -5,15 +5,16 @@ SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
CONTAINER_NAME="piston_api"
IMAGE_TAG="base-latest"
-IMAGE_NAME="ghcr.io/piston"
+IMAGE_NAME="ghcr.io/engineer-man/piston"
IMAGE_NAME_DEV="piston"
SUBCOMMAND="$1"
shift
-cmd_build(){
+build_base(){
CONTAINER_PATH="$(nix build ".#container" --no-link --json | jq '.[0].outputs.out' -r)"
docker load -i $CONTAINER_PATH
+ docker tag "$IMAGE_NAME_DEV:$IMAGE_TAG" "$IMAGE_NAME:$IMAGE_TAG"
}
case "$SUBCOMMAND" in
@@ -22,9 +23,10 @@ case "$SUBCOMMAND" in
restart) docker restart $CONTAINER_NAME ;;
start)
docker run \
+ -p 2000:2000 \
--rm \
--name $CONTAINER_NAME \
- -it "$IMAGE_NAME:$IMAGE_TAG"
+ -d "$IMAGE_NAME:$IMAGE_TAG"
;;
stop) docker stop $CONTAINER_NAME ;;
bash|shell) docker exec -it $CONTAINER_NAME bash ;;
@@ -35,22 +37,40 @@ case "$SUBCOMMAND" in
;;
# dev commands
+
+ scaffold)
+ pushd $SCRIPT_DIR/runtimes > /dev/null
+ ./scaffold.sh $1 $2
+ popd > /dev/null
+ ;;
- build) cmd_build ;;
+ build)
+ build_base
+ if [[ ! -z "$1" ]]; then
+ # $1 contains a variant to build
+ docker build \
+ --build-arg RUNTIMESET=$1 \
+ -f $SCRIPT_DIR/Dockerfile.withset \
+ -t "$IMAGE_NAME_DEV:$1-latest" \
+ .
+ fi
+ ;;
start-dev)
- cmd_build
+ build_base
docker run \
--rm \
+ -p 2000:2000 \
-it \
--name $CONTAINER_NAME \
+ -e PISTON_LOG_LEVEL=DEBUG \
-e PISTON_FLAKE_PATH=/piston/packages \
-v $PWD:/piston/packages \
- -it "$IMAGE_NAME_DEV:$IMAGE_TAG"
+ -d "$IMAGE_NAME_DEV:$IMAGE_TAG"
;;
test)
- cmd_build
+ build_base
docker run \
--rm \
-it \
@@ -82,7 +102,8 @@ case "$SUBCOMMAND" in
echo "See https://nixos.wiki/wiki/Nix_Installation_Guide#Stable_Nix"
echo
echo " start-dev Builds a container locally and starts piston"
- echo " build Builds and loads the API container"
+ echo " build [runtime-set] Builds and loads the API container optionally"
+ echo " including the runtime set within it"
echo " scaffold [runtime] Initializes a new runtime"
echo " test Runs unit tests on the given runtime"
echo " Optionally set runtime to --all to test all"
diff --git a/pre-commit b/pre-commit
new file mode 100755
index 0000000..288a5e3
--- /dev/null
+++ b/pre-commit
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+echo "Linting staged files..."
+npm install > /dev/null || exit 1
+
+FILES=$(git diff --cached --name-only --diff-filter=ACMR | sed 's| |\\ |g')
+[ -z "$FILES" ] && exit 0
+
+# Prettify all selected files
+echo "$FILES" | xargs npx prettier --ignore-unknown --write
+
+# Add back the modified/prettified files to staging
+echo "$FILES" | xargs git add
+
+exit 0
diff --git a/readme.md b/readme.md
index 163d6c9..2bd0c1f 100644
--- a/readme.md
+++ b/readme.md
@@ -41,20 +41,6 @@
-# Notes About Hacktoberfest
-
-While we are accepting pull requests for Hacktoberfest, we will reject any low-quality PRs.
-If we see PR abuse for Hacktoberfest, we will stop providing Hacktoberfest approval for pull requests.
-
-We are accepting PRs for:
-* Packages - updating package versions, adding new packages
-* Documentation updates
-* CLI/API improvements - please discuss these with us in the Discord first
-
-Any queries or concerns, ping @HexF#0015 in the Discord.
-
-
-
# About
@@ -66,11 +52,11 @@ Any queries or concerns, ping @HexF#0015 in the Discord.
It's used in numerous places including:
-- [EMKC Challenges](https://emkc.org/challenges)
-- [EMKC Weekly Contests](https://emkc.org/contests)
-- [Engineer Man Discord Server](https://discord.gg/engineerman)
-- Web IDEs
-- 200+ direct integrations
+- [EMKC Challenges](https://emkc.org/challenges)
+- [EMKC Weekly Contests](https://emkc.org/contests)
+- [Engineer Man Discord Server](https://discord.gg/engineerman)
+- Web IDEs
+- 200+ direct integrations
@@ -78,18 +64,20 @@ It's used in numerous places including:
The following are approved and endorsed extensions/utilities to the core Piston offering.
-- [I Run Code](https://github.com/engineer-man/piston-bot), a Discord bot used in 4100+ servers to handle arbitrary code evaluation in Discord. To get this bot in your own server, go here: https://emkc.org/run.
-- [Piston CLI](https://github.com/Shivansh-007/piston-cli), a universal shell supporting code highlighting, files, and interpretation without the need to download a language.
-- [Node Piston Client](https://github.com/dthree/node-piston), a Node.js wrapper for accessing the Piston API.
-- [Piston4J](https://github.com/the-codeboy/Piston4J), a Java wrapper for accessing the Piston API.
-- [Pyston](https://github.com/ffaanngg/pyston), a Python wrapper for accessing the Piston API.
+- [I Run Code](https://github.com/engineer-man/piston-bot), a Discord bot used in 4100+ servers to handle arbitrary code evaluation in Discord. To get this bot in your own server, go here: https://emkc.org/run.
+- [Piston CLI](https://github.com/Shivansh-007/piston-cli), a universal shell supporting code highlighting, files, and interpretation without the need to download a language.
+- [Node Piston Client](https://github.com/dthree/node-piston), a Node.js wrapper for accessing the Piston API.
+- [Piston4J](https://github.com/the-codeboy/Piston4J), a Java wrapper for accessing the Piston API.
+- [Pyston](https://github.com/ffaanngg/pyston), a Python wrapper for accessing the Piston API.
+- [Go-Piston](https://github.com/milindmadhukar/go-piston), a Golang wrapper for accessing the Piston API.
+- [piston_rs](https://github.com/Jonxslays/piston_rs), a Rust wrapper for accessing the Piston API.
# Public API
-- Requires no installation and you can use it immediately.
-- Reference the Runtimes/Execute sections below to learn about the request and response formats.
+- Requires no installation and you can use it immediately.
+- Reference the Runtimes/Execute sections below to learn about the request and response formats.
@@ -114,9 +102,9 @@ POST https://emkc.org/api/v2/piston/execute
### Host System Package Dependencies
-- Docker
-- Docker Compose
-- Node JS (>= 13, preferably >= 15)
+- Docker
+- Docker Compose
+- Node JS (>= 13, preferably >= 15)
### After system dependencies are installed, clone this repository:
@@ -141,7 +129,7 @@ The API will now be online with no language runtimes installed. To install runti
### Host System Package Dependencies
-- Docker
+- Docker
### Installation
@@ -159,7 +147,7 @@ docker run \
### Host System Package Dependencies
-- Same as [All In One](#All-In-One)
+- Same as [All In One](#All-In-One)
### Installation
@@ -320,6 +308,8 @@ Content-Type: application/json
`awk`,
`bash`,
+`befunge93`,
+`brachylog`,
`brainfuck`,
`c`,
`c++`,
@@ -330,21 +320,25 @@ Content-Type: application/json
`cow`,
`crystal`,
`csharp`,
+`csharp.net`,
`d`,
`dart`,
`dash`,
-`dotnet`,
`dragon`,
`elixir`,
`emacs`,
`erlang`,
+`file`,
`forte`,
`fortran`,
`freebasic`,
+`fsharp.net`,
+`fsi`,
`go`,
`golfscript`,
`groovy`,
`haskell`,
+`husk`,
`iverilog`,
`japt`,
`java`,
@@ -353,6 +347,7 @@ Content-Type: application/json
`julia`,
`kotlin`,
`lisp`,
+`llvm_ir`,
`lolcode`,
`lua`,
`nasm`,
@@ -372,7 +367,9 @@ Content-Type: application/json
`pyth`,
`python`,
`python2`,
+`racket`,
`raku`,
+`retina`,
`rockstar`,
`rscript`,
`ruby`,
@@ -382,7 +379,9 @@ Content-Type: application/json
`swift`,
`typescript`,
`basic`,
+`basic.net`,
`vlang`,
+`vyxal`,
`yeethon`,
`zig`,
@@ -403,14 +402,14 @@ Docker provides a great deal of security out of the box in that it's separate fr
Piston takes additional steps to make it resistant to
various privilege escalation, denial-of-service, and resource saturation threats. These steps include:
-- Disabling outgoing network interaction
-- Capping max processes at 256 by default (resists `:(){ :|: &}:;`, `while True: os.fork()`, etc.)
-- Capping max files at 2048 (resists various file based attacks)
-- Cleaning up all temp space after each execution (resists out of drive space attacks)
-- Running as a variety of unprivileged users
-- Capping runtime execution at 3 seconds
-- Capping stdout to 65536 characters (resists yes/no bombs and runaway output)
-- SIGKILLing misbehaving code
+- Disabling outgoing network interaction
+- Capping max processes at 256 by default (resists `:(){ :|: &}:;`, `while True: os.fork()`, etc.)
+- Capping max files at 2048 (resists various file based attacks)
+- Cleaning up all temp space after each execution (resists out of drive space attacks)
+- Running as a variety of unprivileged users
+- Capping runtime execution at 3 seconds
+- Capping stdout to 65536 characters (resists yes/no bombs and runaway output)
+- SIGKILLing misbehaving code
diff --git a/runtimes/.scaffold.nix b/runtimes/.scaffold.nix
index 51b68aa..de7bc8f 100644
--- a/runtimes/.scaffold.nix
+++ b/runtimes/.scaffold.nix
@@ -49,6 +49,9 @@ in piston.mkRuntime {
# Specify a list of tests.
# These should output "OK" to STDOUT if everything looks good
+ #
+ # Run the following command to test the package:
+ # $ ./piston test %NAME%
tests = [
(piston.mkTest {
files = {
diff --git a/runtimes/crystal.nix b/runtimes/crystal.nix
new file mode 100644
index 0000000..9cbae9d
--- /dev/null
+++ b/runtimes/crystal.nix
@@ -0,0 +1,31 @@
+{pkgs, piston, ...}:
+let
+ pkg = pkgs.crystal;
+in piston.mkRuntime {
+ language = "crystal";
+ version = pkg.version;
+
+ aliases = [
+ "cr"
+ ];
+
+ compile = ''
+ ${pkg}/bin/crystal build "$@" -o out --no-color
+ chmod +x out
+ '';
+
+ run = ''
+ shift
+ ./out "$@"
+ '';
+
+ tests = [
+ (piston.mkTest {
+ files = {
+ "test.cr" = ''
+ puts("OK")
+ '';
+ };
+ })
+ ];
+}
\ No newline at end of file
diff --git a/runtimes/dart.nix b/runtimes/dart.nix
new file mode 100644
index 0000000..d5e0981
--- /dev/null
+++ b/runtimes/dart.nix
@@ -0,0 +1,25 @@
+{pkgs, piston, ...}:
+let
+ pkg = pkgs.dart;
+in piston.mkRuntime {
+ language = "dart";
+ version = pkg.version;
+
+ aliases = [];
+
+ run = ''
+ ${pkg}/bin/dart run "$@"
+ '';
+
+ tests = [
+ (piston.mkTest {
+ files = {
+ "test.dart" = ''
+ void main() {
+ print('OK');
+ }
+ '';
+ };
+ })
+ ];
+}
\ No newline at end of file
diff --git a/runtimes/dash.nix b/runtimes/dash.nix
new file mode 100644
index 0000000..6da4f4e
--- /dev/null
+++ b/runtimes/dash.nix
@@ -0,0 +1,22 @@
+{pkgs, piston, ...}:
+let
+ pkg = pkgs.dash;
+in piston.mkRuntime {
+ language = "dash";
+ version = pkg.version;
+
+ aliases = [];
+
+ run = ''
+ ${pkg}/bin/dash "$@"
+ '';
+ tests = [
+ (piston.mkTest {
+ files = {
+ "test.dash" = ''
+ echo "OK"
+ '';
+ };
+ })
+ ];
+}
\ No newline at end of file
diff --git a/runtimes/default.nix b/runtimes/default.nix
index 2c3ddc4..0571829 100644
--- a/runtimes/default.nix
+++ b/runtimes/default.nix
@@ -5,4 +5,12 @@ args: {
"bash" = import ./bash.nix args;
"clojure" = import ./clojure.nix args;
"cobol-gnu-cobol" = import ./cobol-gnu-cobol.nix args;
+ "crystal" = import ./crystal.nix args;
+ "dart" = import ./dart.nix args;
+ "dash" = import ./dash.nix args;
+ "deno-javascript" = import ./deno-javascript.nix args;
+ "deno-typescript" = import ./deno-typescript.nix args;
+ "elixir" = import ./elixir.nix args;
+ "erlang" = import ./erlang.nix args;
+ "gawk-awk" = import ./gawk-awk.nix args;
}
diff --git a/runtimes/deno-javascript.nix b/runtimes/deno-javascript.nix
new file mode 100644
index 0000000..2e6970e
--- /dev/null
+++ b/runtimes/deno-javascript.nix
@@ -0,0 +1,27 @@
+{pkgs, piston, ...}:
+let
+ pkg = pkgs.deno;
+in piston.mkRuntime {
+ language = "javascript";
+ version = pkg.version;
+ runtime = "deno";
+
+ aliases = [
+ "js"
+ "deno-js"
+ ];
+
+ run = ''
+ DENO_DIR=$PWD ${pkg}/bin/deno run $@
+ '';
+
+ tests = [
+ (piston.mkTest {
+ files = {
+ "test.js" = ''
+ console.log("OK");
+ '';
+ };
+ })
+ ];
+}
\ No newline at end of file
diff --git a/runtimes/deno-typescript.nix b/runtimes/deno-typescript.nix
new file mode 100644
index 0000000..50d0fa6
--- /dev/null
+++ b/runtimes/deno-typescript.nix
@@ -0,0 +1,27 @@
+{pkgs, piston, ...}:
+let
+ pkg = pkgs.deno;
+in piston.mkRuntime {
+ language = "typescript";
+ version = pkg.version;
+ runtime = "deno";
+
+ aliases = [
+ "ts"
+ "deno-ts"
+ ];
+
+ run = ''
+ DENO_DIR=$PWD ${pkg}/bin/deno run $@
+ '';
+
+ tests = [
+ (piston.mkTest {
+ files = {
+ "test.ts" = ''
+ console.log("OK");
+ '';
+ };
+ })
+ ];
+}
\ No newline at end of file
diff --git a/runtimes/elixir.nix b/runtimes/elixir.nix
new file mode 100644
index 0000000..39cb962
--- /dev/null
+++ b/runtimes/elixir.nix
@@ -0,0 +1,30 @@
+{pkgs, piston, ...}:
+let
+ pkg = pkgs.elixir;
+in piston.mkRuntime {
+ language = "elixir";
+ version = pkg.version;
+
+ aliases = [
+ "exs"
+ ];
+
+ run = ''
+ export LC_ALL=en_US.UTF-8
+ ${pkg}/bin/elixir "$@"
+ '';
+
+ tests = [
+ (piston.mkTest {
+ files = {
+ "test.exs" = ''
+ IO.puts("OK")
+ '';
+ };
+ })
+ ];
+
+ limitOverrides = {
+ "max_file_size" = 100000000; # 100MB
+ };
+}
\ No newline at end of file
diff --git a/runtimes/erlang.nix b/runtimes/erlang.nix
new file mode 100644
index 0000000..efac6a1
--- /dev/null
+++ b/runtimes/erlang.nix
@@ -0,0 +1,28 @@
+{pkgs, piston, ...}:
+let
+ pkg = pkgs.erlang;
+in piston.mkRuntime {
+ language = "erlang";
+ version = pkg.version;
+
+ aliases = [
+ "erl"
+ "escript"
+ ];
+
+ run = ''
+ ${pkg}/bin/escript "$@"
+ '';
+
+ tests = [
+ (piston.mkTest {
+ files = {
+ "test.erl" = ''
+
+ main(_) ->
+ io:format("OK~n").
+ '';
+ };
+ })
+ ];
+}
\ No newline at end of file
diff --git a/runtimes/gawk-awk.nix b/runtimes/gawk-awk.nix
new file mode 100644
index 0000000..c5c9675
--- /dev/null
+++ b/runtimes/gawk-awk.nix
@@ -0,0 +1,26 @@
+{pkgs, piston, ...}:
+let
+ pkg = pkgs.gawk;
+in piston.mkRuntime {
+ language = "awk";
+ version = pkg.version;
+ runtime = "gawk";
+
+ aliases = [];
+
+
+ run = ''
+ ${pkg}/bin/gawk -f "$@"
+ '';
+
+ tests = [
+ (piston.mkTest {
+ files = {
+ "test.awk" = ''
+ {print "OK"}
+ '';
+ };
+ stdin = "\n"; # awk needs some line input
+ })
+ ];
+}
\ No newline at end of file
diff --git a/runtimes/scaffold.sh b/runtimes/scaffold.sh
index fb3aafe..2a49ef2 100755
--- a/runtimes/scaffold.sh
+++ b/runtimes/scaffold.sh
@@ -15,7 +15,7 @@ NAME=$1
if [[ $# -eq 2 ]]; then
RUNTIME=$2
- NAME=$LANGUAGE-$RUNTIME
+ NAME=$RUNTIME-$LANGUAGE
fi
@@ -28,10 +28,13 @@ else
echo " \"$NAME\" = import ./$NAME.nix args;" >> default.nix
sed -e 's/%LANGUAGE%/'"$LANGUAGE"'/g' \
-e 's/%RUNTIME%/'"$RUNTIME"'/g' \
+ -e 's/%NAME%/'"$NAME"'/g' \
.scaffold.nix > $NAME.nix
- git add $NAME.nix
echo "}" >> default.nix
+ git add $NAME.nix default.nix
+
echo "Scaffolded $NAME"
echo "Edit $NAME.nix to get started"
+ echo "Once you are done, run ./piston test $NAME to test it"
fi
\ No newline at end of file
diff --git a/tests/readme.md b/tests/readme.md
index 01ae419..746d0b9 100644
--- a/tests/readme.md
+++ b/tests/readme.md
@@ -6,4 +6,4 @@ Write exploits in any language supported by piston.
Hopefully when running any files in this directory, piston will resist the attack.
-Leave a comment in the code describing how the exploit works.
\ No newline at end of file
+Leave a comment in the code describing how the exploit works.