diff --git a/.github/ISSUE_TEMPLATE/language-request.md b/.github/ISSUE_TEMPLATE/language-request.md
index 3f42d90..5ae2661 100644
--- a/.github/ISSUE_TEMPLATE/language-request.md
+++ b/.github/ISSUE_TEMPLATE/language-request.md
@@ -4,6 +4,7 @@ about: Template for requesting language support
title: Add [insert language name here]
labels: package
assignees: ''
+
---
Provide links to different compilers/interpreters that could be used to implement this language, and discuss pros/cons of each.
diff --git a/.github/PULL_REQUEST_TEMPLATE/package.md b/.github/PULL_REQUEST_TEMPLATE/package.md
index da59fe0..6cd3c98 100644
--- a/.github/PULL_REQUEST_TEMPLATE/package.md
+++ b/.github/PULL_REQUEST_TEMPLATE/package.md
@@ -1,11 +1,10 @@
Checklist:
-
-- [ ] The package builds locally with `./piston build-pkg [package] [version]`
-- [ ] The package installs with `./piston ppman install [package]=[version]`
-- [ ] The package runs the test code with `./piston run [package] -l [version] packages/[package]/[version]/test.*`
-- [ ] Package files are placed in the correct directory
-- [ ] No old package versions are removed
-- [ ] All source files are deleted in the `build.sh` script
-- [ ] `metadata.json`'s `language` and `version` fields match the directory path
-- [ ] Any extensions the language may use are set as aliases
-- [ ] Any alternative names the language is referred to are set as aliases.
+* [ ] The package builds locally with `./piston build-pkg [package] [version]`
+* [ ] The package installs with `./piston ppman install [package]=[version]`
+* [ ] The package runs the test code with `./piston run [package] -l [version] packages/[package]/[version]/test.*`
+* [ ] Package files are placed in the correct directory
+* [ ] No old package versions are removed
+* [ ] All source files are deleted in the `build.sh` script
+* [ ] `metadata.json`'s `language` and `version` fields match the directory path
+* [ ] Any extensions the language may use are set as aliases
+* [ ] Any alternative names the language is referred to are set as aliases.
diff --git a/.github/workflows/api-push.yaml b/.github/workflows/api-push.yaml
index dec3bce..bcf0472 100644
--- a/.github/workflows/api-push.yaml
+++ b/.github/workflows/api-push.yaml
@@ -1,38 +1,39 @@
name: Publish API image
on:
- push:
- branches:
- - master
- - v3
- paths:
- - api/**
+ push:
+ branches:
+ - master
+ - v3
+ paths:
+ - api/**
+
jobs:
- push_to_registry:
- runs-on: ubuntu-latest
- name: Build and Push Docker image to Github Packages
- steps:
- - name: Check out repo
- uses: actions/checkout@v2
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
- - name: Login to ghcr.io
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: ghcr.io
+ push_to_registry:
+ runs-on: ubuntu-latest
+ name: Build and Push Docker image to Github Packages
+ steps:
+ - name: Check out repo
+ uses: actions/checkout@v2
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
+ - name: Login to ghcr.io
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: ghcr.io
- - name: Build and push API
- uses: docker/build-push-action@v2
- with:
- context: api
- push: true
- pull: true
- tags: |
- docker.pkg.github.com/engineer-man/piston/api
- ghcr.io/engineer-man/piston
+ - name: Build and push API
+ uses: docker/build-push-action@v2
+ with:
+ context: api
+ push: true
+ pull: true
+ tags: |
+ docker.pkg.github.com/engineer-man/piston/api
+ ghcr.io/engineer-man/piston
diff --git a/.github/workflows/package-pr.yaml b/.github/workflows/package-pr.yaml
index d5bfe78..bb264a3 100644
--- a/.github/workflows/package-pr.yaml
+++ b/.github/workflows/package-pr.yaml
@@ -1,139 +1,140 @@
-name: 'Package Pull Requests'
+name: "Package Pull Requests"
on:
- pull_request:
- types:
- - opened
- - reopened
- - synchronize
- paths:
- - 'packages/**'
+ pull_request:
+ types:
+ - opened
+ - edited
+ - reopened
+ - synchronize
+ paths:
+ - "packages/**"
jobs:
- check-pkg:
- name: Validate README
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Get list of changed files
- uses: lots0logs/gh-action-get-changed-files@2.1.4
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
+ check-pkg:
+ name: Validate README
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+ - name: Get list of changed files
+ uses: lots0logs/gh-action-get-changed-files@2.1.4
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
- - name: Ensure README was updated
- run: |
- MISSING_LINES=$(comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /
/{flag=0} flag' readme.md | sort -u))
+ - name: Ensure README was updated
+ run: |
+ MISSING_LINES=$(comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /
/{flag=0} flag' readme.md | sort -u))
- [[ $(echo $MISSING_LINES | wc -c) = "1" ]] && exit 0
+ [[ $(echo $MISSING_LINES | wc -c) = "1" ]] && exit 0
- echo "README has supported languages missing: "
- comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /
/{flag=0} flag' readme.md | sort -u)
- exit 1
+ echo "README has supported languages missing: "
+ comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /
/{flag=0} flag' readme.md | sort -u)
+ exit 1
- build-pkg:
- name: Check that package builds
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
+ build-pkg:
+ name: Check that package builds
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
- - name: Get list of changed files
- uses: lots0logs/gh-action-get-changed-files@2.1.4
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
+ - name: Get list of changed files
+ uses: lots0logs/gh-action-get-changed-files@2.1.4
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
- - name: Build Packages
- run: |
- PACKAGES=$(jq '.[]' -r ${HOME}/files*.json | awk -F/ '$1~/packages/ && $2 && $3{ print $2 "-" $3 }' | sort -u)
- echo "Packages: $PACKAGES"
- docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
- docker build -t repo-builder repo
- docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
- ls -la packages
+ - name: Build Packages
+ run: |
+ PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
+ echo "Packages: $PACKAGES"
+ docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
+ docker build -t repo-builder repo
+ docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
+ ls -la packages
- - name: Upload package as artifact
- uses: actions/upload-artifact@v2
- with:
- name: packages
- path: packages/*.pkg.tar.gz
+ - name: Upload package as artifact
+ uses: actions/upload-artifact@v2
+ with:
+ name: packages
+ path: packages/*.pkg.tar.gz
- test-pkg:
- name: Test package
- runs-on: ubuntu-latest
- needs: build-pkg
- steps:
- - uses: actions/checkout@v2
+ test-pkg:
+ name: Test package
+ runs-on: ubuntu-latest
+ needs: build-pkg
+ steps:
+ - uses: actions/checkout@v2
- - uses: actions/download-artifact@v2
- with:
- name: packages
+ - uses: actions/download-artifact@v2
+ with:
+ name: packages
- - name: Relocate downloaded packages
- run: mv *.pkg.tar.gz packages/
+ - name: Relocate downloaded packages
+ run: mv *.pkg.tar.gz packages/
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
- - name: Run tests
- run: |
- ls -la
- docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
- docker pull docker.pkg.github.com/engineer-man/piston/api
- docker build -t piston-api api
- docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api
- echo Waiting for API to start..
- docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes
+ - name: Run tests
+ run: |
+ ls -la
+ docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
+ docker pull docker.pkg.github.com/engineer-man/piston/api
+ docker build -t piston-api api
+ docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api
+ echo Waiting for API to start..
+ docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes
- echo Waiting for Index to start..
- docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index
+ echo Waiting for Index to start..
+ docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index
- echo Adjusting index
- sed -i 's/repo/localhost/g' repo/index
+ echo Adjusting index
+ sed -i 's/repo/localhost/g' repo/index
- echo Listing Packages
- PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v2/packages)
- echo $PACKAGES_JSON
+ echo Listing Packages
+ PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v2/packages)
+ echo $PACKAGES_JSON
- echo Getting CLI ready
- docker run -v "$PWD/cli:/app" --entrypoint /bin/bash node:15 -c 'cd /app; npm i'
+ echo Getting CLI ready
+ docker run -v "$PWD/cli:/app" --entrypoint /bin/bash node:15 -c 'cd /app; npm i'
- for package in $(jq -r '.[] | "\(.language)-\(.language_version)"' <<< "$PACKAGES_JSON")
- do
- echo "Testing $package"
- PKG_PATH=$(sed 's|-|/|' <<< $package)
- PKG_NAME=$(awk -F- '{ print $1 }' <<< $package)
- PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package)
+ for package in $(jq -r '.[] | "\(.language)-\(.language_version)"' <<< "$PACKAGES_JSON")
+ do
+ echo "Testing $package"
+ PKG_PATH=$(sed 's|-|/|' <<< $package)
+ PKG_NAME=$(awk -F- '{ print $1 }' <<< $package)
+ PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package)
- echo "Installing..."
- docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v2/packages -H "Content-Type: application/json" -d "{\"language\":\"$PKG_NAME\",\"version\":\"$PKG_VERSION\"}"
+ echo "Installing..."
+ docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v2/packages -H "Content-Type: application/json" -d "{\"language\":\"$PKG_NAME\",\"version\":\"$PKG_VERSION\"}"
- TEST_SCRIPTS=packages/$PKG_PATH/test.*
- echo "Tests: $TEST_SCRIPTS"
+ TEST_SCRIPTS=packages/$PKG_PATH/test.*
+ echo "Tests: $TEST_SCRIPTS"
- for tscript in $TEST_SCRIPTS
- do
- TEST_RUNTIME=$(awk -F. '{print $2}' <<< $(basename $tscript))
- echo Running $tscript with runtime=$TEST_RUNTIME
- docker run --network container:api -v "$PWD/cli:/app" -v "$PWD/$(dirname $tscript):/pkg" node:15 /app/index.js run $TEST_RUNTIME -l $PKG_VERSION /pkg/$(basename $tscript) > test_output
- cat test_output
- grep "OK" test_output
- done
- done
+ for tscript in $TEST_SCRIPTS
+ do
+ TEST_RUNTIME=$(awk -F. '{print $2}' <<< $(basename $tscript))
+ echo Running $tscript with runtime=$TEST_RUNTIME
+ docker run --network container:api -v "$PWD/cli:/app" -v "$PWD/$(dirname $tscript):/pkg" node:15 /app/index.js run $TEST_RUNTIME -l $PKG_VERSION /pkg/$(basename $tscript) > test_output
+ cat test_output
+ grep "OK" test_output
+ done
+ done
- - name: Dump logs
- if: ${{ always() }}
- run: |
- docker logs api
- docker logs repo
+ - name: Dump logs
+ if: ${{ always() }}
+ run: |
+ docker logs api
+ docker logs repo
diff --git a/.github/workflows/package-push.yaml b/.github/workflows/package-push.yaml
index 9de6051..bbb44af 100644
--- a/.github/workflows/package-push.yaml
+++ b/.github/workflows/package-push.yaml
@@ -1,77 +1,78 @@
name: 'Package Pushed'
on:
- push:
- branches:
- - master
- - v3
- paths:
- - packages/**
+ push:
+ branches:
+ - master
+ - v3
+ paths:
+ - packages/**
+
jobs:
- build-pkg:
- name: Build package
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
+ build-pkg:
+ name: Build package
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
+ - name: Get list of changed files
+ uses: lots0logs/gh-action-get-changed-files@2.1.4
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Build Packages
+ run: |
+ PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
+ echo "Packages: $PACKAGES"
+ docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
+ docker build -t repo-builder repo
+ docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
+ ls -la packages
- - name: Get list of changed files
- uses: lots0logs/gh-action-get-changed-files@2.1.4
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
+ - name: Upload Packages
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: packages/*.pkg.tar.gz
+ tag: pkgs
+ overwrite: true
+ file_glob: true
+ create-index:
+ name: Create Index
+ runs-on: ubuntu-latest
+ needs: build-pkg
+ steps:
+ - name: "Download all release assets"
+ run: curl -s https://api.github.com/repos/engineer-man/piston/releases/latest | jq '.assets[].browser_download_url' -r | xargs -L 1 curl -sLO
+ - name: "Generate index file"
+ run: |
+ echo "" > index
+ BASEURL=https://github.com/engineer-man/piston/releases/download/pkgs/
+ for pkg in *.pkg.tar.gz
+ do
+ PKGFILE=$(basename $pkg)
+ PKGFILENAME=$(echo $PKGFILE | sed 's/\.pkg\.tar\.gz//g')
- - name: Build Packages
- run: |
- PACKAGES=$(jq '.[]' -r ${HOME}/files*.json | awk -F/ '$1~/packages/ && $2 && $3{ print $2 "-" $3 }' | sort -u)
- echo "Packages: $PACKAGES"
- docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
- docker build -t repo-builder repo
- docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
- ls -la packages
-
- - name: Upload Packages
- uses: svenstaro/upload-release-action@v2
- with:
- repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: packages/*.pkg.tar.gz
- tag: pkgs
- overwrite: true
- file_glob: true
- create-index:
- name: Create Index
- runs-on: ubuntu-latest
- needs: build-pkg
- steps:
- - name: 'Download all release assets'
- run: curl -s https://api.github.com/repos/engineer-man/piston/releases/latest | jq '.assets[].browser_download_url' -r | xargs -L 1 curl -sLO
- - name: 'Generate index file'
- run: |
- echo "" > index
- BASEURL=https://github.com/engineer-man/piston/releases/download/pkgs/
- for pkg in *.pkg.tar.gz
- do
- PKGFILE=$(basename $pkg)
- PKGFILENAME=$(echo $PKGFILE | sed 's/\.pkg\.tar\.gz//g')
-
- PKGNAME=$(echo $PKGFILENAME | grep -oP '^\K.+(?=-)')
- PKGVERSION=$(echo $PKGFILENAME | grep -oP '^.+-\K.+')
- PKGCHECKSUM=$(sha256sum $PKGFILE | awk '{print $1}')
- echo "$PKGNAME,$PKGVERSION,$PKGCHECKSUM,$BASEURL$PKGFILE" >> index
- echo "Adding package $PKGNAME-$PKGVERSION"
- done
- - name: Upload index
- uses: svenstaro/upload-release-action@v2
- with:
- repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: index
- tag: pkgs
- overwrite: true
- file_glob: true
+ PKGNAME=$(echo $PKGFILENAME | grep -oP '^\K.+(?=-)')
+ PKGVERSION=$(echo $PKGFILENAME | grep -oP '^.+-\K.+')
+ PKGCHECKSUM=$(sha256sum $PKGFILE | awk '{print $1}')
+ echo "$PKGNAME,$PKGVERSION,$PKGCHECKSUM,$BASEURL$PKGFILE" >> index
+ echo "Adding package $PKGNAME-$PKGVERSION"
+ done
+ - name: Upload index
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: index
+ tag: pkgs
+ overwrite: true
+ file_glob: true
diff --git a/.github/workflows/repo-push.yaml b/.github/workflows/repo-push.yaml
index c887b01..b5a603c 100644
--- a/.github/workflows/repo-push.yaml
+++ b/.github/workflows/repo-push.yaml
@@ -1,31 +1,31 @@
name: Publish Repo image
on:
- push:
- branches:
- - master
- - v3
- paths:
- - repo/**
-
+ push:
+ branches:
+ - master
+ - v3
+ paths:
+ - repo/**
+
jobs:
- push_to_registry:
- runs-on: ubuntu-latest
- name: Build and Push Docker image to Github Packages
- steps:
- - name: Check out repo
- uses: actions/checkout@v2
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
+ push_to_registry:
+ runs-on: ubuntu-latest
+ name: Build and Push Docker image to Github Packages
+ steps:
+ - name: Check out repo
+ uses: actions/checkout@v2
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
- - name: Build and push repo
- uses: docker/build-push-action@v2
- with:
- context: repo
- pull: true
- push: true
- tags: |
- docker.pkg.github.com/engineer-man/piston/repo-builder
+ - name: Build and push repo
+ uses: docker/build-push-action@v2
+ with:
+ context: repo
+ pull: true
+ push: true
+ tags: |
+ docker.pkg.github.com/engineer-man/piston/repo-builder
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 6d3d2cb..b706dd9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,4 +1,3 @@
data/
.piston_env
-node_modules
-result
+result
\ No newline at end of file
diff --git a/.prettierignore b/.prettierignore
deleted file mode 100644
index bb310ab..0000000
--- a/.prettierignore
+++ /dev/null
@@ -1,12 +0,0 @@
-node_modules
-data/
-api/_piston
-repo/build
-packages/*/*/*
-packages/*.pkg.tar.gz
-!packages/*/*/metadata.json
-!packages/*/*/build.sh
-!packages/*/*/environment
-!packages/*/*/run
-!packages/*/*/compile
-!packages/*/*/test.*
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index 70f2b5e..e651ad5 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -1,8 +1,8 @@
version: 2
mkdocs:
- configuration: mkdocs.yml
+ configuration: mkdocs.yml
python:
- version: 3.7
- install:
- - requirements: docs/requirements.txt
+ version: 3.7
+ install:
+ - requirements: docs/requirements.txt
diff --git a/Dockerfile.withset b/Dockerfile.withset
deleted file mode 100644
index 4fd9a0b..0000000
--- a/Dockerfile.withset
+++ /dev/null
@@ -1,12 +0,0 @@
-# This "FROM" image is previously emitted by nix
-FROM ghcr.io/engineer-man/piston:base-latest
-
-ENV PISTON_FLAKE_PATH=/piston/packages
-COPY runtimes/ /piston/packages/runtimes
-COPY flake.nix flake.lock /piston/packages/
-
-
-ARG RUNTIMESET=all
-ENV PISTON_RUNTIME_SET=$RUNTIMESET
-
-RUN piston-install
\ No newline at end of file
diff --git a/api/.gitignore b/api/.gitignore
index 4b5a9b8..adbd330 100644
--- a/api/.gitignore
+++ b/api/.gitignore
@@ -1 +1,2 @@
-_piston
+node_modules
+_piston
\ No newline at end of file
diff --git a/api/.prettierignore b/api/.prettierignore
new file mode 100644
index 0000000..3c3629e
--- /dev/null
+++ b/api/.prettierignore
@@ -0,0 +1 @@
+node_modules
diff --git a/.prettierrc.yaml b/api/.prettierrc.yaml
similarity index 100%
rename from .prettierrc.yaml
rename to api/.prettierrc.yaml
diff --git a/api/default.nix b/api/default.nix
index a4d4868..c194587 100644
--- a/api/default.nix
+++ b/api/default.nix
@@ -51,8 +51,6 @@ with pkgs; rec {
do
echo "nixbld$i:x:$(( $i + 30000 )):30000:Nix build user $i:/var/empty:/run/current-system/sw/bin/nologin" >> etc/passwd
done
-
- chmod 1777 {,var/}tmp/
'';
config = {
@@ -63,21 +61,6 @@ with pkgs; rec {
"SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt"
"GIT_SSL_CAINFO=/etc/ssl/certs/ca-bundle.crt"
"NIX_SSL_CERT_FILE=/etc/ssl/certs/ca-bundle.crt"
- "PATH=${lib.concatStringsSep ":" [
- "/usr/local/sbin"
- "/usr/local/bin"
- "/usr/sbin"
- "/usr/bin"
- "/sbin"
- "/bin"
- "/root/.nix-profile/bin"
- "/nix/var/nix/profiles/default/bin"
- "/nix/var/nix/profiles/default/sbin"
- ]}"
- "MANPATH=${lib.concatStringsSep ":" [
- "/root/.nix-profile/share/man"
- "/nix/var/nix/profiles/default/share/man"
- ]}"
];
ExposedPorts = {
diff --git a/api/src/api/v2.js b/api/src/api/v2.js
index 86294b4..13d31bf 100644
--- a/api/src/api/v2.js
+++ b/api/src/api/v2.js
@@ -3,54 +3,16 @@ const router = express.Router();
const events = require('events');
+const config = require('../config');
const runtime = require('../runtime');
const { Job } = require('../job');
const logger = require('logplease').create('api/v3');
-const SIGNALS = [
- 'SIGABRT',
- 'SIGALRM',
- 'SIGBUS',
- 'SIGCHLD',
- 'SIGCLD',
- 'SIGCONT',
- 'SIGEMT',
- 'SIGFPE',
- 'SIGHUP',
- 'SIGILL',
- 'SIGINFO',
- 'SIGINT',
- 'SIGIO',
- 'SIGIOT',
- 'SIGKILL',
- 'SIGLOST',
- 'SIGPIPE',
- 'SIGPOLL',
- 'SIGPROF',
- 'SIGPWR',
- 'SIGQUIT',
- 'SIGSEGV',
- 'SIGSTKFLT',
- 'SIGSTOP',
- 'SIGTSTP',
- 'SIGSYS',
- 'SIGTERM',
- 'SIGTRAP',
- 'SIGTTIN',
- 'SIGTTOU',
- 'SIGUNUSED',
- 'SIGURG',
- 'SIGUSR1',
- 'SIGUSR2',
- 'SIGVTALRM',
- 'SIGXCPU',
- 'SIGXFSZ',
- 'SIGWINCH',
-];
+const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
// ref: https://man7.org/linux/man-pages/man7/signal.7.html
-function get_job(body) {
- let {
+function get_job(body){
+ const {
language,
args,
stdin,
@@ -58,7 +20,7 @@ function get_job(body) {
compile_memory_limit,
run_memory_limit,
run_timeout,
- compile_timeout,
+ compile_timeout
} = body;
return new Promise((resolve, reject) => {
@@ -73,6 +35,7 @@ function get_job(body) {
message: 'files is required as an array',
});
}
+
for (const [i, file] of files.entries()) {
if (typeof file.content !== 'string') {
return reject({
@@ -131,65 +94,23 @@ function get_job(body) {
});
}
- if (
- rt.language !== 'file' &&
- !files.some(file => !file.encoding || file.encoding === 'utf8')
- ) {
- return reject({
- message: 'files must include at least one utf8 encoded file',
- });
- }
-
- for (const constraint of ['memory_limit', 'timeout']) {
- for (const type of ['compile', 'run']) {
- const constraint_name = `${type}_${constraint}`;
- const constraint_value = body[constraint_name];
- const configured_limit = rt[`${constraint}s`][type];
- if (!constraint_value) {
- continue;
- }
- if (typeof constraint_value !== 'number') {
- return reject({
- message: `If specified, ${constraint_name} must be a number`,
- });
- }
- if (configured_limit <= 0) {
- continue;
- }
- if (constraint_value > configured_limit) {
- return reject({
- message: `${constraint_name} cannot exceed the configured limit of ${configured_limit}`,
- });
- }
- if (constraint_value < 0) {
- return reject({
- message: `${constraint_name} must be non-negative`,
- });
- }
+ resolve(new Job({
+ runtime: rt,
+ alias: language,
+ args: args || [],
+ stdin: stdin || "",
+ files,
+ timeouts: {
+ run: run_timeout || 3000,
+ compile: compile_timeout || 10000,
+ },
+ memory_limits: {
+ run: run_memory_limit || config.run_memory_limit,
+ compile: compile_memory_limit || config.compile_memory_limit,
}
- }
+ }));
+ })
- compile_timeout = compile_timeout || rt.timeouts.compile;
- run_timeout = run_timeout || rt.timeouts.run;
- compile_memory_limit = compile_memory_limit || rt.memory_limits.compile;
- run_memory_limit = run_memory_limit || rt.memory_limits.run;
- resolve(
- new Job({
- runtime: rt,
- args: args || [],
- stdin: stdin || '',
- files,
- timeouts: {
- run: run_timeout,
- compile: compile_timeout,
- },
- memory_limits: {
- run: run_memory_limit,
- compile: compile_memory_limit,
- },
- })
- );
- });
}
router.use((req, res, next) => {
@@ -207,104 +128,88 @@ router.use((req, res, next) => {
});
router.ws('/connect', async (ws, req) => {
+
let job = null;
let eventBus = new events.EventEmitter();
- eventBus.on('stdout', data =>
- ws.send(
- JSON.stringify({
- type: 'data',
- stream: 'stdout',
- data: data.toString(),
- })
- )
- );
- eventBus.on('stderr', data =>
- ws.send(
- JSON.stringify({
- type: 'data',
- stream: 'stderr',
- data: data.toString(),
- })
- )
- );
- eventBus.on('stage', stage =>
- ws.send(JSON.stringify({ type: 'stage', stage }))
- );
- eventBus.on('exit', (stage, status) =>
- ws.send(JSON.stringify({ type: 'exit', stage, ...status }))
- );
+ eventBus.on("stdout", (data) => ws.send(JSON.stringify({type: "data", stream: "stdout", data: data.toString()})))
+ eventBus.on("stderr", (data) => ws.send(JSON.stringify({type: "data", stream: "stderr", data: data.toString()})))
+ eventBus.on("stage", (stage)=> ws.send(JSON.stringify({type: "stage", stage})))
+ eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status})))
- ws.on('message', async data => {
- try {
+ ws.on("message", async (data) => {
+
+ try{
const msg = JSON.parse(data);
- switch (msg.type) {
- case 'init':
- if (job === null) {
+ switch(msg.type){
+ case "init":
+ if(job === null){
job = await get_job(msg);
await job.prime();
- ws.send(
- JSON.stringify({
- type: 'runtime',
- language: job.runtime.language,
- version: job.runtime.version.raw,
- })
- );
+ ws.send(JSON.stringify({
+ type: "runtime",
+ language: job.runtime.language,
+ version: job.runtime.version.raw
+ }))
await job.execute_interactive(eventBus);
- ws.close(4999, 'Job Completed');
- } else {
- ws.close(4000, 'Already Initialized');
+ ws.close(4999, "Job Completed");
+
+ }else{
+ ws.close(4000, "Already Initialized");
}
break;
- case 'data':
- if (job !== null) {
- if (msg.stream === 'stdin') {
- eventBus.emit('stdin', msg.data);
- } else {
- ws.close(4004, 'Can only write to stdin');
- }
- } else {
- ws.close(4003, 'Not yet initialized');
+ case "data":
+ if(job !== null){
+ if(msg.stream === "stdin"){
+ eventBus.emit("stdin", msg.data)
+ }else{
+ ws.close(4004, "Can only write to stdin")
}
- break;
- case 'signal':
- if (job !== null) {
- if (SIGNALS.includes(msg.signal)) {
- eventBus.emit('signal', msg.signal);
- } else {
- ws.close(4005, 'Invalid signal');
- }
- } else {
- ws.close(4003, 'Not yet initialized');
+ }else{
+ ws.close(4003, "Not yet initialized")
+ }
+ break;
+ case "signal":
+ if(job !== null){
+ if(SIGNALS.includes(msg.signal)){
+ eventBus.emit("signal", msg.signal)
+ }else{
+ ws.close(4005, "Invalid signal")
}
- break;
+ }else{
+ ws.close(4003, "Not yet initialized")
+ }
+ break;
}
- } catch (error) {
- ws.send(JSON.stringify({ type: 'error', message: error.message }));
- ws.close(4002, 'Notified Error');
+
+ }catch(error){
+ ws.send(JSON.stringify({type: "error", message: error.message}))
+ ws.close(4002, "Notified Error")
// ws.close message is limited to 123 characters, so we notify over WS then close.
}
- });
+ })
- ws.on('close', async () => {
- if (job !== null) {
- await job.cleanup();
+ ws.on("close", async ()=>{
+ if(job !== null){
+ await job.cleanup()
}
- });
+ })
- setTimeout(() => {
+ setTimeout(()=>{
//Terminate the socket after 1 second, if not initialized.
- if (job === null) ws.close(4001, 'Initialization Timeout');
- }, 1000);
-});
+ if(job === null)
+ ws.close(4001, "Initialization Timeout");
+ }, 1000)
+})
router.post('/execute', async (req, res) => {
- try {
+
+ try{
const job = await get_job(req.body);
await job.prime();
@@ -314,7 +219,7 @@ router.post('/execute', async (req, res) => {
await job.cleanup();
return res.status(200).send(result);
- } catch (error) {
+ }catch(error){
return res.status(400).json(error);
}
});
diff --git a/api/src/bin/pistond.js b/api/src/bin/pistond.js
index c56cc99..9016eee 100755
--- a/api/src/bin/pistond.js
+++ b/api/src/bin/pistond.js
@@ -16,6 +16,8 @@ const logger = Logger.create('pistond');
const app = express();
expressWs(app);
+
+
(async () => {
logger.info('Setting loglevel to', config.log_level);
Logger.setLogLevel(config.log_level);
diff --git a/api/src/bin/test.js b/api/src/bin/test.js
index 16de55d..134ce2a 100755
--- a/api/src/bin/test.js
+++ b/api/src/bin/test.js
@@ -5,105 +5,108 @@ const config = require('../config');
const Logger = require('logplease');
const logger = Logger.create('test');
const cp = require('child_process');
-const runtime = require('../runtime');
+const runtime = require("../runtime");
const { Job } = require('../job');
-(async function () {
+(async function(){
logger.info('Setting loglevel to', config.log_level);
Logger.setLogLevel(config.log_level);
+
+
let runtimes_to_test;
let failed = false;
- if (process.argv[2] === '--all') {
+ if(process.argv[2] === "--all"){
// load all
runtimes_to_test = JSON.parse(
- cp.execSync(
- `nix eval ${config.flake_path}#pistonRuntimes --json --apply builtins.attrNames`
- )
+ cp.execSync(`nix eval ${config.flake_path}#pistonRuntimes --json --apply builtins.attrNames`)
);
- } else {
+ }else{
runtimes_to_test = [process.argv[2]];
}
+
+
for (const runtime_name of runtimes_to_test) {
+
+
const runtime_path = `${config.flake_path}#pistonRuntimes.${runtime_name}`;
logger.info(`Testing runtime ${runtime_path}`);
logger.debug(`Loading runtime metadata`);
- const metadata = JSON.parse(
- cp.execSync(`nix eval --json ${runtime_path}.metadata --json`)
- );
+ const metadata = JSON.parse(cp.execSync(`nix eval --json ${runtime_path}.metadata --json`));
logger.debug(`Loading runtime tests`);
- const tests = JSON.parse(
- cp.execSync(`nix eval --json ${runtime_path}.tests --json`)
- );
+ const tests = JSON.parse(cp.execSync(`nix eval --json ${runtime_path}.tests --json`));
logger.debug(`Loading runtime`);
const testable_runtime = new runtime.Runtime({
...metadata,
- ...runtime.Runtime.compute_all_limits(
- metadata.language,
- metadata.limitOverrides
- ),
- flake_path: runtime_path,
+ flake_path: runtime_path
});
testable_runtime.ensure_built();
+
logger.info(`Running tests`);
for (const test of tests) {
+
const files = [];
for (const file_name of Object.keys(test.files)) {
const file_content = test.files[file_name];
const this_file = {
name: file_name,
- content: file_content,
+ content: file_content
};
- if (file_name == test.main) files.unshift(this_file);
- else files.push(this_file);
+ if(file_name == test.main)
+ files.unshift(this_file);
+ else
+ files.push(this_file);
+
}
+
const job = new Job({
runtime: testable_runtime,
args: test.args || [],
- stdin: test.stdin || '',
+ stdin: test.stdin || "",
files,
timeouts: {
run: 3000,
- compile: 10000,
+ compile: 10000
},
memory_limits: {
run: config.run_memory_limit,
- compile: config.compile_memory_limit,
- },
+ compile: config.compile_memory_limit
+ }
});
- await job.prime();
- const result = await job.execute();
- await job.cleanup();
-
- if (result.run.stdout.trim() !== 'OK') {
+ await job.prime()
+ const result = await job.execute()
+ await job.cleanup()
+
+ if(result.run.stdout.trim() !== "OK"){
failed = true;
- logger.error('Test Failed:');
- console.log(job, result);
- } else {
- logger.info('Test Passed');
+ logger.error("Test Failed:")
+ console.log(job, result)
+ }else{
+ logger.info("Test Passed")
}
}
}
- if (failed) {
- logger.error('One or more tests failed');
+ if(failed) {
+ logger.error("One or more tests failed")
process.exit(1);
- } else {
- logger.info('All tests passed');
+ }
+ else {
+ logger.info("All tests passed")
process.exit(0);
}
-})();
+})()
\ No newline at end of file
diff --git a/api/src/config.js b/api/src/config.js
index 897a938..fb83228 100644
--- a/api/src/config.js
+++ b/api/src/config.js
@@ -2,57 +2,6 @@ const fss = require('fs');
const Logger = require('logplease');
const logger = Logger.create('config');
-function parse_overrides(overrides) {
- try {
- return JSON.parse(overrides);
- } catch (e) {
- return null;
- }
-}
-
-function validate_overrides(overrides, options) {
- for (const language in overrides) {
- for (const key in overrides[language]) {
- if (
- ![
- 'max_process_count',
- 'max_open_files',
- 'max_file_size',
- 'compile_memory_limit',
- 'run_memory_limit',
- 'compile_timeout',
- 'run_timeout',
- 'output_max_size',
- ].includes(key)
- ) {
- logger.error(`Invalid overridden option: ${key}`);
- return false;
- }
- const option = options.find(o => o.key === key);
- const parser = option.parser;
- const raw = overrides[language][key];
- const value = parser(raw);
- const validators = option.validators;
- for (const validator of validators) {
- const response = validator(value, raw);
- if (response !== true) {
- logger.error(
- `Failed to validate overridden option: ${key}`,
- response
- );
- return false;
- }
- }
- overrides[language][key] = value;
- }
- // Modifies the reference
- options[
- options.index_of(options.find(o => o.key === 'limit_overrides'))
- ] = overrides;
- }
- return true;
-}
-
const options = [
{
key: 'log_level',
@@ -68,7 +17,7 @@ const options = [
{
key: 'bind_address',
desc: 'Address to bind REST API on',
- default: `0.0.0.0:${process.env["PORT"] || 2000}`,
+ default: '0.0.0.0:2000',
validators: [],
},
{
@@ -142,30 +91,18 @@ const options = [
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
- {
- key: 'compile_timeout',
- desc: 'Max time allowed for compile stage in milliseconds',
- default: 10000, // 10 seconds
- parser: parse_int,
- validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
- },
- {
- key: 'run_timeout',
- desc: 'Max time allowed for run stage in milliseconds',
- default: 3000, // 3 seconds
- parser: parse_int,
- validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
- },
{
key: 'compile_memory_limit',
- desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
+ desc:
+ 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'run_memory_limit',
- desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
+ desc:
+ 'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
@@ -187,22 +124,8 @@ const options = [
desc: 'Maximum number of concurrent jobs to run at one time',
default: 64,
parser: parse_int,
- validators: [x => x > 0 || `${x} cannot be negative`],
- },
- {
- key: 'limit_overrides',
- desc: 'Per-language exceptions in JSON format for each of:\
- max_process_count, max_open_files, max_file_size, compile_memory_limit,\
- run_memory_limit, compile_timeout, run_timeout, output_max_size',
- default: {},
- parser: parse_overrides,
- validators: [
- x => !!x || `Invalid JSON format for the overrides\n${x}`,
- (overrides, _, options) =>
- validate_overrides(overrides, options) ||
- `Failed to validate the overrides`,
- ],
- },
+ validators: [(x) => x > 0 || `${x} cannot be negative`]
+ }
];
logger.info(`Loading Configuration from environment`);
@@ -220,12 +143,12 @@ options.forEach(option => {
const parsed_val = parser(env_val);
- const value = env_val === undefined ? option.default : parsed_val;
+ const value = env_val || option.default;
option.validators.for_each(validator => {
let response = null;
- if (env_val) response = validator(parsed_val, env_val, options);
- else response = validator(value, value, options);
+ if (env_val) response = validator(parsed_val, env_val);
+ else response = validator(value, value);
if (response !== true) {
errored = true;
diff --git a/api/src/job.js b/api/src/job.js
index c562693..bffd0ea 100644
--- a/api/src/job.js
+++ b/api/src/job.js
@@ -1,12 +1,10 @@
-const logplease = require('logplease');
-const logger = logplease.create('job');
+const logger = require('logplease').create('job');
const { v4: uuidv4 } = require('uuid');
const cp = require('child_process');
const path = require('path');
const config = require('./config');
const globals = require('./globals');
const fs = require('fs/promises');
-const fss = require('fs');
const wait_pid = require('waitpid');
const job_states = {
@@ -18,34 +16,30 @@ const job_states = {
let uid = 0;
let gid = 0;
-let remaining_job_spaces = config.max_concurrent_jobs;
+let remainingJobSpaces = config.max_concurrent_jobs;
let jobQueue = [];
-setInterval(() => {
+
+setInterval(()=>{
// Every 10ms try resolve a new job, if there is an available slot
- if (jobQueue.length > 0 && remaining_job_spaces > 0) {
- jobQueue.shift()();
+ if(jobQueue.length > 0 && remainingJobSpaces > 0){
+ jobQueue.shift()()
}
-}, 10);
+}, 10)
+
+
class Job {
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) {
this.uuid = uuidv4();
-
- this.logger = logplease.create(`job/${this.uuid}`);
-
this.runtime = runtime;
this.files = files.map((file, i) => ({
name: file.name || `file${i}.code`,
content: file.content,
- encoding: ['base64', 'hex', 'utf8'].includes(file.encoding)
- ? file.encoding
- : 'utf8',
}));
this.args = args;
this.stdin = stdin;
-
this.timeouts = timeouts;
this.memory_limits = memory_limits;
@@ -58,8 +52,6 @@ class Job {
uid %= config.runner_uid_max - config.runner_uid_min + 1;
gid %= config.runner_gid_max - config.runner_gid_min + 1;
- this.logger.debug(`Assigned uid=${this.uid} gid=${this.gid}`);
-
this.state = job_states.READY;
this.dir = path.join(
config.data_directory,
@@ -69,45 +61,39 @@ class Job {
}
async prime() {
- if (remaining_job_spaces < 1) {
- this.logger.info(`Awaiting job slot`);
- await new Promise(resolve => {
- jobQueue.push(resolve);
- });
+ if(remainingJobSpaces < 1){
+ logger.info(`Awaiting job slot uuid=${this.uuid}`)
+ await new Promise((resolve)=>{
+ jobQueue.push(resolve)
+ })
}
- this.logger.info(`Priming job`);
- remaining_job_spaces--;
- this.logger.debug('Writing files to job cache');
+ logger.info(`Priming job uuid=${this.uuid}`);
+ remainingJobSpaces--;
+ logger.debug('Writing files to job cache');
- this.logger.debug(`Transfering ownership`);
+ logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`);
await fs.mkdir(this.dir, { mode: 0o700 });
await fs.chown(this.dir, this.uid, this.gid);
for (const file of this.files) {
- const file_path = path.join(this.dir, file.name);
+ let file_path = path.join(this.dir, file.name);
const rel = path.relative(this.dir, file_path);
- const file_content = Buffer.from(file.content, file.encoding);
- if (rel.startsWith('..'))
- throw Error(
- `File path "${file.name}" tries to escape parent directory: ${rel}`
- );
+ if(rel.startsWith(".."))
+ throw Error(`File path "${file.name}" tries to escape parent directory: ${rel}`)
- await fs.mkdir(path.dirname(file_path), {
- recursive: true,
- mode: 0o700,
- });
+ await fs.mkdir(path.dirname(file_path), {recursive: true, mode: 0o700})
await fs.chown(path.dirname(file_path), this.uid, this.gid);
- await fs.write_file(file_path, file_content);
+ await fs.write_file(file_path, file.content);
await fs.chown(file_path, this.uid, this.gid);
}
this.state = job_states.PRIMED;
- this.logger.debug('Primed job');
+ logger.debug('Primed job');
}
async safe_call(file, args, timeout, memory_limit, eventBus = null) {
@@ -116,29 +102,26 @@ class Job {
const prlimit = [
'prlimit',
- '--nproc=' + this.runtime.max_process_count,
- '--nofile=' + this.runtime.max_open_files,
- '--fsize=' + this.runtime.max_file_size,
+ '--nproc=' + config.max_process_count,
+ '--nofile=' + config.max_open_files,
+ '--fsize=' + config.max_file_size,
];
if (memory_limit >= 0) {
prlimit.push('--as=' + memory_limit);
}
- const proc_call = [
- 'nice',
- ...prlimit,
- ...nonetwork,
- 'bash',
- file,
- ...args,
- ];
+ const proc_call = [...prlimit, ...nonetwork, 'bash', file, ...args];
var stdout = '';
var stderr = '';
var output = '';
const proc = cp.spawn(proc_call[0], proc_call.splice(1), {
+ env: {
+ ...this.runtime.env_vars,
+ PISTON_LANGUAGE: this.runtime.language,
+ },
stdio: 'pipe',
cwd: this.dir,
uid: this.uid,
@@ -146,34 +129,36 @@ class Job {
detached: true, //give this process its own process group
});
- if (eventBus === null) {
+ if(eventBus === null){
proc.stdin.write(this.stdin);
proc.stdin.end();
proc.stdin.destroy();
- } else {
- eventBus.on('stdin', data => {
+ }else{
+ eventBus.on("stdin", (data) => {
proc.stdin.write(data);
- });
+ })
- eventBus.on('kill', signal => {
- proc.kill(signal);
- });
+ eventBus.on("kill", (signal) => {
+ proc.kill(signal)
+ })
}
+
+
- const kill_timeout =
- (timeout >= 0 &&
- set_timeout(async _ => {
- this.logger.info(`Timeout exceeded timeout=${timeout}`);
- process.kill(proc.pid, 'SIGKILL');
- }, timeout)) ||
- null;
+ const kill_timeout = set_timeout(
+ async _ => {
+ logger.info(`Timeout exceeded timeout=${timeout} uuid=${this.uuid}`)
+ process.kill(proc.pid, 'SIGKILL')
+ },
+ timeout
+ );
proc.stderr.on('data', async data => {
- if (eventBus !== null) {
- eventBus.emit('stderr', data);
- } else if (stderr.length > this.runtime.output_max_size) {
- this.logger.info(`stderr length exceeded`);
- process.kill(proc.pid, 'SIGKILL');
+ if(eventBus !== null) {
+ eventBus.emit("stderr", data);
+ } else if (stderr.length > config.output_max_size) {
+ logger.info(`stderr length exceeded uuid=${this.uuid}`)
+ process.kill(proc.pid, 'SIGKILL')
} else {
stderr += data;
output += data;
@@ -181,35 +166,35 @@ class Job {
});
proc.stdout.on('data', async data => {
- if (eventBus !== null) {
- eventBus.emit('stdout', data);
- } else if (stdout.length > this.runtime.output_max_size) {
- this.logger.info(`stdout length exceeded`);
- process.kill(proc.pid, 'SIGKILL');
+ if(eventBus !== null){
+ eventBus.emit("stdout", data);
+ } else if (stdout.length > config.output_max_size) {
+ logger.info(`stdout length exceeded uuid=${this.uuid}`)
+ process.kill(proc.pid, 'SIGKILL')
} else {
stdout += data;
output += data;
}
});
- const exit_cleanup = () => {
+ const exit_cleanup = async () => {
clear_timeout(kill_timeout);
proc.stderr.destroy();
proc.stdout.destroy();
- this.cleanup_processes();
- this.logger.debug(`Finished exit cleanup`);
+ await this.cleanup_processes()
+ logger.debug(`Finished exit cleanup uuid=${this.uuid}`)
};
- proc.on('exit', (code, signal) => {
- exit_cleanup();
+ proc.on('exit', async (code, signal) => {
+ await exit_cleanup();
- resolve({ stdout, stderr, code, signal, output });
+ resolve({stdout, stderr, code, signal, output });
});
- proc.on('error', err => {
- exit_cleanup();
+ proc.on('error', async err => {
+ await exit_cleanup();
reject({ error: err, stdout, stderr, output });
});
@@ -224,13 +209,13 @@ class Job {
);
}
- this.logger.info(`Executing job runtime=${this.runtime.toString()}`);
+ logger.info(
+ `Executing job uuid=${this.uuid} uid=${this.uid} gid=${
+ this.gid
+ } runtime=${this.runtime.toString()}`
+ );
- const code_files =
- (this.runtime.language === 'file' && this.files) ||
- this.files.filter(file => file.encoding == 'utf8');
-
- this.logger.debug('Compiling');
+ logger.debug('Compiling');
let compile;
@@ -243,11 +228,11 @@ class Job {
);
}
- this.logger.debug('Running');
+ logger.debug('Running');
const run = await this.safe_call(
this.runtime.run,
- [code_files[0].name, ...this.args],
+ [this.files[0].name, ...this.args],
this.timeouts.run,
this.memory_limits.run
);
@@ -262,7 +247,7 @@ class Job {
};
}
- async execute_interactive(eventBus) {
+ async execute_interactive(eventBus){
if (this.state !== job_states.PRIMED) {
throw new Error(
'Job must be in primed state, current state: ' +
@@ -270,98 +255,84 @@ class Job {
);
}
- this.logger.info(
- `Interactively executing job runtime=${this.runtime.toString()}`
+ logger.info(
+ `Interactively executing job uuid=${this.uuid} uid=${this.uid} gid=${
+ this.gid
+ } runtime=${this.runtime.toString()}`
);
- const code_files =
- (this.runtime.language === 'file' && this.files) ||
- this.files.filter(file => file.encoding == 'utf8');
-
- if (this.runtime.compiled) {
- eventBus.emit('stage', 'compile');
- const { error, code, signal } = await this.safe_call(
- path.join(this.runtime.pkgdir, 'compile'),
- code_files.map(x => x.name),
+ if(this.runtime.compiled){
+ eventBus.emit("stage", "compile")
+ const {error, code, signal} = await this.safe_call(
+ this.runtime.compile,
+ this.files.map(x => x.name),
this.timeouts.compile,
this.memory_limits.compile,
eventBus
- );
+ )
- eventBus.emit('exit', 'compile', { error, code, signal });
+ eventBus.emit("exit", "compile", {error, code, signal})
}
- this.logger.debug('Running');
- eventBus.emit('stage', 'run');
- const { error, code, signal } = await this.safe_call(
- path.join(this.runtime.pkgdir, 'run'),
- [code_files[0].name, ...this.args],
+ logger.debug('Running');
+ eventBus.emit("stage", "run")
+ const {error, code, signal} = await this.safe_call(
+ this.runtime.run,
+ [this.files[0].name, ...this.args],
this.timeouts.run,
this.memory_limits.run,
eventBus
);
- eventBus.emit('exit', 'run', { error, code, signal });
+ eventBus.emit("exit", "run", {error, code, signal})
+
this.state = job_states.EXECUTED;
}
- cleanup_processes(dont_wait = []) {
+ async cleanup_processes(dont_wait = []) {
let processes = [1];
- const to_wait = [];
- this.logger.debug(`Cleaning up processes`);
+ logger.debug(`Cleaning up processes uuid=${this.uuid}`)
while (processes.length > 0) {
- processes = [];
+ processes = []
- const proc_ids = fss.readdir_sync('/proc');
- processes = proc_ids.map(proc_id => {
- if (isNaN(proc_id)) return -1;
- try {
- const proc_status = fss.read_file_sync(
- path.join('/proc', proc_id, 'status')
- );
- const proc_lines = proc_status.to_string().split('\n');
- const state_line = proc_lines.find(line =>
- line.starts_with('State:')
- );
- const uid_line = proc_lines.find(line =>
- line.starts_with('Uid:')
- );
+ const proc_ids = await fs.readdir("/proc");
+
+
+ processes = await Promise.all(proc_ids.map(async (proc_id) => {
+ if(isNaN(proc_id)) return -1;
+ try{
+ const proc_status = await fs.read_file(path.join("/proc",proc_id,"status"));
+ const proc_lines = proc_status.to_string().split("\n")
+ const uid_line = proc_lines.find(line=>line.starts_with("Uid:"))
const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/);
+
+
+ if(ruid == this.uid || euid == this.uid)
+ return parse_int(proc_id)
- const [_1, state, user_friendly] = state_line.split(/\s+/);
-
- if (state == 'Z')
- // Zombie process, just needs to be waited
- return -1;
- // We should kill in all other state (Sleep, Stopped & Running)
-
- if (ruid == this.uid || euid == this.uid)
- return parse_int(proc_id);
- } catch {
- return -1;
+ }catch{
+ return -1
}
- return -1;
- });
+ return -1
+ }))
+
+ processes = processes.filter(p => p > 0)
+
+ if(processes.length > 0)
+ logger.debug(`Got processes to kill: ${processes} uuid=${this.uuid}`)
- processes = processes.filter(p => p > 0);
- if (processes.length > 0)
- this.logger.debug(`Got processes to kill: ${processes}`);
for (const proc of processes) {
// First stop the processes, but keep their resources allocated so they cant re-fork
try {
process.kill(proc, 'SIGSTOP');
- } catch (e) {
+ } catch {
// Could already be dead
- this.logger.debug(
- `Got error while SIGSTOPping process ${proc}:`,
- e
- );
}
}
@@ -371,27 +342,14 @@ class Job {
process.kill(proc, 'SIGKILL');
} catch {
// Could already be dead and just needs to be waited on
- this.logger.debug(
- `Got error while SIGKILLing process ${proc}:`,
- e
- );
}
- to_wait.push(proc);
+ if(!dont_wait.includes(proc))
+ wait_pid(proc);
}
}
- this.logger.debug(
- `Finished kill-loop, calling wait_pid to end any zombie processes`
- );
-
- for (const proc of to_wait) {
- if (dont_wait.includes(proc)) continue;
-
- wait_pid(proc);
- }
-
- this.logger.debug(`Cleaned up processes`);
+ logger.debug(`Cleaned up processes uuid=${this.uuid}`)
}
async cleanup_filesystem() {
@@ -412,7 +370,7 @@ class Job {
}
} catch (e) {
// File was somehow deleted in the time that we read the dir to when we checked the file
- this.logger.warn(`Error removing file ${file_path}: ${e}`);
+ logger.warn(`Error removing file ${file_path}: ${e}`);
}
}
}
@@ -421,15 +379,15 @@ class Job {
}
async cleanup() {
- this.logger.info(`Cleaning up job`);
+ logger.info(`Cleaning up job uuid=${this.uuid}`);
- this.cleanup_processes(); // Run process janitor, just incase there are any residual processes somehow
await this.cleanup_filesystem();
- remaining_job_spaces++;
+ remainingJobSpaces++;
}
}
+
module.exports = {
Job,
};
diff --git a/api/src/runtime.js b/api/src/runtime.js
index b239426..02c416b 100644
--- a/api/src/runtime.js
+++ b/api/src/runtime.js
@@ -7,36 +7,14 @@ const path = require('path');
const runtimes = [];
+
class Runtime {
- constructor({
- language,
- version,
- aliases,
- runtime,
- run,
- compile,
- packageSupport,
- flake_path,
- timeouts,
- memory_limits,
- max_process_count,
- max_open_files,
- max_file_size,
- output_max_size,
- }) {
+ constructor({ language, version, aliases, runtime, run, compile, packageSupport, flake_path }) {
this.language = language;
this.runtime = runtime;
-
- this.timeouts = timeouts;
- this.memory_limits = memory_limits;
- this.max_process_count = max_process_count;
- this.max_open_files = max_open_files;
- this.max_file_size = max_file_size;
- this.output_max_size = output_max_size;
-
this.aliases = aliases;
- this.version = version;
-
+ this.version = version;
+
this.run = run;
this.compile = compile;
@@ -44,120 +22,58 @@ class Runtime {
this.package_support = packageSupport;
}
- static compute_single_limit(
- language_name,
- limit_name,
- language_limit_overrides
- ) {
- return (
- (config.limit_overrides[language_name] &&
- config.limit_overrides[language_name][limit_name]) ||
- (language_limit_overrides &&
- language_limit_overrides[limit_name]) ||
- config[limit_name]
- );
- }
-
- static compute_all_limits(language_name, language_limit_overrides) {
- return {
- timeouts: {
- compile: this.compute_single_limit(
- language_name,
- 'compile_timeout',
- language_limit_overrides
- ),
- run: this.compute_single_limit(
- language_name,
- 'run_timeout',
- language_limit_overrides
- ),
- },
- memory_limits: {
- compile: this.compute_single_limit(
- language_name,
- 'compile_memory_limit',
- language_limit_overrides
- ),
- run: this.compute_single_limit(
- language_name,
- 'run_memory_limit',
- language_limit_overrides
- ),
- },
- max_process_count: this.compute_single_limit(
- language_name,
- 'max_process_count',
- language_limit_overrides
- ),
- max_open_files: this.compute_single_limit(
- language_name,
- 'max_open_files',
- language_limit_overrides
- ),
- max_file_size: this.compute_single_limit(
- language_name,
- 'max_file_size',
- language_limit_overrides
- ),
- output_max_size: this.compute_single_limit(
- language_name,
- 'output_max_size',
- language_limit_overrides
- ),
- };
- }
-
- ensure_built() {
+ ensure_built(){
logger.info(`Ensuring ${this} is built`);
const flake_path = this.flake_path;
- function _ensure_built(key) {
+ function _ensure_built(key){
const command = `nix build ${flake_path}.metadata.${key} --no-link`;
- cp.execSync(command, { stdio: 'pipe' });
+ cp.execSync(command, {stdio: "pipe"})
}
- _ensure_built('run');
- if (this.compiled) _ensure_built('compile');
+ _ensure_built("run");
+ if(this.compiled) _ensure_built("compile");
+
+ logger.debug(`Finished ensuring ${this} is installed`)
- logger.debug(`Finished ensuring ${this} is installed`);
}
- static load_runtime(flake_key) {
- logger.info(`Loading ${flake_key}`);
+ static load_runtime(flake_key){
+ logger.info(`Loading ${flake_key}`)
const flake_path = `${config.flake_path}#pistonRuntimeSets.${config.runtime_set}.${flake_key}`;
const metadata_command = `nix eval --json ${flake_path}.metadata`;
const metadata = JSON.parse(cp.execSync(metadata_command));
-
+
const this_runtime = new Runtime({
...metadata,
- ...Runtime.compute_all_limits(
- metadata.language,
- metadata.limitOverrides
- ),
- flake_path,
+ flake_path
});
this_runtime.ensure_built();
runtimes.push(this_runtime);
-
+
+
logger.debug(`Package ${flake_key} was loaded`);
+
}
get compiled() {
return this.compile !== null;
}
- get id() {
+ get id(){
return runtimes.indexOf(this);
}
toString() {
return `${this.language}-${this.version}`;
}
+
}
module.exports = runtimes;
module.exports.Runtime = Runtime;
module.exports.load_runtime = Runtime.load_runtime;
+
diff --git a/cli/.gitignore b/cli/.gitignore
new file mode 100644
index 0000000..b512c09
--- /dev/null
+++ b/cli/.gitignore
@@ -0,0 +1 @@
+node_modules
\ No newline at end of file
diff --git a/cli/commands/execute.js b/cli/commands/execute.js
index 0d906bc..abb1f63 100644
--- a/cli/commands/execute.js
+++ b/cli/commands/execute.js
@@ -3,44 +3,8 @@ const path = require('path');
const chalk = require('chalk');
const WebSocket = require('ws');
-const SIGNALS = [
- 'SIGABRT',
- 'SIGALRM',
- 'SIGBUS',
- 'SIGCHLD',
- 'SIGCLD',
- 'SIGCONT',
- 'SIGEMT',
- 'SIGFPE',
- 'SIGHUP',
- 'SIGILL',
- 'SIGINFO',
- 'SIGINT',
- 'SIGIO',
- 'SIGIOT',
- 'SIGLOST',
- 'SIGPIPE',
- 'SIGPOLL',
- 'SIGPROF',
- 'SIGPWR',
- 'SIGQUIT',
- 'SIGSEGV',
- 'SIGSTKFLT',
- 'SIGTSTP',
- 'SIGSYS',
- 'SIGTERM',
- 'SIGTRAP',
- 'SIGTTIN',
- 'SIGTTOU',
- 'SIGUNUSED',
- 'SIGURG',
- 'SIGUSR1',
- 'SIGUSR2',
- 'SIGVTALRM',
- 'SIGXCPU',
- 'SIGXFSZ',
- 'SIGWINCH',
-];
+const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
+
exports.command = ['execute [args..]'];
exports.aliases = ['run'];
@@ -51,18 +15,18 @@ exports.builder = {
string: true,
desc: 'Set the version of the language to use',
alias: ['l'],
- default: '*',
+ default: '*'
},
stdin: {
boolean: true,
desc: 'Read input from stdin and pass to executor',
- alias: ['i'],
+ alias: ['i']
},
run_timeout: {
alias: ['rt', 'r'],
number: true,
desc: 'Milliseconds before killing run process',
- default: 3000,
+ default: 3000
},
compile_timeout: {
alias: ['ct', 'c'],
@@ -78,126 +42,117 @@ exports.builder = {
interactive: {
boolean: true,
alias: ['t'],
- desc: 'Run interactively using WebSocket transport',
+ desc: 'Run interactively using WebSocket transport'
},
status: {
boolean: true,
alias: ['s'],
- desc: 'Output additional status to stderr',
- },
+ desc: 'Output additional status to stderr'
+ }
};
-async function handle_interactive(files, argv) {
- const ws = new WebSocket(
- argv.pistonUrl.replace('http', 'ws') + '/api/v2/connect'
- );
+async function handle_interactive(files, argv){
+ const ws = new WebSocket(argv.pistonUrl.replace("http", "ws") + "/api/v2/connect")
- const log_message =
- process.stderr.isTTY && argv.status ? console.error : () => {};
+ const log_message = (process.stderr.isTTY && argv.status) ? console.error : ()=>{};
- process.on('exit', () => {
+ process.on("exit", ()=>{
ws.close();
process.stdin.end();
process.stdin.destroy();
- process.exit();
- });
+ process.exit();
+ })
- for (const signal of SIGNALS) {
- process.on(signal, () => {
- ws.send(JSON.stringify({ type: 'signal', signal }));
- });
+ for(const signal of SIGNALS){
+ process.on(signal, ()=>{
+ ws.send(JSON.stringify({type: 'signal', signal}))
+ })
}
- ws.on('open', () => {
+
+
+ ws.on('open', ()=>{
const request = {
- type: 'init',
+ type: "init",
language: argv.language,
version: argv['language_version'],
files: files,
args: argv.args,
compile_timeout: argv.ct,
- run_timeout: argv.rt,
- };
+ run_timeout: argv.rt
+ }
- ws.send(JSON.stringify(request));
- log_message(chalk.white.bold('Connected'));
+ ws.send(JSON.stringify(request))
+ log_message(chalk.white.bold("Connected"))
process.stdin.resume();
- process.stdin.on('data', data => {
- ws.send(
- JSON.stringify({
- type: 'data',
- stream: 'stdin',
- data: data.toString(),
- })
- );
- });
- });
+ process.stdin.on("data", (data) => {
+ ws.send(JSON.stringify({
+ type: "data",
+ stream: "stdin",
+ data: data.toString()
+ }))
+ })
+ })
- ws.on('close', (code, reason) => {
+ ws.on("close", (code, reason)=>{
log_message(
- chalk.white.bold('Disconnected: '),
- chalk.white.bold('Reason: '),
+ chalk.white.bold("Disconnected: "),
+ chalk.white.bold("Reason: "),
chalk.yellow(`"${reason}"`),
- chalk.white.bold('Code: '),
- chalk.yellow(`"${code}"`)
- );
- process.stdin.pause();
- });
+ chalk.white.bold("Code: "),
+ chalk.yellow(`"${code}"`),
+ )
+ process.stdin.pause()
+ })
- ws.on('message', function (data) {
+ ws.on('message', function(data){
const msg = JSON.parse(data);
-
- switch (msg.type) {
- case 'runtime':
- log_message(
- chalk.bold.white('Runtime:'),
- chalk.yellow(`${msg.language} ${msg.version}`)
- );
+
+ switch(msg.type){
+ case "runtime":
+ log_message(chalk.bold.white("Runtime:"), chalk.yellow(`${msg.language} ${msg.version}`))
break;
- case 'stage':
- log_message(
- chalk.bold.white('Stage:'),
- chalk.yellow(msg.stage)
- );
+ case "stage":
+ log_message(chalk.bold.white("Stage:"), chalk.yellow(msg.stage))
break;
- case 'data':
- if (msg.stream == 'stdout') process.stdout.write(msg.data);
- else if (msg.stream == 'stderr') process.stderr.write(msg.data);
- else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data);
+ case "data":
+ if(msg.stream == "stdout") process.stdout.write(msg.data)
+ else if(msg.stream == "stderr") process.stderr.write(msg.data)
+ else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data)
break;
- case 'exit':
- if (msg.signal === null)
+ case "exit":
+ if(msg.signal === null)
log_message(
- chalk.white.bold('Stage'),
+ chalk.white.bold("Stage"),
chalk.yellow(msg.stage),
- chalk.white.bold('exited with code'),
+ chalk.white.bold("exited with code"),
chalk.yellow(msg.code)
- );
+ )
else
log_message(
- chalk.white.bold('Stage'),
+ chalk.white.bold("Stage"),
chalk.yellow(msg.stage),
- chalk.white.bold('exited with signal'),
+ chalk.white.bold("exited with signal"),
chalk.yellow(msg.signal)
- );
- break;
+ )
+ break;
default:
- log_message(chalk.red.bold('Unknown message:'), msg);
+ log_message(chalk.red.bold("Unknown message:"), msg)
}
- });
+ })
+
}
async function run_non_interactively(files, argv) {
- const stdin =
- (argv.stdin &&
- (await new Promise((resolve, _) => {
- let data = '';
- process.stdin.on('data', d => (data += d));
- process.stdin.on('end', _ => resolve(data));
- }))) ||
- '';
+
+
+ const stdin = (argv.stdin && await new Promise((resolve, _) => {
+ let data = '';
+ process.stdin.on('data', d => data += d);
+ process.stdin.on('end', _ => resolve(data));
+ })) || '';
const request = {
language: argv.language,
@@ -206,7 +161,7 @@ async function run_non_interactively(files, argv) {
args: argv.args,
stdin,
compile_timeout: argv.ct,
- run_timeout: argv.rt,
+ run_timeout: argv.rt
};
let { data: response } = await argv.axios.post('/api/v2/execute', request);
@@ -215,13 +170,13 @@ async function run_non_interactively(files, argv) {
console.log(chalk.bold(`== ${name} ==`));
if (ctx.stdout) {
- console.log(chalk.bold(`STDOUT`));
- console.log(ctx.stdout.replace(/\n/g, '\n '));
+ console.log(chalk.bold(`STDOUT`))
+ console.log(ctx.stdout.replace(/\n/g,'\n '))
}
if (ctx.stderr) {
- console.log(chalk.bold(`STDERR`));
- console.log(ctx.stderr.replace(/\n/g, '\n '));
+ console.log(chalk.bold(`STDERR`))
+ console.log(ctx.stderr.replace(/\n/g,'\n '))
}
if (ctx.code) {
@@ -232,9 +187,12 @@ async function run_non_interactively(files, argv) {
}
if (ctx.signal) {
- console.log(chalk.bold(`Signal:`), chalk.bold.yellow(ctx.signal));
+ console.log(
+ chalk.bold(`Signal:`),
+ chalk.bold.yellow(ctx.signal)
+ );
}
- };
+ }
if (response.compile) {
step('Compile', response.compile);
@@ -243,23 +201,17 @@ async function run_non_interactively(files, argv) {
step('Run', response.run);
}
-exports.handler = async argv => {
- const files = [...(argv.files || []), argv.file].map(file_path => {
- const buffer = fs.readFileSync(file_path);
- const encoding =
- (buffer
- .toString()
- .split('')
- .some(x => x.charCodeAt(0) >= 128) &&
- 'base64') ||
- 'utf8';
- return {
- name: path.basename(file_path),
- content: buffer.toString(encoding),
- encoding,
- };
- });
+exports.handler = async (argv) => {
+ const files = [...(argv.files || []),argv.file]
+ .map(file_path => {
+ return {
+ name: path.basename(file_path),
+ content: fs.readFileSync(file_path).toString()
+ };
+ });
- if (argv.interactive) await handle_interactive(files, argv);
+ if(argv.interactive) await handle_interactive(files, argv);
else await run_non_interactively(files, argv);
-};
+}
+
+
diff --git a/cli/index.js b/cli/index.js
index 340cdab..c0c25ee 100755
--- a/cli/index.js
+++ b/cli/index.js
@@ -6,8 +6,8 @@ const axios_instance = argv => {
argv.axios = axios.create({
baseURL: argv['piston-url'],
headers: {
- 'Content-Type': 'application/json',
- },
+ 'Content-Type': 'application/json'
+ }
});
return argv;
@@ -18,11 +18,12 @@ require('yargs')(process.argv.slice(2))
alias: ['u'],
default: 'http://127.0.0.1:2000',
desc: 'Piston API URL',
- string: true,
+ string: true
})
.middleware(axios_instance)
.scriptName('piston')
.commandDir('commands')
.demandCommand()
.help()
- .wrap(72).argv;
+ .wrap(72)
+ .argv;
diff --git a/docs/api-v2.md b/docs/api-v2.md
index b25e142..111b514 100644
--- a/docs/api-v2.md
+++ b/docs/api-v2.md
@@ -17,10 +17,10 @@ Returns a list of available languages, including the version, runtime and aliase
#### Response
-- `[].language`: Name of the language
-- `[].version`: Version of the runtime
-- `[].aliases`: List of alternative names that can be used for the language
-- `[].runtime` (_optional_): Name of the runtime used to run the langage, only provided if alternative runtimes exist for the language
+- `[].language`: Name of the language
+- `[].version`: Version of the runtime
+- `[].aliases`: List of alternative names that can be used for the language
+- `[].runtime` (_optional_): Name of the runtime used to run the langage, only provided if alternative runtimes exist for the language
#### Example
@@ -55,35 +55,34 @@ Runs the given code, using the given runtime and arguments, returning the result
#### Request
-- `language`: Name or alias of a language listed in [runtimes](#runtimes)
-- `version`: SemVer version selector of a language listed in [runtimes](#runtimes)
-- `files`: An array of files which should be uploaded into the job context
-- `files[].name` (_optional_): Name of file to be written, if none a random name is picked
-- `files[].content`: Content of file to be written
-- `files[].encoding` (_optional_): The encoding scheme used for the file content. One of `base64`, `hex` or `utf8`. Defaults to `utf8`.
-- `stdin` (_optional_): Text to pass into stdin of the program. Defaults to blank string.
-- `args` (_optional_): Arguments to pass to the program. Defaults to none
-- `run_timeout` (_optional_): The maximum allowed time in milliseconds for the compile stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout.
-- `compile_timeout` (_optional_): The maximum allowed time in milliseconds for the run stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout. Defaults to maximum.
-- `compile_memory_limit` (_optional_): The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
-- `run_memory_limit` (_optional_): The maximum amount of memory the run stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
+- `language`: Name or alias of a language listed in [runtimes](#runtimes)
+- `version`: SemVer version selector of a language listed in [runtimes](#runtimes)
+- `files`: An array of files which should be uploaded into the job context
+- `files[].name` (_optional_): Name of file to be written, if none a random name is picked
+- `files[].content`: Content of file to be written
+- `stdin` (_optional_): Text to pass into stdin of the program. Defaults to blank string.
+- `args` (_optional_): Arguments to pass to the program. Defaults to none
+- `run_timeout` (_optional_): The maximum allowed time in milliseconds for the compile stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout.
+- `compile_timeout` (_optional_): The maximum allowed time in milliseconds for the run stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout. Defaults to maximum.
+- `compile_memory_limit` (_optional_): The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
+- `run_memory_limit` (_optional_): The maximum amount of memory the run stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
#### Response
-- `language`: Name (not alias) of the runtime used
-- `version`: Version of the used runtime
-- `run`: Results from the run stage
-- `run.stdout`: stdout from run stage process
-- `run.stderr`: stderr from run stage process
-- `run.output`: stdout and stderr combined in order of data from run stage process
-- `run.code`: Exit code from run process, or null if signal is not null
-- `run.signal`: Signal from run process, or null if code is not null
-- `compile` (_optional_): Results from the compile stage, only provided if the runtime has a compile stage
-- `compile.stdout`: stdout from compile stage process
-- `compile.stderr`: stderr from compile stage process
-- `compile.output`: stdout and stderr combined in order of data from compile stage process
-- `compile.code`: Exit code from compile process, or null if signal is not null
-- `compile.signal`: Signal from compile process, or null if code is not null
+- `language`: Name (not alias) of the runtime used
+- `version`: Version of the used runtime
+- `run`: Results from the run stage
+- `run.stdout`: stdout from run stage process
+- `run.stderr`: stderr from run stage process
+- `run.output`: stdout and stderr combined in order of data from run stage process
+- `run.code`: Exit code from run process, or null if signal is not null
+- `run.signal`: Signal from run process, or null if code is not null
+- `compile` (_optional_): Results from the compile stage, only provided if the runtime has a compile stage
+- `compile.stdout`: stdout from compile stage process
+- `compile.stderr`: stderr from compile stage process
+- `compile.output`: stdout and stderr combined in order of data from compile stage process
+- `compile.code`: Exit code from compile process, or null if signal is not null
+- `compile.signal`: Signal from compile process, or null if code is not null
#### Example
@@ -134,9 +133,9 @@ Returns a list of all possible packages, and whether their installation status.
#### Response
-- `[].language`: Name of the contained runtime
-- `[].language_version`: Version of the contained runtime
-- `[].installed`: Status on the package being installed
+- `[].language`: Name of the contained runtime
+- `[].language_version`: Version of the contained runtime
+- `[].installed`: Status on the package being installed
#### Example
@@ -168,13 +167,13 @@ Install the given package.
#### Request
-- `language`: Name of package from [package list](#get-apiv2packages)
-- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
+- `language`: Name of package from [package list](#get-apiv2packages)
+- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
#### Response
-- `language`: Name of package installed
-- `version`: Version of package installed
+- `language`: Name of package installed
+- `version`: Version of package installed
#### Example
@@ -204,13 +203,13 @@ Uninstall the given package.
#### Request
-- `language`: Name of package from [package list](#get-apiv2packages)
-- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
+- `language`: Name of package from [package list](#get-apiv2packages)
+- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
#### Response
-- `language`: Name of package uninstalled
-- `version`: Version of package uninstalled
+- `language`: Name of package uninstalled
+- `version`: Version of package uninstalled
#### Example
diff --git a/docs/configuration.md b/docs/configuration.md
index 1a6f5bd..16a5df0 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -50,15 +50,15 @@ Absolute path to piston related data, including packages and job contexts.
```yaml
key:
- - PISTON_RUNNER_UID_MIN
- - PISTON_RUNNER_UID_MAX
- - PISTON_RUNNER_GID_MIN
- - PISTON_RUNNER_GID_MAX
+ - PISTON_RUNNER_UID_MIN
+ - PISTON_RUNNER_UID_MAX
+ - PISTON_RUNNER_GID_MIN
+ - PISTON_RUNNER_GID_MAX
default:
- - 1001
- - 1500
- - 1001
- - 1500
+ - 1001
+ - 1500
+ - 1001
+ - 1500
```
UID and GID ranges to use when executing jobs.
@@ -86,11 +86,11 @@ key: PISTON_MAX_PROCESS_COUNT
default: 64
```
-Maximum number of processes allowed to to have open for a job.
+Maximum number of processess allowed to to have open for a job.
Resists against exhausting the process table, causing a full system lockup.
-## Output Max Size
+## Output Max Side
```yaml
key: PISTON_OUTPUT_MAX_SIZE
@@ -123,27 +123,12 @@ Maximum size for a singular file written to disk.
Resists against large file writes to exhaust disk space.
-## Compile/Run timeouts
-
-```yaml
-key:
- - PISTON_COMPILE_TIMEOUT
-default: 10000
-
-key:
- - PISTON_RUN_TIMEOUT
-default: 3000
-```
-
-The maximum time that is allowed to be taken by a stage in milliseconds.
-Use -1 for unlimited time.
-
## Compile/Run memory limits
```yaml
key:
- - PISTON_COMPILE_MEMORY_LIMIT
- - PISTON_RUN_MEMORY_LIMIT
+ - PISTON_COMPILE_MEMORY_LIMIT
+ - PISTON_RUN_MEMORY_LIMIT
default: -1
```
@@ -169,19 +154,3 @@ default: 64
```
Maximum number of jobs to run concurrently.
-
-## Limit overrides
-
-```yaml
-key: PISTON_LIMIT_OVERRIDES
-default: {}
-```
-
-Per-language overrides/exceptions for the each of `max_process_count`, `max_open_files`, `max_file_size`,
-`compile_memory_limit`, `run_memory_limit`, `compile_timeout`, `run_timeout`, `output_max_size`. Defined as follows:
-
-```
-PISTON_LIMIT_OVERRIDES={"c++":{"max_process_count":128}}
-```
-
-This will give `c++` a max_process_count of 128 regardless of the configuration.
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 33ce51e..53dbf05 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1 +1 @@
-mkdocs==1.2.3
\ No newline at end of file
+mkdocs==1.1.2
\ No newline at end of file
diff --git a/flake.nix b/flake.nix
index fde9cee..78b2ee1 100644
--- a/flake.nix
+++ b/flake.nix
@@ -21,7 +21,6 @@
compile? null,
packages? null,
aliases? [],
- limitOverrides? {},
tests
}: let
compileFile = if compile != null then
@@ -29,7 +28,7 @@
else null;
runFile = pkgs.writeShellScript "run" run;
metadata = {
- inherit language version runtime aliases limitOverrides;
+ inherit language version runtime aliases;
run = runFile;
compile = compileFile;
packageSupport = packages != null;
diff --git a/mkdocs.yml b/mkdocs.yml
index a6ef999..148ba91 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -1,15 +1,15 @@
site_name: Piston
nav:
- - Home: index.md
- - Configuration: configuration.md
- - API: api-v2.md
+ - Home: index.md
+ - Configuration: configuration.md
+ - API: api-v2.md
theme:
- name: readthedocs
- highlightjs: true
- hljs_languages:
- - yaml
- - json
+ name: readthedocs
+ highlightjs: true
+ hljs_languages:
+ - yaml
+ - json
markdown_extensions:
- - admonition
+ - admonition
diff --git a/package-lock.json b/package-lock.json
deleted file mode 100644
index 5c51a1d..0000000
--- a/package-lock.json
+++ /dev/null
@@ -1,32 +0,0 @@
-{
- "name": "piston",
- "lockfileVersion": 2,
- "requires": true,
- "packages": {
- "": {
- "devDependencies": {
- "prettier": "2.4.1"
- }
- },
- "node_modules/prettier": {
- "version": "2.4.1",
- "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.1.tgz",
- "integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==",
- "dev": true,
- "bin": {
- "prettier": "bin-prettier.js"
- },
- "engines": {
- "node": ">=10.13.0"
- }
- }
- },
- "dependencies": {
- "prettier": {
- "version": "2.4.1",
- "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.1.tgz",
- "integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==",
- "dev": true
- }
- }
-}
diff --git a/package.json b/package.json
deleted file mode 100644
index 8f07606..0000000
--- a/package.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "devDependencies": {
- "prettier": "2.4.1"
- }
-}
diff --git a/packages/befunge93/0.2.0/build.sh b/packages/befunge93/0.2.0/build.sh
deleted file mode 100644
index de6bdbf..0000000
--- a/packages/befunge93/0.2.0/build.sh
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/usr/bin/env bash
-
-# source python 2.7
-source ../../python/2.7.18/build.sh
-
-# clone befunge repo
-git clone -q 'https://github.com/programble/befungee' befunge93
-
-# go inside befunge93 so we can checkout
-cd befunge93
-
-# checkout the version 0.2.0
-git checkout tag/v0.2.0
-
-cd ..
\ No newline at end of file
diff --git a/packages/befunge93/0.2.0/metadata.json b/packages/befunge93/0.2.0/metadata.json
deleted file mode 100644
index 16c2643..0000000
--- a/packages/befunge93/0.2.0/metadata.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "language": "befunge93",
- "version": "0.2.0",
- "aliases": ["b93"]
-}
diff --git a/packages/befunge93/0.2.0/run b/packages/befunge93/0.2.0/run
deleted file mode 100644
index 3d95114..0000000
--- a/packages/befunge93/0.2.0/run
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/env bash
-
-# run the befunge program with the file name
-python2.7 "$BEFUNGE93_PATH"/befungee.py "$1"
\ No newline at end of file
diff --git a/packages/befunge93/0.2.0/test.b93 b/packages/befunge93/0.2.0/test.b93
deleted file mode 100644
index 4cb5175..0000000
--- a/packages/befunge93/0.2.0/test.b93
+++ /dev/null
@@ -1 +0,0 @@
-64+"KO">:#,_@
\ No newline at end of file
diff --git a/packages/brachylog/1.0.0/build.sh b/packages/brachylog/1.0.0/build.sh
deleted file mode 100644
index 8f35ef2..0000000
--- a/packages/brachylog/1.0.0/build.sh
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env bash
-
-# build prolog 8.2.4 as dependency
-source ../../prolog/8.2.4/build.sh
-
-# curl brachylog 1.0.0
-curl -L "https://github.com/JCumin/Brachylog/archive/refs/tags/v1.0-ascii.tar.gz" -o brachylog.tar.gz
-tar xzf brachylog.tar.gz --strip-components=1
-rm brachylog.tar.gz
-
-# move swi prolog to working directory
-cp bin/swipl swipl
-
-# give execution permission to swipl
-chmod +x swipl
-
-# add some code the branchylog.pl so we don't have to escape backslashes while using the interactive mode
-echo '
-
-:-feature(argv, [Code, Stdin]), run_from_atom(Code, Stdin, _), halt.' >> prolog_parser/brachylog.pl
\ No newline at end of file
diff --git a/packages/brachylog/1.0.0/metadata.json b/packages/brachylog/1.0.0/metadata.json
deleted file mode 100644
index d5df839..0000000
--- a/packages/brachylog/1.0.0/metadata.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "language": "brachylog",
- "version": "1.0.0",
- "aliases": []
-}
diff --git a/packages/brachylog/1.0.0/run b/packages/brachylog/1.0.0/run
deleted file mode 100644
index 16ce3cd..0000000
--- a/packages/brachylog/1.0.0/run
+++ /dev/null
@@ -1,19 +0,0 @@
-#!/usr/bin/env bash
-
-# save the file for later
-file="$1"
-
-# remove the file from $@
-shift
-
-# save stdin as $@ joined by newlines
-stdin=`printf "%s\n" "$@"`
-
-# save code as the contents of $file
-code=`cat "$file"`
-
-# go to the directory where brachylog.pl is so the imports work
-cd "$BRACHYLOG_PATH"/prolog_parser
-
-# run swi prolog with code and stdin
-swipl -f brachylog.pl "$code" "$stdin"
\ No newline at end of file
diff --git a/packages/brachylog/1.0.0/test.brachylog b/packages/brachylog/1.0.0/test.brachylog
deleted file mode 100644
index d90d668..0000000
--- a/packages/brachylog/1.0.0/test.brachylog
+++ /dev/null
@@ -1 +0,0 @@
-"OK"w
\ No newline at end of file
diff --git a/packages/cjam/0.6.5/metadata.json b/packages/cjam/0.6.5/metadata.json
index bd25bde..af510fd 100644
--- a/packages/cjam/0.6.5/metadata.json
+++ b/packages/cjam/0.6.5/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "cjam",
- "version": "0.6.5",
- "aliases": []
+ "language": "cjam",
+ "version": "0.6.5",
+ "aliases": []
}
diff --git a/packages/crystal/0.36.1/build.sh b/packages/crystal/0.36.1/build.sh
new file mode 100755
index 0000000..ba10f3f
--- /dev/null
+++ b/packages/crystal/0.36.1/build.sh
@@ -0,0 +1,7 @@
+#!/bin/bash
+
+PREFIX=$(realpath $(dirname $0))
+
+curl -L "https://github.com/crystal-lang/crystal/releases/download/0.36.1/crystal-0.36.1-1-linux-x86_64.tar.gz" -o crystal.tar.gz
+tar xzf crystal.tar.gz --strip-components=1
+rm crystal.tar.gz
diff --git a/packages/crystal/0.36.1/compile b/packages/crystal/0.36.1/compile
new file mode 100644
index 0000000..afbad6c
--- /dev/null
+++ b/packages/crystal/0.36.1/compile
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+# Compile crystal files into out file
+crystal build "$@" -o out --no-color && \
+chmod +x out
diff --git a/packages/crystal/0.36.1/environment b/packages/crystal/0.36.1/environment
new file mode 100644
index 0000000..bd0ff98
--- /dev/null
+++ b/packages/crystal/0.36.1/environment
@@ -0,0 +1 @@
+export PATH=$PWD/bin:$PATH
\ No newline at end of file
diff --git a/packages/crystal/0.36.1/metadata.json b/packages/crystal/0.36.1/metadata.json
new file mode 100644
index 0000000..ee995eb
--- /dev/null
+++ b/packages/crystal/0.36.1/metadata.json
@@ -0,0 +1,5 @@
+{
+ "language": "crystal",
+ "version": "0.36.1",
+ "aliases": ["crystal", "cr"]
+}
diff --git a/packages/crystal/0.36.1/run b/packages/crystal/0.36.1/run
new file mode 100644
index 0000000..6955ba9
--- /dev/null
+++ b/packages/crystal/0.36.1/run
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+shift # Filename is only used to compile
+./out "$@"
diff --git a/packages/crystal/0.36.1/test.cr b/packages/crystal/0.36.1/test.cr
new file mode 100644
index 0000000..3fca0dd
--- /dev/null
+++ b/packages/crystal/0.36.1/test.cr
@@ -0,0 +1 @@
+puts("OK")
\ No newline at end of file
diff --git a/packages/dart/2.12.1/build.sh b/packages/dart/2.12.1/build.sh
new file mode 100755
index 0000000..d202d7f
--- /dev/null
+++ b/packages/dart/2.12.1/build.sh
@@ -0,0 +1,11 @@
+#!/usr/bin/env bash
+
+curl -L "https://storage.googleapis.com/dart-archive/channels/stable/release/2.12.1/sdk/dartsdk-linux-x64-release.zip" -o dart.zip
+
+unzip dart.zip
+rm dart.zip
+
+cp -r dart-sdk/* .
+rm -rf dart-sdk
+
+chmod -R +rx bin
diff --git a/packages/brachylog/1.0.0/environment b/packages/dart/2.12.1/environment
similarity index 80%
rename from packages/brachylog/1.0.0/environment
rename to packages/dart/2.12.1/environment
index 7012c44..780b668 100644
--- a/packages/brachylog/1.0.0/environment
+++ b/packages/dart/2.12.1/environment
@@ -2,4 +2,3 @@
# Put 'export' statements here for environment variables
export PATH=$PWD/bin:$PATH
-export BRACHYLOG_PATH=$PWD
\ No newline at end of file
diff --git a/packages/dart/2.12.1/metadata.json b/packages/dart/2.12.1/metadata.json
new file mode 100644
index 0000000..cec3d77
--- /dev/null
+++ b/packages/dart/2.12.1/metadata.json
@@ -0,0 +1,5 @@
+{
+ "language": "dart",
+ "version": "2.12.1",
+ "aliases": []
+}
diff --git a/packages/dart/2.12.1/run b/packages/dart/2.12.1/run
new file mode 100644
index 0000000..aae792a
--- /dev/null
+++ b/packages/dart/2.12.1/run
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+# Put instructions to run the runtime
+dart run "$@"
diff --git a/packages/dart/2.12.1/test.dart b/packages/dart/2.12.1/test.dart
new file mode 100644
index 0000000..27e87b2
--- /dev/null
+++ b/packages/dart/2.12.1/test.dart
@@ -0,0 +1,3 @@
+void main() {
+ print('OK');
+}
\ No newline at end of file
diff --git a/packages/dash/0.5.11/build.sh b/packages/dash/0.5.11/build.sh
new file mode 100755
index 0000000..202d5aa
--- /dev/null
+++ b/packages/dash/0.5.11/build.sh
@@ -0,0 +1,19 @@
+#!/usr/bin/env bash
+
+# Put instructions to build your package in here
+PREFIX=$(realpath $(dirname $0))
+
+mkdir -p build
+
+cd build
+
+curl "http://gondor.apana.org.au/~herbert/dash/files/dash-0.5.11.tar.gz" -o dash.tar.gz
+tar xzf dash.tar.gz --strip-components=1
+
+./configure --prefix "$PREFIX" &&
+make -j$(nproc) &&
+make install -j$(nproc)
+
+cd ../
+
+rm -rf build
diff --git a/packages/befunge93/0.2.0/environment b/packages/dash/0.5.11/environment
similarity index 74%
rename from packages/befunge93/0.2.0/environment
rename to packages/dash/0.5.11/environment
index 43f2db1..780b668 100644
--- a/packages/befunge93/0.2.0/environment
+++ b/packages/dash/0.5.11/environment
@@ -2,4 +2,3 @@
# Put 'export' statements here for environment variables
export PATH=$PWD/bin:$PATH
-export BEFUNGE93_PATH=$PWD/befunge93
\ No newline at end of file
diff --git a/packages/dash/0.5.11/metadata.json b/packages/dash/0.5.11/metadata.json
new file mode 100644
index 0000000..a2e5be5
--- /dev/null
+++ b/packages/dash/0.5.11/metadata.json
@@ -0,0 +1,5 @@
+{
+ "language": "dash",
+ "version": "0.5.11",
+ "aliases": ["dash"]
+}
diff --git a/packages/dash/0.5.11/run b/packages/dash/0.5.11/run
new file mode 100644
index 0000000..6a8a1da
--- /dev/null
+++ b/packages/dash/0.5.11/run
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+# Put instructions to run the runtime
+dash "$@"
diff --git a/packages/dash/0.5.11/test.dash b/packages/dash/0.5.11/test.dash
new file mode 100644
index 0000000..727518f
--- /dev/null
+++ b/packages/dash/0.5.11/test.dash
@@ -0,0 +1 @@
+echo "OK"
\ No newline at end of file
diff --git a/packages/deno/1.7.5/build.sh b/packages/deno/1.7.5/build.sh
new file mode 100755
index 0000000..165d3b6
--- /dev/null
+++ b/packages/deno/1.7.5/build.sh
@@ -0,0 +1,5 @@
+curl -L https://github.com/denoland/deno/releases/download/v1.7.5/deno-x86_64-unknown-linux-gnu.zip --output deno.zip
+unzip -o deno.zip
+rm deno.zip
+
+chmod +x deno
diff --git a/packages/deno/1.7.5/environment b/packages/deno/1.7.5/environment
new file mode 100644
index 0000000..98fd770
--- /dev/null
+++ b/packages/deno/1.7.5/environment
@@ -0,0 +1 @@
+export PATH=$PWD:$PATH
\ No newline at end of file
diff --git a/packages/deno/1.7.5/metadata.json b/packages/deno/1.7.5/metadata.json
new file mode 100644
index 0000000..d30608b
--- /dev/null
+++ b/packages/deno/1.7.5/metadata.json
@@ -0,0 +1,14 @@
+{
+ "language": "deno",
+ "version": "1.7.5",
+ "provides": [
+ {
+ "language": "typescript",
+ "aliases": ["deno-ts","deno"]
+ },
+ {
+ "language": "javascript",
+ "aliases": ["deno-js"]
+ }
+ ]
+}
diff --git a/packages/deno/1.7.5/run b/packages/deno/1.7.5/run
new file mode 100644
index 0000000..d1b196f
--- /dev/null
+++ b/packages/deno/1.7.5/run
@@ -0,0 +1,2 @@
+#!/bin/bash
+DENO_DIR=$PWD deno run "$@"
\ No newline at end of file
diff --git a/packages/deno/1.7.5/test.deno.ts b/packages/deno/1.7.5/test.deno.ts
new file mode 100644
index 0000000..56ed4a0
--- /dev/null
+++ b/packages/deno/1.7.5/test.deno.ts
@@ -0,0 +1 @@
+console.log("OK")
\ No newline at end of file
diff --git a/packages/dotnet/5.0.201/build.sh b/packages/dotnet/5.0.201/build.sh
old mode 100755
new mode 100644
index 6318b07..c685668
--- a/packages/dotnet/5.0.201/build.sh
+++ b/packages/dotnet/5.0.201/build.sh
@@ -7,10 +7,8 @@ rm dotnet.tar.gz
# Cache nuget packages
export DOTNET_CLI_HOME=$PWD
./dotnet new console -o cache_application
-./dotnet new console -lang F# -o fs_cache_application
-./dotnet new console -lang VB -o vb_cache_application
# This calls a restore on the global-packages index ($DOTNET_CLI_HOME/.nuget/packages)
# If we want to allow more packages, we could add them to this cache_application
-rm -rf cache_application fs_cache_application vb_cache_application
-# Get rid of it, we don't actually need the application - just the restore
+rm -rf cache_application
+# Get rid of it, we don't actually need the application - just the restore
\ No newline at end of file
diff --git a/packages/dotnet/5.0.201/compile b/packages/dotnet/5.0.201/compile
index 1c34213..8bfcc27 100644
--- a/packages/dotnet/5.0.201/compile
+++ b/packages/dotnet/5.0.201/compile
@@ -1,36 +1,15 @@
#!/usr/bin/env bash
-[ "${PISTON_LANGUAGE}" == "fsi" ] && exit 0
-
export DOTNET_CLI_HOME=$PWD
export HOME=$PWD
+rename 's/$/\.cs/' "$@" # Add .cs extension
+
dotnet build --help > /dev/null # Shut the thing up
-case "${PISTON_LANGUAGE}" in
- basic.net)
- rename 's/$/\.vb/' "$@" # Add .vb extension
- dotnet new console -lang VB -o . --no-restore
- rm Program.vb
- ;;
- fsharp.net)
- first_file=$1
- shift
- rename 's/$/\.fs/' "$@" # Add .fs extension
- dotnet new console -lang F# -o . --no-restore
- mv $first_file Program.fs # For some reason F#.net doesn't work unless the file name is Program.fs
- ;;
- csharp.net)
- rename 's/$/\.cs/' "$@" # Add .cs extension
- dotnet new console -o . --no-restore
- rm Program.cs
- ;;
- *)
- echo "How did you get here? (${PISTON_LANGUAGE})"
- exit 1
- ;;
+dotnet new console -o . --no-restore
+rm Program.cs
-esac
dotnet restore --source $DOTNET_ROOT/.nuget/packages
-dotnet build --no-restore
+dotnet build --no-restore
\ No newline at end of file
diff --git a/packages/dotnet/5.0.201/environment b/packages/dotnet/5.0.201/environment
index 468463d..596d56e 100644
--- a/packages/dotnet/5.0.201/environment
+++ b/packages/dotnet/5.0.201/environment
@@ -2,5 +2,4 @@
# Put 'export' statements here for environment variables
export DOTNET_ROOT=$PWD
-export PATH=$DOTNET_ROOT:$PATH
-export FSI_PATH=$(find $(pwd) -name fsi.dll)
+export PATH=$DOTNET_ROOT:$PATH
\ No newline at end of file
diff --git a/packages/dotnet/5.0.201/metadata.json b/packages/dotnet/5.0.201/metadata.json
index 7c73c58..619265d 100644
--- a/packages/dotnet/5.0.201/metadata.json
+++ b/packages/dotnet/5.0.201/metadata.json
@@ -1,66 +1,5 @@
{
"language": "dotnet",
"version": "5.0.201",
- "provides": [
- {
- "language": "basic.net",
- "aliases": [
- "basic",
- "visual-basic",
- "visual-basic.net",
- "vb",
- "vb.net",
- "vb-dotnet",
- "dotnet-vb",
- "basic-dotnet",
- "dotnet-basic"
- ],
- "limit_overrides": { "max_process_count": 128 }
- },
- {
- "language": "fsharp.net",
- "aliases": [
- "fsharp",
- "fs",
- "f#",
- "fs.net",
- "f#.net",
- "fsharp-dotnet",
- "fs-dotnet",
- "f#-dotnet",
- "dotnet-fsharp",
- "dotnet-fs",
- "dotnet-fs"
- ],
- "limit_overrides": { "max_process_count": 128 }
- },
- {
- "language": "csharp.net",
- "aliases": [
- "csharp",
- "c#",
- "cs",
- "c#.net",
- "cs.net",
- "c#-dotnet",
- "cs-dotnet",
- "csharp-dotnet",
- "dotnet-c#",
- "dotnet-cs",
- "dotnet-csharp"
- ],
- "limit_overrides": { "max_process_count": 128 }
- },
- {
- "language": "fsi",
- "aliases": [
- "fsx",
- "fsharp-interactive",
- "f#-interactive",
- "dotnet-fsi",
- "fsi-dotnet",
- "fsi.net"
- ]
- }
- ]
+ "aliases": ["cs", "csharp"]
}
diff --git a/packages/dotnet/5.0.201/run b/packages/dotnet/5.0.201/run
index 6b5c995..774a08a 100644
--- a/packages/dotnet/5.0.201/run
+++ b/packages/dotnet/5.0.201/run
@@ -3,23 +3,5 @@
# Put instructions to run the runtime
export DOTNET_CLI_HOME=$PWD
-case "${PISTON_LANGUAGE}" in
- basic.net)
- ;&
- fsharp.net)
- ;&
- csharp.net)
- shift
- dotnet bin/Debug/net5.0/$(basename $(realpath .)).dll "$@"
- ;;
- fsi)
- FILENAME=$1
- rename 's/$/\.fsx/' $FILENAME # Add .fsx extension
- shift
- dotnet $FSI_PATH $FILENAME.fsx "$@"
- ;;
- *)
- echo "How did you get here? (${PISTON_LANGUAGE})"
- exit 1
- ;;
-esac
+shift
+dotnet bin/Debug/net5.0/$(basename $(realpath .)).dll "$@"
\ No newline at end of file
diff --git a/packages/dotnet/5.0.201/test.fs b/packages/dotnet/5.0.201/test.fs
deleted file mode 100644
index 006ac10..0000000
--- a/packages/dotnet/5.0.201/test.fs
+++ /dev/null
@@ -1,6 +0,0 @@
-open System
-
-[]
-let main argv =
- printfn "OK"
- 0
diff --git a/packages/dotnet/5.0.201/test.fsx b/packages/dotnet/5.0.201/test.fsx
deleted file mode 100644
index 33d166f..0000000
--- a/packages/dotnet/5.0.201/test.fsx
+++ /dev/null
@@ -1 +0,0 @@
-printfn "OK"
diff --git a/packages/dotnet/5.0.201/test.vb b/packages/dotnet/5.0.201/test.vb
deleted file mode 100644
index 291042e..0000000
--- a/packages/dotnet/5.0.201/test.vb
+++ /dev/null
@@ -1,9 +0,0 @@
-Imports System
-
-Module Module1
-
- Sub Main()
- Console.WriteLine("OK")
- End Sub
-
-End Module
diff --git a/packages/dragon/1.9.8/metadata.json b/packages/dragon/1.9.8/metadata.json
index 3fbc015..86cfc4c 100644
--- a/packages/dragon/1.9.8/metadata.json
+++ b/packages/dragon/1.9.8/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "dragon",
- "version": "1.9.8",
- "aliases": []
+ "language": "dragon",
+ "version": "1.9.8",
+ "aliases": []
}
diff --git a/packages/elixir/1.11.3/build.sh b/packages/elixir/1.11.3/build.sh
new file mode 100755
index 0000000..9328d8c
--- /dev/null
+++ b/packages/elixir/1.11.3/build.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+
+source ../../erlang/23.0.0/build.sh
+
+export PATH=$PWD/bin:$PATH
+
+PREFIX=$(realpath $(dirname $0))
+
+mkdir -p build
+
+cd build
+
+curl -L "https://github.com/elixir-lang/elixir/archive/v1.11.3.tar.gz" -o elixir.tar.gz
+tar xzf elixir.tar.gz --strip-components=1
+rm elixir.tar.gz
+
+./configure --prefix "$PREFIX"
+make -j$(nproc)
+
+cd ..
+
+cp -r build/bin .
+cp -r build/lib .
+
+rm -rf build
diff --git a/packages/elixir/1.11.3/environment b/packages/elixir/1.11.3/environment
new file mode 100644
index 0000000..ea24603
--- /dev/null
+++ b/packages/elixir/1.11.3/environment
@@ -0,0 +1,5 @@
+#!/usr/bin/env bash
+
+# Put 'export' statements here for environment variables
+export LC_ALL=en_US.UTF-8
+export PATH=$PWD/bin:$PATH
diff --git a/packages/elixir/1.11.3/metadata.json b/packages/elixir/1.11.3/metadata.json
new file mode 100644
index 0000000..9c1a2fc
--- /dev/null
+++ b/packages/elixir/1.11.3/metadata.json
@@ -0,0 +1,5 @@
+{
+ "language": "elixir",
+ "version": "1.11.3",
+ "aliases": ["elixir", "exs"]
+}
diff --git a/packages/elixir/1.11.3/run b/packages/elixir/1.11.3/run
new file mode 100644
index 0000000..9b3ba9c
--- /dev/null
+++ b/packages/elixir/1.11.3/run
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+# Put instructions to run the runtime
+elixir "$@"
\ No newline at end of file
diff --git a/packages/elixir/1.11.3/test.exs b/packages/elixir/1.11.3/test.exs
new file mode 100644
index 0000000..bc837c6
--- /dev/null
+++ b/packages/elixir/1.11.3/test.exs
@@ -0,0 +1 @@
+IO.puts("OK")
\ No newline at end of file
diff --git a/packages/erlang/23.0.0/build.sh b/packages/erlang/23.0.0/build.sh
new file mode 100755
index 0000000..f282bc2
--- /dev/null
+++ b/packages/erlang/23.0.0/build.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+
+PREFIX=$(realpath $(dirname $0))
+
+mkdir -p build
+
+cd build
+
+curl "http://erlang.org/download/otp_src_23.0.tar.gz" -o erlang.tar.gz
+tar xzf erlang.tar.gz --strip-components=1
+rm erlang.tar.gz
+
+export ERL_TOP=$(pwd)
+./configure --prefix "$PREFIX"
+make -j$(nproc)
+make install -j$(nproc)
+
+cd ..
+
+rm -rf build
+
diff --git a/packages/erlang/23.0.0/environment b/packages/erlang/23.0.0/environment
new file mode 100644
index 0000000..780b668
--- /dev/null
+++ b/packages/erlang/23.0.0/environment
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+# Put 'export' statements here for environment variables
+export PATH=$PWD/bin:$PATH
diff --git a/packages/erlang/23.0.0/metadata.json b/packages/erlang/23.0.0/metadata.json
new file mode 100644
index 0000000..e82b4b3
--- /dev/null
+++ b/packages/erlang/23.0.0/metadata.json
@@ -0,0 +1,5 @@
+{
+ "language": "erlang",
+ "version": "23.0.0",
+ "aliases": ["erlang", "erl", "escript"]
+}
diff --git a/packages/erlang/23.0.0/run b/packages/erlang/23.0.0/run
new file mode 100644
index 0000000..135d9f4
--- /dev/null
+++ b/packages/erlang/23.0.0/run
@@ -0,0 +1,4 @@
+#!/bin/bash
+
+# Put instructions to run the runtime
+escript "$@"
\ No newline at end of file
diff --git a/packages/erlang/23.0.0/test.erl b/packages/erlang/23.0.0/test.erl
new file mode 100644
index 0000000..d898d2c
--- /dev/null
+++ b/packages/erlang/23.0.0/test.erl
@@ -0,0 +1,3 @@
+
+main(_) ->
+ io:format("OK~n").
\ No newline at end of file
diff --git a/packages/forte/1.0.0/metadata.json b/packages/forte/1.0.0/metadata.json
index f7f4137..fd4ec12 100644
--- a/packages/forte/1.0.0/metadata.json
+++ b/packages/forte/1.0.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "forte",
- "version": "1.0.0",
- "aliases": ["forter"]
+ "language": "forte",
+ "version": "1.0.0",
+ "aliases": ["forter"]
}
diff --git a/packages/gawk/5.1.0/build.sh b/packages/gawk/5.1.0/build.sh
new file mode 100644
index 0000000..81fb998
--- /dev/null
+++ b/packages/gawk/5.1.0/build.sh
@@ -0,0 +1,21 @@
+#!/usr/bin/env bash
+
+# Put instructions to build your package in here
+PREFIX=$(realpath $(dirname $0))
+
+mkdir -p build
+
+cd build
+
+curl "https://ftp.gnu.org/gnu/gawk/gawk-5.1.0.tar.gz" -o gawk.tar.gz
+
+tar xzf gawk.tar.gz --strip-components=1
+
+# === autoconf based ===
+./configure --prefix "$PREFIX"
+
+make -j$(nproc)
+make install -j$(nproc)
+cd ../
+rm -rf build
+
diff --git a/packages/gawk/5.1.0/environment b/packages/gawk/5.1.0/environment
new file mode 100644
index 0000000..780b668
--- /dev/null
+++ b/packages/gawk/5.1.0/environment
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+# Put 'export' statements here for environment variables
+export PATH=$PWD/bin:$PATH
diff --git a/packages/gawk/5.1.0/metadata.json b/packages/gawk/5.1.0/metadata.json
new file mode 100644
index 0000000..1ae8c16
--- /dev/null
+++ b/packages/gawk/5.1.0/metadata.json
@@ -0,0 +1,10 @@
+{
+ "language": "gawk",
+ "version": "5.1.0",
+ "provides": [
+ {
+ "language": "awk",
+ "aliases": ["gawk"]
+ }
+ ]
+}
diff --git a/packages/gawk/5.1.0/run b/packages/gawk/5.1.0/run
new file mode 100644
index 0000000..5134ddf
--- /dev/null
+++ b/packages/gawk/5.1.0/run
@@ -0,0 +1,4 @@
+#!/usr/bin/env bash
+
+# Put instructions to run the runtime
+gawk-5.1.0 -f "$@"
diff --git a/packages/gawk/5.1.0/test.awk b/packages/gawk/5.1.0/test.awk
new file mode 100644
index 0000000..25e1bd3
--- /dev/null
+++ b/packages/gawk/5.1.0/test.awk
@@ -0,0 +1 @@
+{print "OK"}
\ No newline at end of file
diff --git a/packages/gcc/10.2.0/metadata.json b/packages/gcc/10.2.0/metadata.json
index 367de7c..f969bf5 100644
--- a/packages/gcc/10.2.0/metadata.json
+++ b/packages/gcc/10.2.0/metadata.json
@@ -3,7 +3,7 @@
"version": "10.2.0",
"provides": [
{
- "language": "c",
+ "language":"c",
"aliases": ["gcc"]
},
{
diff --git a/packages/golfscript/1.0.0/metadata.json b/packages/golfscript/1.0.0/metadata.json
index cb4f356..4ef3a62 100644
--- a/packages/golfscript/1.0.0/metadata.json
+++ b/packages/golfscript/1.0.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "golfscript",
- "version": "1.0.0",
- "aliases": ["golfscript"]
+ "language": "golfscript",
+ "version": "1.0.0",
+ "aliases": ["golfscript"]
}
diff --git a/packages/groovy/3.0.7/metadata.json b/packages/groovy/3.0.7/metadata.json
index 34ab93d..b790007 100644
--- a/packages/groovy/3.0.7/metadata.json
+++ b/packages/groovy/3.0.7/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "groovy",
- "version": "3.0.7",
- "aliases": ["groovy", "gvy"]
+ "language": "groovy",
+ "version": "3.0.7",
+ "aliases": ["groovy", "gvy"]
}
diff --git a/packages/husk/1.0.0/build.sh b/packages/husk/1.0.0/build.sh
deleted file mode 100644
index e2ee19c..0000000
--- a/packages/husk/1.0.0/build.sh
+++ /dev/null
@@ -1,14 +0,0 @@
-#!/usr/bin/env bash
-
-cp ../../haskell/9.0.1/build.sh ./haskell-build.sh
-sed -Ei 's/9\.0\.1/8\.10\.7/g' ./haskell-build.sh
-source ./haskell-build.sh
-
-# compile Husk from source
-git clone -q "https://github.com/barbuz/husk.git"
-cd husk
-../bin/ghc -O2 Husk
-
-# cleanup
-cd ..
-rm -f haskell-build.sh
\ No newline at end of file
diff --git a/packages/husk/1.0.0/environment b/packages/husk/1.0.0/environment
deleted file mode 100644
index 8b8421d..0000000
--- a/packages/husk/1.0.0/environment
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env bash
-
-# haskell and husk path
-export PATH=$PWD/bin:$PATH
-export HUSK_PATH=$PWD/husk
-export LANG=en_US.UTF8
diff --git a/packages/husk/1.0.0/metadata.json b/packages/husk/1.0.0/metadata.json
deleted file mode 100644
index 69c5b7e..0000000
--- a/packages/husk/1.0.0/metadata.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "language": "husk",
- "version": "1.0.0",
- "aliases": []
-}
diff --git a/packages/husk/1.0.0/run b/packages/husk/1.0.0/run
deleted file mode 100644
index 1fa65a0..0000000
--- a/packages/husk/1.0.0/run
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env bash
-
-# Store the current path because we'll need it to run the program file
-PROGRAM_PATH=$PWD
-
-# For now, Husk can only be run within the folder that has the imported modules
-cd $HUSK_PATH
-
-# Run Husk from file in unicode format with the given args
-./Husk -uf "${PROGRAM_PATH}/${@}"
\ No newline at end of file
diff --git a/packages/husk/1.0.0/test.husk b/packages/husk/1.0.0/test.husk
deleted file mode 100644
index d096585..0000000
--- a/packages/husk/1.0.0/test.husk
+++ /dev/null
@@ -1 +0,0 @@
-"OK
\ No newline at end of file
diff --git a/packages/japt/2.0.0/metadata.json b/packages/japt/2.0.0/metadata.json
index ef0ff8d..7a3e5aa 100644
--- a/packages/japt/2.0.0/metadata.json
+++ b/packages/japt/2.0.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "japt",
- "version": "2.0.0",
- "aliases": ["japt"]
-}
+ "language": "japt",
+ "version": "2.0.0",
+ "aliases": ["japt"]
+}
\ No newline at end of file
diff --git a/packages/llvm_ir/12.0.1/build.sh b/packages/llvm_ir/12.0.1/build.sh
deleted file mode 100755
index 7afd41a..0000000
--- a/packages/llvm_ir/12.0.1/build.sh
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env bash
-curl -L "https://github.com/llvm/llvm-project/releases/download/llvmorg-12.0.1/clang+llvm-12.0.1-x86_64-linux-gnu-ubuntu-16.04.tar.xz" -o llvm-ir.tar.xz
-
-tar xf llvm-ir.tar.xz clang+llvm-12.0.1-x86_64-linux-gnu-ubuntu-/bin --strip-components=1
-
-rm llvm-ir.tar.xz
diff --git a/packages/llvm_ir/12.0.1/compile b/packages/llvm_ir/12.0.1/compile
deleted file mode 100755
index 082fb5c..0000000
--- a/packages/llvm_ir/12.0.1/compile
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/env bash
-
-llc "$@" -o binary.s
-clang binary.s -o binary
diff --git a/packages/llvm_ir/12.0.1/environment b/packages/llvm_ir/12.0.1/environment
deleted file mode 100644
index 85a25b6..0000000
--- a/packages/llvm_ir/12.0.1/environment
+++ /dev/null
@@ -1,2 +0,0 @@
-#!/usr/bin/env bash
-export PATH=$PWD/bin:$PATH
diff --git a/packages/llvm_ir/12.0.1/metadata.json b/packages/llvm_ir/12.0.1/metadata.json
deleted file mode 100644
index 50dfbbc..0000000
--- a/packages/llvm_ir/12.0.1/metadata.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "language": "llvm_ir",
- "version": "12.0.1",
- "aliases": ["llvm", "llvm-ir", "ll"]
-}
diff --git a/packages/llvm_ir/12.0.1/run b/packages/llvm_ir/12.0.1/run
deleted file mode 100644
index 94b3b0b..0000000
--- a/packages/llvm_ir/12.0.1/run
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/env bash
-
-shift
-binary "$@"
diff --git a/packages/llvm_ir/12.0.1/test.ll b/packages/llvm_ir/12.0.1/test.ll
deleted file mode 100644
index be7d5d3..0000000
--- a/packages/llvm_ir/12.0.1/test.ll
+++ /dev/null
@@ -1,10 +0,0 @@
-@.str = private unnamed_addr constant [2 x i8] c"OK"
-
-declare i32 @puts(i8* nocapture) nounwind
-
-define i32 @main() {
- %cast210 = getelementptr [2 x i8],[2 x i8]* @.str, i64 0, i64 0
-
- call i32 @puts(i8* %cast210)
- ret i32 0
-}
diff --git a/packages/mono/6.12.0/compile b/packages/mono/6.12.0/compile
index 5246bc2..e3ae230 100644
--- a/packages/mono/6.12.0/compile
+++ b/packages/mono/6.12.0/compile
@@ -1,13 +1,20 @@
#!/bin/bash
+check_errors () {
+ grep -q 'error [A-Z]\+[0-9]\+:' check.txt && cat check.txt 1>&2 || cat check.txt
+ rm check.txt
+}
+
case "${PISTON_LANGUAGE}" in
csharp)
rename 's/$/\.cs/' "$@" # Add .cs extension
- csc -out:out *.cs
+ csc -out:out *.cs > check.txt
+ check_errors
;;
basic)
rename 's/$/\.vb/' "$@" # Add .vb extension
- vbnc -out:out *.vb
+ vbnc -out:out *.vb > check.txt
+ check_errors
;;
*)
echo "How did you get here? (${PISTON_LANGUAGE})"
diff --git a/packages/mono/6.12.0/metadata.json b/packages/mono/6.12.0/metadata.json
index 3f483a4..4d09ae7 100644
--- a/packages/mono/6.12.0/metadata.json
+++ b/packages/mono/6.12.0/metadata.json
@@ -8,13 +8,7 @@
},
{
"language": "basic",
- "aliases": [
- "vb",
- "mono-vb",
- "mono-basic",
- "visual-basic",
- "visual basic"
- ]
+ "aliases": ["vb", "mono-vb", "mono-basic", "visual-basic", "visual basic"]
}
]
}
diff --git a/packages/ocaml/4.12.0/metadata.json b/packages/ocaml/4.12.0/metadata.json
index 6c2f733..ddbfb89 100644
--- a/packages/ocaml/4.12.0/metadata.json
+++ b/packages/ocaml/4.12.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "ocaml",
- "version": "4.12.0",
- "aliases": ["ocaml", "ml"]
+ "language": "ocaml",
+ "version": "4.12.0",
+ "aliases": ["ocaml", "ml"]
}
diff --git a/packages/octave/6.2.0/metadata.json b/packages/octave/6.2.0/metadata.json
index 0b209ce..ab9dbb1 100644
--- a/packages/octave/6.2.0/metadata.json
+++ b/packages/octave/6.2.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "octave",
- "version": "6.2.0",
- "aliases": ["matlab", "m"]
+ "language": "octave",
+ "version": "6.2.0",
+ "aliases": ["matlab", "m"]
}
diff --git a/packages/pyth/1.0.0/metadata.json b/packages/pyth/1.0.0/metadata.json
index e9bbfe9..bcddb7a 100644
--- a/packages/pyth/1.0.0/metadata.json
+++ b/packages/pyth/1.0.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "pyth",
- "version": "1.0.0",
- "aliases": ["pyth"]
+ "language": "pyth",
+ "version": "1.0.0",
+ "aliases": ["pyth"]
}
diff --git a/packages/racket/8.3.0/build.sh b/packages/racket/8.3.0/build.sh
deleted file mode 100644
index 187aef4..0000000
--- a/packages/racket/8.3.0/build.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/env bash
-
-# curl racket 8.3 linux installation shell file
-curl -L 'https://download.racket-lang.org/installers/8.3/racket-8.3-x86_64-linux-cs.sh' -o racket.sh
-
-# provide settings "no" "4" and "" to racket.sh
-echo "no
-4
-" | sh racket.sh
-
diff --git a/packages/racket/8.3.0/environment b/packages/racket/8.3.0/environment
deleted file mode 100644
index 1a30eef..0000000
--- a/packages/racket/8.3.0/environment
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/bin/bash
-
-# Path to racket binary
-export PATH=$PWD/bin:$PATH
-export RACKET_PATH=$PWD/racket
\ No newline at end of file
diff --git a/packages/racket/8.3.0/metadata.json b/packages/racket/8.3.0/metadata.json
deleted file mode 100644
index 14b902c..0000000
--- a/packages/racket/8.3.0/metadata.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "language": "racket",
- "version": "8.3.0",
- "aliases": ["rkt"]
-}
diff --git a/packages/racket/8.3.0/run b/packages/racket/8.3.0/run
deleted file mode 100644
index 227bfaa..0000000
--- a/packages/racket/8.3.0/run
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/bin/bash
-
-"$RACKET_PATH"/bin/racket "$@"
\ No newline at end of file
diff --git a/packages/racket/8.3.0/test.rkt b/packages/racket/8.3.0/test.rkt
deleted file mode 100644
index 7e0825c..0000000
--- a/packages/racket/8.3.0/test.rkt
+++ /dev/null
@@ -1,3 +0,0 @@
-#lang racket
-
-(display "OK")
\ No newline at end of file
diff --git a/packages/raku/6.100.0/metadata.json b/packages/raku/6.100.0/metadata.json
index e1fbad8..7cda1ed 100644
--- a/packages/raku/6.100.0/metadata.json
+++ b/packages/raku/6.100.0/metadata.json
@@ -2,4 +2,4 @@
"language": "raku",
"version": "6.100.0",
"aliases": ["raku", "rakudo", "perl6", "p6", "pl6"]
-}
+}
\ No newline at end of file
diff --git a/packages/retina/1.2.0/build.sh b/packages/retina/1.2.0/build.sh
deleted file mode 100644
index 9712458..0000000
--- a/packages/retina/1.2.0/build.sh
+++ /dev/null
@@ -1,22 +0,0 @@
-#!/usr/bin/env bash
-
-# get dotnet 2.2.8 as a dependency for retina
-curl "https://download.visualstudio.microsoft.com/download/pr/022d9abf-35f0-4fd5-8d1c-86056df76e89/477f1ebb70f314054129a9f51e9ec8ec/dotnet-sdk-2.2.207-linux-x64.tar.gz" -Lo dotnet.tar.gz
-tar xzf dotnet.tar.gz --strip-components=1
-rm dotnet.tar.gz
-
-export DOTNET_CLI_HOME=$PWD
-./dotnet new console -o cache_application
-
-rm -rf cache_application
-
-# curl retina version 1.2.0
-curl -L "https://github.com/m-ender/retina/releases/download/v1.2.0/retina-linux-x64.tar.gz" -o retina.tar.xz
-tar xf retina.tar.xz --strip-components=1
-rm retina.tar.xz
-
-# move the libhostfxr.so file to the current directory so we don't have to set DOTNET_ROOT
-mv host/fxr/2.2.8/libhostfxr.so libhostfxr.so
-
-# give execute permissions to retina
-chmod +x Retina
\ No newline at end of file
diff --git a/packages/retina/1.2.0/environment b/packages/retina/1.2.0/environment
deleted file mode 100644
index d9b5a4d..0000000
--- a/packages/retina/1.2.0/environment
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/bin/bash
-
-export PATH=$PWD/bin:$PATH
-export RETINA_PATH=$PWD
\ No newline at end of file
diff --git a/packages/retina/1.2.0/metadata.json b/packages/retina/1.2.0/metadata.json
deleted file mode 100644
index f632d6b..0000000
--- a/packages/retina/1.2.0/metadata.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "language": "retina",
- "version": "1.2.0",
- "aliases": ["ret"]
-}
diff --git a/packages/retina/1.2.0/run b/packages/retina/1.2.0/run
deleted file mode 100644
index bb64b05..0000000
--- a/packages/retina/1.2.0/run
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/bash
-
-# retina doesn't take interactive stdin and doesn't take args either
-
-# save the file for later
-file="$1"
-
-# remove file from $@
-shift
-
-# join arguments on newline
-stdin=`printf "%s\n" "$@"`
-
-# pass stdin into Retina command with file as argument
-echo "$stdin" | "$RETINA_PATH"/Retina "$file"
\ No newline at end of file
diff --git a/packages/retina/1.2.0/test.ret b/packages/retina/1.2.0/test.ret
deleted file mode 100644
index c4b21da..0000000
--- a/packages/retina/1.2.0/test.ret
+++ /dev/null
@@ -1 +0,0 @@
-K`OK
\ No newline at end of file
diff --git a/packages/rust/1.56.1/build.sh b/packages/rust/1.56.1/build.sh
deleted file mode 100644
index 6be556f..0000000
--- a/packages/rust/1.56.1/build.sh
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-curl -OL "https://static.rust-lang.org/dist/rust-1.56.1-x86_64-unknown-linux-gnu.tar.gz"
-tar xzvf rust-1.56.1-x86_64-unknown-linux-gnu.tar.gz
-rm rust-1.56.1-x86_64-unknown-linux-gnu.tar.gz
diff --git a/packages/rust/1.56.1/compile b/packages/rust/1.56.1/compile
deleted file mode 100644
index 201318a..0000000
--- a/packages/rust/1.56.1/compile
+++ /dev/null
@@ -1,6 +0,0 @@
-#!/usr/bin/env bash
-
-# https://stackoverflow.com/questions/38041331/rust-compiler-cant-find-crate-for-std
-# Rust compiler needs to find the stdlib to link against
-rustc -o binary -L ${RUST_INSTALL_LOC}/rustc/lib -L ${RUST_INSTALL_LOC}/rust-std-x86_64-unknown-linux-gnu/lib/rustlib/x86_64-unknown-linux-gnu/lib "$@"
-chmod +x binary
diff --git a/packages/rust/1.56.1/environment b/packages/rust/1.56.1/environment
deleted file mode 100644
index c28b1b7..0000000
--- a/packages/rust/1.56.1/environment
+++ /dev/null
@@ -1,5 +0,0 @@
-#!/usr/bin/env bash
-
-# Put 'export' statements here for environment variables
-export PATH=$PWD/rust-1.56.1-x86_64-unknown-linux-gnu/rustc/bin/:$PATH
-export RUST_INSTALL_LOC=$PWD/rust-1.56.1-x86_64-unknown-linux-gnu
diff --git a/packages/rust/1.56.1/metadata.json b/packages/rust/1.56.1/metadata.json
deleted file mode 100644
index 1f7319a..0000000
--- a/packages/rust/1.56.1/metadata.json
+++ /dev/null
@@ -1,7 +0,0 @@
-{
- "language": "rust",
- "version": "1.56.1",
- "aliases": [
- "rs"
- ]
-}
diff --git a/packages/rust/1.56.1/run b/packages/rust/1.56.1/run
deleted file mode 100644
index d377dd9..0000000
--- a/packages/rust/1.56.1/run
+++ /dev/null
@@ -1,4 +0,0 @@
-#!/usr/bin/env bash
-
-shift
-./binary "$@"
diff --git a/packages/rust/1.56.1/test.rs b/packages/rust/1.56.1/test.rs
deleted file mode 100644
index 9561664..0000000
--- a/packages/rust/1.56.1/test.rs
+++ /dev/null
@@ -1,3 +0,0 @@
-fn main() {
- println!("OK");
-}
diff --git a/packages/typescript/4.2.3/run b/packages/typescript/4.2.3/run
index 5a8c60e..1d26f3f 100644
--- a/packages/typescript/4.2.3/run
+++ b/packages/typescript/4.2.3/run
@@ -2,7 +2,7 @@
# Put instructions to run the runtime
-CODE=$1.js
+CODE=$(sed 's/ts$/js/' <<<"$1")
shift
node $CODE "$@"
diff --git a/packages/typescript/4.2.3/test.ts b/packages/typescript/4.2.3/test.ts
index e106678..56ed4a0 100644
--- a/packages/typescript/4.2.3/test.ts
+++ b/packages/typescript/4.2.3/test.ts
@@ -1 +1 @@
-console.log('OK');
+console.log("OK")
\ No newline at end of file
diff --git a/packages/vyxal/2.4.1/build.sh b/packages/vyxal/2.4.1/build.sh
deleted file mode 100644
index e7ce729..0000000
--- a/packages/vyxal/2.4.1/build.sh
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env bash
-
-# get Python source
-source ../../python/3.9.4/build.sh
-
-# add regex and pwn modules
-bin/pip3 install regex pwn
-
-# make vyxal directory
-mkdir vyxal
-cd vyxal
-
-# Vyxal install
-curl -L "https://github.com/Vyxal/Vyxal/archive/refs/tags/v2.4.1.tar.gz" -o vyxal.tar.xz
-tar xf vyxal.tar.xz --strip-components=1
-rm vyxal.tar.xz
-
-cd ..
\ No newline at end of file
diff --git a/packages/vyxal/2.4.1/environment b/packages/vyxal/2.4.1/environment
deleted file mode 100644
index f0008c8..0000000
--- a/packages/vyxal/2.4.1/environment
+++ /dev/null
@@ -1,8 +0,0 @@
-#!/usr/bin/env bash
-
-# Python and Vyxal path
-export PATH=$PWD/bin:$PATH
-export VYXAL_PATH=$PWD/vyxal
-
-# export term to fix curses warning
-export TERM=xterm
\ No newline at end of file
diff --git a/packages/vyxal/2.4.1/metadata.json b/packages/vyxal/2.4.1/metadata.json
deleted file mode 100644
index e5427fb..0000000
--- a/packages/vyxal/2.4.1/metadata.json
+++ /dev/null
@@ -1,5 +0,0 @@
-{
- "language": "vyxal",
- "version": "2.4.1",
- "aliases": []
-}
\ No newline at end of file
diff --git a/packages/vyxal/2.4.1/run b/packages/vyxal/2.4.1/run
deleted file mode 100644
index c9b08a6..0000000
--- a/packages/vyxal/2.4.1/run
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env bash
-
-python3 "$VYXAL_PATH"/Vyxal.py "$1"
\ No newline at end of file
diff --git a/packages/vyxal/2.4.1/test.vyxal b/packages/vyxal/2.4.1/test.vyxal
deleted file mode 100644
index 6d0cb6e..0000000
--- a/packages/vyxal/2.4.1/test.vyxal
+++ /dev/null
@@ -1 +0,0 @@
-`OK
\ No newline at end of file
diff --git a/packages/zig/0.8.0/metadata.json b/packages/zig/0.8.0/metadata.json
index 8c02d33..7af8ed6 100644
--- a/packages/zig/0.8.0/metadata.json
+++ b/packages/zig/0.8.0/metadata.json
@@ -1,8 +1,5 @@
{
"language": "zig",
"version": "0.8.0",
- "aliases": ["zig"],
- "limit_overrides": {
- "compile_timeout": 15000
- }
+ "aliases": ["zig"]
}
diff --git a/piston b/piston
index cacf23e..67d2fa0 100755
--- a/piston
+++ b/piston
@@ -5,16 +5,15 @@ SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
CONTAINER_NAME="piston_api"
IMAGE_TAG="base-latest"
-IMAGE_NAME="ghcr.io/engineer-man/piston"
+IMAGE_NAME="ghcr.io/piston"
IMAGE_NAME_DEV="piston"
SUBCOMMAND="$1"
shift
-build_base(){
+cmd_build(){
CONTAINER_PATH="$(nix build ".#container" --no-link --json | jq '.[0].outputs.out' -r)"
docker load -i $CONTAINER_PATH
- docker tag "$IMAGE_NAME_DEV:$IMAGE_TAG" "$IMAGE_NAME:$IMAGE_TAG"
}
case "$SUBCOMMAND" in
@@ -23,10 +22,9 @@ case "$SUBCOMMAND" in
restart) docker restart $CONTAINER_NAME ;;
start)
docker run \
- -p 2000:2000 \
--rm \
--name $CONTAINER_NAME \
- -d "$IMAGE_NAME:$IMAGE_TAG"
+ -it "$IMAGE_NAME:$IMAGE_TAG"
;;
stop) docker stop $CONTAINER_NAME ;;
bash|shell) docker exec -it $CONTAINER_NAME bash ;;
@@ -37,40 +35,22 @@ case "$SUBCOMMAND" in
;;
# dev commands
-
- scaffold)
- pushd $SCRIPT_DIR/runtimes > /dev/null
- ./scaffold.sh $1 $2
- popd > /dev/null
- ;;
- build)
- build_base
- if [[ ! -z "$1" ]]; then
- # $1 contains a variant to build
- docker build \
- --build-arg RUNTIMESET=$1 \
- -f $SCRIPT_DIR/Dockerfile.withset \
- -t "$IMAGE_NAME_DEV:$1-latest" \
- .
- fi
- ;;
+ build) cmd_build ;;
start-dev)
- build_base
+ cmd_build
docker run \
--rm \
- -p 2000:2000 \
-it \
--name $CONTAINER_NAME \
- -e PISTON_LOG_LEVEL=DEBUG \
-e PISTON_FLAKE_PATH=/piston/packages \
-v $PWD:/piston/packages \
- -d "$IMAGE_NAME_DEV:$IMAGE_TAG"
+ -it "$IMAGE_NAME_DEV:$IMAGE_TAG"
;;
test)
- build_base
+ cmd_build
docker run \
--rm \
-it \
@@ -102,8 +82,7 @@ case "$SUBCOMMAND" in
echo "See https://nixos.wiki/wiki/Nix_Installation_Guide#Stable_Nix"
echo
echo " start-dev Builds a container locally and starts piston"
- echo " build [runtime-set] Builds and loads the API container optionally"
- echo " including the runtime set within it"
+ echo " build Builds and loads the API container"
echo " scaffold [runtime] Initializes a new runtime"
echo " test Runs unit tests on the given runtime"
echo " Optionally set runtime to --all to test all"
diff --git a/pre-commit b/pre-commit
deleted file mode 100755
index 288a5e3..0000000
--- a/pre-commit
+++ /dev/null
@@ -1,15 +0,0 @@
-#!/bin/sh
-
-echo "Linting staged files..."
-npm install > /dev/null || exit 1
-
-FILES=$(git diff --cached --name-only --diff-filter=ACMR | sed 's| |\\ |g')
-[ -z "$FILES" ] && exit 0
-
-# Prettify all selected files
-echo "$FILES" | xargs npx prettier --ignore-unknown --write
-
-# Add back the modified/prettified files to staging
-echo "$FILES" | xargs git add
-
-exit 0
diff --git a/readme.md b/readme.md
index 2bd0c1f..163d6c9 100644
--- a/readme.md
+++ b/readme.md
@@ -41,6 +41,20 @@
+# Notes About Hacktoberfest
+
+While we are accepting pull requests for Hacktoberfest, we will reject any low-quality PRs.
+If we see PR abuse for Hacktoberfest, we will stop providing Hacktoberfest approval for pull requests.
+
+We are accepting PRs for:
+* Packages - updating package versions, adding new packages
+* Documentation updates
+* CLI/API improvements - please discuss these with us in the Discord first
+
+Any queries or concerns, ping @HexF#0015 in the Discord.
+
+
+
# About
@@ -52,11 +66,11 @@
It's used in numerous places including:
-- [EMKC Challenges](https://emkc.org/challenges)
-- [EMKC Weekly Contests](https://emkc.org/contests)
-- [Engineer Man Discord Server](https://discord.gg/engineerman)
-- Web IDEs
-- 200+ direct integrations
+- [EMKC Challenges](https://emkc.org/challenges)
+- [EMKC Weekly Contests](https://emkc.org/contests)
+- [Engineer Man Discord Server](https://discord.gg/engineerman)
+- Web IDEs
+- 200+ direct integrations
@@ -64,20 +78,18 @@ It's used in numerous places including:
The following are approved and endorsed extensions/utilities to the core Piston offering.
-- [I Run Code](https://github.com/engineer-man/piston-bot), a Discord bot used in 4100+ servers to handle arbitrary code evaluation in Discord. To get this bot in your own server, go here: https://emkc.org/run.
-- [Piston CLI](https://github.com/Shivansh-007/piston-cli), a universal shell supporting code highlighting, files, and interpretation without the need to download a language.
-- [Node Piston Client](https://github.com/dthree/node-piston), a Node.js wrapper for accessing the Piston API.
-- [Piston4J](https://github.com/the-codeboy/Piston4J), a Java wrapper for accessing the Piston API.
-- [Pyston](https://github.com/ffaanngg/pyston), a Python wrapper for accessing the Piston API.
-- [Go-Piston](https://github.com/milindmadhukar/go-piston), a Golang wrapper for accessing the Piston API.
-- [piston_rs](https://github.com/Jonxslays/piston_rs), a Rust wrapper for accessing the Piston API.
+- [I Run Code](https://github.com/engineer-man/piston-bot), a Discord bot used in 4100+ servers to handle arbitrary code evaluation in Discord. To get this bot in your own server, go here: https://emkc.org/run.
+- [Piston CLI](https://github.com/Shivansh-007/piston-cli), a universal shell supporting code highlighting, files, and interpretation without the need to download a language.
+- [Node Piston Client](https://github.com/dthree/node-piston), a Node.js wrapper for accessing the Piston API.
+- [Piston4J](https://github.com/the-codeboy/Piston4J), a Java wrapper for accessing the Piston API.
+- [Pyston](https://github.com/ffaanngg/pyston), a Python wrapper for accessing the Piston API.
# Public API
-- Requires no installation and you can use it immediately.
-- Reference the Runtimes/Execute sections below to learn about the request and response formats.
+- Requires no installation and you can use it immediately.
+- Reference the Runtimes/Execute sections below to learn about the request and response formats.
@@ -102,9 +114,9 @@ POST https://emkc.org/api/v2/piston/execute
### Host System Package Dependencies
-- Docker
-- Docker Compose
-- Node JS (>= 13, preferably >= 15)
+- Docker
+- Docker Compose
+- Node JS (>= 13, preferably >= 15)
### After system dependencies are installed, clone this repository:
@@ -129,7 +141,7 @@ The API will now be online with no language runtimes installed. To install runti
### Host System Package Dependencies
-- Docker
+- Docker
### Installation
@@ -147,7 +159,7 @@ docker run \
### Host System Package Dependencies
-- Same as [All In One](#All-In-One)
+- Same as [All In One](#All-In-One)
### Installation
@@ -308,8 +320,6 @@ Content-Type: application/json
`awk`,
`bash`,
-`befunge93`,
-`brachylog`,
`brainfuck`,
`c`,
`c++`,
@@ -320,25 +330,21 @@ Content-Type: application/json
`cow`,
`crystal`,
`csharp`,
-`csharp.net`,
`d`,
`dart`,
`dash`,
+`dotnet`,
`dragon`,
`elixir`,
`emacs`,
`erlang`,
-`file`,
`forte`,
`fortran`,
`freebasic`,
-`fsharp.net`,
-`fsi`,
`go`,
`golfscript`,
`groovy`,
`haskell`,
-`husk`,
`iverilog`,
`japt`,
`java`,
@@ -347,7 +353,6 @@ Content-Type: application/json
`julia`,
`kotlin`,
`lisp`,
-`llvm_ir`,
`lolcode`,
`lua`,
`nasm`,
@@ -367,9 +372,7 @@ Content-Type: application/json
`pyth`,
`python`,
`python2`,
-`racket`,
`raku`,
-`retina`,
`rockstar`,
`rscript`,
`ruby`,
@@ -379,9 +382,7 @@ Content-Type: application/json
`swift`,
`typescript`,
`basic`,
-`basic.net`,
`vlang`,
-`vyxal`,
`yeethon`,
`zig`,
@@ -402,14 +403,14 @@ Docker provides a great deal of security out of the box in that it's separate fr
Piston takes additional steps to make it resistant to
various privilege escalation, denial-of-service, and resource saturation threats. These steps include:
-- Disabling outgoing network interaction
-- Capping max processes at 256 by default (resists `:(){ :|: &}:;`, `while True: os.fork()`, etc.)
-- Capping max files at 2048 (resists various file based attacks)
-- Cleaning up all temp space after each execution (resists out of drive space attacks)
-- Running as a variety of unprivileged users
-- Capping runtime execution at 3 seconds
-- Capping stdout to 65536 characters (resists yes/no bombs and runaway output)
-- SIGKILLing misbehaving code
+- Disabling outgoing network interaction
+- Capping max processes at 256 by default (resists `:(){ :|: &}:;`, `while True: os.fork()`, etc.)
+- Capping max files at 2048 (resists various file based attacks)
+- Cleaning up all temp space after each execution (resists out of drive space attacks)
+- Running as a variety of unprivileged users
+- Capping runtime execution at 3 seconds
+- Capping stdout to 65536 characters (resists yes/no bombs and runaway output)
+- SIGKILLing misbehaving code
diff --git a/runtimes/.scaffold.nix b/runtimes/.scaffold.nix
index de7bc8f..51b68aa 100644
--- a/runtimes/.scaffold.nix
+++ b/runtimes/.scaffold.nix
@@ -49,9 +49,6 @@ in piston.mkRuntime {
# Specify a list of tests.
# These should output "OK" to STDOUT if everything looks good
- #
- # Run the following command to test the package:
- # $ ./piston test %NAME%
tests = [
(piston.mkTest {
files = {
diff --git a/runtimes/crystal.nix b/runtimes/crystal.nix
deleted file mode 100644
index 9cbae9d..0000000
--- a/runtimes/crystal.nix
+++ /dev/null
@@ -1,31 +0,0 @@
-{pkgs, piston, ...}:
-let
- pkg = pkgs.crystal;
-in piston.mkRuntime {
- language = "crystal";
- version = pkg.version;
-
- aliases = [
- "cr"
- ];
-
- compile = ''
- ${pkg}/bin/crystal build "$@" -o out --no-color
- chmod +x out
- '';
-
- run = ''
- shift
- ./out "$@"
- '';
-
- tests = [
- (piston.mkTest {
- files = {
- "test.cr" = ''
- puts("OK")
- '';
- };
- })
- ];
-}
\ No newline at end of file
diff --git a/runtimes/dart.nix b/runtimes/dart.nix
deleted file mode 100644
index d5e0981..0000000
--- a/runtimes/dart.nix
+++ /dev/null
@@ -1,25 +0,0 @@
-{pkgs, piston, ...}:
-let
- pkg = pkgs.dart;
-in piston.mkRuntime {
- language = "dart";
- version = pkg.version;
-
- aliases = [];
-
- run = ''
- ${pkg}/bin/dart run "$@"
- '';
-
- tests = [
- (piston.mkTest {
- files = {
- "test.dart" = ''
- void main() {
- print('OK');
- }
- '';
- };
- })
- ];
-}
\ No newline at end of file
diff --git a/runtimes/dash.nix b/runtimes/dash.nix
deleted file mode 100644
index 6da4f4e..0000000
--- a/runtimes/dash.nix
+++ /dev/null
@@ -1,22 +0,0 @@
-{pkgs, piston, ...}:
-let
- pkg = pkgs.dash;
-in piston.mkRuntime {
- language = "dash";
- version = pkg.version;
-
- aliases = [];
-
- run = ''
- ${pkg}/bin/dash "$@"
- '';
- tests = [
- (piston.mkTest {
- files = {
- "test.dash" = ''
- echo "OK"
- '';
- };
- })
- ];
-}
\ No newline at end of file
diff --git a/runtimes/default.nix b/runtimes/default.nix
index 0571829..2c3ddc4 100644
--- a/runtimes/default.nix
+++ b/runtimes/default.nix
@@ -5,12 +5,4 @@ args: {
"bash" = import ./bash.nix args;
"clojure" = import ./clojure.nix args;
"cobol-gnu-cobol" = import ./cobol-gnu-cobol.nix args;
- "crystal" = import ./crystal.nix args;
- "dart" = import ./dart.nix args;
- "dash" = import ./dash.nix args;
- "deno-javascript" = import ./deno-javascript.nix args;
- "deno-typescript" = import ./deno-typescript.nix args;
- "elixir" = import ./elixir.nix args;
- "erlang" = import ./erlang.nix args;
- "gawk-awk" = import ./gawk-awk.nix args;
}
diff --git a/runtimes/deno-javascript.nix b/runtimes/deno-javascript.nix
deleted file mode 100644
index 2e6970e..0000000
--- a/runtimes/deno-javascript.nix
+++ /dev/null
@@ -1,27 +0,0 @@
-{pkgs, piston, ...}:
-let
- pkg = pkgs.deno;
-in piston.mkRuntime {
- language = "javascript";
- version = pkg.version;
- runtime = "deno";
-
- aliases = [
- "js"
- "deno-js"
- ];
-
- run = ''
- DENO_DIR=$PWD ${pkg}/bin/deno run $@
- '';
-
- tests = [
- (piston.mkTest {
- files = {
- "test.js" = ''
- console.log("OK");
- '';
- };
- })
- ];
-}
\ No newline at end of file
diff --git a/runtimes/deno-typescript.nix b/runtimes/deno-typescript.nix
deleted file mode 100644
index 50d0fa6..0000000
--- a/runtimes/deno-typescript.nix
+++ /dev/null
@@ -1,27 +0,0 @@
-{pkgs, piston, ...}:
-let
- pkg = pkgs.deno;
-in piston.mkRuntime {
- language = "typescript";
- version = pkg.version;
- runtime = "deno";
-
- aliases = [
- "ts"
- "deno-ts"
- ];
-
- run = ''
- DENO_DIR=$PWD ${pkg}/bin/deno run $@
- '';
-
- tests = [
- (piston.mkTest {
- files = {
- "test.ts" = ''
- console.log("OK");
- '';
- };
- })
- ];
-}
\ No newline at end of file
diff --git a/runtimes/elixir.nix b/runtimes/elixir.nix
deleted file mode 100644
index 39cb962..0000000
--- a/runtimes/elixir.nix
+++ /dev/null
@@ -1,30 +0,0 @@
-{pkgs, piston, ...}:
-let
- pkg = pkgs.elixir;
-in piston.mkRuntime {
- language = "elixir";
- version = pkg.version;
-
- aliases = [
- "exs"
- ];
-
- run = ''
- export LC_ALL=en_US.UTF-8
- ${pkg}/bin/elixir "$@"
- '';
-
- tests = [
- (piston.mkTest {
- files = {
- "test.exs" = ''
- IO.puts("OK")
- '';
- };
- })
- ];
-
- limitOverrides = {
- "max_file_size" = 100000000; # 100MB
- };
-}
\ No newline at end of file
diff --git a/runtimes/erlang.nix b/runtimes/erlang.nix
deleted file mode 100644
index efac6a1..0000000
--- a/runtimes/erlang.nix
+++ /dev/null
@@ -1,28 +0,0 @@
-{pkgs, piston, ...}:
-let
- pkg = pkgs.erlang;
-in piston.mkRuntime {
- language = "erlang";
- version = pkg.version;
-
- aliases = [
- "erl"
- "escript"
- ];
-
- run = ''
- ${pkg}/bin/escript "$@"
- '';
-
- tests = [
- (piston.mkTest {
- files = {
- "test.erl" = ''
-
- main(_) ->
- io:format("OK~n").
- '';
- };
- })
- ];
-}
\ No newline at end of file
diff --git a/runtimes/gawk-awk.nix b/runtimes/gawk-awk.nix
deleted file mode 100644
index c5c9675..0000000
--- a/runtimes/gawk-awk.nix
+++ /dev/null
@@ -1,26 +0,0 @@
-{pkgs, piston, ...}:
-let
- pkg = pkgs.gawk;
-in piston.mkRuntime {
- language = "awk";
- version = pkg.version;
- runtime = "gawk";
-
- aliases = [];
-
-
- run = ''
- ${pkg}/bin/gawk -f "$@"
- '';
-
- tests = [
- (piston.mkTest {
- files = {
- "test.awk" = ''
- {print "OK"}
- '';
- };
- stdin = "\n"; # awk needs some line input
- })
- ];
-}
\ No newline at end of file
diff --git a/runtimes/scaffold.sh b/runtimes/scaffold.sh
index 2a49ef2..fb3aafe 100755
--- a/runtimes/scaffold.sh
+++ b/runtimes/scaffold.sh
@@ -15,7 +15,7 @@ NAME=$1
if [[ $# -eq 2 ]]; then
RUNTIME=$2
- NAME=$RUNTIME-$LANGUAGE
+ NAME=$LANGUAGE-$RUNTIME
fi
@@ -28,13 +28,10 @@ else
echo " \"$NAME\" = import ./$NAME.nix args;" >> default.nix
sed -e 's/%LANGUAGE%/'"$LANGUAGE"'/g' \
-e 's/%RUNTIME%/'"$RUNTIME"'/g' \
- -e 's/%NAME%/'"$NAME"'/g' \
.scaffold.nix > $NAME.nix
+ git add $NAME.nix
echo "}" >> default.nix
- git add $NAME.nix default.nix
-
echo "Scaffolded $NAME"
echo "Edit $NAME.nix to get started"
- echo "Once you are done, run ./piston test $NAME to test it"
fi
\ No newline at end of file
diff --git a/tests/readme.md b/tests/readme.md
index 746d0b9..01ae419 100644
--- a/tests/readme.md
+++ b/tests/readme.md
@@ -6,4 +6,4 @@ Write exploits in any language supported by piston.
Hopefully when running any files in this directory, piston will resist the attack.
-Leave a comment in the code describing how the exploit works.
+Leave a comment in the code describing how the exploit works.
\ No newline at end of file