diff --git a/.github/ISSUE_TEMPLATE/language-request.md b/.github/ISSUE_TEMPLATE/language-request.md
index 5ae2661..3f42d90 100644
--- a/.github/ISSUE_TEMPLATE/language-request.md
+++ b/.github/ISSUE_TEMPLATE/language-request.md
@@ -4,7 +4,6 @@ about: Template for requesting language support
title: Add [insert language name here]
labels: package
assignees: ''
-
---
Provide links to different compilers/interpreters that could be used to implement this language, and discuss pros/cons of each.
diff --git a/.github/PULL_REQUEST_TEMPLATE/package.md b/.github/PULL_REQUEST_TEMPLATE/package.md
index 6cd3c98..da59fe0 100644
--- a/.github/PULL_REQUEST_TEMPLATE/package.md
+++ b/.github/PULL_REQUEST_TEMPLATE/package.md
@@ -1,10 +1,11 @@
Checklist:
-* [ ] The package builds locally with `./piston build-pkg [package] [version]`
-* [ ] The package installs with `./piston ppman install [package]=[version]`
-* [ ] The package runs the test code with `./piston run [package] -l [version] packages/[package]/[version]/test.*`
-* [ ] Package files are placed in the correct directory
-* [ ] No old package versions are removed
-* [ ] All source files are deleted in the `build.sh` script
-* [ ] `metadata.json`'s `language` and `version` fields match the directory path
-* [ ] Any extensions the language may use are set as aliases
-* [ ] Any alternative names the language is referred to are set as aliases.
+
+- [ ] The package builds locally with `./piston build-pkg [package] [version]`
+- [ ] The package installs with `./piston ppman install [package]=[version]`
+- [ ] The package runs the test code with `./piston run [package] -l [version] packages/[package]/[version]/test.*`
+- [ ] Package files are placed in the correct directory
+- [ ] No old package versions are removed
+- [ ] All source files are deleted in the `build.sh` script
+- [ ] `metadata.json`'s `language` and `version` fields match the directory path
+- [ ] Any extensions the language may use are set as aliases
+- [ ] Any alternative names the language is referred to are set as aliases.
diff --git a/.github/workflows/api-push.yaml b/.github/workflows/api-push.yaml
index bcf0472..dec3bce 100644
--- a/.github/workflows/api-push.yaml
+++ b/.github/workflows/api-push.yaml
@@ -1,39 +1,38 @@
name: Publish API image
on:
- push:
- branches:
- - master
- - v3
- paths:
- - api/**
+ push:
+ branches:
+ - master
+ - v3
+ paths:
+ - api/**
-
jobs:
- push_to_registry:
- runs-on: ubuntu-latest
- name: Build and Push Docker image to Github Packages
- steps:
- - name: Check out repo
- uses: actions/checkout@v2
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
- - name: Login to ghcr.io
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: ghcr.io
+ push_to_registry:
+ runs-on: ubuntu-latest
+ name: Build and Push Docker image to Github Packages
+ steps:
+ - name: Check out repo
+ uses: actions/checkout@v2
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
+ - name: Login to ghcr.io
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: ghcr.io
- - name: Build and push API
- uses: docker/build-push-action@v2
- with:
- context: api
- push: true
- pull: true
- tags: |
- docker.pkg.github.com/engineer-man/piston/api
- ghcr.io/engineer-man/piston
+ - name: Build and push API
+ uses: docker/build-push-action@v2
+ with:
+ context: api
+ push: true
+ pull: true
+ tags: |
+ docker.pkg.github.com/engineer-man/piston/api
+ ghcr.io/engineer-man/piston
diff --git a/.github/workflows/package-pr.yaml b/.github/workflows/package-pr.yaml
index cb646d3..78101b0 100644
--- a/.github/workflows/package-pr.yaml
+++ b/.github/workflows/package-pr.yaml
@@ -1,140 +1,140 @@
-name: "Package Pull Requests"
+name: 'Package Pull Requests'
on:
- pull_request:
- types:
- - opened
- - edited
- - reopened
- - synchronize
- paths:
- - "packages/**"
+ pull_request:
+ types:
+ - opened
+ - edited
+ - reopened
+ - synchronize
+ paths:
+ - 'packages/**'
jobs:
- check-pkg:
- name: Validate README
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
- - name: Get list of changed files
- uses: lots0logs/gh-action-get-changed-files@2.1.4
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
+ check-pkg:
+ name: Validate README
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
+ - name: Get list of changed files
+ uses: lots0logs/gh-action-get-changed-files@2.1.4
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
- - name: Ensure README was updated
- run: |
- MISSING_LINES=$(comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /
/{flag=0} flag' readme.md | sort -u))
+ - name: Ensure README was updated
+ run: |
+ MISSING_LINES=$(comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /
/{flag=0} flag' readme.md | sort -u))
- [[ $(echo $MISSING_LINES | wc -c) = "1" ]] && exit 0
+ [[ $(echo $MISSING_LINES | wc -c) = "1" ]] && exit 0
- echo "README has supported languages missing: "
- comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /
/{flag=0} flag' readme.md | sort -u)
- exit 1
+ echo "README has supported languages missing: "
+ comm -23 <(jq 'if .provides then .provides[].language else .language end' -r $(find packages -name "metadata.json" ) | sed -e 's/^/`/g' -e 's/$/`,/g' | sort -u) <(awk '/# Supported Languages/{flag=1; next} /
/{flag=0} flag' readme.md | sort -u)
+ exit 1
- build-pkg:
- name: Check that package builds
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
+ build-pkg:
+ name: Check that package builds
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
- - name: Get list of changed files
- uses: lots0logs/gh-action-get-changed-files@2.1.4
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
+ - name: Get list of changed files
+ uses: lots0logs/gh-action-get-changed-files@2.1.4
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
- - name: Build Packages
- run: |
- PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '$2 && $3{ print $2 "-" $3 }' | sort -u)
- echo "Packages: $PACKAGES"
- docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
- docker build -t repo-builder repo
- docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
- ls -la packages
+ - name: Build Packages
+ run: |
+ PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '$2 && $3{ print $2 "-" $3 }' | sort -u)
+ echo "Packages: $PACKAGES"
+ docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
+ docker build -t repo-builder repo
+ docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
+ ls -la packages
- - name: Upload package as artifact
- uses: actions/upload-artifact@v2
- with:
- name: packages
- path: packages/*.pkg.tar.gz
+ - name: Upload package as artifact
+ uses: actions/upload-artifact@v2
+ with:
+ name: packages
+ path: packages/*.pkg.tar.gz
- test-pkg:
- name: Test package
- runs-on: ubuntu-latest
- needs: build-pkg
- steps:
- - uses: actions/checkout@v2
+ test-pkg:
+ name: Test package
+ runs-on: ubuntu-latest
+ needs: build-pkg
+ steps:
+ - uses: actions/checkout@v2
- - uses: actions/download-artifact@v2
- with:
- name: packages
+ - uses: actions/download-artifact@v2
+ with:
+ name: packages
- - name: Relocate downloaded packages
- run: mv *.pkg.tar.gz packages/
+ - name: Relocate downloaded packages
+ run: mv *.pkg.tar.gz packages/
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
- - name: Run tests
- run: |
- ls -la
- docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
- docker pull docker.pkg.github.com/engineer-man/piston/api
- docker build -t piston-api api
- docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api
- echo Waiting for API to start..
- docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes
+ - name: Run tests
+ run: |
+ ls -la
+ docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
+ docker pull docker.pkg.github.com/engineer-man/piston/api
+ docker build -t piston-api api
+ docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api
+ echo Waiting for API to start..
+ docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes
- echo Waiting for Index to start..
- docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index
+ echo Waiting for Index to start..
+ docker run --network container:repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index
- echo Adjusting index
- sed -i 's/repo/localhost/g' repo/index
+ echo Adjusting index
+ sed -i 's/repo/localhost/g' repo/index
- echo Listing Packages
- PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v2/packages)
- echo $PACKAGES_JSON
+ echo Listing Packages
+ PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/api/v2/packages)
+ echo $PACKAGES_JSON
- echo Getting CLI ready
- docker run -v "$PWD/cli:/app" --entrypoint /bin/bash node:15 -c 'cd /app; npm i'
+ echo Getting CLI ready
+ docker run -v "$PWD/cli:/app" --entrypoint /bin/bash node:15 -c 'cd /app; npm i'
- for package in $(jq -r '.[] | "\(.language)-\(.language_version)"' <<< "$PACKAGES_JSON")
- do
- echo "Testing $package"
- PKG_PATH=$(sed 's|-|/|' <<< $package)
- PKG_NAME=$(awk -F- '{ print $1 }' <<< $package)
- PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package)
+ for package in $(jq -r '.[] | "\(.language)-\(.language_version)"' <<< "$PACKAGES_JSON")
+ do
+ echo "Testing $package"
+ PKG_PATH=$(sed 's|-|/|' <<< $package)
+ PKG_NAME=$(awk -F- '{ print $1 }' <<< $package)
+ PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package)
- echo "Installing..."
- docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v2/packages -H "Content-Type: application/json" -d "{\"language\":\"$PKG_NAME\",\"version\":\"$PKG_VERSION\"}"
+ echo "Installing..."
+ docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/api/v2/packages -H "Content-Type: application/json" -d "{\"language\":\"$PKG_NAME\",\"version\":\"$PKG_VERSION\"}"
- TEST_SCRIPTS=packages/$PKG_PATH/test.*
- echo "Tests: $TEST_SCRIPTS"
+ TEST_SCRIPTS=packages/$PKG_PATH/test.*
+ echo "Tests: $TEST_SCRIPTS"
- for tscript in $TEST_SCRIPTS
- do
- TEST_RUNTIME=$(awk -F. '{print $2}' <<< $(basename $tscript))
- echo Running $tscript with runtime=$TEST_RUNTIME
- docker run --network container:api -v "$PWD/cli:/app" -v "$PWD/$(dirname $tscript):/pkg" node:15 /app/index.js run $TEST_RUNTIME -l $PKG_VERSION /pkg/$(basename $tscript) > test_output
- cat test_output
- grep "OK" test_output
- done
- done
+ for tscript in $TEST_SCRIPTS
+ do
+ TEST_RUNTIME=$(awk -F. '{print $2}' <<< $(basename $tscript))
+ echo Running $tscript with runtime=$TEST_RUNTIME
+ docker run --network container:api -v "$PWD/cli:/app" -v "$PWD/$(dirname $tscript):/pkg" node:15 /app/index.js run $TEST_RUNTIME -l $PKG_VERSION /pkg/$(basename $tscript) > test_output
+ cat test_output
+ grep "OK" test_output
+ done
+ done
- - name: Dump logs
- if: ${{ always() }}
- run: |
- docker logs api
- docker logs repo
+ - name: Dump logs
+ if: ${{ always() }}
+ run: |
+ docker logs api
+ docker logs repo
diff --git a/.github/workflows/package-push.yaml b/.github/workflows/package-push.yaml
index bbb44af..ee49487 100644
--- a/.github/workflows/package-push.yaml
+++ b/.github/workflows/package-push.yaml
@@ -1,78 +1,77 @@
name: 'Package Pushed'
on:
- push:
- branches:
- - master
- - v3
- paths:
- - packages/**
-
+ push:
+ branches:
+ - master
+ - v3
+ paths:
+ - packages/**
jobs:
- build-pkg:
- name: Build package
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@v2
-
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
+ build-pkg:
+ name: Build package
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout
+ uses: actions/checkout@v2
- - name: Get list of changed files
- uses: lots0logs/gh-action-get-changed-files@2.1.4
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
-
- - name: Build Packages
- run: |
- PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
- echo "Packages: $PACKAGES"
- docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
- docker build -t repo-builder repo
- docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
- ls -la packages
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
- - name: Upload Packages
- uses: svenstaro/upload-release-action@v2
- with:
- repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: packages/*.pkg.tar.gz
- tag: pkgs
- overwrite: true
- file_glob: true
- create-index:
- name: Create Index
- runs-on: ubuntu-latest
- needs: build-pkg
- steps:
- - name: "Download all release assets"
- run: curl -s https://api.github.com/repos/engineer-man/piston/releases/latest | jq '.assets[].browser_download_url' -r | xargs -L 1 curl -sLO
- - name: "Generate index file"
- run: |
- echo "" > index
- BASEURL=https://github.com/engineer-man/piston/releases/download/pkgs/
- for pkg in *.pkg.tar.gz
- do
- PKGFILE=$(basename $pkg)
- PKGFILENAME=$(echo $PKGFILE | sed 's/\.pkg\.tar\.gz//g')
+ - name: Get list of changed files
+ uses: lots0logs/gh-action-get-changed-files@2.1.4
+ with:
+ token: ${{ secrets.GITHUB_TOKEN }}
- PKGNAME=$(echo $PKGFILENAME | grep -oP '^\K.+(?=-)')
- PKGVERSION=$(echo $PKGFILENAME | grep -oP '^.+-\K.+')
- PKGCHECKSUM=$(sha256sum $PKGFILE | awk '{print $1}')
- echo "$PKGNAME,$PKGVERSION,$PKGCHECKSUM,$BASEURL$PKGFILE" >> index
- echo "Adding package $PKGNAME-$PKGVERSION"
- done
- - name: Upload index
- uses: svenstaro/upload-release-action@v2
- with:
- repo_token: ${{ secrets.GITHUB_TOKEN }}
- file: index
- tag: pkgs
- overwrite: true
- file_glob: true
+ - name: Build Packages
+ run: |
+ PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
+ echo "Packages: $PACKAGES"
+ docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
+ docker build -t repo-builder repo
+ docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
+ ls -la packages
+
+ - name: Upload Packages
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: packages/*.pkg.tar.gz
+ tag: pkgs
+ overwrite: true
+ file_glob: true
+ create-index:
+ name: Create Index
+ runs-on: ubuntu-latest
+ needs: build-pkg
+ steps:
+ - name: 'Download all release assets'
+ run: curl -s https://api.github.com/repos/engineer-man/piston/releases/latest | jq '.assets[].browser_download_url' -r | xargs -L 1 curl -sLO
+ - name: 'Generate index file'
+ run: |
+ echo "" > index
+ BASEURL=https://github.com/engineer-man/piston/releases/download/pkgs/
+ for pkg in *.pkg.tar.gz
+ do
+ PKGFILE=$(basename $pkg)
+ PKGFILENAME=$(echo $PKGFILE | sed 's/\.pkg\.tar\.gz//g')
+
+ PKGNAME=$(echo $PKGFILENAME | grep -oP '^\K.+(?=-)')
+ PKGVERSION=$(echo $PKGFILENAME | grep -oP '^.+-\K.+')
+ PKGCHECKSUM=$(sha256sum $PKGFILE | awk '{print $1}')
+ echo "$PKGNAME,$PKGVERSION,$PKGCHECKSUM,$BASEURL$PKGFILE" >> index
+ echo "Adding package $PKGNAME-$PKGVERSION"
+ done
+ - name: Upload index
+ uses: svenstaro/upload-release-action@v2
+ with:
+ repo_token: ${{ secrets.GITHUB_TOKEN }}
+ file: index
+ tag: pkgs
+ overwrite: true
+ file_glob: true
diff --git a/.github/workflows/repo-push.yaml b/.github/workflows/repo-push.yaml
index b5a603c..c887b01 100644
--- a/.github/workflows/repo-push.yaml
+++ b/.github/workflows/repo-push.yaml
@@ -1,31 +1,31 @@
name: Publish Repo image
on:
- push:
- branches:
- - master
- - v3
- paths:
- - repo/**
-
-jobs:
- push_to_registry:
- runs-on: ubuntu-latest
- name: Build and Push Docker image to Github Packages
- steps:
- - name: Check out repo
- uses: actions/checkout@v2
- - name: Login to GitHub registry
- uses: docker/login-action@v1
- with:
- username: ${{ github.actor }}
- password: ${{ secrets.GITHUB_TOKEN }}
- registry: docker.pkg.github.com
+ push:
+ branches:
+ - master
+ - v3
+ paths:
+ - repo/**
- - name: Build and push repo
- uses: docker/build-push-action@v2
- with:
- context: repo
- pull: true
- push: true
- tags: |
- docker.pkg.github.com/engineer-man/piston/repo-builder
\ No newline at end of file
+jobs:
+ push_to_registry:
+ runs-on: ubuntu-latest
+ name: Build and Push Docker image to Github Packages
+ steps:
+ - name: Check out repo
+ uses: actions/checkout@v2
+ - name: Login to GitHub registry
+ uses: docker/login-action@v1
+ with:
+ username: ${{ github.actor }}
+ password: ${{ secrets.GITHUB_TOKEN }}
+ registry: docker.pkg.github.com
+
+ - name: Build and push repo
+ uses: docker/build-push-action@v2
+ with:
+ context: repo
+ pull: true
+ push: true
+ tags: |
+ docker.pkg.github.com/engineer-man/piston/repo-builder
diff --git a/.gitignore b/.gitignore
index 222be8c..eb53d81 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,3 @@
data/
-.piston_env
\ No newline at end of file
+.piston_env
+node_modules
diff --git a/.prettierignore b/.prettierignore
new file mode 100644
index 0000000..bb310ab
--- /dev/null
+++ b/.prettierignore
@@ -0,0 +1,12 @@
+node_modules
+data/
+api/_piston
+repo/build
+packages/*/*/*
+packages/*.pkg.tar.gz
+!packages/*/*/metadata.json
+!packages/*/*/build.sh
+!packages/*/*/environment
+!packages/*/*/run
+!packages/*/*/compile
+!packages/*/*/test.*
diff --git a/api/.prettierrc.yaml b/.prettierrc.yaml
similarity index 100%
rename from api/.prettierrc.yaml
rename to .prettierrc.yaml
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index e651ad5..70f2b5e 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -1,8 +1,8 @@
version: 2
mkdocs:
- configuration: mkdocs.yml
+ configuration: mkdocs.yml
python:
- version: 3.7
- install:
- - requirements: docs/requirements.txt
+ version: 3.7
+ install:
+ - requirements: docs/requirements.txt
diff --git a/api/.gitignore b/api/.gitignore
index adbd330..4b5a9b8 100644
--- a/api/.gitignore
+++ b/api/.gitignore
@@ -1,2 +1 @@
-node_modules
-_piston
\ No newline at end of file
+_piston
diff --git a/api/.prettierignore b/api/.prettierignore
deleted file mode 100644
index 3c3629e..0000000
--- a/api/.prettierignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules
diff --git a/api/package-lock.json b/api/package-lock.json
index 83df240..2b34fc4 100644
--- a/api/package-lock.json
+++ b/api/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "piston-api",
- "version": "3.0.0",
+ "version": "3.1.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "piston-api",
- "version": "3.0.0",
+ "version": "3.1.0",
"license": "MIT",
"dependencies": {
"body-parser": "^1.19.0",
@@ -20,9 +20,6 @@
"semver": "^7.3.4",
"uuid": "^8.3.2",
"waitpid": "git+https://github.com/HexF/node-waitpid.git"
- },
- "devDependencies": {
- "prettier": "2.2.1"
}
},
"node_modules/accepts": {
@@ -409,18 +406,6 @@
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
"integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w="
},
- "node_modules/prettier": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.2.1.tgz",
- "integrity": "sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==",
- "dev": true,
- "bin": {
- "prettier": "bin-prettier.js"
- },
- "engines": {
- "node": ">=10.13.0"
- }
- },
"node_modules/proxy-addr": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz",
@@ -595,7 +580,8 @@
}
},
"node_modules/waitpid": {
- "resolved": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa"
+ "resolved": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa",
+ "hasInstallScript": true
},
"node_modules/ws": {
"version": "7.5.3",
@@ -913,12 +899,6 @@
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
"integrity": "sha1-32BBeABfUi8V60SQ5yR6G/qmf4w="
},
- "prettier": {
- "version": "2.2.1",
- "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.2.1.tgz",
- "integrity": "sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==",
- "dev": true
- },
"proxy-addr": {
"version": "2.0.6",
"resolved": "https://registry.npmjs.org/proxy-addr/-/proxy-addr-2.0.6.tgz",
diff --git a/api/package.json b/api/package.json
index e8e5b5d..fb107a5 100644
--- a/api/package.json
+++ b/api/package.json
@@ -16,11 +16,5 @@
"uuid": "^8.3.2",
"waitpid": "git+https://github.com/HexF/node-waitpid.git"
},
- "license": "MIT",
- "scripts": {
- "lint": "prettier . --write"
- },
- "devDependencies": {
- "prettier": "2.2.1"
- }
+ "license": "MIT"
}
diff --git a/api/src/api/v2.js b/api/src/api/v2.js
index a3571e1..a8fa9f0 100644
--- a/api/src/api/v2.js
+++ b/api/src/api/v2.js
@@ -8,10 +8,49 @@ const { Job } = require('../job');
const package = require('../package');
const logger = require('logplease').create('api/v2');
-const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
+const SIGNALS = [
+ 'SIGABRT',
+ 'SIGALRM',
+ 'SIGBUS',
+ 'SIGCHLD',
+ 'SIGCLD',
+ 'SIGCONT',
+ 'SIGEMT',
+ 'SIGFPE',
+ 'SIGHUP',
+ 'SIGILL',
+ 'SIGINFO',
+ 'SIGINT',
+ 'SIGIO',
+ 'SIGIOT',
+ 'SIGKILL',
+ 'SIGLOST',
+ 'SIGPIPE',
+ 'SIGPOLL',
+ 'SIGPROF',
+ 'SIGPWR',
+ 'SIGQUIT',
+ 'SIGSEGV',
+ 'SIGSTKFLT',
+ 'SIGSTOP',
+ 'SIGTSTP',
+ 'SIGSYS',
+ 'SIGTERM',
+ 'SIGTRAP',
+ 'SIGTTIN',
+ 'SIGTTOU',
+ 'SIGUNUSED',
+ 'SIGURG',
+ 'SIGUSR1',
+ 'SIGUSR2',
+ 'SIGVTALRM',
+ 'SIGXCPU',
+ 'SIGXFSZ',
+ 'SIGWINCH',
+];
// ref: https://man7.org/linux/man-pages/man7/signal.7.html
-function get_job(body){
+function get_job(body) {
let {
language,
version,
@@ -21,7 +60,7 @@ function get_job(body){
compile_memory_limit,
run_memory_limit,
run_timeout,
- compile_timeout
+ compile_timeout,
} = body;
return new Promise((resolve, reject) => {
@@ -68,7 +107,7 @@ function get_job(body){
}
if (typeof constraint_value !== 'number') {
return reject({
- message: `If specified, ${constraint_name} must be a number`
+ message: `If specified, ${constraint_name} must be a number`,
});
}
if (configured_limit <= 0) {
@@ -76,12 +115,12 @@ function get_job(body){
}
if (constraint_value > configured_limit) {
return reject({
- message: `${constraint_name} cannot exceed the configured limit of ${configured_limit}`
+ message: `${constraint_name} cannot exceed the configured limit of ${configured_limit}`,
});
}
if (constraint_value < 0) {
return reject({
- message: `${constraint_name} must be non-negative`
+ message: `${constraint_name} must be non-negative`,
});
}
}
@@ -91,20 +130,22 @@ function get_job(body){
run_timeout = run_timeout || rt.timeouts.run;
compile_memory_limit = compile_memory_limit || rt.memory_limits.compile;
run_timeout = run_timeout || rt.timeouts.run;
- resolve(new Job({
- runtime: rt,
- args: args || [],
- stdin: stdin || "",
- files,
- timeouts: {
- run: run_timeout,
- compile: compile_timeout,
- },
- memory_limits: {
- run: run_memory_limit,
- compile: compile_memory_limit,
- }
- }));
+ resolve(
+ new Job({
+ runtime: rt,
+ args: args || [],
+ stdin: stdin || '',
+ files,
+ timeouts: {
+ run: run_timeout,
+ compile: compile_timeout,
+ },
+ memory_limits: {
+ run: run_memory_limit,
+ compile: compile_memory_limit,
+ },
+ })
+ );
});
}
@@ -123,88 +164,104 @@ router.use((req, res, next) => {
});
router.ws('/connect', async (ws, req) => {
-
let job = null;
let eventBus = new events.EventEmitter();
- eventBus.on("stdout", (data) => ws.send(JSON.stringify({type: "data", stream: "stdout", data: data.toString()})))
- eventBus.on("stderr", (data) => ws.send(JSON.stringify({type: "data", stream: "stderr", data: data.toString()})))
- eventBus.on("stage", (stage)=> ws.send(JSON.stringify({type: "stage", stage})))
- eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status})))
+ eventBus.on('stdout', data =>
+ ws.send(
+ JSON.stringify({
+ type: 'data',
+ stream: 'stdout',
+ data: data.toString(),
+ })
+ )
+ );
+ eventBus.on('stderr', data =>
+ ws.send(
+ JSON.stringify({
+ type: 'data',
+ stream: 'stderr',
+ data: data.toString(),
+ })
+ )
+ );
+ eventBus.on('stage', stage =>
+ ws.send(JSON.stringify({ type: 'stage', stage }))
+ );
+ eventBus.on('exit', (stage, status) =>
+ ws.send(JSON.stringify({ type: 'exit', stage, ...status }))
+ );
- ws.on("message", async (data) => {
-
- try{
+ ws.on('message', async data => {
+ try {
const msg = JSON.parse(data);
- switch(msg.type){
- case "init":
- if(job === null){
+ switch (msg.type) {
+ case 'init':
+ if (job === null) {
job = await get_job(msg);
await job.prime();
- ws.send(JSON.stringify({
- type: "runtime",
- language: job.runtime.language,
- version: job.runtime.version.raw
- }))
+ ws.send(
+ JSON.stringify({
+ type: 'runtime',
+ language: job.runtime.language,
+ version: job.runtime.version.raw,
+ })
+ );
await job.execute_interactive(eventBus);
- ws.close(4999, "Job Completed");
-
- }else{
- ws.close(4000, "Already Initialized");
+ ws.close(4999, 'Job Completed');
+ } else {
+ ws.close(4000, 'Already Initialized');
}
break;
- case "data":
- if(job !== null){
- if(msg.stream === "stdin"){
- eventBus.emit("stdin", msg.data)
- }else{
- ws.close(4004, "Can only write to stdin")
+ case 'data':
+ if (job !== null) {
+ if (msg.stream === 'stdin') {
+ eventBus.emit('stdin', msg.data);
+ } else {
+ ws.close(4004, 'Can only write to stdin');
+ }
+ } else {
+ ws.close(4003, 'Not yet initialized');
}
- }else{
- ws.close(4003, "Not yet initialized")
- }
- break;
- case "signal":
- if(job !== null){
- if(SIGNALS.includes(msg.signal)){
- eventBus.emit("signal", msg.signal)
- }else{
- ws.close(4005, "Invalid signal")
+ break;
+ case 'signal':
+ if (job !== null) {
+ if (SIGNALS.includes(msg.signal)) {
+ eventBus.emit('signal', msg.signal);
+ } else {
+ ws.close(4005, 'Invalid signal');
+ }
+ } else {
+ ws.close(4003, 'Not yet initialized');
}
- }else{
- ws.close(4003, "Not yet initialized")
- }
- break;
+ break;
}
-
- }catch(error){
- ws.send(JSON.stringify({type: "error", message: error.message}))
- ws.close(4002, "Notified Error")
+ } catch (error) {
+ ws.send(JSON.stringify({ type: 'error', message: error.message }));
+ ws.close(4002, 'Notified Error');
// ws.close message is limited to 123 characters, so we notify over WS then close.
}
- })
+ });
- ws.on("close", async ()=>{
- if(job !== null){
- await job.cleanup()
+ ws.on('close', async () => {
+ if (job !== null) {
+ await job.cleanup();
}
- })
+ });
- setTimeout(()=>{
+ setTimeout(() => {
//Terminate the socket after 1 second, if not initialized.
- if(job === null)
- ws.close(4001, "Initialization Timeout");
- }, 1000)
-})
+ if (job === null) ws.close(4001, 'Initialization Timeout');
+ }, 1000);
+});
router.post('/execute', async (req, res) => {
-
- try{
+ try {
const job = await get_job(req.body);
await job.prime();
@@ -214,7 +271,7 @@ router.post('/execute', async (req, res) => {
await job.cleanup();
return res.status(200).send(result);
- }catch(error){
+ } catch (error) {
return res.status(400).json(error);
}
});
diff --git a/api/src/config.js b/api/src/config.js
index c191644..1a9eb3d 100644
--- a/api/src/config.js
+++ b/api/src/config.js
@@ -5,8 +5,7 @@ const logger = Logger.create('config');
function parse_overrides(overrides) {
try {
return JSON.parse(overrides);
- }
- catch (e) {
+ } catch (e) {
return null;
}
}
@@ -16,15 +15,20 @@ function validate_overrides(overrides, options) {
for (let key in overrides[language]) {
if (
![
- 'max_process_count', 'max_open_files', 'max_file_size',
- 'compile_memory_limit', 'run_memory_limit', 'compile_timeout',
- 'run_timeout', 'output_max_size'
+ 'max_process_count',
+ 'max_open_files',
+ 'max_file_size',
+ 'compile_memory_limit',
+ 'run_memory_limit',
+ 'compile_timeout',
+ 'run_timeout',
+ 'output_max_size',
].includes(key)
) {
logger.error(`Invalid overridden option: ${key}`);
return false;
}
- let option = options.find((o) => o.key === key);
+ let option = options.find(o => o.key === key);
let parser = option.parser;
let raw = overrides[language][key];
let value = parser(raw);
@@ -32,14 +36,19 @@ function validate_overrides(overrides, options) {
for (let validator of validators) {
let response = validator(value, raw);
if (response !== true) {
- logger.error(`Failed to validate overridden option: ${key}`, response);
+ logger.error(
+ `Failed to validate overridden option: ${key}`,
+ response
+ );
return false;
}
}
overrides[language][key] = value;
}
// Modifies the reference
- options[options.index_of(options.find((o) => o.key === 'limit_overrides'))] = overrides;
+ options[
+ options.index_of(options.find(o => o.key === 'limit_overrides'))
+ ] = overrides;
}
return true;
}
@@ -135,32 +144,28 @@ const options = [
},
{
key: 'compile_timeout',
- desc:
- 'Max time allowed for compile stage in milliseconds',
+ desc: 'Max time allowed for compile stage in milliseconds',
default: 10000, // 10 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'run_timeout',
- desc:
- 'Max time allowed for run stage in milliseconds',
+ desc: 'Max time allowed for run stage in milliseconds',
default: 3000, // 3 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'compile_memory_limit',
- desc:
- 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
+ desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'run_memory_limit',
- desc:
- 'Max memory usage for run stage in bytes (set to -1 for no limit)',
+ desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
@@ -177,7 +182,7 @@ const options = [
desc: 'Maximum number of concurrent jobs to run at one time',
default: 64,
parser: parse_int,
- validators: [(x) => x > 0 || `${x} cannot be negative`]
+ validators: [x => x > 0 || `${x} cannot be negative`],
},
{
key: 'limit_overrides',
@@ -187,10 +192,12 @@ const options = [
default: {},
parser: parse_overrides,
validators: [
- (x) => !!x || `Invalid JSON format for the overrides\n${x}`,
- (overrides, _, options) => validate_overrides(overrides, options) || `Failed to validate the overrides`
- ]
- }
+ x => !!x || `Invalid JSON format for the overrides\n${x}`,
+ (overrides, _, options) =>
+ validate_overrides(overrides, options) ||
+ `Failed to validate the overrides`,
+ ],
+ },
];
logger.info(`Loading Configuration from environment`);
diff --git a/api/src/index.js b/api/src/index.js
index afd4d15..025f5ae 100644
--- a/api/src/index.js
+++ b/api/src/index.js
@@ -15,8 +15,6 @@ const logger = Logger.create('index');
const app = express();
expressWs(app);
-
-
(async () => {
logger.info('Setting loglevel to', config.log_level);
Logger.setLogLevel(config.log_level);
diff --git a/api/src/job.js b/api/src/job.js
index 552463a..c2e8bc8 100644
--- a/api/src/job.js
+++ b/api/src/job.js
@@ -19,15 +19,12 @@ let gid = 0;
let remainingJobSpaces = config.max_concurrent_jobs;
let jobQueue = [];
-
-setInterval(()=>{
+setInterval(() => {
// Every 10ms try resolve a new job, if there is an available slot
- if(jobQueue.length > 0 && remainingJobSpaces > 0){
- jobQueue.shift()()
+ if (jobQueue.length > 0 && remainingJobSpaces > 0) {
+ jobQueue.shift()();
}
-}, 10)
-
-
+}, 10);
class Job {
constructor({ runtime, files, args, stdin }) {
@@ -59,11 +56,11 @@ class Job {
}
async prime() {
- if(remainingJobSpaces < 1){
- logger.info(`Awaiting job slot uuid=${this.uuid}`)
- await new Promise((resolve)=>{
- jobQueue.push(resolve)
- })
+ if (remainingJobSpaces < 1) {
+ logger.info(`Awaiting job slot uuid=${this.uuid}`);
+ await new Promise(resolve => {
+ jobQueue.push(resolve);
+ });
}
logger.info(`Priming job uuid=${this.uuid}`);
@@ -79,10 +76,15 @@ class Job {
let file_path = path.join(this.dir, file.name);
const rel = path.relative(this.dir, file_path);
- if(rel.startsWith(".."))
- throw Error(`File path "${file.name}" tries to escape parent directory: ${rel}`)
+ if (rel.startsWith('..'))
+ throw Error(
+ `File path "${file.name}" tries to escape parent directory: ${rel}`
+ );
- await fs.mkdir(path.dirname(file_path), {recursive: true, mode: 0o700})
+ await fs.mkdir(path.dirname(file_path), {
+ recursive: true,
+ mode: 0o700,
+ });
await fs.chown(path.dirname(file_path), this.uid, this.gid);
await fs.write_file(file_path, file.content);
@@ -127,34 +129,33 @@ class Job {
detached: true, //give this process its own process group
});
- if(eventBus === null){
+ if (eventBus === null) {
proc.stdin.write(this.stdin);
proc.stdin.end();
proc.stdin.destroy();
- }else{
- eventBus.on("stdin", (data) => {
+ } else {
+ eventBus.on('stdin', data => {
proc.stdin.write(data);
- })
+ });
- eventBus.on("kill", (signal) => {
- proc.kill(signal)
- })
+ eventBus.on('kill', signal => {
+ proc.kill(signal);
+ });
}
- const kill_timeout = set_timeout(
- async _ => {
- logger.info(`Timeout exceeded timeout=${timeout} uuid=${this.uuid}`)
- process.kill(proc.pid, 'SIGKILL')
- },
- timeout
- );
+ const kill_timeout = set_timeout(async _ => {
+ logger.info(
+ `Timeout exceeded timeout=${timeout} uuid=${this.uuid}`
+ );
+ process.kill(proc.pid, 'SIGKILL');
+ }, timeout);
proc.stderr.on('data', async data => {
- if(eventBus !== null) {
- eventBus.emit("stderr", data);
+ if (eventBus !== null) {
+ eventBus.emit('stderr', data);
} else if (stderr.length > this.runtime.output_max_size) {
- logger.info(`stderr length exceeded uuid=${this.uuid}`)
- process.kill(proc.pid, 'SIGKILL')
+ logger.info(`stderr length exceeded uuid=${this.uuid}`);
+ process.kill(proc.pid, 'SIGKILL');
} else {
stderr += data;
output += data;
@@ -162,11 +163,11 @@ class Job {
});
proc.stdout.on('data', async data => {
- if(eventBus !== null){
- eventBus.emit("stdout", data);
+ if (eventBus !== null) {
+ eventBus.emit('stdout', data);
} else if (stdout.length > this.runtime.output_max_size) {
- logger.info(`stdout length exceeded uuid=${this.uuid}`)
- process.kill(proc.pid, 'SIGKILL')
+ logger.info(`stdout length exceeded uuid=${this.uuid}`);
+ process.kill(proc.pid, 'SIGKILL');
} else {
stdout += data;
output += data;
@@ -179,14 +180,14 @@ class Job {
proc.stderr.destroy();
proc.stdout.destroy();
- await this.cleanup_processes()
- logger.debug(`Finished exit cleanup uuid=${this.uuid}`)
+ await this.cleanup_processes();
+ logger.debug(`Finished exit cleanup uuid=${this.uuid}`);
};
proc.on('exit', async (code, signal) => {
await exit_cleanup();
- resolve({stdout, stderr, code, signal, output });
+ resolve({ stdout, stderr, code, signal, output });
});
proc.on('error', async err => {
@@ -243,7 +244,7 @@ class Job {
};
}
- async execute_interactive(eventBus){
+ async execute_interactive(eventBus) {
if (this.state !== job_states.PRIMED) {
throw new Error(
'Job must be in primed state, current state: ' +
@@ -252,27 +253,27 @@ class Job {
}
logger.info(
- `Interactively executing job uuid=${this.uuid} uid=${this.uid} gid=${
- this.gid
- } runtime=${this.runtime.toString()}`
+ `Interactively executing job uuid=${this.uuid} uid=${
+ this.uid
+ } gid=${this.gid} runtime=${this.runtime.toString()}`
);
- if(this.runtime.compiled){
- eventBus.emit("stage", "compile")
- const {error, code, signal} = await this.safe_call(
+ if (this.runtime.compiled) {
+ eventBus.emit('stage', 'compile');
+ const { error, code, signal } = await this.safe_call(
path.join(this.runtime.pkgdir, 'compile'),
this.files.map(x => x.name),
this.runtime.timeouts.compile,
this.runtime.memory_limits.compile,
eventBus
- )
+ );
- eventBus.emit("exit", "compile", {error, code, signal})
+ eventBus.emit('exit', 'compile', { error, code, signal });
}
logger.debug('Running');
- eventBus.emit("stage", "run")
- const {error, code, signal} = await this.safe_call(
+ eventBus.emit('stage', 'run');
+ const { error, code, signal } = await this.safe_call(
path.join(this.runtime.pkgdir, 'run'),
[this.files[0].name, ...this.args],
this.runtime.timeouts.run,
@@ -280,47 +281,50 @@ class Job {
eventBus
);
- eventBus.emit("exit", "run", {error, code, signal})
-
+ eventBus.emit('exit', 'run', { error, code, signal });
this.state = job_states.EXECUTED;
}
async cleanup_processes(dont_wait = []) {
let processes = [1];
- logger.debug(`Cleaning up processes uuid=${this.uuid}`)
+ logger.debug(`Cleaning up processes uuid=${this.uuid}`);
while (processes.length > 0) {
- processes = []
+ processes = [];
+ const proc_ids = await fs.readdir('/proc');
- const proc_ids = await fs.readdir("/proc");
+ processes = await Promise.all(
+ proc_ids.map(async proc_id => {
+ if (isNaN(proc_id)) return -1;
+ try {
+ const proc_status = await fs.read_file(
+ path.join('/proc', proc_id, 'status')
+ );
+ const proc_lines = proc_status.to_string().split('\n');
+ const uid_line = proc_lines.find(line =>
+ line.starts_with('Uid:')
+ );
+ const [_, ruid, euid, suid, fuid] =
+ uid_line.split(/\s+/);
+ if (ruid == this.uid || euid == this.uid)
+ return parse_int(proc_id);
+ } catch {
+ return -1;
+ }
- processes = await Promise.all(proc_ids.map(async (proc_id) => {
- if(isNaN(proc_id)) return -1;
- try{
- const proc_status = await fs.read_file(path.join("/proc",proc_id,"status"));
- const proc_lines = proc_status.to_string().split("\n")
- const uid_line = proc_lines.find(line=>line.starts_with("Uid:"))
- const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/);
-
- if(ruid == this.uid || euid == this.uid)
- return parse_int(proc_id)
-
- }catch{
- return -1
- }
-
- return -1
- }))
-
- processes = processes.filter(p => p > 0)
-
- if(processes.length > 0)
- logger.debug(`Got processes to kill: ${processes} uuid=${this.uuid}`)
+ return -1;
+ })
+ );
+ processes = processes.filter(p => p > 0);
+ if (processes.length > 0)
+ logger.debug(
+ `Got processes to kill: ${processes} uuid=${this.uuid}`
+ );
for (const proc of processes) {
// First stop the processes, but keep their resources allocated so they cant re-fork
@@ -339,12 +343,11 @@ class Job {
// Could already be dead and just needs to be waited on
}
- if(!dont_wait.includes(proc))
- wait_pid(proc);
+ if (!dont_wait.includes(proc)) wait_pid(proc);
}
}
- logger.debug(`Cleaned up processes uuid=${this.uuid}`)
+ logger.debug(`Cleaned up processes uuid=${this.uuid}`);
}
async cleanup_filesystem() {
@@ -382,7 +385,6 @@ class Job {
}
}
-
module.exports = {
Job,
};
diff --git a/api/src/package.js b/api/src/package.js
index 1300c8c..4e4630f 100644
--- a/api/src/package.js
+++ b/api/src/package.js
@@ -74,9 +74,8 @@ class Package {
await new Promise((resolve, reject) => {
read_stream.on('data', chunk => hash.update(chunk));
read_stream.on('end', () => resolve());
- read_stream.on('error', error => reject(error))
+ read_stream.on('error', error => reject(error));
});
-
const cs = hash.digest('hex');
diff --git a/api/src/runtime.js b/api/src/runtime.js
index 60d3c23..6c6f10e 100644
--- a/api/src/runtime.js
+++ b/api/src/runtime.js
@@ -9,8 +9,17 @@ const runtimes = [];
class Runtime {
constructor({
- language, version, aliases, pkgdir, runtime, timeouts, memory_limits, max_process_count,
- max_open_files, max_file_size, output_max_size
+ language,
+ version,
+ aliases,
+ pkgdir,
+ runtime,
+ timeouts,
+ memory_limits,
+ max_process_count,
+ max_open_files,
+ max_file_size,
+ output_max_size,
}) {
this.language = language;
this.version = version;
@@ -25,37 +34,67 @@ class Runtime {
this.output_max_size = output_max_size;
}
- static compute_single_limit(language_name, limit_name, language_limit_overrides) {
+ static compute_single_limit(
+ language_name,
+ limit_name,
+ language_limit_overrides
+ ) {
return (
- config.limit_overrides[language_name] && config.limit_overrides[language_name][limit_name]
- || language_limit_overrides && language_limit_overrides[limit_name]
- || config[limit_name]
+ (config.limit_overrides[language_name] &&
+ config.limit_overrides[language_name][limit_name]) ||
+ (language_limit_overrides &&
+ language_limit_overrides[limit_name]) ||
+ config[limit_name]
);
}
static compute_all_limits(language_name, language_limit_overrides) {
return {
timeouts: {
- compile:
- this.compute_single_limit(language_name, 'compile_timeout', language_limit_overrides),
- run:
- this.compute_single_limit(language_name, 'run_timeout', language_limit_overrides)
+ compile: this.compute_single_limit(
+ language_name,
+ 'compile_timeout',
+ language_limit_overrides
+ ),
+ run: this.compute_single_limit(
+ language_name,
+ 'run_timeout',
+ language_limit_overrides
+ ),
},
memory_limits: {
- compile:
- this.compute_single_limit(language_name, 'compile_memory_limit', language_limit_overrides),
- run:
- this.compute_single_limit(language_name, 'run_memory_limit', language_limit_overrides)
+ compile: this.compute_single_limit(
+ language_name,
+ 'compile_memory_limit',
+ language_limit_overrides
+ ),
+ run: this.compute_single_limit(
+ language_name,
+ 'run_memory_limit',
+ language_limit_overrides
+ ),
},
- max_process_count:
- this.compute_single_limit(language_name, 'max_process_count', language_limit_overrides),
- max_open_files:
- this.compute_single_limit(language_name, 'max_open_files', language_limit_overrides),
- max_file_size:
- this.compute_single_limit(language_name, 'max_file_size', language_limit_overrides),
- output_max_size:
- this.compute_single_limit(language_name, 'output_max_size', language_limit_overrides),
- }
+ max_process_count: this.compute_single_limit(
+ language_name,
+ 'max_process_count',
+ language_limit_overrides
+ ),
+ max_open_files: this.compute_single_limit(
+ language_name,
+ 'max_open_files',
+ language_limit_overrides
+ ),
+ max_file_size: this.compute_single_limit(
+ language_name,
+ 'max_file_size',
+ language_limit_overrides
+ ),
+ output_max_size: this.compute_single_limit(
+ language_name,
+ 'output_max_size',
+ language_limit_overrides
+ ),
+ };
}
static load_package(package_dir) {
@@ -63,7 +102,14 @@ class Runtime {
fss.read_file_sync(path.join(package_dir, 'pkg-info.json'))
);
- let { language, version, build_platform, aliases, provides, limit_overrides } = info;
+ let {
+ language,
+ version,
+ build_platform,
+ aliases,
+ provides,
+ limit_overrides,
+ } = info;
version = semver.parse(version);
if (build_platform !== globals.platform) {
@@ -83,7 +129,10 @@ class Runtime {
version,
pkgdir: package_dir,
runtime: language,
- ...Runtime.compute_all_limits(lang.language, lang.limit_overrides)
+ ...Runtime.compute_all_limits(
+ lang.language,
+ lang.limit_overrides
+ ),
})
);
});
@@ -94,7 +143,7 @@ class Runtime {
version,
aliases,
pkgdir: package_dir,
- ...Runtime.compute_all_limits(language, limit_overrides)
+ ...Runtime.compute_all_limits(language, limit_overrides),
})
);
}
diff --git a/cli/.gitignore b/cli/.gitignore
deleted file mode 100644
index b512c09..0000000
--- a/cli/.gitignore
+++ /dev/null
@@ -1 +0,0 @@
-node_modules
\ No newline at end of file
diff --git a/cli/commands/execute.js b/cli/commands/execute.js
index abb1f63..c69d3a0 100644
--- a/cli/commands/execute.js
+++ b/cli/commands/execute.js
@@ -3,8 +3,44 @@ const path = require('path');
const chalk = require('chalk');
const WebSocket = require('ws');
-const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
-
+const SIGNALS = [
+ 'SIGABRT',
+ 'SIGALRM',
+ 'SIGBUS',
+ 'SIGCHLD',
+ 'SIGCLD',
+ 'SIGCONT',
+ 'SIGEMT',
+ 'SIGFPE',
+ 'SIGHUP',
+ 'SIGILL',
+ 'SIGINFO',
+ 'SIGINT',
+ 'SIGIO',
+ 'SIGIOT',
+ 'SIGLOST',
+ 'SIGPIPE',
+ 'SIGPOLL',
+ 'SIGPROF',
+ 'SIGPWR',
+ 'SIGQUIT',
+ 'SIGSEGV',
+ 'SIGSTKFLT',
+ 'SIGTSTP',
+ 'SIGSYS',
+ 'SIGTERM',
+ 'SIGTRAP',
+ 'SIGTTIN',
+ 'SIGTTOU',
+ 'SIGUNUSED',
+ 'SIGURG',
+ 'SIGUSR1',
+ 'SIGUSR2',
+ 'SIGVTALRM',
+ 'SIGXCPU',
+ 'SIGXFSZ',
+ 'SIGWINCH',
+];
exports.command = ['execute [args..]'];
exports.aliases = ['run'];
@@ -15,18 +51,18 @@ exports.builder = {
string: true,
desc: 'Set the version of the language to use',
alias: ['l'],
- default: '*'
+ default: '*',
},
stdin: {
boolean: true,
desc: 'Read input from stdin and pass to executor',
- alias: ['i']
+ alias: ['i'],
},
run_timeout: {
alias: ['rt', 'r'],
number: true,
desc: 'Milliseconds before killing run process',
- default: 3000
+ default: 3000,
},
compile_timeout: {
alias: ['ct', 'c'],
@@ -42,117 +78,126 @@ exports.builder = {
interactive: {
boolean: true,
alias: ['t'],
- desc: 'Run interactively using WebSocket transport'
+ desc: 'Run interactively using WebSocket transport',
},
status: {
boolean: true,
alias: ['s'],
- desc: 'Output additional status to stderr'
- }
+ desc: 'Output additional status to stderr',
+ },
};
-async function handle_interactive(files, argv){
- const ws = new WebSocket(argv.pistonUrl.replace("http", "ws") + "/api/v2/connect")
+async function handle_interactive(files, argv) {
+ const ws = new WebSocket(
+ argv.pistonUrl.replace('http', 'ws') + '/api/v2/connect'
+ );
- const log_message = (process.stderr.isTTY && argv.status) ? console.error : ()=>{};
+ const log_message =
+ process.stderr.isTTY && argv.status ? console.error : () => {};
- process.on("exit", ()=>{
+ process.on('exit', () => {
ws.close();
process.stdin.end();
process.stdin.destroy();
- process.exit();
- })
+ process.exit();
+ });
- for(const signal of SIGNALS){
- process.on(signal, ()=>{
- ws.send(JSON.stringify({type: 'signal', signal}))
- })
+ for (const signal of SIGNALS) {
+ process.on(signal, () => {
+ ws.send(JSON.stringify({ type: 'signal', signal }));
+ });
}
-
-
- ws.on('open', ()=>{
+ ws.on('open', () => {
const request = {
- type: "init",
+ type: 'init',
language: argv.language,
version: argv['language_version'],
files: files,
args: argv.args,
compile_timeout: argv.ct,
- run_timeout: argv.rt
- }
+ run_timeout: argv.rt,
+ };
- ws.send(JSON.stringify(request))
- log_message(chalk.white.bold("Connected"))
+ ws.send(JSON.stringify(request));
+ log_message(chalk.white.bold('Connected'));
process.stdin.resume();
- process.stdin.on("data", (data) => {
- ws.send(JSON.stringify({
- type: "data",
- stream: "stdin",
- data: data.toString()
- }))
- })
- })
+ process.stdin.on('data', data => {
+ ws.send(
+ JSON.stringify({
+ type: 'data',
+ stream: 'stdin',
+ data: data.toString(),
+ })
+ );
+ });
+ });
- ws.on("close", (code, reason)=>{
+ ws.on('close', (code, reason) => {
log_message(
- chalk.white.bold("Disconnected: "),
- chalk.white.bold("Reason: "),
+ chalk.white.bold('Disconnected: '),
+ chalk.white.bold('Reason: '),
chalk.yellow(`"${reason}"`),
- chalk.white.bold("Code: "),
- chalk.yellow(`"${code}"`),
- )
- process.stdin.pause()
- })
+ chalk.white.bold('Code: '),
+ chalk.yellow(`"${code}"`)
+ );
+ process.stdin.pause();
+ });
- ws.on('message', function(data){
+ ws.on('message', function (data) {
const msg = JSON.parse(data);
-
- switch(msg.type){
- case "runtime":
- log_message(chalk.bold.white("Runtime:"), chalk.yellow(`${msg.language} ${msg.version}`))
+
+ switch (msg.type) {
+ case 'runtime':
+ log_message(
+ chalk.bold.white('Runtime:'),
+ chalk.yellow(`${msg.language} ${msg.version}`)
+ );
break;
- case "stage":
- log_message(chalk.bold.white("Stage:"), chalk.yellow(msg.stage))
+ case 'stage':
+ log_message(
+ chalk.bold.white('Stage:'),
+ chalk.yellow(msg.stage)
+ );
break;
- case "data":
- if(msg.stream == "stdout") process.stdout.write(msg.data)
- else if(msg.stream == "stderr") process.stderr.write(msg.data)
- else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data)
+ case 'data':
+ if (msg.stream == 'stdout') process.stdout.write(msg.data);
+ else if (msg.stream == 'stderr') process.stderr.write(msg.data);
+ else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data);
break;
- case "exit":
- if(msg.signal === null)
+ case 'exit':
+ if (msg.signal === null)
log_message(
- chalk.white.bold("Stage"),
+ chalk.white.bold('Stage'),
chalk.yellow(msg.stage),
- chalk.white.bold("exited with code"),
+ chalk.white.bold('exited with code'),
chalk.yellow(msg.code)
- )
+ );
else
log_message(
- chalk.white.bold("Stage"),
+ chalk.white.bold('Stage'),
chalk.yellow(msg.stage),
- chalk.white.bold("exited with signal"),
+ chalk.white.bold('exited with signal'),
chalk.yellow(msg.signal)
- )
- break;
+ );
+ break;
default:
- log_message(chalk.red.bold("Unknown message:"), msg)
+ log_message(chalk.red.bold('Unknown message:'), msg);
}
- })
-
+ });
}
async function run_non_interactively(files, argv) {
-
-
- const stdin = (argv.stdin && await new Promise((resolve, _) => {
- let data = '';
- process.stdin.on('data', d => data += d);
- process.stdin.on('end', _ => resolve(data));
- })) || '';
+ const stdin =
+ (argv.stdin &&
+ (await new Promise((resolve, _) => {
+ let data = '';
+ process.stdin.on('data', d => (data += d));
+ process.stdin.on('end', _ => resolve(data));
+ }))) ||
+ '';
const request = {
language: argv.language,
@@ -161,7 +206,7 @@ async function run_non_interactively(files, argv) {
args: argv.args,
stdin,
compile_timeout: argv.ct,
- run_timeout: argv.rt
+ run_timeout: argv.rt,
};
let { data: response } = await argv.axios.post('/api/v2/execute', request);
@@ -170,13 +215,13 @@ async function run_non_interactively(files, argv) {
console.log(chalk.bold(`== ${name} ==`));
if (ctx.stdout) {
- console.log(chalk.bold(`STDOUT`))
- console.log(ctx.stdout.replace(/\n/g,'\n '))
+ console.log(chalk.bold(`STDOUT`));
+ console.log(ctx.stdout.replace(/\n/g, '\n '));
}
if (ctx.stderr) {
- console.log(chalk.bold(`STDERR`))
- console.log(ctx.stderr.replace(/\n/g,'\n '))
+ console.log(chalk.bold(`STDERR`));
+ console.log(ctx.stderr.replace(/\n/g, '\n '));
}
if (ctx.code) {
@@ -187,12 +232,9 @@ async function run_non_interactively(files, argv) {
}
if (ctx.signal) {
- console.log(
- chalk.bold(`Signal:`),
- chalk.bold.yellow(ctx.signal)
- );
+ console.log(chalk.bold(`Signal:`), chalk.bold.yellow(ctx.signal));
}
- }
+ };
if (response.compile) {
step('Compile', response.compile);
@@ -201,17 +243,14 @@ async function run_non_interactively(files, argv) {
step('Run', response.run);
}
-exports.handler = async (argv) => {
- const files = [...(argv.files || []),argv.file]
- .map(file_path => {
- return {
- name: path.basename(file_path),
- content: fs.readFileSync(file_path).toString()
- };
- });
+exports.handler = async argv => {
+ const files = [...(argv.files || []), argv.file].map(file_path => {
+ return {
+ name: path.basename(file_path),
+ content: fs.readFileSync(file_path).toString(),
+ };
+ });
- if(argv.interactive) await handle_interactive(files, argv);
+ if (argv.interactive) await handle_interactive(files, argv);
else await run_non_interactively(files, argv);
-}
-
-
+};
diff --git a/cli/commands/ppman.js b/cli/commands/ppman.js
index 8d1cb34..ad2c879 100644
--- a/cli/commands/ppman.js
+++ b/cli/commands/ppman.js
@@ -2,6 +2,4 @@ exports.command = 'ppman';
exports.aliases = ['pkg'];
exports.describe = 'Package Manager';
-exports.builder = yargs => yargs
- .commandDir('ppman_commands')
- .demandCommand();
+exports.builder = yargs => yargs.commandDir('ppman_commands').demandCommand();
diff --git a/cli/commands/ppman_commands/install.js b/cli/commands/ppman_commands/install.js
index 8b2baf9..a47665d 100644
--- a/cli/commands/ppman_commands/install.js
+++ b/cli/commands/ppman_commands/install.js
@@ -4,30 +4,31 @@ exports.command = ['install '];
exports.aliases = ['i'];
exports.describe = 'Installs the named package';
-
-
//Splits the package into it's language and version
function split_package(package) {
- [language, language_version] = package.split("=")
+ [language, language_version] = package.split('=');
res = {
language: language,
- version: language_version || "*"
+ version: language_version || '*',
};
- return res
+ return res;
}
const msg_format = {
- color: p => `${p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')} Installation ${p.language ? 'succeeded' : 'failed: ' + p.message}`,
- monochrome: p => `Installation ${p.language ? 'succeeded' : 'failed: ' + p.message}`,
- json: JSON.stringify
+ color: p =>
+ `${
+ p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')
+ } Installation ${p.language ? 'succeeded' : 'failed: ' + p.message}`,
+ monochrome: p =>
+ `Installation ${p.language ? 'succeeded' : 'failed: ' + p.message}`,
+ json: JSON.stringify,
};
exports.handler = async ({ axios, packages }) => {
const requests = packages.map(package => split_package(package));
for (request of requests) {
try {
-
const install = await axios.post(`/api/v2/packages`, request);
console.log(msg_format.color(install.data));
@@ -35,5 +36,4 @@ exports.handler = async ({ axios, packages }) => {
console.error(response.data.message);
}
}
-
-}
+};
diff --git a/cli/commands/ppman_commands/list.js b/cli/commands/ppman_commands/list.js
index a45030c..0b09667 100644
--- a/cli/commands/ppman_commands/list.js
+++ b/cli/commands/ppman_commands/list.js
@@ -5,17 +5,21 @@ exports.aliases = ['l'];
exports.describe = 'Lists all available packages';
const msg_format = {
- color: p => `${chalk[p.installed ? 'green':'red']('•')} ${p.language} ${p.language_version}`,
- monochrome: p => `${p.language} ${p.language_version} ${p.installed ? '(INSTALLED)': ''}`,
- json: JSON.stringify
+ color: p =>
+ `${chalk[p.installed ? 'green' : 'red']('•')} ${p.language} ${
+ p.language_version
+ }`,
+ monochrome: p =>
+ `${p.language} ${p.language_version} ${
+ p.installed ? '(INSTALLED)' : ''
+ }`,
+ json: JSON.stringify,
};
exports.handler = async ({ axios }) => {
const packages = await axios.get('/api/v2/packages');
- const pkg_msg = packages.data
- .map(msg_format.color)
- .join('\n');
+ const pkg_msg = packages.data.map(msg_format.color).join('\n');
console.log(pkg_msg);
-}
+};
diff --git a/cli/commands/ppman_commands/spec.js b/cli/commands/ppman_commands/spec.js
index d558810..9c04857 100644
--- a/cli/commands/ppman_commands/spec.js
+++ b/cli/commands/ppman_commands/spec.js
@@ -1,49 +1,53 @@
const chalk = require('chalk');
const fs = require('fs/promises');
-const minimatch = require("minimatch");
+const minimatch = require('minimatch');
const semver = require('semver');
exports.command = ['spec '];
exports.aliases = ['s'];
-exports.describe = 'Install the packages described in the spec file, uninstalling packages which aren\'t in the list'
+exports.describe =
+ "Install the packages described in the spec file, uninstalling packages which aren't in the list";
-function does_match(package, rule){
+function does_match(package, rule) {
const nameMatch = minimatch(package.language, rule.package_selector);
- const versionMatch = semver.satisfies(package.language_version, rule.version_selector)
+ const versionMatch = semver.satisfies(
+ package.language_version,
+ rule.version_selector
+ );
return nameMatch && versionMatch;
}
-exports.handler = async ({axios, specfile}) => {
+exports.handler = async ({ axios, specfile }) => {
const spec_contents = await fs.readFile(specfile);
- const spec_lines = spec_contents.toString().split("\n");
+ const spec_lines = spec_contents.toString().split('\n');
const rules = [];
- for(const line of spec_lines){
+ for (const line of spec_lines) {
const rule = {
_raw: line.trim(),
comment: false,
package_selector: null,
version_selector: null,
- negate: false
+ negate: false,
};
- if(line.starts_with("#")){
+ if (line.starts_with('#')) {
rule.comment = true;
- }else {
+ } else {
let l = line.trim();
- if(line.starts_with("!")){
+ if (line.starts_with('!')) {
rule.negate = true;
l = line.slice(1).trim();
}
- const [pkg, ver] = l.split(" ", 2);
+ const [pkg, ver] = l.split(' ', 2);
rule.package_selector = pkg;
rule.version_selector = ver;
}
- if(rule._raw.length != 0) rules.push(rule);
+ if (rule._raw.length != 0) rules.push(rule);
}
const packages_req = await axios.get('/api/v2/packages');
@@ -53,108 +57,127 @@ exports.handler = async ({axios, specfile}) => {
let ensure_packages = [];
- for(const rule of rules){
- if(rule.comment) continue;
+ for (const rule of rules) {
+ if (rule.comment) continue;
const matches = [];
- if(!rule.negate){
- for(const package of packages){
- if(does_match(package, rule))
- matches.push(package)
+ if (!rule.negate) {
+ for (const package of packages) {
+ if (does_match(package, rule)) matches.push(package);
}
- const latest_matches = matches.filter(
- pkg => {
- const versions = matches
- .filter(x=>x.language == pkg.language)
- .map(x=>x.language_version).sort(semver.rcompare);
- return versions[0] == pkg.language_version
- }
- );
+ const latest_matches = matches.filter(pkg => {
+ const versions = matches
+ .filter(x => x.language == pkg.language)
+ .map(x => x.language_version)
+ .sort(semver.rcompare);
+ return versions[0] == pkg.language_version;
+ });
- for(const match of latest_matches){
- if(!ensure_packages.find(pkg => pkg.language == match.language && pkg.language_version == match.language_version))
- ensure_packages.push(match)
+ for (const match of latest_matches) {
+ if (
+ !ensure_packages.find(
+ pkg =>
+ pkg.language == match.language &&
+ pkg.language_version == match.language_version
+ )
+ )
+ ensure_packages.push(match);
}
- }else{
+ } else {
ensure_packages = ensure_packages.filter(
pkg => !does_match(pkg, rule)
- )
+ );
}
-
-
}
const operations = [];
- for(const package of ensure_packages){
- if(!package.installed)
+ for (const package of ensure_packages) {
+ if (!package.installed)
operations.push({
- type: "install",
+ type: 'install',
package: package.language,
- version: package.language_version
+ version: package.language_version,
});
}
- for(const installed_package of installed){
- if(!ensure_packages.find(
- pkg => pkg.language == installed_package.language &&
- pkg.language_version == installed_package.language_version
- ))
+ for (const installed_package of installed) {
+ if (
+ !ensure_packages.find(
+ pkg =>
+ pkg.language == installed_package.language &&
+ pkg.language_version == installed_package.language_version
+ )
+ )
operations.push({
- type: "uninstall",
+ type: 'uninstall',
package: installed_package.language,
- version: installed_package.language_version
- })
+ version: installed_package.language_version,
+ });
}
- console.log(chalk.bold.yellow("Actions"))
- for(const op of operations){
- console.log((op.type == "install" ? chalk.green("Install") : chalk.red("Uninstall")) + ` ${op.package} ${op.version}`)
+ console.log(chalk.bold.yellow('Actions'));
+ for (const op of operations) {
+ console.log(
+ (op.type == 'install'
+ ? chalk.green('Install')
+ : chalk.red('Uninstall')) + ` ${op.package} ${op.version}`
+ );
}
- if(operations.length == 0){
- console.log(chalk.gray("None"))
+ if (operations.length == 0) {
+ console.log(chalk.gray('None'));
}
- for(const op of operations){
- if(op.type == "install"){
- try{
+ for (const op of operations) {
+ if (op.type == 'install') {
+ try {
const install = await axios.post(`/api/v2/packages`, {
language: op.package,
- version: op.version
+ version: op.version,
});
- if(!install.data.language)
+ if (!install.data.language)
throw new Error(install.data.message); // Go to exception handler
- console.log(chalk.bold.green("Installed"), op.package, op.version)
-
- }catch(e){
- console.log(chalk.bold.red("Failed to install") + ` ${op.package} ${op.version}:`, e.message)
+ console.log(
+ chalk.bold.green('Installed'),
+ op.package,
+ op.version
+ );
+ } catch (e) {
+ console.log(
+ chalk.bold.red('Failed to install') +
+ ` ${op.package} ${op.version}:`,
+ e.message
+ );
}
- }
- else if(op.type == "uninstall"){
- try{
+ } else if (op.type == 'uninstall') {
+ try {
const install = await axios.delete(`/api/v2/packages`, {
data: {
language: op.package,
- version: op.version
- }
+ version: op.version,
+ },
});
- if(!install.data.language)
+ if (!install.data.language)
throw new Error(install.data.message); // Go to exception handler
- console.log(chalk.bold.green("Uninstalled"), op.package, op.version)
-
- }catch(e){
- console.log(chalk.bold.red("Failed to uninstall") + ` ${op.package} ${op.version}:`, e.message)
+ console.log(
+ chalk.bold.green('Uninstalled'),
+ op.package,
+ op.version
+ );
+ } catch (e) {
+ console.log(
+ chalk.bold.red('Failed to uninstall') +
+ ` ${op.package} ${op.version}:`,
+ e.message
+ );
}
}
}
-
-
-
-}
\ No newline at end of file
+};
diff --git a/cli/commands/ppman_commands/uninstall.js b/cli/commands/ppman_commands/uninstall.js
index f174fdb..21d2198 100644
--- a/cli/commands/ppman_commands/uninstall.js
+++ b/cli/commands/ppman_commands/uninstall.js
@@ -6,31 +6,36 @@ exports.describe = 'Uninstalls the named package';
//Splits the package into it's language and version
function split_package(package) {
- [language, language_version] = package.split("=")
+ [language, language_version] = package.split('=');
res = {
language: language,
- version: language_version || "*"
+ version: language_version || '*',
};
- return res
+ return res;
}
const msg_format = {
- color: p => `${p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')} Uninstallation ${p.language ? 'succeeded' : 'failed: ' + p.message}`,
- monochrome: p => `Uninstallation ${p.language ? 'succeeded' : 'failed: ' + p.message}`,
- json: JSON.stringify
+ color: p =>
+ `${
+ p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')
+ } Uninstallation ${p.language ? 'succeeded' : 'failed: ' + p.message}`,
+ monochrome: p =>
+ `Uninstallation ${p.language ? 'succeeded' : 'failed: ' + p.message}`,
+ json: JSON.stringify,
};
exports.handler = async ({ axios, packages }) => {
const requests = packages.map(package => split_package(package));
for (request of requests) {
try {
-
- const uninstall = await axios.delete(`/api/v2/packages`, { data: request });
+ const uninstall = await axios.delete(`/api/v2/packages`, {
+ data: request,
+ });
console.log(msg_format.color(uninstall.data));
} catch ({ response }) {
- console.error(response.data.message)
+ console.error(response.data.message);
}
}
-}
\ No newline at end of file
+};
diff --git a/cli/index.js b/cli/index.js
index c0c25ee..340cdab 100755
--- a/cli/index.js
+++ b/cli/index.js
@@ -6,8 +6,8 @@ const axios_instance = argv => {
argv.axios = axios.create({
baseURL: argv['piston-url'],
headers: {
- 'Content-Type': 'application/json'
- }
+ 'Content-Type': 'application/json',
+ },
});
return argv;
@@ -18,12 +18,11 @@ require('yargs')(process.argv.slice(2))
alias: ['u'],
default: 'http://127.0.0.1:2000',
desc: 'Piston API URL',
- string: true
+ string: true,
})
.middleware(axios_instance)
.scriptName('piston')
.commandDir('commands')
.demandCommand()
.help()
- .wrap(72)
- .argv;
+ .wrap(72).argv;
diff --git a/docker-compose.dev.yaml b/docker-compose.dev.yaml
index 5c3b61e..25d8fe6 100644
--- a/docker-compose.dev.yaml
+++ b/docker-compose.dev.yaml
@@ -1,24 +1,24 @@
-version: "3.2"
+version: '3.2'
services:
- api:
- build: api
- container_name: piston_api
- cap_add:
- - CAP_SYS_ADMIN
- restart: always
- ports:
- - 2000:2000
- volumes:
- - ./data/piston:/piston
- environment:
- - PISTON_REPO_URL=http://repo:8000/index
- tmpfs:
- - /piston/jobs:exec
+ api:
+ build: api
+ container_name: piston_api
+ cap_add:
+ - CAP_SYS_ADMIN
+ restart: always
+ ports:
+ - 2000:2000
+ volumes:
+ - ./data/piston:/piston
+ environment:
+ - PISTON_REPO_URL=http://repo:8000/index
+ tmpfs:
+ - /piston/jobs:exec
- repo: # Local testing of packages
- build: repo
- container_name: piston_repo
- command: ["--no-build"] # Don't build anything
- volumes:
- - .:/piston
+ repo: # Local testing of packages
+ build: repo
+ container_name: piston_repo
+ command: ['--no-build'] # Don't build anything
+ volumes:
+ - .:/piston
diff --git a/docs/api-v2.md b/docs/api-v2.md
index 111b514..d5cb486 100644
--- a/docs/api-v2.md
+++ b/docs/api-v2.md
@@ -17,10 +17,10 @@ Returns a list of available languages, including the version, runtime and aliase
#### Response
-- `[].language`: Name of the language
-- `[].version`: Version of the runtime
-- `[].aliases`: List of alternative names that can be used for the language
-- `[].runtime` (_optional_): Name of the runtime used to run the langage, only provided if alternative runtimes exist for the language
+- `[].language`: Name of the language
+- `[].version`: Version of the runtime
+- `[].aliases`: List of alternative names that can be used for the language
+- `[].runtime` (_optional_): Name of the runtime used to run the langage, only provided if alternative runtimes exist for the language
#### Example
@@ -55,34 +55,34 @@ Runs the given code, using the given runtime and arguments, returning the result
#### Request
-- `language`: Name or alias of a language listed in [runtimes](#runtimes)
-- `version`: SemVer version selector of a language listed in [runtimes](#runtimes)
-- `files`: An array of files which should be uploaded into the job context
-- `files[].name` (_optional_): Name of file to be written, if none a random name is picked
-- `files[].content`: Content of file to be written
-- `stdin` (_optional_): Text to pass into stdin of the program. Defaults to blank string.
-- `args` (_optional_): Arguments to pass to the program. Defaults to none
-- `run_timeout` (_optional_): The maximum allowed time in milliseconds for the compile stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout.
-- `compile_timeout` (_optional_): The maximum allowed time in milliseconds for the run stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout. Defaults to maximum.
-- `compile_memory_limit` (_optional_): The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
-- `run_memory_limit` (_optional_): The maximum amount of memory the run stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
+- `language`: Name or alias of a language listed in [runtimes](#runtimes)
+- `version`: SemVer version selector of a language listed in [runtimes](#runtimes)
+- `files`: An array of files which should be uploaded into the job context
+- `files[].name` (_optional_): Name of file to be written, if none a random name is picked
+- `files[].content`: Content of file to be written
+- `stdin` (_optional_): Text to pass into stdin of the program. Defaults to blank string.
+- `args` (_optional_): Arguments to pass to the program. Defaults to none
+- `run_timeout` (_optional_): The maximum allowed time in milliseconds for the compile stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout.
+- `compile_timeout` (_optional_): The maximum allowed time in milliseconds for the run stage to finish before bailing out. Must be a number, less than or equal to the configured maximum timeout. Defaults to maximum.
+- `compile_memory_limit` (_optional_): The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
+- `run_memory_limit` (_optional_): The maximum amount of memory the run stage is allowed to use in bytes. Must be a number, less than or equal to the configured maximum. Defaults to maximum, or `-1` (no limit) if none is configured.
#### Response
-- `language`: Name (not alias) of the runtime used
-- `version`: Version of the used runtime
-- `run`: Results from the run stage
-- `run.stdout`: stdout from run stage process
-- `run.stderr`: stderr from run stage process
-- `run.output`: stdout and stderr combined in order of data from run stage process
-- `run.code`: Exit code from run process, or null if signal is not null
-- `run.signal`: Signal from run process, or null if code is not null
-- `compile` (_optional_): Results from the compile stage, only provided if the runtime has a compile stage
-- `compile.stdout`: stdout from compile stage process
-- `compile.stderr`: stderr from compile stage process
-- `compile.output`: stdout and stderr combined in order of data from compile stage process
-- `compile.code`: Exit code from compile process, or null if signal is not null
-- `compile.signal`: Signal from compile process, or null if code is not null
+- `language`: Name (not alias) of the runtime used
+- `version`: Version of the used runtime
+- `run`: Results from the run stage
+- `run.stdout`: stdout from run stage process
+- `run.stderr`: stderr from run stage process
+- `run.output`: stdout and stderr combined in order of data from run stage process
+- `run.code`: Exit code from run process, or null if signal is not null
+- `run.signal`: Signal from run process, or null if code is not null
+- `compile` (_optional_): Results from the compile stage, only provided if the runtime has a compile stage
+- `compile.stdout`: stdout from compile stage process
+- `compile.stderr`: stderr from compile stage process
+- `compile.output`: stdout and stderr combined in order of data from compile stage process
+- `compile.code`: Exit code from compile process, or null if signal is not null
+- `compile.signal`: Signal from compile process, or null if code is not null
#### Example
@@ -133,9 +133,9 @@ Returns a list of all possible packages, and whether their installation status.
#### Response
-- `[].language`: Name of the contained runtime
-- `[].language_version`: Version of the contained runtime
-- `[].installed`: Status on the package being installed
+- `[].language`: Name of the contained runtime
+- `[].language_version`: Version of the contained runtime
+- `[].installed`: Status on the package being installed
#### Example
@@ -167,13 +167,13 @@ Install the given package.
#### Request
-- `language`: Name of package from [package list](#get-apiv2packages)
-- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
+- `language`: Name of package from [package list](#get-apiv2packages)
+- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
#### Response
-- `language`: Name of package installed
-- `version`: Version of package installed
+- `language`: Name of package installed
+- `version`: Version of package installed
#### Example
@@ -203,13 +203,13 @@ Uninstall the given package.
#### Request
-- `language`: Name of package from [package list](#get-apiv2packages)
-- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
+- `language`: Name of package from [package list](#get-apiv2packages)
+- `version`: SemVer version selector for package from [package list](#get-apiv2packages)
#### Response
-- `language`: Name of package uninstalled
-- `version`: Version of package uninstalled
+- `language`: Name of package uninstalled
+- `version`: Version of package uninstalled
#### Example
diff --git a/docs/configuration.md b/docs/configuration.md
index 4c6601a..1a6f5bd 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -50,15 +50,15 @@ Absolute path to piston related data, including packages and job contexts.
```yaml
key:
- - PISTON_RUNNER_UID_MIN
- - PISTON_RUNNER_UID_MAX
- - PISTON_RUNNER_GID_MIN
- - PISTON_RUNNER_GID_MAX
+ - PISTON_RUNNER_UID_MIN
+ - PISTON_RUNNER_UID_MAX
+ - PISTON_RUNNER_GID_MIN
+ - PISTON_RUNNER_GID_MAX
default:
- - 1001
- - 1500
- - 1001
- - 1500
+ - 1001
+ - 1500
+ - 1001
+ - 1500
```
UID and GID ranges to use when executing jobs.
@@ -124,6 +124,7 @@ Maximum size for a singular file written to disk.
Resists against large file writes to exhaust disk space.
## Compile/Run timeouts
+
```yaml
key:
- PISTON_COMPILE_TIMEOUT
@@ -133,6 +134,7 @@ key:
- PISTON_RUN_TIMEOUT
default: 3000
```
+
The maximum time that is allowed to be taken by a stage in milliseconds.
Use -1 for unlimited time.
@@ -140,8 +142,8 @@ Use -1 for unlimited time.
```yaml
key:
- - PISTON_COMPILE_MEMORY_LIMIT
- - PISTON_RUN_MEMORY_LIMIT
+ - PISTON_COMPILE_MEMORY_LIMIT
+ - PISTON_RUN_MEMORY_LIMIT
default: -1
```
@@ -177,7 +179,9 @@ default: {}
Per-language overrides/exceptions for the each of `max_process_count`, `max_open_files`, `max_file_size`,
`compile_memory_limit`, `run_memory_limit`, `compile_timeout`, `run_timeout`, `output_max_size`. Defined as follows:
+
```
PISTON_LIMIT_OVERRIDES={"c++":{"max_process_count":128}}
```
+
This will give `c++` a max_process_count of 128 regardless of the configuration.
diff --git a/mkdocs.yml b/mkdocs.yml
index 148ba91..a6ef999 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -1,15 +1,15 @@
site_name: Piston
nav:
- - Home: index.md
- - Configuration: configuration.md
- - API: api-v2.md
+ - Home: index.md
+ - Configuration: configuration.md
+ - API: api-v2.md
theme:
- name: readthedocs
- highlightjs: true
- hljs_languages:
- - yaml
- - json
+ name: readthedocs
+ highlightjs: true
+ hljs_languages:
+ - yaml
+ - json
markdown_extensions:
- - admonition
+ - admonition
diff --git a/package-lock.json b/package-lock.json
index ac734d4..5c51a1d 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,6 +1,32 @@
{
- "name": "piston",
- "lockfileVersion": 2,
- "requires": true,
- "packages": {}
+ "name": "piston",
+ "lockfileVersion": 2,
+ "requires": true,
+ "packages": {
+ "": {
+ "devDependencies": {
+ "prettier": "2.4.1"
+ }
+ },
+ "node_modules/prettier": {
+ "version": "2.4.1",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.1.tgz",
+ "integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==",
+ "dev": true,
+ "bin": {
+ "prettier": "bin-prettier.js"
+ },
+ "engines": {
+ "node": ">=10.13.0"
+ }
+ }
+ },
+ "dependencies": {
+ "prettier": {
+ "version": "2.4.1",
+ "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.4.1.tgz",
+ "integrity": "sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA==",
+ "dev": true
+ }
+ }
}
diff --git a/package.json b/package.json
index 0967ef4..8f07606 100644
--- a/package.json
+++ b/package.json
@@ -1 +1,5 @@
-{}
+{
+ "devDependencies": {
+ "prettier": "2.4.1"
+ }
+}
diff --git a/packages/cjam/0.6.5/metadata.json b/packages/cjam/0.6.5/metadata.json
index af510fd..bd25bde 100644
--- a/packages/cjam/0.6.5/metadata.json
+++ b/packages/cjam/0.6.5/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "cjam",
- "version": "0.6.5",
- "aliases": []
+ "language": "cjam",
+ "version": "0.6.5",
+ "aliases": []
}
diff --git a/packages/cobol/3.1.2/metadata.json b/packages/cobol/3.1.2/metadata.json
index cf3e7e1..0a80d3c 100644
--- a/packages/cobol/3.1.2/metadata.json
+++ b/packages/cobol/3.1.2/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "cobol",
- "version": "3.1.2",
- "aliases": ["cob"]
+ "language": "cobol",
+ "version": "3.1.2",
+ "aliases": ["cob"]
}
diff --git a/packages/deno/1.7.5/metadata.json b/packages/deno/1.7.5/metadata.json
index d30608b..217a7c6 100644
--- a/packages/deno/1.7.5/metadata.json
+++ b/packages/deno/1.7.5/metadata.json
@@ -4,7 +4,7 @@
"provides": [
{
"language": "typescript",
- "aliases": ["deno-ts","deno"]
+ "aliases": ["deno-ts", "deno"]
},
{
"language": "javascript",
diff --git a/packages/deno/1.7.5/test.deno.ts b/packages/deno/1.7.5/test.deno.ts
index 56ed4a0..e106678 100644
--- a/packages/deno/1.7.5/test.deno.ts
+++ b/packages/deno/1.7.5/test.deno.ts
@@ -1 +1 @@
-console.log("OK")
\ No newline at end of file
+console.log('OK');
diff --git a/packages/dragon/1.9.8/metadata.json b/packages/dragon/1.9.8/metadata.json
index 86cfc4c..3fbc015 100644
--- a/packages/dragon/1.9.8/metadata.json
+++ b/packages/dragon/1.9.8/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "dragon",
- "version": "1.9.8",
- "aliases": []
+ "language": "dragon",
+ "version": "1.9.8",
+ "aliases": []
}
diff --git a/packages/forte/1.0.0/metadata.json b/packages/forte/1.0.0/metadata.json
index fd4ec12..f7f4137 100644
--- a/packages/forte/1.0.0/metadata.json
+++ b/packages/forte/1.0.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "forte",
- "version": "1.0.0",
- "aliases": ["forter"]
+ "language": "forte",
+ "version": "1.0.0",
+ "aliases": ["forter"]
}
diff --git a/packages/gcc/10.2.0/metadata.json b/packages/gcc/10.2.0/metadata.json
index f969bf5..367de7c 100644
--- a/packages/gcc/10.2.0/metadata.json
+++ b/packages/gcc/10.2.0/metadata.json
@@ -3,7 +3,7 @@
"version": "10.2.0",
"provides": [
{
- "language":"c",
+ "language": "c",
"aliases": ["gcc"]
},
{
diff --git a/packages/golfscript/1.0.0/metadata.json b/packages/golfscript/1.0.0/metadata.json
index 4ef3a62..cb4f356 100644
--- a/packages/golfscript/1.0.0/metadata.json
+++ b/packages/golfscript/1.0.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "golfscript",
- "version": "1.0.0",
- "aliases": ["golfscript"]
+ "language": "golfscript",
+ "version": "1.0.0",
+ "aliases": ["golfscript"]
}
diff --git a/packages/groovy/3.0.7/metadata.json b/packages/groovy/3.0.7/metadata.json
index b790007..34ab93d 100644
--- a/packages/groovy/3.0.7/metadata.json
+++ b/packages/groovy/3.0.7/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "groovy",
- "version": "3.0.7",
- "aliases": ["groovy", "gvy"]
+ "language": "groovy",
+ "version": "3.0.7",
+ "aliases": ["groovy", "gvy"]
}
diff --git a/packages/japt/2.0.0/metadata.json b/packages/japt/2.0.0/metadata.json
index 7a3e5aa..ef0ff8d 100644
--- a/packages/japt/2.0.0/metadata.json
+++ b/packages/japt/2.0.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "japt",
- "version": "2.0.0",
- "aliases": ["japt"]
-}
\ No newline at end of file
+ "language": "japt",
+ "version": "2.0.0",
+ "aliases": ["japt"]
+}
diff --git a/packages/llvm_ir/12.0.1/metadata.json b/packages/llvm_ir/12.0.1/metadata.json
index 4c92048..50dfbbc 100644
--- a/packages/llvm_ir/12.0.1/metadata.json
+++ b/packages/llvm_ir/12.0.1/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "llvm_ir",
- "version": "12.0.1",
- "aliases": ["llvm", "llvm-ir", "ll"]
+ "language": "llvm_ir",
+ "version": "12.0.1",
+ "aliases": ["llvm", "llvm-ir", "ll"]
}
diff --git a/packages/mono/6.12.0/metadata.json b/packages/mono/6.12.0/metadata.json
index 4d09ae7..3f483a4 100644
--- a/packages/mono/6.12.0/metadata.json
+++ b/packages/mono/6.12.0/metadata.json
@@ -8,7 +8,13 @@
},
{
"language": "basic",
- "aliases": ["vb", "mono-vb", "mono-basic", "visual-basic", "visual basic"]
+ "aliases": [
+ "vb",
+ "mono-vb",
+ "mono-basic",
+ "visual-basic",
+ "visual basic"
+ ]
}
]
}
diff --git a/packages/node/15.10.0/test.js b/packages/node/15.10.0/test.js
index 56ed4a0..e106678 100644
--- a/packages/node/15.10.0/test.js
+++ b/packages/node/15.10.0/test.js
@@ -1 +1 @@
-console.log("OK")
\ No newline at end of file
+console.log('OK');
diff --git a/packages/node/16.3.0/test.js b/packages/node/16.3.0/test.js
index 56ed4a0..e106678 100644
--- a/packages/node/16.3.0/test.js
+++ b/packages/node/16.3.0/test.js
@@ -1 +1 @@
-console.log("OK")
\ No newline at end of file
+console.log('OK');
diff --git a/packages/ocaml/4.12.0/metadata.json b/packages/ocaml/4.12.0/metadata.json
index ddbfb89..6c2f733 100644
--- a/packages/ocaml/4.12.0/metadata.json
+++ b/packages/ocaml/4.12.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "ocaml",
- "version": "4.12.0",
- "aliases": ["ocaml", "ml"]
+ "language": "ocaml",
+ "version": "4.12.0",
+ "aliases": ["ocaml", "ml"]
}
diff --git a/packages/octave/6.2.0/metadata.json b/packages/octave/6.2.0/metadata.json
index ab9dbb1..0b209ce 100644
--- a/packages/octave/6.2.0/metadata.json
+++ b/packages/octave/6.2.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "octave",
- "version": "6.2.0",
- "aliases": ["matlab", "m"]
+ "language": "octave",
+ "version": "6.2.0",
+ "aliases": ["matlab", "m"]
}
diff --git a/packages/pyth/1.0.0/metadata.json b/packages/pyth/1.0.0/metadata.json
index bcddb7a..e9bbfe9 100644
--- a/packages/pyth/1.0.0/metadata.json
+++ b/packages/pyth/1.0.0/metadata.json
@@ -1,5 +1,5 @@
{
- "language": "pyth",
- "version": "1.0.0",
- "aliases": ["pyth"]
+ "language": "pyth",
+ "version": "1.0.0",
+ "aliases": ["pyth"]
}
diff --git a/packages/raku/6.100.0/metadata.json b/packages/raku/6.100.0/metadata.json
index 7cda1ed..e1fbad8 100644
--- a/packages/raku/6.100.0/metadata.json
+++ b/packages/raku/6.100.0/metadata.json
@@ -2,4 +2,4 @@
"language": "raku",
"version": "6.100.0",
"aliases": ["raku", "rakudo", "perl6", "p6", "pl6"]
-}
\ No newline at end of file
+}
diff --git a/packages/typescript/4.2.3/run b/packages/typescript/4.2.3/run
index 1d26f3f..5a8c60e 100644
--- a/packages/typescript/4.2.3/run
+++ b/packages/typescript/4.2.3/run
@@ -2,7 +2,7 @@
# Put instructions to run the runtime
-CODE=$(sed 's/ts$/js/' <<<"$1")
+CODE=$1.js
shift
node $CODE "$@"
diff --git a/packages/typescript/4.2.3/test.ts b/packages/typescript/4.2.3/test.ts
index 56ed4a0..e106678 100644
--- a/packages/typescript/4.2.3/test.ts
+++ b/packages/typescript/4.2.3/test.ts
@@ -1 +1 @@
-console.log("OK")
\ No newline at end of file
+console.log('OK');
diff --git a/piston b/piston
index 1653eae..90d47ed 100755
--- a/piston
+++ b/piston
@@ -38,6 +38,7 @@ case $1 in
echo " clean-repo Remove all packages from local repo"
echo " build-pkg Build a package"
echo " rebuild Build and restart the docker container"
+ echo " lint Lint the codebase using prettier"
else
@@ -53,7 +54,11 @@ case $1 in
logs) docker_compose logs -f ;;
restart) docker_compose restart ;;
- start) docker_compose up -d ;;
+ start)
+ rm -f .git/hooks/pre-commit
+ ln -s $(realpath $(dirname "$0"))/pre-commit .git/hooks/pre-commit
+ docker_compose up -d
+ ;;
stop) docker_compose down ;;
bash) docker_compose exec api /bin/bash ;;
@@ -75,6 +80,11 @@ case $1 in
docker build repo -t piston-repo-builder
docker run --rm -v "$(realpath $(dirname "$0")):/piston" piston-repo-builder --no-server $PKGSLUG
;;
+
+ lint)
+ npm install
+ npx prettier --ignore-unknown --write .
+ ;;
*)
cd cli
npm i > /dev/null
diff --git a/pre-commit b/pre-commit
new file mode 100755
index 0000000..f592c88
--- /dev/null
+++ b/pre-commit
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+echo "Linting staged files..."
+npm install
+
+FILES=$(git diff --cached --name-only --diff-filter=ACMR | sed 's| |\\ |g')
+[ -z "$FILES" ] && exit 0
+
+# Prettify all selected files
+echo "$FILES" | xargs npx prettier --ignore-unknown --write
+
+# Add back the modified/prettified files to staging
+echo "$FILES" | xargs git add
+
+exit 0
diff --git a/readme.md b/readme.md
index 998d07e..a2f3afa 100644
--- a/readme.md
+++ b/readme.md
@@ -47,9 +47,10 @@ While we are accepting pull requests for Hacktoberfest, we will reject any low-q
If we see PR abuse for Hacktoberfest, we will stop providing Hacktoberfest approval for pull requests.
We are accepting PRs for:
-* Packages - updating package versions, adding new packages
-* Documentation updates
-* CLI/API improvements - please discuss these with us in the Discord first
+
+- Packages - updating package versions, adding new packages
+- Documentation updates
+- CLI/API improvements - please discuss these with us in the Discord first
Any queries or concerns, ping @HexF#0015 in the Discord.
@@ -66,11 +67,11 @@ Any queries or concerns, ping @HexF#0015 in the Discord.
It's used in numerous places including:
-- [EMKC Challenges](https://emkc.org/challenges)
-- [EMKC Weekly Contests](https://emkc.org/contests)
-- [Engineer Man Discord Server](https://discord.gg/engineerman)
-- Web IDEs
-- 200+ direct integrations
+- [EMKC Challenges](https://emkc.org/challenges)
+- [EMKC Weekly Contests](https://emkc.org/contests)
+- [Engineer Man Discord Server](https://discord.gg/engineerman)
+- Web IDEs
+- 200+ direct integrations
@@ -78,19 +79,19 @@ It's used in numerous places including:
The following are approved and endorsed extensions/utilities to the core Piston offering.
-- [I Run Code](https://github.com/engineer-man/piston-bot), a Discord bot used in 4100+ servers to handle arbitrary code evaluation in Discord. To get this bot in your own server, go here: https://emkc.org/run.
-- [Piston CLI](https://github.com/Shivansh-007/piston-cli), a universal shell supporting code highlighting, files, and interpretation without the need to download a language.
-- [Node Piston Client](https://github.com/dthree/node-piston), a Node.js wrapper for accessing the Piston API.
-- [Piston4J](https://github.com/the-codeboy/Piston4J), a Java wrapper for accessing the Piston API.
-- [Pyston](https://github.com/ffaanngg/pyston), a Python wrapper for accessing the Piston API.
-- [Go-Piston](https://github.com/milindmadhukar/go-piston), a Golang wrapper for accessing the Piston API.
+- [I Run Code](https://github.com/engineer-man/piston-bot), a Discord bot used in 4100+ servers to handle arbitrary code evaluation in Discord. To get this bot in your own server, go here: https://emkc.org/run.
+- [Piston CLI](https://github.com/Shivansh-007/piston-cli), a universal shell supporting code highlighting, files, and interpretation without the need to download a language.
+- [Node Piston Client](https://github.com/dthree/node-piston), a Node.js wrapper for accessing the Piston API.
+- [Piston4J](https://github.com/the-codeboy/Piston4J), a Java wrapper for accessing the Piston API.
+- [Pyston](https://github.com/ffaanngg/pyston), a Python wrapper for accessing the Piston API.
+- [Go-Piston](https://github.com/milindmadhukar/go-piston), a Golang wrapper for accessing the Piston API.
# Public API
-- Requires no installation and you can use it immediately.
-- Reference the Runtimes/Execute sections below to learn about the request and response formats.
+- Requires no installation and you can use it immediately.
+- Reference the Runtimes/Execute sections below to learn about the request and response formats.
@@ -115,9 +116,9 @@ POST https://emkc.org/api/v2/piston/execute
### Host System Package Dependencies
-- Docker
-- Docker Compose
-- Node JS (>= 13, preferably >= 15)
+- Docker
+- Docker Compose
+- Node JS (>= 13, preferably >= 15)
### After system dependencies are installed, clone this repository:
@@ -142,7 +143,7 @@ The API will now be online with no language runtimes installed. To install runti
### Host System Package Dependencies
-- Docker
+- Docker
### Installation
@@ -160,7 +161,7 @@ docker run \
### Host System Package Dependencies
-- Same as [All In One](#All-In-One)
+- Same as [All In One](#All-In-One)
### Installation
@@ -250,34 +251,34 @@ Content-Type: application/json
`POST /api/v2/execute`
This endpoint requests execution of some arbitrary code.
-- `language` (**required**) The language to use for execution, must be a string and must be installed.
-- `version` (**required**) The version of the language to use for execution, must be a string containing a SemVer selector for the version or the specific version number to use.
-- `files` (**required**) An array of files containing code or other data that should be used for execution. The first file in this array is considered the main file.
-- `files[].name` (_optional_) The name of the file to upload, must be a string containing no path or left out.
-- `files[].content` (**required**) The content of the files to upload, must be a string containing text to write.
-- `stdin` (_optional_) The text to pass as stdin to the program. Must be a string or left out. Defaults to blank string.
-- `args` (_optional_) The arguments to pass to the program. Must be an array or left out. Defaults to `[]`.
-- `compile_timeout` (_optional_) The maximum time allowed for the compile stage to finish before bailing out in milliseconds. Must be a number or left out. Defaults to `10000` (10 seconds).
-- `run_timeout` (_optional_) The maximum time allowed for the run stage to finish before bailing out in milliseconds. Must be a number or left out. Defaults to `3000` (3 seconds).
-- `compile_memory_limit` (_optional_) The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number or left out. Defaults to `-1` (no limit)
-- `run_memory_limit` (_optional_) The maximum amount of memory the run stage is allowed to use in bytes. Must be a number or left out. Defaults to `-1` (no limit)
+- `language` (**required**) The language to use for execution, must be a string and must be installed.
+- `version` (**required**) The version of the language to use for execution, must be a string containing a SemVer selector for the version or the specific version number to use.
+- `files` (**required**) An array of files containing code or other data that should be used for execution. The first file in this array is considered the main file.
+- `files[].name` (_optional_) The name of the file to upload, must be a string containing no path or left out.
+- `files[].content` (**required**) The content of the files to upload, must be a string containing text to write.
+- `stdin` (_optional_) The text to pass as stdin to the program. Must be a string or left out. Defaults to blank string.
+- `args` (_optional_) The arguments to pass to the program. Must be an array or left out. Defaults to `[]`.
+- `compile_timeout` (_optional_) The maximum time allowed for the compile stage to finish before bailing out in milliseconds. Must be a number or left out. Defaults to `10000` (10 seconds).
+- `run_timeout` (_optional_) The maximum time allowed for the run stage to finish before bailing out in milliseconds. Must be a number or left out. Defaults to `3000` (3 seconds).
+- `compile_memory_limit` (_optional_) The maximum amount of memory the compile stage is allowed to use in bytes. Must be a number or left out. Defaults to `-1` (no limit)
+- `run_memory_limit` (_optional_) The maximum amount of memory the run stage is allowed to use in bytes. Must be a number or left out. Defaults to `-1` (no limit)
```json
{
- "language": "js",
- "version": "15.10.0",
- "files": [
- {
- "name": "my_cool_code.js",
- "content": "console.log(process.argv)"
- }
- ],
- "stdin": "",
- "args": ["1", "2", "3"],
- "compile_timeout": 10000,
- "run_timeout": 3000,
- "compile_memory_limit": -1,
- "run_memory_limit": -1
+ "language": "js",
+ "version": "15.10.0",
+ "files": [
+ {
+ "name": "my_cool_code.js",
+ "content": "console.log(process.argv)"
+ }
+ ],
+ "stdin": "",
+ "args": ["1", "2", "3"],
+ "compile_timeout": 10000,
+ "run_timeout": 3000,
+ "compile_memory_limit": -1,
+ "run_memory_limit": -1
}
```
@@ -410,14 +411,14 @@ Docker provides a great deal of security out of the box in that it's separate fr
Piston takes additional steps to make it resistant to
various privilege escalation, denial-of-service, and resource saturation threats. These steps include:
-- Disabling outgoing network interaction
-- Capping max processes at 256 by default (resists `:(){ :|: &}:;`, `while True: os.fork()`, etc.)
-- Capping max files at 2048 (resists various file based attacks)
-- Cleaning up all temp space after each execution (resists out of drive space attacks)
-- Running as a variety of unprivileged users
-- Capping runtime execution at 3 seconds
-- Capping stdout to 65536 characters (resists yes/no bombs and runaway output)
-- SIGKILLing misbehaving code
+- Disabling outgoing network interaction
+- Capping max processes at 256 by default (resists `:(){ :|: &}:;`, `while True: os.fork()`, etc.)
+- Capping max files at 2048 (resists various file based attacks)
+- Cleaning up all temp space after each execution (resists out of drive space attacks)
+- Running as a variety of unprivileged users
+- Capping runtime execution at 3 seconds
+- Capping stdout to 65536 characters (resists yes/no bombs and runaway output)
+- SIGKILLing misbehaving code
diff --git a/tests/readme.md b/tests/readme.md
index 01ae419..746d0b9 100644
--- a/tests/readme.md
+++ b/tests/readme.md
@@ -6,4 +6,4 @@ Write exploits in any language supported by piston.
Hopefully when running any files in this directory, piston will resist the attack.
-Leave a comment in the code describing how the exploit works.
\ No newline at end of file
+Leave a comment in the code describing how the exploit works.