commit
8bbf364581
|
@ -0,0 +1,39 @@
|
|||
name: Publish API image
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- v3
|
||||
paths:
|
||||
- api/**
|
||||
|
||||
|
||||
jobs:
|
||||
push_to_registry:
|
||||
runs-on: ubuntu-latest
|
||||
name: Build and Push Docker image to Github Packages
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v2
|
||||
- name: Login to GitHub registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
registry: docker.pkg.github.com
|
||||
- name: Login to ghcr.io
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
registry: ghcr.io
|
||||
|
||||
- name: Build and push API
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: api
|
||||
push: true
|
||||
pull: true
|
||||
tags: |
|
||||
docker.pkg.github.com/engineer-man/piston/api
|
||||
ghcr.io/engineer-man/piston
|
|
@ -0,0 +1,145 @@
|
|||
name: 'Package Pull Requests'
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- edited
|
||||
- reopened
|
||||
- synchronize
|
||||
paths:
|
||||
- 'packages/**'
|
||||
|
||||
jobs:
|
||||
build-pkg:
|
||||
name: Check that package builds
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Login to GitHub registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
registry: docker.pkg.github.com
|
||||
|
||||
- name: Get list of changed files
|
||||
uses: lots0logs/gh-action-get-changed-files@2.1.4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Packages
|
||||
run: |
|
||||
PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
|
||||
echo "Packages: $PACKAGES"
|
||||
docker run -v "${{ github.workspace }}:/piston" docker.pkg.github.com/engineer-man/piston/repo-builder:latest --no-server $PACKAGES
|
||||
ls -la packages
|
||||
|
||||
- name: Upload package as artifact
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: packages
|
||||
path: packages/*.pkg.tar.gz
|
||||
|
||||
|
||||
test-pkg:
|
||||
name: Test package
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-pkg
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
|
||||
- uses: actions/download-artifact@v2
|
||||
with:
|
||||
name: packages
|
||||
|
||||
- name: Relocate downloaded packages
|
||||
run: mv *.pkg.tar.gz packages/
|
||||
|
||||
- name: Write test config file
|
||||
uses: DamianReeves/write-file-action@v1.0
|
||||
with:
|
||||
path: data/config.yaml
|
||||
contents: |
|
||||
log_level: DEBUG
|
||||
bind_address: 0.0.0.0:2000
|
||||
data_directory: /piston
|
||||
runner_uid_min: 1100
|
||||
runner_uid_max: 1500
|
||||
runner_gid_min: 1100
|
||||
runner_gid_max: 1500
|
||||
disable_networking: false
|
||||
output_max_size: 1024
|
||||
max_process_count: 64
|
||||
max_open_files: 2048
|
||||
repo_url: http://localhost:8000/index
|
||||
|
||||
write-mode: overwrite
|
||||
|
||||
- name: Login to GitHub registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
registry: docker.pkg.github.com
|
||||
|
||||
- name: Run tests
|
||||
run: |
|
||||
ls -la
|
||||
docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name piston_fs_repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
|
||||
docker run --network container:piston_fs_repo -v $(pwd)'/data:/piston' -d --name api docker.pkg.github.com/engineer-man/piston/api
|
||||
echo Waiting for API to start..
|
||||
docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/runtimes
|
||||
|
||||
echo Waiting for Index to start..
|
||||
docker run --network container:piston_fs_repo appropriate/curl -s --retry 999 --retry-max-time 0 --retry-connrefused http://localhost:8000/index
|
||||
|
||||
echo Adjusting index
|
||||
sed -i 's/piston_fs_repo/localhost/g' repo/index
|
||||
|
||||
echo Listing Packages
|
||||
PACKAGES_JSON=$(docker run --network container:api appropriate/curl -s http://localhost:2000/packages)
|
||||
echo $PACKAGES_JSON
|
||||
|
||||
echo Getting CLI ready
|
||||
docker run -v "$PWD/cli:/app" --entrypoint /bin/bash node:15 -c 'cd /app; npm i'
|
||||
|
||||
for package in $(jq -r '.[] | "\(.language)-\(.language_version)"' <<< "$PACKAGES_JSON")
|
||||
do
|
||||
echo "Testing $package"
|
||||
PKG_PATH=$(sed 's|-|/|' <<< $package)
|
||||
PKG_NAME=$(awk -F- '{ print $1 }' <<< $package)
|
||||
PKG_VERSION=$(awk -F- '{ print $2 }' <<< $package)
|
||||
|
||||
echo "Installing..."
|
||||
docker run --network container:api appropriate/curl -sXPOST http://localhost:2000/packages/$PKG_PATH
|
||||
|
||||
TEST_SCRIPTS=packages/$PKG_PATH/test.*
|
||||
echo "Tests: $TEST_SCRIPTS"
|
||||
|
||||
for tscript in $TEST_SCRIPTS
|
||||
do
|
||||
TEST_RUNTIME=$(awk -F. '{print $2}' <<< $(basename $tscript))
|
||||
echo Running $tscript with runtime=$TEST_RUNTIME
|
||||
docker run --network container:api -v "$PWD/cli:/app" -v "$PWD/$(dirname $tscript):/pkg" node:15 /app/index.js run $TEST_RUNTIME $PKG_VERSION /pkg/$(basename $tscript) > test_output
|
||||
cat test_output
|
||||
grep "OK" test_output
|
||||
done
|
||||
done
|
||||
|
||||
|
||||
- name: Dump logs
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
docker logs api
|
||||
docker logs piston_fs_repo
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,76 @@
|
|||
name: 'Package Pushed'
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- v3
|
||||
paths:
|
||||
- packages/**
|
||||
|
||||
|
||||
jobs:
|
||||
build-pkg:
|
||||
name: Build package
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
|
||||
- name: Login to GitHub registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
registry: docker.pkg.github.com
|
||||
|
||||
- name: Get list of changed files
|
||||
uses: lots0logs/gh-action-get-changed-files@2.1.4
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build Packages
|
||||
run: |
|
||||
PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
|
||||
echo "Packages: $PACKAGES"
|
||||
docker run -v "${{ github.workspace }}:/piston" docker.pkg.github.com/engineer-man/piston/repo-builder:latest --no-server $PACKAGES
|
||||
ls -la packages
|
||||
|
||||
- name: Upload Packages
|
||||
uses: svenstaro/upload-release-action@v2
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
file: packages/*.pkg.tar.gz
|
||||
tag: pkgs
|
||||
overwrite: true
|
||||
file_glob: true
|
||||
create-index:
|
||||
name: Create Index
|
||||
runs-on: ubuntu-latest
|
||||
needs: build-pkg
|
||||
steps:
|
||||
- name: "Download all release assets"
|
||||
run: curl -s https://api.github.com/repos/engineer-man/piston/releases/latest | jq '.assets[].browser_download_url' -r | xargs -L 1 curl -sLO
|
||||
- name: "Generate index file"
|
||||
run: |
|
||||
echo "" > index
|
||||
BASEURL=https://github.com/engineer-man/piston/releases/download/pkgs/
|
||||
for pkg in *.pkg.tar.gz
|
||||
do
|
||||
PKGFILE=$(basename $pkg)
|
||||
PKGFILENAME=$(echo $PKGFILE | sed 's/\.pkg\.tar\.gz//g')
|
||||
|
||||
PKGNAME=$(echo $PKGFILENAME | grep -oP '^\K.+(?=-)')
|
||||
PKGVERSION=$(echo $PKGFILENAME | grep -oP '^.+-\K.+')
|
||||
PKGCHECKSUM=$(sha256sum $PKGFILE | awk '{print $1}')
|
||||
echo "$PKGNAME,$PKGVERSION,$PKGCHECKSUM,$BASEURL$PKGFILE" >> index
|
||||
echo "Adding package $PKGNAME-$PKGVERSION"
|
||||
done
|
||||
- name: Upload index
|
||||
uses: svenstaro/upload-release-action@v2
|
||||
with:
|
||||
repo_token: ${{ secrets.GITHUB_TOKEN }}
|
||||
file: index
|
||||
tag: pkgs
|
||||
overwrite: true
|
||||
file_glob: true
|
|
@ -0,0 +1,31 @@
|
|||
name: Publish Repo image
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- v3
|
||||
paths:
|
||||
- repo/**
|
||||
|
||||
jobs:
|
||||
push_to_registry:
|
||||
runs-on: ubuntu-latest
|
||||
name: Build and Push Docker image to Github Packages
|
||||
steps:
|
||||
- name: Check out repo
|
||||
uses: actions/checkout@v2
|
||||
- name: Login to GitHub registry
|
||||
uses: docker/login-action@v1
|
||||
with:
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
registry: docker.pkg.github.com
|
||||
|
||||
- name: Build and push repo
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: repo
|
||||
pull: true
|
||||
push: true
|
||||
tags: |
|
||||
docker.pkg.github.com/engineer-man/piston/repo-builder
|
|
@ -1,6 +1 @@
|
|||
api/api
|
||||
api/package-lock.json
|
||||
lxc/i
|
||||
lxc/lockfile
|
||||
container/build.yaml
|
||||
container/*.tar.xz
|
||||
data/
|
|
@ -0,0 +1,2 @@
|
|||
node_modules/
|
||||
_piston/
|
|
@ -1 +1,2 @@
|
|||
node_modules
|
||||
_piston
|
|
@ -0,0 +1,29 @@
|
|||
FROM node:15.8.0-buster-slim
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN dpkg-reconfigure -p critical dash
|
||||
RUN for i in $(seq 1001 1500); do \
|
||||
groupadd -g $i runner$i && \
|
||||
useradd -M runner$i -g $i -u $i ; \
|
||||
done
|
||||
RUN apt-get update && \
|
||||
apt-get install -y libxml2 gnupg tar coreutils util-linux libc6-dev \
|
||||
binutils build-essential locales libpcre3-dev libevent-dev libgmp3-dev \
|
||||
libncurses6 libncurses5 libedit-dev libseccomp-dev && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen
|
||||
|
||||
ENV NODE_ENV=production
|
||||
|
||||
WORKDIR /piston_api
|
||||
COPY ["package.json", "package-lock.json", "./"]
|
||||
RUN npm i
|
||||
COPY ./src ./src
|
||||
|
||||
RUN make -C ./src/nosocket/ all && make -C ./src/nosocket/ install
|
||||
|
||||
CMD [ "node", "src", "-m", "-c", "/piston/config.yaml"]
|
||||
EXPOSE 2000/tcp
|
||||
|
File diff suppressed because it is too large
Load Diff
|
@ -1,16 +1,20 @@
|
|||
{
|
||||
"name": "api",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"express": "^4.17.1",
|
||||
"express-validator": "^6.9.2"
|
||||
}
|
||||
"name": "piston-api",
|
||||
"version": "3.0.0",
|
||||
"description": "API for piston - a high performance code execution engine",
|
||||
"main": "src/index.js",
|
||||
"dependencies": {
|
||||
"body-parser": "^1.19.0",
|
||||
"express": "^4.17.1",
|
||||
"express-validator": "^6.10.0",
|
||||
"is-docker": "^2.1.1",
|
||||
"js-yaml": "^4.0.0",
|
||||
"logplease": "^1.2.15",
|
||||
"nocamel": "HexF/nocamel#patch-1",
|
||||
"node-fetch": "^2.6.1",
|
||||
"semver": "^7.3.4",
|
||||
"uuid": "^8.3.2",
|
||||
"yargs": "^16.2.0"
|
||||
},
|
||||
"license": "MIT"
|
||||
}
|
||||
|
|
|
@ -0,0 +1,191 @@
|
|||
const fss = require('fs');
|
||||
const yargs = require('yargs');
|
||||
const hide_bin = require('yargs/helpers').hideBin;
|
||||
const Logger = require('logplease');
|
||||
const logger = Logger.create('config');
|
||||
const yaml = require('js-yaml');
|
||||
|
||||
const header = `#
|
||||
# ____ _ _
|
||||
# | _ \\(_)___| |_ ___ _ __
|
||||
# | |_) | / __| __/ _ \\| '_ \\
|
||||
# | __/| \\__ \\ || (_) | | | |
|
||||
# |_| |_|___/\\__\\___/|_| |_|
|
||||
#
|
||||
# A High performance code execution engine
|
||||
# github.com/engineer-man/piston
|
||||
#
|
||||
|
||||
`;
|
||||
const argv = yargs(hide_bin(process.argv))
|
||||
.usage('Usage: $0 -c [config]')
|
||||
.demandOption('c')
|
||||
.option('config', {
|
||||
alias: 'c',
|
||||
describe: 'config file to load from',
|
||||
default: '/piston/config.yaml'
|
||||
})
|
||||
.option('make-config', {
|
||||
alias: 'm',
|
||||
type: 'boolean',
|
||||
describe: 'create config file and populate defaults if it does not already exist'
|
||||
})
|
||||
.argv;
|
||||
|
||||
const options = [
|
||||
{
|
||||
key: 'log_level',
|
||||
desc: 'Level of data to log',
|
||||
default: 'INFO',
|
||||
options: Object.values(Logger.LogLevels),
|
||||
validators: [
|
||||
x => Object.values(Logger.LogLevels).includes(x) || `Log level ${x} does not exist`
|
||||
]
|
||||
},
|
||||
{
|
||||
key: 'bind_address',
|
||||
desc: 'Address to bind REST API on\nThank @Bones for the number',
|
||||
default: '0.0.0.0:2000',
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'data_directory',
|
||||
desc: 'Absolute path to store all piston related data at',
|
||||
default: '/piston',
|
||||
validators: [x=> fss.exists_sync(x) || `Directory ${x} does not exist`]
|
||||
},
|
||||
{
|
||||
key: 'runner_uid_min',
|
||||
desc: 'Minimum uid to use for runner',
|
||||
default: 1001,
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'runner_uid_max',
|
||||
desc: 'Maximum uid to use for runner',
|
||||
default: 1500,
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'runner_gid_min',
|
||||
desc: 'Minimum gid to use for runner',
|
||||
default: 1001,
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'runner_gid_max',
|
||||
desc: 'Maximum gid to use for runner',
|
||||
default: 1500,
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'disable_networking',
|
||||
desc: 'Set to true to disable networking',
|
||||
default: true,
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'output_max_size',
|
||||
desc: 'Max size of each stdio buffer',
|
||||
default: 1024,
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'max_process_count',
|
||||
desc: 'Max number of processes per job',
|
||||
default: 64,
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'max_open_files',
|
||||
desc: 'Max number of open files per job',
|
||||
default: 2048,
|
||||
validators: []
|
||||
},
|
||||
{
|
||||
key: 'repo_url',
|
||||
desc: 'URL of repo index',
|
||||
default: 'https://github.com/engineer-man/piston/releases/download/pkgs/index',
|
||||
validators: []
|
||||
}
|
||||
];
|
||||
|
||||
const make_default_config = () => {
|
||||
let content = header.split('\n');
|
||||
|
||||
options.forEach(option => {
|
||||
content = content.concat(option.desc.split('\n').map(x=>`# ${x}`));
|
||||
|
||||
if (option.options) {
|
||||
content.push('# Options: ' + option.options.join(', '));
|
||||
}
|
||||
|
||||
content.push(`${option.key}: ${option.default}`);
|
||||
|
||||
content.push(''); // New line between
|
||||
});
|
||||
|
||||
return content.join('\n');
|
||||
};
|
||||
|
||||
logger.info(`Loading Configuration from ${argv.config}`);
|
||||
|
||||
if (argv['make-config']) {
|
||||
logger.debug('Make configuration flag is set');
|
||||
}
|
||||
|
||||
if (!!argv['make-config'] && !fss.exists_sync(argv.config)) {
|
||||
logger.info('Writing default configuration...');
|
||||
try {
|
||||
fss.write_file_sync(argv.config, make_default_config());
|
||||
} catch (e) {
|
||||
logger.error('Error writing default configuration:', e.message);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
let config = {};
|
||||
|
||||
logger.debug('Reading config file');
|
||||
|
||||
try {
|
||||
const cfg_content = fss.read_file_sync(argv.config);
|
||||
config = yaml.load(cfg_content);
|
||||
} catch(err) {
|
||||
logger.error('Error reading configuration file:', err.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
logger.debug('Validating config entries');
|
||||
|
||||
let errored = false;
|
||||
|
||||
options.for_each(option => {
|
||||
logger.debug('Checking option', option.key);
|
||||
|
||||
let cfg_val = config[option.key];
|
||||
|
||||
if (cfg_val === undefined) {
|
||||
errored = true;
|
||||
logger.error(`Config key ${option.key} does not exist on currently loaded configuration`);
|
||||
return;
|
||||
}
|
||||
|
||||
option.validators.for_each(validator => {
|
||||
let response = validator(cfg_val);
|
||||
|
||||
if (!response) {
|
||||
errored = true;
|
||||
logger.error(`Config option ${option.key} failed validation:`, response);
|
||||
return;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (errored) {
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
logger.info('Configuration successfully loaded');
|
||||
|
||||
module.exports = config;
|
|
@ -0,0 +1,193 @@
|
|||
const logger = require('logplease').create('executor/job');
|
||||
const {v4: uuidv4} = require('uuid');
|
||||
const cp = require('child_process');
|
||||
const path = require('path');
|
||||
const config = require('../config');
|
||||
const globals = require('../globals');
|
||||
const fs = require('fs/promises');
|
||||
|
||||
const job_states = {
|
||||
READY: Symbol('Ready to be primed'),
|
||||
PRIMED: Symbol('Primed and ready for execution'),
|
||||
EXECUTED: Symbol('Executed and ready for cleanup')
|
||||
};
|
||||
|
||||
let uid = 0;
|
||||
let gid = 0;
|
||||
|
||||
class Job {
|
||||
|
||||
constructor({ runtime, files, args, stdin, timeouts, alias }) {
|
||||
this.uuid = uuidv4();
|
||||
this.runtime = runtime;
|
||||
this.files = files.map((file,i) => ({
|
||||
name: file.name || `file${i}`,
|
||||
content: file.content
|
||||
}));
|
||||
|
||||
this.args = args;
|
||||
this.stdin = stdin;
|
||||
this.timeouts = timeouts;
|
||||
this.alias = alias;
|
||||
|
||||
this.uid = config.runner_uid_min + uid;
|
||||
this.gid = config.runner_gid_min + gid;
|
||||
|
||||
uid++;
|
||||
gid++;
|
||||
|
||||
uid %= (config.runner_uid_max - config.runner_uid_min) + 1;
|
||||
gid %= (config.runner_gid_max - config.runner_gid_min) + 1;
|
||||
|
||||
|
||||
this.state = job_states.READY;
|
||||
this.dir = path.join(config.data_directory, globals.data_directories.jobs, this.uuid);
|
||||
}
|
||||
|
||||
async prime() {
|
||||
logger.info(`Priming job uuid=${this.uuid}`);
|
||||
|
||||
logger.debug('Writing files to job cache');
|
||||
|
||||
logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`);
|
||||
|
||||
await fs.mkdir(this.dir, { mode:0o700 });
|
||||
await fs.chown(this.dir, this.uid, this.gid);
|
||||
|
||||
for (const file of this.files) {
|
||||
let file_path = path.join(this.dir, file.name);
|
||||
|
||||
await fs.write_file(file_path, file.content);
|
||||
await fs.chown(file_path, this.uid, this.gid);
|
||||
}
|
||||
|
||||
this.state = job_states.PRIMED;
|
||||
|
||||
logger.debug('Primed job');
|
||||
}
|
||||
|
||||
async safe_call(file, args, timeout) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const nonetwork = config.disable_networking ? ['nosocket'] : [];
|
||||
|
||||
const prlimit = [
|
||||
'prlimit',
|
||||
'--nproc=' + config.max_process_count,
|
||||
'--nofile=' + config.max_open_files
|
||||
];
|
||||
|
||||
const proc_call = [
|
||||
...prlimit,
|
||||
...nonetwork,
|
||||
'bash',file,
|
||||
...args
|
||||
];
|
||||
|
||||
var stdout = '';
|
||||
var stderr = '';
|
||||
|
||||
const proc = cp.spawn(proc_call[0], proc_call.splice(1) ,{
|
||||
env: {
|
||||
...this.runtime.env_vars,
|
||||
PISTON_ALIAS: this.alias
|
||||
},
|
||||
stdio: 'pipe',
|
||||
cwd: this.dir,
|
||||
uid: this.uid,
|
||||
gid: this.gid,
|
||||
detached: true //give this process its own process group
|
||||
});
|
||||
|
||||
proc.stdin.write(this.stdin);
|
||||
proc.stdin.end();
|
||||
|
||||
const kill_timeout = set_timeout(_ => proc.kill('SIGKILL'), timeout);
|
||||
|
||||
proc.stderr.on('data', data => {
|
||||
if (stderr.length > config.output_max_size) {
|
||||
proc.kill('SIGKILL');
|
||||
} else {
|
||||
stderr += data;
|
||||
}
|
||||
});
|
||||
|
||||
proc.stdout.on('data', data => {
|
||||
if (stdout.length > config.output_max_size) {
|
||||
proc.kill('SIGKILL');
|
||||
} else {
|
||||
stdout += data;
|
||||
}
|
||||
});
|
||||
|
||||
const exit_cleanup = () => {
|
||||
clear_timeout(kill_timeout);
|
||||
|
||||
proc.stderr.destroy();
|
||||
proc.stdout.destroy();
|
||||
|
||||
try {
|
||||
process.kill(-proc.pid, 'SIGKILL');
|
||||
} catch {
|
||||
// Process will be dead already, so nothing to kill.
|
||||
}
|
||||
};
|
||||
|
||||
proc.on('exit', (code, signal)=>{
|
||||
exit_cleanup();
|
||||
|
||||
resolve({ stdout, stderr, code, signal });
|
||||
});
|
||||
|
||||
proc.on('error', (err) => {
|
||||
exit_cleanup();
|
||||
|
||||
reject({ error: err, stdout, stderr });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async execute() {
|
||||
if (this.state !== job_states.PRIMED) {
|
||||
throw new Error('Job must be in primed state, current state: ' + this.state.toString());
|
||||
}
|
||||
|
||||
logger.info(`Executing job uuid=${this.uuid} uid=${this.uid} gid=${this.gid} runtime=${this.runtime.toString()}`);
|
||||
|
||||
logger.debug('Compiling');
|
||||
|
||||
let compile;
|
||||
|
||||
if (this.runtime.compiled) {
|
||||
compile = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'compile'),
|
||||
this.files.map(x => x.name),
|
||||
this.timeouts.compile
|
||||
);
|
||||
}
|
||||
|
||||
logger.debug('Running');
|
||||
|
||||
const run = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'run'),
|
||||
[this.files[0].name, ...this.args],
|
||||
this.timeouts.run
|
||||
);
|
||||
|
||||
this.state = job_states.EXECUTED;
|
||||
|
||||
return {
|
||||
compile,
|
||||
run
|
||||
};
|
||||
}
|
||||
|
||||
async cleanup() {
|
||||
logger.info(`Cleaning up job uuid=${this.uuid}`);
|
||||
await fs.rm(this.dir, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Job
|
||||
};
|
|
@ -0,0 +1,57 @@
|
|||
// {"language":"python","version":"3.9.1","files":{"code.py":"print('hello world')"},"args":[],"stdin":"","compile_timeout":10, "run_timeout":3}
|
||||
// {"success":true, "run":{"stdout":"hello world", "stderr":"", "error_code":0},"compile":{"stdout":"","stderr":"","error_code":0}}
|
||||
|
||||
const { get_latest_runtime_matching_language_version } = require('../runtime');
|
||||
const { Job } = require('./job');
|
||||
const { body } = require('express-validator');
|
||||
|
||||
module.exports = {
|
||||
|
||||
run_job_validators: [
|
||||
body('language')
|
||||
.isString(),
|
||||
body('version')
|
||||
.isString(),
|
||||
// isSemVer requires it to be a version, not a selector
|
||||
body('files')
|
||||
.isArray(),
|
||||
body('files.*.content')
|
||||
.isString(),
|
||||
],
|
||||
|
||||
// POST /jobs
|
||||
async run_job(req, res) {
|
||||
const runtime = get_latest_runtime_matching_language_version(req.body.language, req.body.version);
|
||||
|
||||
if (runtime === undefined) {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: `${req.body.language}-${req.body.version} runtime is unknown`
|
||||
});
|
||||
}
|
||||
|
||||
const job = new Job({
|
||||
runtime,
|
||||
alias: req.body.language,
|
||||
files: req.body.files,
|
||||
args: req.body.args || [],
|
||||
stdin: req.body.stdin || "",
|
||||
timeouts: {
|
||||
run: req.body.run_timeout || 3000,
|
||||
compile: req.body.compile_timeout || 10000
|
||||
}
|
||||
});
|
||||
|
||||
await job.prime();
|
||||
|
||||
const result = await job.execute();
|
||||
|
||||
await job.cleanup();
|
||||
|
||||
return res
|
||||
.status(200)
|
||||
.send(result);
|
||||
}
|
||||
|
||||
};
|
|
@ -0,0 +1,20 @@
|
|||
// Globals are things the user shouldn't change in config, but is good to not use inline constants for
|
||||
const is_docker = require('is-docker');
|
||||
const fss = require('fs');
|
||||
const platform = `${is_docker() ? 'docker' : 'baremetal'}-${
|
||||
fss.read_file_sync('/etc/os-release')
|
||||
.toString()
|
||||
.split('\n')
|
||||
.find(x => x.startsWith('ID'))
|
||||
.replace('ID=','')
|
||||
}`;
|
||||
|
||||
module.exports = {
|
||||
data_directories: {
|
||||
packages: 'packages',
|
||||
jobs: 'jobs'
|
||||
},
|
||||
version: require('../package.json').version,
|
||||
platform,
|
||||
pkg_installed_file: '.ppman-installed' //Used as indication for if a package was installed
|
||||
};
|
179
api/src/index.js
179
api/src/index.js
|
@ -1,79 +1,122 @@
|
|||
#!/usr/bin/env node
|
||||
require('nocamel');
|
||||
const Logger = require('logplease');
|
||||
const express = require('express');
|
||||
const { execute } = require('../../lxc/execute.js');
|
||||
const { languages } = require('./languages');
|
||||
const { checkSchema, validationResult } = require('express-validator');
|
||||
|
||||
const PORT = 2000;
|
||||
const globals = require('./globals');
|
||||
const config = require('./config');
|
||||
const path = require('path');
|
||||
const fs = require('fs/promises');
|
||||
const fss = require('fs');
|
||||
const body_parser = require('body-parser');
|
||||
const runtime = require('./runtime');
|
||||
const { validationResult } = require('express-validator');
|
||||
|
||||
const logger = Logger.create('index');
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
|
||||
app.post(
|
||||
'/execute',
|
||||
checkSchema({
|
||||
language: {
|
||||
in: 'body',
|
||||
notEmpty: {
|
||||
errorMessage: 'No language supplied',
|
||||
},
|
||||
isString: {
|
||||
errorMessage: 'Supplied language is not a string',
|
||||
},
|
||||
custom: {
|
||||
options: value => value && languages.find(language => language.aliases.includes(value.toLowerCase())),
|
||||
errorMessage: 'Supplied language is not supported by Piston',
|
||||
},
|
||||
},
|
||||
source: {
|
||||
in: 'body',
|
||||
notEmpty: {
|
||||
errorMessage: 'No source supplied',
|
||||
},
|
||||
isString: {
|
||||
errorMessage: 'Supplied source is not a string',
|
||||
},
|
||||
},
|
||||
args: {
|
||||
in: 'body',
|
||||
optional: true,
|
||||
isArray: {
|
||||
errorMessage: 'Supplied args is not an array',
|
||||
},
|
||||
},
|
||||
stdin: {
|
||||
in: 'body',
|
||||
optional: true,
|
||||
isString: {
|
||||
errorMessage: 'Supplied stdin is not a string',
|
||||
},
|
||||
(async () => {
|
||||
logger.info('Setting loglevel to',config.log_level);
|
||||
Logger.setLogLevel(config.log_level);
|
||||
logger.debug('Ensuring data directories exist');
|
||||
|
||||
Object.values(globals.data_directories).for_each(dir => {
|
||||
let data_path = path.join(config.data_directory, dir);
|
||||
|
||||
logger.debug(`Ensuring ${data_path} exists`);
|
||||
|
||||
if (!fss.exists_sync(data_path)) {
|
||||
logger.info(`${data_path} does not exist.. Creating..`);
|
||||
|
||||
try {
|
||||
fss.mkdir_sync(data_path);
|
||||
} catch(e) {
|
||||
logger.error(`Failed to create ${data_path}: `, e.message);
|
||||
}
|
||||
}
|
||||
}),
|
||||
async (req, res) => {
|
||||
const errors = validationResult(req).array();
|
||||
});
|
||||
|
||||
if (errors.length === 0) {
|
||||
const language = languages.find(language =>
|
||||
language.aliases.includes(req.body.language.toLowerCase())
|
||||
);
|
||||
logger.info('Loading packages');
|
||||
const pkgdir = path.join(config.data_directory,globals.data_directories.packages);
|
||||
|
||||
const { stdout, stderr, output, ran } = await execute(language, req.body.source, req.body.stdin, req.body.args);
|
||||
const pkglist = await fs.readdir(pkgdir);
|
||||
|
||||
res.status(200).json({
|
||||
ran,
|
||||
language: language.name,
|
||||
version: language.version,
|
||||
stdout,
|
||||
stderr,
|
||||
output,
|
||||
});
|
||||
} else {
|
||||
res.status(400).json({
|
||||
message: errors[0].msg,
|
||||
});
|
||||
const languages = await Promise.all(
|
||||
pkglist.map(lang=>
|
||||
fs.readdir(path.join(pkgdir,lang))
|
||||
.then(x=>x.map(y=>path.join(pkgdir, lang, y)))
|
||||
));
|
||||
|
||||
const installed_languages = languages
|
||||
.flat()
|
||||
.filter(pkg => fss.exists_sync(path.join(pkg, globals.pkg_installed_file)));
|
||||
|
||||
installed_languages.forEach(pkg => new runtime.Runtime(pkg));
|
||||
|
||||
logger.info('Starting API Server');
|
||||
logger.debug('Constructing Express App');
|
||||
logger.debug('Registering middleware');
|
||||
|
||||
app.use(body_parser.urlencoded({ extended: true }));
|
||||
app.use(body_parser.json());
|
||||
|
||||
app.use(function (err, req, res, next) {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
stack: err.stack
|
||||
})
|
||||
})
|
||||
|
||||
const validate = (req, res, next) => {
|
||||
const errors = validationResult(req);
|
||||
|
||||
if (!errors.isEmpty()) {
|
||||
return res
|
||||
.status(400)
|
||||
.send({
|
||||
message: errors.array()
|
||||
});
|
||||
}
|
||||
},
|
||||
);
|
||||
|
||||
app.get('/versions', (_, res) => res.json(languages));
|
||||
next();
|
||||
};
|
||||
|
||||
app.listen(PORT, () => console.log(`Listening on port ${PORT}`));
|
||||
logger.debug('Registering Routes');
|
||||
|
||||
const ppman_routes = require('./ppman/routes');
|
||||
const executor_routes = require('./executor/routes');
|
||||
|
||||
app.get('/api/v1/packages', ppman_routes.package_list);
|
||||
app.post('/api/v1/packages/:language/:version', ppman_routes.package_install);
|
||||
app.delete('/api/v1/packages/:language/:version', ppman_routes.package_uninstall);
|
||||
app.post('/api/v1/execute',
|
||||
executor_routes.run_job_validators,
|
||||
validate,
|
||||
executor_routes.run_job
|
||||
);
|
||||
app.get('/api/v1/runtimes', (req, res) => {
|
||||
const runtimes = runtime
|
||||
.map(rt => {
|
||||
return {
|
||||
language: rt.language,
|
||||
version: rt.version.raw,
|
||||
aliases: rt.aliases
|
||||
};
|
||||
});
|
||||
|
||||
return res
|
||||
.status(200)
|
||||
.send(runtimes);
|
||||
});
|
||||
|
||||
app.use(function (req,res,next){
|
||||
return res.status(404).send({message: 'Not Found'});
|
||||
});
|
||||
|
||||
logger.debug('Calling app.listen');
|
||||
const [ address, port ] = config.bind_address.split(':');
|
||||
|
||||
app.listen(port, address, () => {
|
||||
logger.info('API server started on', config.bind_address);
|
||||
});
|
||||
})();
|
||||
|
|
|
@ -1,41 +0,0 @@
|
|||
const { spawn } = require('child_process');
|
||||
const languages = require('../../config/languages.json');
|
||||
|
||||
{
|
||||
const process = spawn(__dirname + '/../../lxc/util/versions');
|
||||
|
||||
let output = '';
|
||||
process.stderr.on('data', chunk => output += chunk);
|
||||
process.stdout.on('data', chunk => output += chunk);
|
||||
|
||||
process.on('exit', () => {
|
||||
const sections = output.toLowerCase().split('---');
|
||||
const versions = {};
|
||||
|
||||
for (const section of sections) {
|
||||
const lines = section.trim().split('\n');
|
||||
|
||||
if (lines.length >= 2) {
|
||||
const language = lines[0];
|
||||
|
||||
if (language === 'java') {
|
||||
versions[language] = /\d+/.exec(lines[1])?.[0];
|
||||
} else if (language === 'emacs') {
|
||||
versions[language] = /\d+\.\d+/.exec(lines[1])?.[0];
|
||||
} else if (language === 'clojure') {
|
||||
versions[language] = /\d+\.\d+\.\d+\.\d+/.exec(lines[1])?.[0];
|
||||
} else {
|
||||
versions[language] = /\d+\.\d+\.\d+/.exec(section)?.[0];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const language of languages) {
|
||||
language.version = versions[language.name];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
languages,
|
||||
};
|
|
@ -0,0 +1,19 @@
|
|||
CC = gcc
|
||||
CFLAGS = -O2 -Wall -lseccomp
|
||||
TARGET = nosocket
|
||||
BUILD_PATH = ./
|
||||
INSTALL_PATH = /usr/local/bin/
|
||||
SOURCE = nosocket.c
|
||||
|
||||
all: $(TARGET)
|
||||
|
||||
$(TARGET): $(SOURCE)
|
||||
$(CC) $(BUILD_PATH)$(SOURCE) $(CFLAGS) -o $(TARGET)
|
||||
|
||||
install:
|
||||
mv $(TARGET) $(INSTALL_PATH)
|
||||
|
||||
clean:
|
||||
$(RM) $(TARGET)
|
||||
$(RM) $(INSTALL_PATH)$(TARGET)
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
/*
|
||||
nosocket.c
|
||||
|
||||
Disables access to the `socket` syscall and runs a program provided as the first
|
||||
commandline argument.
|
||||
*/
|
||||
#include <stdio.h>
|
||||
#include <errno.h>
|
||||
#include <unistd.h>
|
||||
#include <sys/prctl.h>
|
||||
#include <seccomp.h>
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
// Disallow any new capabilities from being added
|
||||
prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
|
||||
|
||||
// SCMP_ACT_ALLOW lets the filter have no effect on syscalls not matching a
|
||||
// configured filter rule (allow all by default)
|
||||
scmp_filter_ctx ctx = seccomp_init(SCMP_ACT_ALLOW);
|
||||
if (!ctx)
|
||||
{
|
||||
fprintf(stderr, "Unable to initialize seccomp filter context\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Add a seccomp rule to the syscall blacklist - blacklist the socket syscall
|
||||
if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(EACCES), SCMP_SYS(socket), 0) < 0)
|
||||
{
|
||||
fprintf(stderr, "Unable to add seccomp rule to context\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
seccomp_export_pfc(ctx, 0);
|
||||
#endif
|
||||
|
||||
if (argc < 2)
|
||||
{
|
||||
fprintf(stderr, "Usage %s: %s <program name> <arguments>\n", argv[0], argv[0]);
|
||||
return 1;
|
||||
}
|
||||
seccomp_load(ctx);
|
||||
execvp(argv[1], argv + 1);
|
||||
return 1;
|
||||
}
|
|
@ -0,0 +1,165 @@
|
|||
const logger = require('logplease').create('ppman/package');
|
||||
const semver = require('semver');
|
||||
const config = require('../config');
|
||||
const globals = require('../globals');
|
||||
const fetch = require('node-fetch');
|
||||
const path = require('path');
|
||||
const fs = require('fs/promises');
|
||||
const fss = require('fs');
|
||||
const cp = require('child_process');
|
||||
const crypto = require('crypto');
|
||||
const runtime = require('../runtime');
|
||||
|
||||
class Package {
|
||||
|
||||
constructor({ language, version, download, checksum }){
|
||||
this.language = language;
|
||||
this.version = semver.parse(version);
|
||||
this.checksum = checksum;
|
||||
this.download = download;
|
||||
}
|
||||
|
||||
get installed() {
|
||||
return fss.exists_sync(path.join(this.install_path, globals.pkg_installed_file));
|
||||
}
|
||||
|
||||
get install_path() {
|
||||
return path.join(
|
||||
config.data_directory,
|
||||
globals.data_directories.packages,
|
||||
this.language,
|
||||
this.version.raw
|
||||
);
|
||||
}
|
||||
|
||||
async install() {
|
||||
if (this.installed) {
|
||||
throw new Error('Already installed');
|
||||
}
|
||||
|
||||
logger.info(`Installing ${this.language}-${this.version.raw}`);
|
||||
|
||||
if (fss.exists_sync(this.install_path)) {
|
||||
logger.warn(`${this.language}-${this.version.raw} has residual files. Removing them.`);
|
||||
await fs.rm(this.install_path, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
logger.debug(`Making directory ${this.install_path}`);
|
||||
await fs.mkdir(this.install_path, {recursive: true});
|
||||
|
||||
logger.debug(`Downloading package from ${this.download} in to ${this.install_path}`);
|
||||
const pkgpath = path.join(this.install_path, 'pkg.tar.gz');
|
||||
const download = await fetch(this.download);
|
||||
|
||||
const file_stream = fss.create_write_stream(pkgpath);
|
||||
await new Promise((resolve, reject) => {
|
||||
download.body.pipe(file_stream);
|
||||
download.body.on('error', reject);
|
||||
|
||||
file_stream.on('finish', resolve);
|
||||
});
|
||||
|
||||
logger.debug('Validating checksums');
|
||||
logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`);
|
||||
const cs = crypto.create_hash("sha256")
|
||||
.update(fss.readFileSync(pkgpath))
|
||||
.digest('hex');
|
||||
|
||||
if (cs !== this.checksum) {
|
||||
throw new Error(`Checksum miss-match want: ${val} got: ${cs}`);
|
||||
}
|
||||
|
||||
logger.debug(`Extracting package files from archive ${pkgpath} in to ${this.install_path}`);
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'`);
|
||||
|
||||
proc.once('exit', (code, _) => {
|
||||
code === 0 ? resolve() : reject();
|
||||
});
|
||||
|
||||
proc.stdout.pipe(process.stdout);
|
||||
proc.stderr.pipe(process.stderr);
|
||||
|
||||
proc.once('error', reject);
|
||||
});
|
||||
|
||||
logger.debug('Registering runtime');
|
||||
new runtime.Runtime(this.install_path);
|
||||
|
||||
logger.debug('Caching environment');
|
||||
const get_env_command = `cd ${this.install_path}; source environment; env`;
|
||||
|
||||
const envout = await new Promise((resolve, reject) => {
|
||||
let stdout = '';
|
||||
|
||||
const proc = cp
|
||||
.spawn(
|
||||
'env',
|
||||
['-i','bash','-c',`${get_env_command}`],
|
||||
{
|
||||
stdio: ['ignore', 'pipe', 'pipe']
|
||||
}
|
||||
);
|
||||
|
||||
proc.once('exit', (code, _) => {
|
||||
code === 0 ? resolve(stdout) : reject();
|
||||
});
|
||||
|
||||
proc.stdout.on('data', data => {
|
||||
stdout += data;
|
||||
});
|
||||
|
||||
proc.once('error', reject);
|
||||
});
|
||||
|
||||
const filtered_env = envout
|
||||
.split('\n')
|
||||
.filter(l => !['PWD','OLDPWD','_', 'SHLVL'].includes(l.split('=',2)[0]))
|
||||
.join('\n');
|
||||
|
||||
await fs.write_file(path.join(this.install_path, '.env'), filtered_env);
|
||||
|
||||
logger.debug('Writing installed state to disk');
|
||||
await fs.write_file(path.join(this.install_path, globals.pkg_installed_file), Date.now().toString());
|
||||
|
||||
logger.info(`Installed ${this.language}-${this.version.raw}`);
|
||||
|
||||
return {
|
||||
language: this.language,
|
||||
version: this.version.raw
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
async uninstall(){
|
||||
logger.info(`Uninstalling ${this.language}-${this.version.raw}`);
|
||||
|
||||
logger.debug("Finding runtime")
|
||||
const found_runtime = runtime.get_latest_runtime_matching_language_version(this.language, this.version.raw);
|
||||
|
||||
if(!found_runtime){
|
||||
logger.error(`Uninstalling ${this.language}-${this.version.raw} failed: Not installed`)
|
||||
throw new Error(`${this.language}-${this.version.raw} is not installed`)
|
||||
}
|
||||
|
||||
logger.debug("Unregistering runtime")
|
||||
found_runtime.unregister();
|
||||
|
||||
logger.debug("Cleaning files from disk")
|
||||
await fs.rmdir(this.install_path, {recursive: true})
|
||||
|
||||
logger.info(`Uninstalled ${this.language}-${this.version.raw}`)
|
||||
|
||||
return {
|
||||
language: this.language,
|
||||
version: this.version.raw
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
Package
|
||||
};
|
|
@ -0,0 +1,123 @@
|
|||
const logger = require('logplease').create('ppman/routes');
|
||||
const semver = require('semver');
|
||||
const fetch = require('node-fetch');
|
||||
const config = require('../config');
|
||||
const { Package } = require('./package');
|
||||
|
||||
const get_package_list = async () => {
|
||||
const repo_content = await fetch(config.repo_url).then(x => x.text());
|
||||
|
||||
const entries = repo_content
|
||||
.split('\n')
|
||||
.filter(x => x.length > 0);
|
||||
|
||||
return entries.map(line => {
|
||||
const [ language, version, checksum, download ] = line.split(',', 4);
|
||||
|
||||
return new Package({
|
||||
language,
|
||||
version,
|
||||
checksum,
|
||||
download
|
||||
});
|
||||
});
|
||||
};
|
||||
|
||||
const get_package = async (lang, version) => {
|
||||
const packages = await get_package_list();
|
||||
|
||||
const candidates = packages
|
||||
.filter(pkg => {
|
||||
return pkg.language == lang && semver.satisfies(pkg.version, version)
|
||||
});
|
||||
|
||||
candidates.sort((a, b) => semver.rcompare(a.version, b.version));
|
||||
|
||||
return candidates[0] || null;
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
|
||||
// GET /packages
|
||||
async package_list(req, res) {
|
||||
logger.debug('Request to list packages');
|
||||
|
||||
let packages = await get_package_list();
|
||||
|
||||
packages = packages
|
||||
.map(pkg => {
|
||||
return {
|
||||
language: pkg.language,
|
||||
language_version: pkg.version.raw,
|
||||
installed: pkg.installed
|
||||
};
|
||||
});
|
||||
|
||||
return res
|
||||
.status(200)
|
||||
.send(packages);
|
||||
},
|
||||
|
||||
// POST /packages/:language/:version
|
||||
async package_install(req, res) {
|
||||
logger.debug('Request to install package');
|
||||
|
||||
const pkg = await get_package(req.params.language, req.params.version);
|
||||
|
||||
if (pkg == null) {
|
||||
return res
|
||||
.status(404)
|
||||
.send({
|
||||
message: `Requested package ${req.params.language}-${req.params.version} does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await pkg.install();
|
||||
|
||||
return res
|
||||
.status(200)
|
||||
.send(response);
|
||||
} catch(e) {
|
||||
logger.error(`Error while installing package ${pkg.language}-${pkg.version}:`, e.message);
|
||||
|
||||
return res
|
||||
.status(500)
|
||||
.send({
|
||||
message: e.message
|
||||
});
|
||||
}
|
||||
},
|
||||
|
||||
// DELETE /packages/:language/:version
|
||||
async package_uninstall(req, res) {
|
||||
logger.debug('Request to uninstall package');
|
||||
|
||||
const pkg = await get_package(req.params.language, req.params.version);
|
||||
|
||||
if (pkg == null) {
|
||||
return res
|
||||
.status(404)
|
||||
.send({
|
||||
message: `Requested package ${req.params.language}-${req.params.version} does not exist`
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await pkg.uninstall();
|
||||
|
||||
return res
|
||||
.status(200)
|
||||
.send(response);
|
||||
} catch(e) {
|
||||
logger.error(`Error while uninstalling package ${pkg.language}-${pkg.version}:`, e.message);
|
||||
|
||||
return res
|
||||
.status(500)
|
||||
.send({
|
||||
message: e.message
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
};
|
|
@ -0,0 +1,81 @@
|
|||
const logger = require('logplease').create('runtime');
|
||||
const semver = require('semver');
|
||||
const config = require('./config');
|
||||
const globals = require('./globals');
|
||||
const fss = require('fs');
|
||||
const path = require('path');
|
||||
|
||||
const runtimes = [];
|
||||
|
||||
class Runtime {
|
||||
|
||||
constructor(package_dir){
|
||||
let info = JSON.parse(
|
||||
fss.read_file_sync(path.join(package_dir, 'pkg-info.json'))
|
||||
);
|
||||
|
||||
const { language, version, build_platform, aliases } = info;
|
||||
|
||||
this.pkgdir = package_dir;
|
||||
this.language = language;
|
||||
this.version = semver.parse(version);
|
||||
this.aliases = aliases;
|
||||
|
||||
if (build_platform !== globals.platform) {
|
||||
logger.warn(
|
||||
`Package ${language}-${version} was built for platform ${build_platform}, ` +
|
||||
`but our platform is ${globals.platform}`
|
||||
);
|
||||
}
|
||||
|
||||
logger.debug(`Package ${language}-${version} was loaded`);
|
||||
|
||||
runtimes.push(this);
|
||||
}
|
||||
|
||||
get compiled() {
|
||||
if (this._compiled === undefined) {
|
||||
this._compiled = fss.exists_sync(path.join(this.pkgdir, 'compile'));
|
||||
}
|
||||
|
||||
return this._compiled;
|
||||
}
|
||||
|
||||
get env_vars() {
|
||||
if (!this._env_vars) {
|
||||
const env_file = path.join(this.pkgdir, '.env');
|
||||
const env_content = fss.read_file_sync(env_file).toString();
|
||||
|
||||
this._env_vars = {};
|
||||
|
||||
env_content
|
||||
.trim()
|
||||
.split('\n')
|
||||
.map(line => line.split('=',2))
|
||||
.forEach(([key,val]) => {
|
||||
this._env_vars[key.trim()] = val.trim();
|
||||
});
|
||||
}
|
||||
|
||||
return this._env_vars;
|
||||
}
|
||||
|
||||
toString() {
|
||||
return `${this.language}-${this.version.raw}`;
|
||||
}
|
||||
|
||||
unregister() {
|
||||
const index = runtimes.indexOf(this);
|
||||
runtimes.splice(index, 1); //Remove from runtimes list
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = runtimes;
|
||||
module.exports.Runtime = Runtime;
|
||||
module.exports.get_runtimes_matching_language_version = function(lang, ver){
|
||||
return runtimes.filter(rt => (rt.language == lang || rt.aliases.includes(lang)) && semver.satisfies(rt.version, ver));
|
||||
};
|
||||
module.exports.get_latest_runtime_matching_language_version = function(lang, ver){
|
||||
return module.exports.get_runtimes_matching_language_version(lang, ver)
|
||||
.sort((a,b) => semver.rcompare(a.version, b.version))[0];
|
||||
};
|
|
@ -0,0 +1 @@
|
|||
node_modules
|
|
@ -0,0 +1,98 @@
|
|||
//const fetch = require('node-fetch');
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const chalk = require('chalk');
|
||||
|
||||
exports.command = ['execute <language> <file> [args..]']
|
||||
exports.aliases = ['run']
|
||||
exports.describe = 'Executes file with the specified runner'
|
||||
|
||||
exports.builder = {
|
||||
languageVersion: {
|
||||
string: true,
|
||||
desc: 'Set the version of the language to use',
|
||||
alias: ['l'],
|
||||
default: '*'
|
||||
},
|
||||
stdin: {
|
||||
boolean: true,
|
||||
desc: 'Read input from stdin and pass to executor',
|
||||
alias: ['i']
|
||||
},
|
||||
run_timeout: {
|
||||
alias: ['rt', 'r'],
|
||||
number: true,
|
||||
desc: 'Milliseconds before killing run process',
|
||||
default: 3000
|
||||
},
|
||||
compile_timeout: {
|
||||
alias: ['ct', 'c'],
|
||||
number: true,
|
||||
desc: 'Milliseconds before killing compile process',
|
||||
default: 10000,
|
||||
},
|
||||
files: {
|
||||
alias: ['f'],
|
||||
array: true,
|
||||
desc: 'Additional files to add',
|
||||
}
|
||||
}
|
||||
|
||||
exports.handler = async function(argv){
|
||||
|
||||
const files = [...(argv.files || []),argv.file]
|
||||
.map(file_path => ({
|
||||
name: path.basename(file_path),
|
||||
content: fs.readFileSync(file_path).toString()
|
||||
}));
|
||||
|
||||
|
||||
const stdin = (argv.stdin && await new Promise((resolve, _)=>{
|
||||
var data = "";
|
||||
process.stdin.on('data', d=> data += d)
|
||||
process.stdin.on('end', _ => resolve(data))
|
||||
})) || "";
|
||||
|
||||
|
||||
const request = {
|
||||
language: argv.language,
|
||||
version: argv['language-version'],
|
||||
files: files,
|
||||
args: argv.args,
|
||||
stdin,
|
||||
compile_timeout: argv.ct,
|
||||
run_timeout: argv.rt
|
||||
};
|
||||
|
||||
let response = await argv.axios.post('/jobs', request);
|
||||
response = response.data
|
||||
|
||||
function step(name, ctx){
|
||||
console.log(chalk.bold(`== ${name} ==`))
|
||||
if(ctx.stdout){
|
||||
console.log(" ",chalk.bold(`STDOUT`))
|
||||
console.log(" ",ctx.stdout.replace(/\n/g,'\n '))
|
||||
}
|
||||
if(ctx.stderr){
|
||||
console.log(chalk.bold(`STDERR`))
|
||||
console.log(" ",ctx.stderr.replace(/\n/g,'\n '))
|
||||
}
|
||||
|
||||
if(ctx.code)
|
||||
console.log(
|
||||
chalk.bold(`Exit Code:`),
|
||||
chalk.bold[ctx.code > 0 ? 'red' : 'green'](ctx.code)
|
||||
)
|
||||
if(ctx.signal)
|
||||
console.log(
|
||||
chalk.bold(`Signal:`),
|
||||
chalk.bold.yellow(ctx.signal)
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
|
||||
if(response.compile) step('Compile', response.compile)
|
||||
step('Run', response.run)
|
||||
|
||||
}
|
|
@ -0,0 +1,7 @@
|
|||
exports.command = 'ppman'
|
||||
exports.aliases = ['pkg']
|
||||
exports.describe = 'Package Manager'
|
||||
|
||||
exports.builder = yargs => yargs
|
||||
.commandDir('ppman_commands')
|
||||
.demandCommand()
|
|
@ -0,0 +1,23 @@
|
|||
const chalk = require('chalk');
|
||||
|
||||
exports.command = ['install <language> [language-version]']
|
||||
exports.aliases = ['i']
|
||||
exports.describe = 'Installs the named package'
|
||||
|
||||
|
||||
const msg_format = {
|
||||
'color': p => `${p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')} Installation ${p.language ? "succeeded" : "failed: " + p.message}`,
|
||||
'monochrome': p => `Installation ${p.language ? "succeeded" : "failed: " + p.message}`,
|
||||
'json': JSON.stringify
|
||||
|
||||
}
|
||||
|
||||
exports.handler = async function({axios, language, languageVersion}){
|
||||
try{
|
||||
const install = await axios.post(`/api/v1/packages/${language}/${languageVersion || '*'}`)
|
||||
|
||||
console.log(msg_format.color(install.data));
|
||||
}catch({response}){
|
||||
console.error(response.data.message)
|
||||
}
|
||||
}
|
|
@ -0,0 +1,26 @@
|
|||
//const fetch = require('node-fetch');
|
||||
const chalk = require('chalk');
|
||||
|
||||
exports.command = ['list']
|
||||
exports.aliases = ['l']
|
||||
exports.describe = 'Lists all available packages'
|
||||
|
||||
|
||||
const msg_format = {
|
||||
'color': p => `${chalk[p.installed ? "green":"red"]("•")} ${p.language} ${p.language_version}`,
|
||||
'monochrome': p => `${p.language} ${p.language_version} ${p.installed ? "(INSTALLED)": ""}`,
|
||||
'json': JSON.stringify
|
||||
|
||||
}
|
||||
|
||||
exports.handler = async function({axios}){
|
||||
|
||||
const packages = await axios.get('/api/v1/packages');
|
||||
|
||||
|
||||
const pkg_msg = packages.data
|
||||
.map(msg_format.color)
|
||||
.join('\n');
|
||||
|
||||
console.log(pkg_msg);
|
||||
}
|
|
@ -0,0 +1,23 @@
|
|||
const chalk = require('chalk');
|
||||
|
||||
exports.command = ['uninstall <language> [language-version]']
|
||||
exports.aliases = ['u']
|
||||
exports.describe = 'Uninstalls the named package'
|
||||
|
||||
|
||||
const msg_format = {
|
||||
'color': p => `${p.language ? chalk.green.bold('✓') : chalk.red.bold('❌')} Uninstallation ${p.language ? "succeeded" : "failed: " + p.message}`,
|
||||
'monochrome': p => `Uninstallation ${p.language ? "succeeded" : "failed: " + p.message}`,
|
||||
'json': JSON.stringify
|
||||
|
||||
}
|
||||
|
||||
exports.handler = async function({axios, language, languageVersion}){
|
||||
try{
|
||||
const uninstall = await axios.delete(`/api/v1/packages/${language}/${languageVersion || '*'}`)
|
||||
|
||||
console.log(msg_format.color(uninstall.data));
|
||||
}catch({response}){
|
||||
console.error(response.data.message)
|
||||
}
|
||||
}
|
32
cli/execute
32
cli/execute
|
@ -1,32 +0,0 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const { execute } = require('../lxc/execute.js');
|
||||
const { readFileSync } = require('fs');
|
||||
const languages = require('../config/languages.json');
|
||||
|
||||
const [languageName, sourceFile, ...args] = process.argv.slice(2);
|
||||
|
||||
(async () => {
|
||||
if (!languageName) {
|
||||
console.error('Provide a language name');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!sourceFile) {
|
||||
console.error('Provide a source file');
|
||||
return;
|
||||
}
|
||||
|
||||
const source = readFileSync(sourceFile).toString();
|
||||
|
||||
const language = languages.find(language => language.aliases.includes(languageName.toLowerCase()));
|
||||
|
||||
if (!language) {
|
||||
console.error(`${languageName} is not supported by Piston`);
|
||||
return;
|
||||
}
|
||||
|
||||
const { output } = await execute(language, source, '', args);
|
||||
|
||||
console.log(output);
|
||||
})();
|
|
@ -0,0 +1,26 @@
|
|||
#!/usr/bin/env node
|
||||
|
||||
const axios = require('axios').default;
|
||||
|
||||
const axios_instance = function(argv){
|
||||
argv.axios = axios.create({
|
||||
baseURL: argv['piston-url']
|
||||
});
|
||||
|
||||
return argv;
|
||||
};
|
||||
|
||||
require('yargs')(process.argv.slice(2))
|
||||
.option('piston-url', {
|
||||
alias: ['u'],
|
||||
default: 'http://127.0.0.1:2000',
|
||||
desc: 'Piston API URL',
|
||||
string: true
|
||||
})
|
||||
.middleware(axios_instance)
|
||||
.scriptName("piston")
|
||||
.commandDir('commands')
|
||||
.demandCommand()
|
||||
.help()
|
||||
.wrap(72)
|
||||
.argv;
|
|
@ -0,0 +1,156 @@
|
|||
{
|
||||
"name": "piston-cli",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"ansi-regex": {
|
||||
"version": "5.0.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz",
|
||||
"integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg=="
|
||||
},
|
||||
"ansi-styles": {
|
||||
"version": "4.3.0",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
|
||||
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
|
||||
"requires": {
|
||||
"color-convert": "^2.0.1"
|
||||
}
|
||||
},
|
||||
"axios": {
|
||||
"version": "0.21.1",
|
||||
"resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz",
|
||||
"integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==",
|
||||
"requires": {
|
||||
"follow-redirects": "^1.10.0"
|
||||
}
|
||||
},
|
||||
"chalk": {
|
||||
"version": "4.1.0",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.0.tgz",
|
||||
"integrity": "sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A==",
|
||||
"requires": {
|
||||
"ansi-styles": "^4.1.0",
|
||||
"supports-color": "^7.1.0"
|
||||
}
|
||||
},
|
||||
"cliui": {
|
||||
"version": "7.0.4",
|
||||
"resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
|
||||
"integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
|
||||
"requires": {
|
||||
"string-width": "^4.2.0",
|
||||
"strip-ansi": "^6.0.0",
|
||||
"wrap-ansi": "^7.0.0"
|
||||
}
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "2.0.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
|
||||
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
|
||||
"requires": {
|
||||
"color-name": "~1.1.4"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.4",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
|
||||
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
|
||||
},
|
||||
"emoji-regex": {
|
||||
"version": "8.0.0",
|
||||
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
|
||||
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="
|
||||
},
|
||||
"escalade": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.1.tgz",
|
||||
"integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw=="
|
||||
},
|
||||
"follow-redirects": {
|
||||
"version": "1.13.3",
|
||||
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.13.3.tgz",
|
||||
"integrity": "sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA=="
|
||||
},
|
||||
"get-caller-file": {
|
||||
"version": "2.0.5",
|
||||
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
|
||||
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="
|
||||
},
|
||||
"has-flag": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
|
||||
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="
|
||||
},
|
||||
"is-fullwidth-code-point": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
|
||||
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
|
||||
},
|
||||
"require-directory": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
|
||||
"integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I="
|
||||
},
|
||||
"string-width": {
|
||||
"version": "4.2.2",
|
||||
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.2.tgz",
|
||||
"integrity": "sha512-XBJbT3N4JhVumXE0eoLU9DCjcaF92KLNqTmFCnG1pf8duUxFGwtP6AD6nkjw9a3IdiRtL3E2w3JDiE/xi3vOeA==",
|
||||
"requires": {
|
||||
"emoji-regex": "^8.0.0",
|
||||
"is-fullwidth-code-point": "^3.0.0",
|
||||
"strip-ansi": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"strip-ansi": {
|
||||
"version": "6.0.0",
|
||||
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz",
|
||||
"integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==",
|
||||
"requires": {
|
||||
"ansi-regex": "^5.0.0"
|
||||
}
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "7.2.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
|
||||
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
|
||||
"requires": {
|
||||
"has-flag": "^4.0.0"
|
||||
}
|
||||
},
|
||||
"wrap-ansi": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
|
||||
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
|
||||
"requires": {
|
||||
"ansi-styles": "^4.0.0",
|
||||
"string-width": "^4.1.0",
|
||||
"strip-ansi": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"y18n": {
|
||||
"version": "5.0.5",
|
||||
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz",
|
||||
"integrity": "sha512-hsRUr4FFrvhhRH12wOdfs38Gy7k2FFzB9qgN9v3aLykRq0dRcdcpz5C9FxdS2NuhOrI/628b/KSTJ3rwHysYSg=="
|
||||
},
|
||||
"yargs": {
|
||||
"version": "16.2.0",
|
||||
"resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
|
||||
"integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
|
||||
"requires": {
|
||||
"cliui": "^7.0.2",
|
||||
"escalade": "^3.1.1",
|
||||
"get-caller-file": "^2.0.5",
|
||||
"require-directory": "^2.1.1",
|
||||
"string-width": "^4.2.0",
|
||||
"y18n": "^5.0.5",
|
||||
"yargs-parser": "^20.2.2"
|
||||
}
|
||||
},
|
||||
"yargs-parser": {
|
||||
"version": "20.2.7",
|
||||
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.7.tgz",
|
||||
"integrity": "sha512-FiNkvbeHzB/syOjIUxFDCnhSfzAL8R5vs40MgLFBorXACCOAEaWu0gRZl14vG8MR9AOJIZbmkjhusqBYZ3HTHw=="
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,12 @@
|
|||
{
|
||||
"name": "piston-cli",
|
||||
"version": "1.0.0",
|
||||
"description": "Piston Execution Engine CLI tools",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"axios": "^0.21.1",
|
||||
"chalk": "^4.1.0",
|
||||
"yargs": "^16.2.0"
|
||||
}
|
||||
}
|
|
@ -1,280 +0,0 @@
|
|||
[
|
||||
{
|
||||
"name": "nasm",
|
||||
"aliases": [
|
||||
"asm",
|
||||
"nasm"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "nasm64",
|
||||
"aliases": [
|
||||
"asm64",
|
||||
"nasm64"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "awk",
|
||||
"aliases": [
|
||||
"awk"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "bash",
|
||||
"aliases": [
|
||||
"bash",
|
||||
"sh"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "brainfuck",
|
||||
"aliases": [
|
||||
"bf",
|
||||
"brainfuck"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "c",
|
||||
"aliases": [
|
||||
"c"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "clojure",
|
||||
"aliases": [
|
||||
"clojure",
|
||||
"clj"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "crystal",
|
||||
"aliases": [
|
||||
"crystal",
|
||||
"cr"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "lisp",
|
||||
"aliases": [
|
||||
"lisp",
|
||||
"commonlisp",
|
||||
"clisp",
|
||||
"cl"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "csharp",
|
||||
"aliases": [
|
||||
"c#",
|
||||
"cs",
|
||||
"csharp"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "cpp",
|
||||
"aliases": [
|
||||
"c++",
|
||||
"cpp",
|
||||
"cc",
|
||||
"cxx"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "d",
|
||||
"aliases": [
|
||||
"dlang",
|
||||
"d"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "deno",
|
||||
"aliases": [
|
||||
"deno",
|
||||
"denojs",
|
||||
"denots"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "dash",
|
||||
"aliases": [
|
||||
"dash"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "ruby",
|
||||
"aliases": [
|
||||
"duby",
|
||||
"rb",
|
||||
"ruby"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "emacs",
|
||||
"aliases": [
|
||||
"el",
|
||||
"elisp",
|
||||
"emacs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "elixir",
|
||||
"aliases": [
|
||||
"elixir",
|
||||
"exs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "haskell",
|
||||
"aliases": [
|
||||
"haskell",
|
||||
"hs"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "go",
|
||||
"aliases": [
|
||||
"go",
|
||||
"golang"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "java",
|
||||
"aliases": [
|
||||
"java"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "lolcode",
|
||||
"aliases": [
|
||||
"lolcode"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "nim",
|
||||
"aliases": [
|
||||
"nim"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "node",
|
||||
"aliases": [
|
||||
"javascript",
|
||||
"js",
|
||||
"node",
|
||||
"node.js"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "jelly",
|
||||
"aliases": [
|
||||
"jelly"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "julia",
|
||||
"aliases": [
|
||||
"jl",
|
||||
"julia"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "kotlin",
|
||||
"aliases": [
|
||||
"kotlin",
|
||||
"kt"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "lua",
|
||||
"aliases": [
|
||||
"lua"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "paradoc",
|
||||
"aliases": [
|
||||
"paradoc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "perl",
|
||||
"aliases": [
|
||||
"perl",
|
||||
"pl"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "php",
|
||||
"aliases": [
|
||||
"php",
|
||||
"php3",
|
||||
"php4",
|
||||
"php5"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "prolog",
|
||||
"aliases": [
|
||||
"prolog",
|
||||
"plg"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "python3",
|
||||
"aliases": [
|
||||
"py",
|
||||
"py3",
|
||||
"python",
|
||||
"python3"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "python2",
|
||||
"aliases": [
|
||||
"python2",
|
||||
"py2"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "rust",
|
||||
"aliases": [
|
||||
"rs",
|
||||
"rust"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "scala",
|
||||
"aliases": [
|
||||
"scala",
|
||||
"sc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "swift",
|
||||
"aliases": [
|
||||
"swift"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "typescript",
|
||||
"aliases": [
|
||||
"ts",
|
||||
"typescript"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "zig",
|
||||
"aliases": [
|
||||
"zig"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "osabie",
|
||||
"aliases": [
|
||||
"osabie",
|
||||
"05AB1E",
|
||||
"osable",
|
||||
"usable"
|
||||
]
|
||||
}
|
||||
]
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
python3 -m pip install pyyaml
|
||||
python3 configure.py
|
||||
distrobuilder build-lxc build.yaml
|
||||
|
|
@ -1,13 +0,0 @@
|
|||
import yaml
|
||||
|
||||
|
||||
with open('piston.yaml') as dbc:
|
||||
with open('install_script.sh') as install_script_file:
|
||||
with open('build.yaml' , 'w+') as distrobuilder_config_file_new:
|
||||
distrobuilder_config = yaml.safe_load(dbc)
|
||||
distrobuilder_config['actions'].append({
|
||||
'trigger': 'post-packages',
|
||||
'action': install_script_file.read(),
|
||||
|
||||
})
|
||||
yaml.dump(distrobuilder_config, distrobuilder_config_file_new)
|
|
@ -1,343 +0,0 @@
|
|||
#!/bin/bash
|
||||
#echo "Don't run this on your system!" && exit 0
|
||||
|
||||
# install all necessary piston dependencies
|
||||
echo 'source /opt/.profile' >> /opt/.bashrc
|
||||
echo 'export HOME=/opt' >> /opt/.profile
|
||||
echo 'export TERM=linux' >> /opt/.profile
|
||||
echo 'export PATH=$PATH:/opt/.local/bin' >> /opt/.profile
|
||||
export HOME=/opt
|
||||
export TERM=linux
|
||||
sed -i 's/\/root/\/opt/' /etc/passwd
|
||||
sed -i \
|
||||
's/http:\/\/archive.ubuntu.com\/ubuntu/http:\/\/mirror.math.princeton.edu\/pub\/ubuntu/' \
|
||||
/etc/apt/sources.list
|
||||
apt-get update
|
||||
apt-get install -y \
|
||||
nano wget build-essential pkg-config libxml2-dev \
|
||||
libsqlite3-dev mono-complete curl cmake libpython2.7-dev \
|
||||
ruby libtinfo-dev unzip git openssl libssl-dev sbcl libevent-dev \
|
||||
ninja-build maven
|
||||
|
||||
# install python2
|
||||
# final binary: /opt/python2/Python-2.7.17/python
|
||||
# get version: /opt/python2/Python-2.7.17/python -V
|
||||
cd /opt && mkdir python2 && cd python2
|
||||
wget https://www.python.org/ftp/python/2.7.17/Python-2.7.17.tar.xz
|
||||
unxz Python-2.7.17.tar.xz
|
||||
tar -xf Python-2.7.17.tar
|
||||
cd Python-2.7.17
|
||||
./configure
|
||||
# open Modules/Setup and uncomment zlib line
|
||||
make
|
||||
echo 'export PATH=$PATH:/opt/python2/Python-2.7.17' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install python3
|
||||
# final binary: /opt/python3/Python-3.9.1/python
|
||||
# get version: /opt/python3/Python-3.9.1/python -V
|
||||
cd /opt && mkdir python3 && cd python3
|
||||
wget https://www.python.org/ftp/python/3.9.1/Python-3.9.1.tar.xz
|
||||
unxz Python-3.9.1.tar.xz
|
||||
tar -xf Python-3.9.1.tar
|
||||
cd Python-3.9.1
|
||||
./configure
|
||||
make
|
||||
ln -s python python3.9
|
||||
echo 'export PATH=$PATH:/opt/python3/Python-3.9.1' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install paradoc
|
||||
# this is not a binary, it is a python module
|
||||
# therefore it cannot be run directly as it requires python3 to be installed
|
||||
cd /opt && mkdir paradoc && cd paradoc
|
||||
git clone https://github.com/betaveros/paradoc.git
|
||||
|
||||
# install node.js
|
||||
# final binary: /opt/nodejs/node-v12.16.1-linux-x64/bin/node
|
||||
# get version: /opt/nodejs/node-v12.16.1-linux-x64/bin/node -v
|
||||
cd /opt && mkdir nodejs && cd nodejs
|
||||
wget https://nodejs.org/dist/v12.16.1/node-v12.16.1-linux-x64.tar.xz
|
||||
unxz node-v12.16.1-linux-x64.tar.xz
|
||||
tar -xf node-v12.16.1-linux-x64.tar
|
||||
echo 'export PATH=$PATH:/opt/nodejs/node-v12.16.1-linux-x64/bin' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install typescript
|
||||
# final binary: /opt/nodejs/node-v12.16.1-linux-x64/bin/tsc
|
||||
# get version: /opt/nodejs/node-v12.16.1-linux-x64/bin/tsc -v
|
||||
/opt/nodejs/node-v12.16.1-linux-x64/bin/npm i -g typescript
|
||||
|
||||
# install golang
|
||||
# final binary: /opt/go/go/bin/go
|
||||
# get version: /opt/go/go/bin/go version
|
||||
cd /opt && mkdir go && cd go
|
||||
wget https://dl.google.com/go/go1.14.1.linux-amd64.tar.gz
|
||||
tar -xzf go1.14.1.linux-amd64.tar.gz
|
||||
echo 'export PATH=$PATH:/opt/go/go/bin' >> /opt/.profile
|
||||
echo 'export GOROOT=/opt/go/go' >> /opt/.profile
|
||||
echo 'export GOCACHE=/tmp' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install php
|
||||
# final binary: /usr/local/bin/php
|
||||
# get version: /usr/local/bin/php -v
|
||||
cd /opt && mkdir php && cd php
|
||||
wget https://www.php.net/distributions/php-8.0.0.tar.gz
|
||||
tar -xzf php-8.0.0.tar.gz
|
||||
cd php-8.0.0
|
||||
./configure
|
||||
make
|
||||
make install
|
||||
|
||||
# install rust
|
||||
# final binary: /usr/local/bin/rustc
|
||||
# get version: /usr/local/bin/rustc --version
|
||||
cd /opt && mkdir rust && cd rust
|
||||
wget https://static.rust-lang.org/dist/rust-1.49.0-x86_64-unknown-linux-gnu.tar.gz
|
||||
tar -xzf rust-1.49.0-x86_64-unknown-linux-gnu.tar.gz
|
||||
cd rust-1.49.0-x86_64-unknown-linux-gnu
|
||||
./install.sh
|
||||
|
||||
# install scala
|
||||
# final binary: /opt/scala/scala3-3.0.0-M3/bin/scala
|
||||
# get version: /opt/scala/scala3-3.0.0-M3/bin/scalac -version
|
||||
cd /opt && mkdir scala && cd scala
|
||||
wget https://github.com/lampepfl/dotty/releases/download/3.0.0-M3/scala3-3.0.0-M3.tar.gz
|
||||
tar -xzf scala3-3.0.0-M3.tar.gz
|
||||
echo 'export PATH=$PATH:/opt/scala/scala3-3.0.0-M3/bin' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install swift
|
||||
# final binary: /opt/swift/swift-5.1.5-RELEASE-ubuntu18.04/usr/bin/swift
|
||||
# get version: /opt/swift/swift-5.1.5-RELEASE-ubuntu18.04/usr/bin/swift --version
|
||||
cd /opt && mkdir swift && cd swift
|
||||
wget https://swift.org/builds/swift-5.1.5-release/ubuntu1804/swift-5.1.5-RELEASE/swift-5.1.5-RELEASE-ubuntu18.04.tar.gz
|
||||
tar -xzf swift-5.1.5-RELEASE-ubuntu18.04.tar.gz
|
||||
echo 'export PATH=$PATH:/opt/swift/swift-5.1.5-RELEASE-ubuntu18.04/usr/bin' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install nasm
|
||||
# final binary: /opt/nasm/nasm-2.14.02/nasm
|
||||
# get version: /opt/nasm/nasm-2.14.02/nasm -v
|
||||
cd /opt && mkdir nasm && cd nasm
|
||||
wget https://www.nasm.us/pub/nasm/releasebuilds/2.14.02/nasm-2.14.02.tar.gz
|
||||
tar -xzf nasm-2.14.02.tar.gz
|
||||
cd nasm-2.14.02
|
||||
./configure
|
||||
make
|
||||
echo 'export PATH=$PATH:/opt/nasm/nasm-2.14.02' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install java
|
||||
# final binary: /opt/java/jdk-14/bin/java
|
||||
# get version: /opt/java/jdk-14/bin/java -version
|
||||
cd /opt && mkdir java && cd java
|
||||
wget https://download.java.net/java/GA/jdk14/076bab302c7b4508975440c56f6cc26a/36/GPL/openjdk-14_linux-x64_bin.tar.gz
|
||||
tar -xzf openjdk-14_linux-x64_bin.tar.gz
|
||||
echo 'export PATH=$PATH:/opt/java/jdk-14/bin' >> /opt/.profile
|
||||
# Scala will complain if JAVA_HOME isn't set
|
||||
echo 'export JAVA_HOME=/opt/java/jdk-14' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install jelly
|
||||
cd /opt && mkdir jelly && cd jelly
|
||||
wget https://github.com/DennisMitchell/jellylanguage/archive/master.zip
|
||||
unzip master.zip
|
||||
cd jellylanguage-master
|
||||
python3.8 -m pip install .
|
||||
sed -i 's/\/usr\/local\/bin\/python3.8/\/opt\/python3\/Python-3.8.2\/python3.8/' /usr/local/bin/jelly
|
||||
|
||||
# install julia
|
||||
# final binary: /opt/julia/julia-1.5.0/bin/julia
|
||||
# get version: /opt/julia/julia-1.5.0/bin/julia --version
|
||||
cd /opt && mkdir julia && cd julia
|
||||
wget https://julialang-s3.julialang.org/bin/linux/x64/1.5/julia-1.5.0-linux-x86_64.tar.gz
|
||||
tar -xzf julia-1.5.0-linux-x86_64.tar.gz
|
||||
echo 'export PATH=$PATH:/opt/julia/julia-1.5.0/bin' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install kotlin
|
||||
# final binary: /opt/kotlinc/bin/kotlinc
|
||||
# get version: /opt/kotlinc/bin/kotlinc -version
|
||||
cd /opt
|
||||
wget https://github.com/JetBrains/kotlin/releases/download/v1.4.10/kotlin-compiler-1.4.10.zip
|
||||
unzip kotlin-compiler-1.4.10.zip
|
||||
rm kotlin-compiler-1.4.10.zip
|
||||
echo 'export PATH=$PATH:/opt/kotlinc/bin' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install elixir and erlang
|
||||
# final binary: /opt/elixir/bin/elixir
|
||||
# get version: /opt/elixir/bin/elixir --version
|
||||
# erlang
|
||||
cd /opt && mkdir erlang && cd erlang
|
||||
wget http://erlang.org/download/otp_src_23.0.tar.gz
|
||||
gunzip -c otp_src_23.0.tar.gz | tar xf -
|
||||
cd otp_src_23.0 && ./configure
|
||||
make
|
||||
echo 'export PATH=$PATH:/opt/erlang/otp_src_23.0/bin' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
# elixir
|
||||
cd /opt && mkdir elixir && cd elixir
|
||||
wget https://github.com/elixir-lang/elixir/releases/download/v1.10.3/Precompiled.zip
|
||||
mkdir elixir-1.10.3 && unzip Precompiled.zip -d elixir-1.10.3/
|
||||
echo 'export PATH=$PATH:/opt/elixir/elixir-1.10.3/bin' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install emacs
|
||||
# final binary: /opt/emacs/emacs-26.3/src/emacs
|
||||
# get version: /opt/emacs/emacs-26.3/src/emacs --version
|
||||
cd /opt && mkdir emacs && cd emacs
|
||||
wget https://mirrors.ocf.berkeley.edu/gnu/emacs/emacs-26.3.tar.xz
|
||||
tar -xf emacs-26.3.tar.xz
|
||||
rm emacs-26.3.tar.xz
|
||||
cd emacs-26.3
|
||||
./configure --with-gnutls=no
|
||||
make
|
||||
echo 'export PATH=$PATH:/opt/emacs/emacs-26.3/src' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install lua
|
||||
# final binary: /opt/lua/lua54/src/lua
|
||||
# get version: /opt/lua/lua54/src/lua -v
|
||||
cd /opt && mkdir lua && cd lua
|
||||
wget https://sourceforge.net/projects/luabinaries/files/5.4.0/Docs%20and%20Sources/lua-5.4.0_Sources.tar.gz/download
|
||||
tar -xzf download
|
||||
cd lua54
|
||||
make
|
||||
echo 'export PATH=$PATH:/opt/lua/lua54/src' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install haskell
|
||||
# final binary: /usr/bin/ghc
|
||||
# get version: /usr/bin/ghc --version
|
||||
apt install -y ghc
|
||||
|
||||
# install deno
|
||||
# final binary: /opt/.deno/bin/deno
|
||||
# get version: /opt/.deno/bin/deno --version
|
||||
cd /opt && mkdir deno && cd deno
|
||||
curl -fsSL https://deno.land/x/install/install.sh | sh
|
||||
echo 'export DENO_INSTALL="/opt/.deno"' >> /opt/.profile
|
||||
echo 'export PATH="$DENO_INSTALL/bin:$PATH"' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install brainfuck
|
||||
cd /opt && mkdir bf && cd bf
|
||||
git clone https://github.com/texus/Brainfuck-interpreter
|
||||
cd Brainfuck-interpreter
|
||||
echo 'export PATH=$PATH:/opt/bf/Brainfuck-interpreter' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install crystal
|
||||
# final binary: /opt/crystal/crystal-0.35.1-1/bin/crystal
|
||||
# get version: /opt/crystal/crystal-0.35.1-1/bin/crystal -v
|
||||
cd /opt && mkdir crystal && cd crystal
|
||||
wget https://github.com/crystal-lang/crystal/releases/download/0.35.1/crystal-0.35.1-1-linux-x86_64.tar.gz
|
||||
tar -xzf crystal-0.35.1-1-linux-x86_64.tar.gz
|
||||
echo 'export PATH="$PATH:/opt/crystal/crystal-0.35.1-1/bin:$PATH"' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install d
|
||||
# final binary: /opt/d/dmd2/linux/bin64/dmd
|
||||
# get version: /opt/d/dmd2/linux/bin64/dmd --version
|
||||
cd /opt && mkdir d && cd d
|
||||
wget http://downloads.dlang.org/releases/2.x/2.095.0/dmd.2.095.0.linux.tar.xz
|
||||
unxz dmd.2.095.0.linux.tar.xz
|
||||
tar -xf dmd.2.095.0.linux.tar
|
||||
echo 'export PATH=$PATH:/opt/d/dmd2/linux/bin64' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install zig
|
||||
# final binary: /opt/zig/zig
|
||||
# get version: /opt/zig/zig version
|
||||
cd /opt && mkdir zig && cd zig
|
||||
wget https://ziglang.org/download/0.7.1/zig-linux-x86_64-0.7.1.tar.xz
|
||||
tar -xf zig-linux-x86_64-0.7.1.tar.xz
|
||||
mv zig-linux-x86_64-0.7.1 zig
|
||||
rm zig-linux-x86_64-0.7.1.tar.xz
|
||||
echo 'export PATH=$PATH:/opt/zig/zig' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install nim
|
||||
# final binary: /opt/nim/bin/nim
|
||||
# get version: /opt/nim/bin/nim -v
|
||||
cd /opt && mkdir nim && cd nim
|
||||
wget https://nim-lang.org/download/nim-1.4.0-linux_x64.tar.xz
|
||||
unxz nim-1.4.0-linux_x64.tar.xz
|
||||
tar -xf nim-1.4.0-linux_x64.tar
|
||||
cd nim-1.4.0
|
||||
./install.sh /opt
|
||||
echo 'export PATH=$PATH:/opt/nim/bin' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install 05AB1E
|
||||
# final binary: /opt/05AB1E/05AB1E/osabie
|
||||
# requires Elixir to install
|
||||
cd /opt && mkdir 05AB1E && cd 05AB1E
|
||||
git clone https://github.com/Adriandmen/05AB1E.git
|
||||
cd 05AB1E
|
||||
mix local.hex --force
|
||||
mix deps.get --force
|
||||
MIX_ENV=prod mix escript.build --force
|
||||
echo 'export PATH=$PATH:/opt/05AB1E/05AB1E' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install prolog
|
||||
# final binary: /opt/swipl/swipl-<version>/build/src/swipl
|
||||
cd /opt && mkdir swipl && cd swipl
|
||||
SUB_DIR=swipl-8.2.4
|
||||
wget https://www.swi-prolog.org/download/stable/src/$SUB_DIR.tar.gz
|
||||
tar -xf $SUB_DIR.tar.gz
|
||||
rm $SUB_DIR.tar.gz
|
||||
cd $SUB_DIR
|
||||
mkdir build
|
||||
cd build
|
||||
cmake -DSWIPL_PACKAGES_JAVA=OFF -DSWIPL_PACKAGES_X=OFF -DMULTI_THREADED=OFF -DINSTALL_DOCUMENTATION=OFF -G Ninja ..
|
||||
ninja
|
||||
echo "export PATH=\$PATH:/opt/swipl/$SUB_DIR/build/src" >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install lolcode
|
||||
# final binary: /opt/lolcode/bin/lci
|
||||
cd /opt
|
||||
git clone https://github.com/justinmeza/lci.git lolcode
|
||||
cd lolcode
|
||||
mkdir bin
|
||||
cd bin
|
||||
cmake ..
|
||||
make
|
||||
echo 'export PATH=$PATH:/opt/lolcode/bin' >> /opt/.profile
|
||||
source /opt/.profile
|
||||
|
||||
# install clojure
|
||||
# final binary: /opt/clojure/bin/clojure
|
||||
# get version: /opt/clojure/bin/clojure -version
|
||||
cd /opt && mkdir clojure && cd clojure
|
||||
git clone https://github.com/clojure/clojure.git
|
||||
cd clojure
|
||||
mvn -Plocal -Dmaven.test.skip=true package
|
||||
|
||||
# create runnable users and apply limits
|
||||
for i in {1..150}; do
|
||||
useradd -M runner$i
|
||||
usermod -d /tmp runner$i
|
||||
echo "runner$i soft nproc 64" >> /etc/security/limits.conf
|
||||
echo "runner$i hard nproc 64" >> /etc/security/limits.conf
|
||||
echo "runner$i soft nofile 2048" >> /etc/security/limits.conf
|
||||
echo "runner$i hard nofile 2048" >> /etc/security/limits.conf
|
||||
done
|
||||
|
||||
# remove any lingering write access to others
|
||||
cd /opt
|
||||
chown -R root: *
|
||||
chmod -R o-w *
|
||||
|
||||
# cleanup
|
||||
rm -rf /home/ubuntu
|
||||
chmod 777 /tmp
|
||||
|
||||
# disable cron
|
||||
systemctl stop cron
|
||||
systemctl disable cron
|
|
@ -1,355 +0,0 @@
|
|||
image:
|
||||
name: ubuntu-bionic-x86_64-piston
|
||||
distribution: ubuntu
|
||||
release: bionic
|
||||
description: |-
|
||||
Ubuntu {{ image.release }} preconfigured for Piston
|
||||
architecture: x86_64
|
||||
|
||||
source:
|
||||
downloader: debootstrap
|
||||
same_as: bionic
|
||||
url: http://archive.ubuntu.com/ubuntu
|
||||
keyserver: keyserver.ubuntu.com
|
||||
keys:
|
||||
- '0x790BC7277767219C42C86F933B4FE6ACC0B21F32'
|
||||
- '0xf6ecb3762474eda9d21b7022871920d1991bc93c'
|
||||
|
||||
targets:
|
||||
lxc:
|
||||
create-message: |-
|
||||
You just created an {{ image.description }} container.
|
||||
To enable SSH, run: apt install openssh-server
|
||||
No default root or user password are set by LXC.
|
||||
config:
|
||||
- type: all
|
||||
before: 5
|
||||
content: |-
|
||||
lxc.include = LXC_TEMPLATE_CONFIG/ubuntu.common.conf
|
||||
- type: user
|
||||
before: 5
|
||||
content: |-
|
||||
lxc.include = LXC_TEMPLATE_CONFIG/ubuntu.userns.conf
|
||||
- type: all
|
||||
after: 4
|
||||
content: |-
|
||||
lxc.include = LXC_TEMPLATE_CONFIG/common.conf
|
||||
# For Ubuntu 14.04
|
||||
lxc.mount.entry = /sys/kernel/debug sys/kernel/debug none bind,optional 0 0
|
||||
lxc.mount.entry = /sys/kernel/security sys/kernel/security none bind,optional 0 0
|
||||
lxc.mount.entry = /sys/fs/pstore sys/fs/pstore none bind,optional 0 0
|
||||
lxc.mount.entry = mqueue dev/mqueue mqueue rw,relatime,create=dir,optional 0 0
|
||||
- type: user
|
||||
after: 4
|
||||
content: |-
|
||||
lxc.include = LXC_TEMPLATE_CONFIG/userns.conf
|
||||
# For Ubuntu 14.04
|
||||
lxc.mount.entry = /sys/firmware/efi/efivars sys/firmware/efi/efivars none bind,optional 0 0
|
||||
lxc.mount.entry = /proc/sys/fs/binfmt_misc proc/sys/fs/binfmt_misc none bind,optional 0 0
|
||||
- type: all
|
||||
content: |-
|
||||
lxc.arch = {{ image.architecture_personality }}
|
||||
files:
|
||||
- path: /etc/hostname
|
||||
generator: hostname
|
||||
|
||||
- path: /etc/hosts
|
||||
generator: hosts
|
||||
|
||||
- path: /etc/resolvconf/resolv.conf.d/original
|
||||
generator: remove
|
||||
|
||||
- path: /etc/resolvconf/resolv.conf.d/tail
|
||||
generator: remove
|
||||
|
||||
- path: /etc/machine-id
|
||||
generator: dump
|
||||
|
||||
|
||||
- path: /var/lib/dbus/machine-id
|
||||
generator: remove
|
||||
|
||||
- path: /etc/netplan/10-lxc.yaml
|
||||
generator: dump
|
||||
content: |-
|
||||
network:
|
||||
version: 2
|
||||
ethernets:
|
||||
eth0:
|
||||
dhcp4: true
|
||||
dhcp-identifier: mac
|
||||
releases:
|
||||
- bionic
|
||||
- eoan
|
||||
- focal
|
||||
- groovy
|
||||
types:
|
||||
- container
|
||||
variants:
|
||||
- default
|
||||
|
||||
- path: /etc/network/interfaces
|
||||
generator: dump
|
||||
content: |-
|
||||
# This file describes the network interfaces available on your system
|
||||
# and how to activate them. For more information, see interfaces(5).
|
||||
# The loopback network interface
|
||||
auto lo
|
||||
iface lo inet loopback
|
||||
auto eth0
|
||||
iface eth0 inet dhcp
|
||||
source /etc/network/interfaces.d/*.cfg
|
||||
releases:
|
||||
- trusty
|
||||
- xenial
|
||||
types:
|
||||
- container
|
||||
|
||||
- path: /etc/netplan/10-lxc.yaml
|
||||
generator: dump
|
||||
content: |-
|
||||
network:
|
||||
version: 2
|
||||
ethernets:
|
||||
enp5s0:
|
||||
dhcp4: true
|
||||
dhcp-identifier: mac
|
||||
releases:
|
||||
- bionic
|
||||
- eoan
|
||||
- focal
|
||||
- groovy
|
||||
types:
|
||||
- vm
|
||||
variants:
|
||||
- default
|
||||
|
||||
- path: /etc/network/interfaces
|
||||
generator: dump
|
||||
content: |-
|
||||
# This file describes the network interfaces available on your system
|
||||
# and how to activate them. For more information, see interfaces(5).
|
||||
# The loopback network interface
|
||||
auto lo
|
||||
iface lo inet loopback
|
||||
auto enp5s0
|
||||
iface enp5s0 inet dhcp
|
||||
source /etc/network/interfaces.d/*.cfg
|
||||
releases:
|
||||
- trusty
|
||||
- xenial
|
||||
types:
|
||||
- vm
|
||||
|
||||
- path: /etc/init/lxc-tty.conf
|
||||
generator: upstart-tty
|
||||
releases:
|
||||
- trusty
|
||||
types:
|
||||
- container
|
||||
|
||||
- name: meta-data
|
||||
generator: cloud-init
|
||||
variants:
|
||||
- cloud
|
||||
|
||||
- name: network-config
|
||||
generator: cloud-init
|
||||
variants:
|
||||
- cloud
|
||||
|
||||
- name: user-data
|
||||
generator: cloud-init
|
||||
variants:
|
||||
- cloud
|
||||
|
||||
- name: vendor-data
|
||||
generator: cloud-init
|
||||
variants:
|
||||
- cloud
|
||||
|
||||
- name: ext4
|
||||
generator: fstab
|
||||
types:
|
||||
- vm
|
||||
|
||||
- name: lxd-agent
|
||||
generator: lxd-agent
|
||||
types:
|
||||
- vm
|
||||
|
||||
- path: /etc/default/grub.d/50-lxd.cfg
|
||||
generator: dump
|
||||
content: |-
|
||||
GRUB_RECORDFAIL_TIMEOUT=0
|
||||
GRUB_TIMEOUT=0
|
||||
GRUB_CMDLINE_LINUX_DEFAULT="${GRUB_CMDLINE_LINUX_DEFAULT} console=tty1 console=ttyS0"
|
||||
GRUB_TERMINAL=console
|
||||
types:
|
||||
- vm
|
||||
|
||||
- path: /etc/sudoers.d/90-lxd
|
||||
generator: dump
|
||||
mode: '0440'
|
||||
content: |-
|
||||
# User rules for ubuntu
|
||||
ubuntu ALL=(ALL) NOPASSWD:ALL
|
||||
variants:
|
||||
- default
|
||||
|
||||
packages:
|
||||
manager: apt
|
||||
update: true
|
||||
cleanup: true
|
||||
sets:
|
||||
- packages:
|
||||
- apt-transport-https
|
||||
- fuse
|
||||
- language-pack-en
|
||||
- openssh-client
|
||||
- vim
|
||||
action: install
|
||||
|
||||
- packages:
|
||||
- cloud-init
|
||||
action: install
|
||||
variants:
|
||||
- cloud
|
||||
|
||||
- packages:
|
||||
- acpid
|
||||
action: install
|
||||
architectures:
|
||||
- amd64
|
||||
- arm64
|
||||
types:
|
||||
- vm
|
||||
|
||||
- packages:
|
||||
- grub-efi-amd64-signed
|
||||
- shim-signed
|
||||
action: install
|
||||
architectures:
|
||||
- amd64
|
||||
types:
|
||||
- vm
|
||||
|
||||
- packages:
|
||||
- grub-efi-arm64-signed
|
||||
action: install
|
||||
architectures:
|
||||
- arm64
|
||||
types:
|
||||
- vm
|
||||
|
||||
- packages:
|
||||
- shim-signed
|
||||
action: install
|
||||
architectures:
|
||||
- arm64
|
||||
releases:
|
||||
- disco
|
||||
- eoan
|
||||
- focal
|
||||
- groovy
|
||||
types:
|
||||
- vm
|
||||
|
||||
- packages:
|
||||
- linux-virtual-hwe-16.04
|
||||
action: install
|
||||
releases:
|
||||
- xenial
|
||||
types:
|
||||
- vm
|
||||
|
||||
- packages:
|
||||
- linux-virtual
|
||||
action: install
|
||||
releases:
|
||||
- bionic
|
||||
- eoan
|
||||
- focal
|
||||
- groovy
|
||||
types:
|
||||
- vm
|
||||
|
||||
- packages:
|
||||
- os-prober
|
||||
action: remove
|
||||
types:
|
||||
- vm
|
||||
|
||||
repositories:
|
||||
- name: sources.list
|
||||
url: |-
|
||||
deb http://archive.ubuntu.com/ubuntu {{ image.release }} main restricted universe multiverse
|
||||
deb http://archive.ubuntu.com/ubuntu {{ image.release }}-updates main restricted universe multiverse
|
||||
deb http://security.ubuntu.com/ubuntu {{ image.release }}-security main restricted universe multiverse
|
||||
architectures:
|
||||
- amd64
|
||||
- i386
|
||||
|
||||
- name: sources.list
|
||||
url: |-
|
||||
deb http://ports.ubuntu.com/ubuntu-ports {{ image.release }} main restricted universe multiverse
|
||||
deb http://ports.ubuntu.com/ubuntu-ports {{ image.release }}-updates main restricted universe multiverse
|
||||
deb http://ports.ubuntu.com/ubuntu-ports {{ image.release }}-security main restricted universe multiverse
|
||||
architectures:
|
||||
- armhf
|
||||
- arm64
|
||||
- powerpc
|
||||
- powerpc64
|
||||
- ppc64el
|
||||
|
||||
actions:
|
||||
- trigger: post-update
|
||||
action: |-
|
||||
#!/bin/sh
|
||||
set -eux
|
||||
# Create the ubuntu user account
|
||||
getent group sudo >/dev/null 2>&1 || groupadd --system sudo
|
||||
useradd --create-home -s /bin/bash -G sudo -U ubuntu
|
||||
variants:
|
||||
- default
|
||||
|
||||
- trigger: post-packages
|
||||
action: |-
|
||||
#!/bin/sh
|
||||
set -eux
|
||||
# Enable systemd-networkd
|
||||
systemctl enable systemd-networkd
|
||||
releases:
|
||||
- bionic
|
||||
- eoan
|
||||
- focal
|
||||
- groovy
|
||||
|
||||
- trigger: post-packages
|
||||
action: |-
|
||||
#!/bin/sh
|
||||
set -eux
|
||||
# Make sure the locale is built and functional
|
||||
locale-gen en_US.UTF-8
|
||||
update-locale LANG=en_US.UTF-8
|
||||
# Cleanup underlying /run
|
||||
mount -o bind / /mnt
|
||||
rm -rf /mnt/run/*
|
||||
umount /mnt
|
||||
# Cleanup temporary shadow paths
|
||||
rm /etc/*-
|
||||
- trigger: post-files
|
||||
action: |-
|
||||
#!/bin/sh
|
||||
set -eux
|
||||
TARGET="x86_64"
|
||||
[ "$(uname -m)" = "aarch64" ] && TARGET="arm64"
|
||||
update-grub
|
||||
grub-install --uefi-secure-boot --target="${TARGET}-efi" --no-nvram --removable
|
||||
update-grub
|
||||
sed -i "s#root=[^ ]*#root=/dev/sda2#g" /boot/grub/grub.cfg
|
||||
types:
|
||||
- vm
|
||||
|
||||
mappings:
|
||||
architecture_map: debian
|
|
@ -1,7 +0,0 @@
|
|||
# LXC Container Build
|
||||
|
||||
Requires: `lxc`, `lxc-net`, `packer` (Hashicorp Packer)
|
||||
|
||||
To build: `packer build -var 'apt_mirror=[apt mirror]' -var 'make_threads=[-j flag]' piston.pkr.hcl`
|
||||
|
||||
After roughly 30 minutes (on an i7-4790k), you should have an image built
|
|
@ -0,0 +1,22 @@
|
|||
version: '3.2'
|
||||
|
||||
services:
|
||||
api:
|
||||
build: api
|
||||
container_name: piston_api
|
||||
cap_add:
|
||||
- CAP_SYS_ADMIN
|
||||
restart: always
|
||||
ports:
|
||||
- 2000:2000
|
||||
volumes:
|
||||
- ./data/piston:/piston
|
||||
tmpfs:
|
||||
- /piston/jobs:exec
|
||||
|
||||
repo: # Local testing of packages
|
||||
build: repo
|
||||
container_name: piston_repo
|
||||
command: ['dart-2.12.1'] # Only build dart
|
||||
volumes:
|
||||
- .:/piston
|
|
@ -0,0 +1,13 @@
|
|||
version: '3.2'
|
||||
|
||||
services:
|
||||
api:
|
||||
image: ghcr.io/engineer-man/piston
|
||||
container_name: piston_api
|
||||
restart: always
|
||||
ports:
|
||||
- 2000:2000
|
||||
volumes:
|
||||
- ./data/piston:/piston
|
||||
tmpfs:
|
||||
- /piston/jobs:exec
|
19
license
19
license
|
@ -1,19 +0,0 @@
|
|||
Copyright (c) 2018 Brian Seymour, EMKC Contributors
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
55
lxc/execute
55
lxc/execute
|
@ -1,55 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
dir="$( cd "$( dirname "$0" )" && pwd )"
|
||||
|
||||
touch $dir/lockfile
|
||||
|
||||
if [ -z "$1" ] || [ -z "$2" ]; then
|
||||
echo "invalid args"
|
||||
exit
|
||||
fi
|
||||
|
||||
language=$1
|
||||
id=$2
|
||||
|
||||
basepath="/var/lib/lxc/piston/rootfs"
|
||||
|
||||
# process incrementor
|
||||
exec 200>$dir/lockfile
|
||||
flock 200
|
||||
|
||||
touch $dir/i
|
||||
runner=$(cat $dir/i)
|
||||
let 'runner = runner % 150 + 1'
|
||||
|
||||
echo $runner > $dir/i
|
||||
exec 200>&-
|
||||
|
||||
# prevent users from spying on each other
|
||||
lxc-attach --clear-env -n piston -- \
|
||||
/bin/bash -c "
|
||||
chown runner$runner: -R /tmp/$id
|
||||
chmod 700 /tmp/$id
|
||||
" > /dev/null 2>&1
|
||||
|
||||
# runner
|
||||
timeout -s KILL 20 \
|
||||
lxc-attach --clear-env -n piston -- \
|
||||
/bin/bash -l -c "runuser runner$runner /exec/$language $id"
|
||||
|
||||
# process janitor
|
||||
lxc-attach --clear-env -n piston -- \
|
||||
/bin/bash -c "
|
||||
while pgrep -u runner$runner > /dev/null
|
||||
do
|
||||
pkill -u runner$runner --signal SIGKILL
|
||||
done
|
||||
|
||||
find /tmp -user runner$runner -delete
|
||||
find /var/tmp -user runner$runner -delete
|
||||
find /var/lock -user runner$runner -delete
|
||||
find /dev/shm -user runner$runner -delete
|
||||
find /run/lock -user runner$runner -delete
|
||||
" > /dev/null 2>&1 &
|
||||
|
||||
rm -rf $basepath/tmp/$id
|
|
@ -1,56 +0,0 @@
|
|||
const { writeFileSync, unlinkSync, mkdirSync } = require('fs');
|
||||
const { spawn } = require('child_process');
|
||||
|
||||
const OUTPUT_LIMIT = 65535;
|
||||
const LXC_ROOT = '/var/lib/lxc/piston/rootfs';
|
||||
|
||||
function execute(language, source, stdin = '', args = []) {
|
||||
return new Promise(resolve => {
|
||||
const id = new Date().getTime() + '_' + Math.floor(Math.random() * 10000000);
|
||||
|
||||
mkdirSync(`${LXC_ROOT}/tmp/${id}`);
|
||||
writeFileSync(`${LXC_ROOT}/tmp/${id}/code.code`, source);
|
||||
writeFileSync(`${LXC_ROOT}/tmp/${id}/stdin.stdin`, stdin);
|
||||
writeFileSync(`${LXC_ROOT}/tmp/${id}/args.args`, args.join('\n'));
|
||||
|
||||
const process = spawn(__dirname + '/execute', [
|
||||
language.name,
|
||||
id,
|
||||
]);
|
||||
|
||||
let stdout = '';
|
||||
let stderr = '';
|
||||
let output = '';
|
||||
|
||||
process.stderr.on('data', chunk => {
|
||||
if (stderr.length >= OUTPUT_LIMIT) return;
|
||||
|
||||
stderr += chunk;
|
||||
output += chunk;
|
||||
});
|
||||
|
||||
process.stdout.on('data', chunk => {
|
||||
if (stdout.length >= OUTPUT_LIMIT) return;
|
||||
|
||||
stdout += chunk;
|
||||
output += chunk;
|
||||
});
|
||||
|
||||
process.on('exit', code => {
|
||||
stderr = stderr.substring(0, OUTPUT_LIMIT);
|
||||
stdout = stdout.substring(0, OUTPUT_LIMIT);
|
||||
output = output.substring(0, OUTPUT_LIMIT);
|
||||
|
||||
resolve({
|
||||
stdout,
|
||||
stderr,
|
||||
output,
|
||||
ran: code === 0,
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
execute,
|
||||
};
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' awk -f code.code < stdin.stdin
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
xargs -a args.args -d '\n' timeout -s KILL 3 bash code.code < stdin.stdin
|
|
@ -1,36 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
sedarg="\
|
||||
s/+/P/g;\
|
||||
s/-/M/g;\
|
||||
s/>/++p;/g;\
|
||||
s/</--p;/g;\
|
||||
s/P/++*p;/g;\
|
||||
s/M/--*p;/g;\
|
||||
s/\\./putchar(*p);/g;\
|
||||
s/,/*p=(c=getchar())==EOF?0:c;/g;\
|
||||
s/\\[/while(*p){/g;\
|
||||
s/]/}/g\
|
||||
"
|
||||
|
||||
# compilation
|
||||
MEMSIZE=15
|
||||
cat <<EOF > code.c
|
||||
#include <stdio.h>
|
||||
|
||||
char mem[1<<$MEMSIZE];
|
||||
char *p = mem + (1<<$((MEMSIZE - 1)));
|
||||
int c;
|
||||
|
||||
int main() {
|
||||
$(timeout -s KILL 3 sed 's/[^][<>.,+-]//g' code.code | timeout -s KILL 3 sed $sedarg)
|
||||
}
|
||||
EOF
|
||||
timeout -s KILL 3 gcc -std=c11 -o binary code.c
|
||||
|
||||
# Merging args.args and stdin.stdin for emkc challenges
|
||||
cat stdin.stdin >> args.args
|
||||
|
||||
# execution
|
||||
timeout -s KILL 3 ./binary < args.args
|
|
@ -1,5 +0,0 @@
|
|||
#!/usr/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 10 gcc -std=c11 -o binary -x c code.code -lm
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' ./binary < stdin.stdin
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 10 xargs -a args.args -d '\n' java -jar /opt/clojure/clojure/clojure.jar code.code < stdin.stdin
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 10 g++ -std=c++17 -o binary -x c++ code.code
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' ./binary < stdin.stdin
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 10 crystal build code.code
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' ./code < stdin.stdin
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
mcs $(echo code.code | sed 's/\///') -nowarn:0219 -out:binary
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' mono binary < stdin.stdin
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
cp code.code code.d
|
||||
timeout -s KILL 10 dmd code.d
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' ./code
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
xargs -a args.args -d '\n' timeout -s KILL 3 dash code.code < stdin.stdin
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
export NO_COLOR=true
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' deno run code.code < stdin.stdin
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
export LC_ALL="en_US.UTF-8"
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' elixir code.code < stdin.stdin
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' emacs -Q --script code.code < stdin.stdin
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
cp code.code interim.go
|
||||
go build interim.go
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' ./interim < stdin.stdin
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
cp code.code code.hs
|
||||
ghc -dynamic -o binary code.hs > /dev/null 2>&1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' ./binary < stdin.stdin
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
cp code.code interim.java
|
||||
timeout -s KILL 10 xargs -a args.args -d '\n' java interim.java < stdin.stdin
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' jelly fu code.code < stdin.stdin
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' julia code.code < stdin.stdin
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
cp code.code code.kt
|
||||
kotlinc code.kt -include-runtime -d code.jar
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' java -jar code.jar < stdin.stdin
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' sbcl --script code.code < stdin.stdin
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 lci code.code < stdin.stdin
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' lua code.code < stdin.stdin
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
nasm -f elf32 -o binary.o code.code
|
||||
ld -m elf_i386 binary.o -o binary
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' ./binary < stdin.stdin
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
nasm -f elf64 -o binary.o code.code
|
||||
ld -m elf_x86_64 binary.o -o binary
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' ./binary < stdin.stdin
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 10 nim --hints:off c code.code
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' ./code
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' node code.code < stdin.stdin
|
|
@ -1,8 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
# osabie uses Elixir, which expects UTF-8 native encoding
|
||||
export LC_ALL="en_US.UTF-8"
|
||||
|
||||
# osabie will break if you try using it with xargs
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 osabie code.code < args.args
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
export PYTHONPATH=$PYTHONPATH:/opt/paradoc
|
||||
timeout -s KILL 3 python3.8 -m paradoc code.code < args.args
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' perl code.code < stdin.stdin
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' php code.code < stdin.stdin
|
|
@ -1,13 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
|
||||
sed 's/^.*$/:- forall((Goal = (\0), call(Goal)), (write(Goal), nl))./' stdin.stdin |
|
||||
cat code.code - > code.pl
|
||||
|
||||
if [ -s args.args ]
|
||||
then
|
||||
echo ":- main($(jq --raw-input -c --slurp 'split("\n")' args.args))." >> code.pl
|
||||
fi
|
||||
|
||||
timeout -s KILL 3 swipl -g true -t halt code.pl
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' python code.code < stdin.stdin
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' python3.8 code.code < stdin.stdin
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' ruby code.code < stdin.stdin
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 10 rustc -o binary code.code
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' ./binary < stdin.stdin
|
|
@ -1,5 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
cp code.code interim.scala
|
||||
timeout -s KILL 10 xargs -a args.args -d '\n' scala -color never interim.scala < stdin.stdin
|
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' swift code.code < stdin.stdin
|
|
@ -1,8 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
mv code.code interim.ts
|
||||
tsc interim.ts
|
||||
rm -f interim.ts
|
||||
mv interim.js code.code
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' node code.code < stdin.stdin
|
|
@ -1,6 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
cd /tmp/$1
|
||||
cp code.code main.zig
|
||||
timeout -s KILL 10 zig build-exe main.zig && \
|
||||
timeout -s KILL 3 xargs -a args.args -d '\n' ./main
|
|
@ -1,9 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
mkdir -p /var/lib/lxc/piston/rootfs/exec
|
||||
rm -f /var/lib/lxc/piston/rootfs/exec/*
|
||||
cp -f executors/* /var/lib/lxc/piston/rootfs/exec
|
||||
chmod 555 /var/lib/lxc/piston/rootfs/exec/*
|
||||
chown -R root:root /var/lib/lxc/piston/rootfs/exec
|
||||
|
||||
lxc-start -n piston -d
|
|
@ -1,76 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
echo -n 'testing awk = '
|
||||
../../cli/execute awk awk.awk
|
||||
echo -n 'testing bash = '
|
||||
../../cli/execute bash bash.sh
|
||||
echo -n 'testing c = '
|
||||
../../cli/execute c c.c
|
||||
echo -n 'testing clojure = '
|
||||
../../cli/execute clojure clojure.clj
|
||||
echo -n 'testing cpp = '
|
||||
../../cli/execute cpp cpp.cpp
|
||||
echo -n 'testing crystal = '
|
||||
../../cli/execute crystal crystal.cr
|
||||
echo -n 'testing csharp = '
|
||||
../../cli/execute csharp csharp.cs
|
||||
echo -n 'testing d = '
|
||||
../../cli/execute d d.d
|
||||
echo -n 'testing dash = '
|
||||
../../cli/execute dash dash.sh
|
||||
echo -n 'testing deno = '
|
||||
../../cli/execute deno deno.ts
|
||||
echo -n 'testing elixir = '
|
||||
../../cli/execute elixir elixir.exs
|
||||
echo -n 'testing emacs = '
|
||||
../../cli/execute emacs emacs.el
|
||||
echo -n 'testing go = '
|
||||
../../cli/execute go go.go
|
||||
echo -n 'testing haskell = '
|
||||
../../cli/execute haskell haskell.hs
|
||||
echo -n 'testing java = '
|
||||
../../cli/execute java java.java
|
||||
echo -n 'testing jelly = '
|
||||
../../cli/execute jelly jelly.jelly good
|
||||
echo -n 'testing julia = '
|
||||
../../cli/execute julia julia.jl
|
||||
echo -n 'testing kotlin = '
|
||||
../../cli/execute kotlin kotlin.kt
|
||||
echo -n 'testing lolcode = '
|
||||
../../cli/execute lolcode lolcode.lol
|
||||
echo -n 'testing lisp = '
|
||||
../../cli/execute lisp lisp.cl
|
||||
echo -n 'testing nasm 32 bit = '
|
||||
../../cli/execute nasm nasm.nasm
|
||||
echo -n 'testing nasm 64 bit = '
|
||||
../../cli/execute nasm64 nasm64.nasm
|
||||
echo -n 'testing nim = '
|
||||
../../cli/execute nim nim.nim
|
||||
echo -n 'testing node = '
|
||||
../../cli/execute node node.js
|
||||
echo -n 'testing osabie = '
|
||||
../../cli/execute osabie osabie.abe
|
||||
echo -n 'testing paradoc = '
|
||||
../../cli/execute bash paradoc.sh
|
||||
echo -n 'testing perl = '
|
||||
../../cli/execute perl perl.pl
|
||||
echo -n 'testing php = '
|
||||
../../cli/execute php php.php
|
||||
echo -n 'testing prolog = '
|
||||
../../cli/execute prolog prolog.pl
|
||||
echo -n 'testing python2 = '
|
||||
../../cli/execute python2 python2.py
|
||||
echo -n 'testing python3 = '
|
||||
../../cli/execute python3 python3.py
|
||||
echo -n 'testing ruby = '
|
||||
../../cli/execute ruby ruby.rb
|
||||
echo -n 'testing rust = '
|
||||
../../cli/execute rust rust.rs
|
||||
echo -n 'testing scala = '
|
||||
../../cli/execute scala scala.scala
|
||||
echo -n 'testing swift = '
|
||||
../../cli/execute swift swift.swift
|
||||
echo -n 'testing typescript = '
|
||||
../../cli/execute typescript typescript.ts
|
||||
echo -n 'testing zig = '
|
||||
../../cli/execute zig zig.zig
|
|
@ -1 +0,0 @@
|
|||
BEGIN{ print "good" }
|
|
@ -1 +0,0 @@
|
|||
echo 'good'
|
|
@ -1,5 +0,0 @@
|
|||
#include <stdio.h>
|
||||
|
||||
void main(void) {
|
||||
printf("good\n");
|
||||
}
|
|
@ -1,5 +0,0 @@
|
|||
(ns clojure.examples.hello
|
||||
(:gen-class))
|
||||
(defn hello-world []
|
||||
(println "good"))
|
||||
(hello-world)
|
|
@ -1 +0,0 @@
|
|||
puts "good"
|
|
@ -1,9 +0,0 @@
|
|||
using System;
|
||||
|
||||
namespace HelloWorld {
|
||||
class Hello {
|
||||
static void Main() {
|
||||
Console.WriteLine("good");
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
echo 'good'
|
|
@ -1 +0,0 @@
|
|||
console.log('good')
|
|
@ -1 +0,0 @@
|
|||
IO.puts("good")
|
|
@ -1 +0,0 @@
|
|||
(message "good")
|
|
@ -1 +0,0 @@
|
|||
main = putStrLn "good"
|
|
@ -1 +0,0 @@
|
|||
³
|
|
@ -1 +0,0 @@
|
|||
println("good")
|
|
@ -1,3 +0,0 @@
|
|||
fun main() {
|
||||
println("good")
|
||||
}
|
|
@ -1 +0,0 @@
|
|||
(write-line "good")
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue