Merge branch 'master' into add-forte

This commit is contained in:
Dan Vargas 2021-10-01 12:11:12 -06:00 committed by GitHub
commit 977ec08311
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
46 changed files with 834 additions and 210 deletions

View File

@ -55,7 +55,9 @@ jobs:
run: | run: |
PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u) PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
echo "Packages: $PACKAGES" echo "Packages: $PACKAGES"
docker run -v "${{ github.workspace }}:/piston" docker.pkg.github.com/engineer-man/piston/repo-builder:latest --no-server $PACKAGES docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
docker build -t repo-builder repo
docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
ls -la packages ls -la packages
- name: Upload package as artifact - name: Upload package as artifact
@ -89,7 +91,9 @@ jobs:
run: | run: |
ls -la ls -la
docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build
docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api docker.pkg.github.com/engineer-man/piston/api docker pull docker.pkg.github.com/engineer-man/piston/api
docker build -t piston-api api
docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api
echo Waiting for API to start.. echo Waiting for API to start..
docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes

View File

@ -33,7 +33,9 @@ jobs:
run: | run: |
PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u) PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u)
echo "Packages: $PACKAGES" echo "Packages: $PACKAGES"
docker run -v "${{ github.workspace }}:/piston" docker.pkg.github.com/engineer-man/piston/repo-builder:latest --no-server $PACKAGES docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest
docker build -t repo-builder repo
docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES
ls -la packages ls -la packages
- name: Upload Packages - name: Upload Packages

View File

@ -13,7 +13,7 @@ RUN apt-get update && \
libncurses6 libncurses5 libedit-dev libseccomp-dev rename procps python3 \ libncurses6 libncurses5 libedit-dev libseccomp-dev rename procps python3 \
libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev \ libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev \
libfftw3-dev libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev \ libfftw3-dev libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev \
libsundials-dev && \ libsundials-dev libpcre2-dev && \
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen

49
api/package-lock.json generated
View File

@ -12,6 +12,7 @@
"body-parser": "^1.19.0", "body-parser": "^1.19.0",
"chownr": "^2.0.0", "chownr": "^2.0.0",
"express": "^4.17.1", "express": "^4.17.1",
"express-ws": "^5.0.2",
"is-docker": "^2.1.1", "is-docker": "^2.1.1",
"logplease": "^1.2.15", "logplease": "^1.2.15",
"nocamel": "HexF/nocamel#patch-1", "nocamel": "HexF/nocamel#patch-1",
@ -196,6 +197,20 @@
"node": ">= 0.10.0" "node": ">= 0.10.0"
} }
}, },
"node_modules/express-ws": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/express-ws/-/express-ws-5.0.2.tgz",
"integrity": "sha512-0uvmuk61O9HXgLhGl3QhNSEtRsQevtmbL94/eILaliEADZBHZOQUAiHFrGPrgsjikohyrmSG5g+sCfASTt0lkQ==",
"dependencies": {
"ws": "^7.4.6"
},
"engines": {
"node": ">=4.5.0"
},
"peerDependencies": {
"express": "^4.0.0 || ^5.0.0-alpha.1"
}
},
"node_modules/finalhandler": { "node_modules/finalhandler": {
"version": "1.1.2", "version": "1.1.2",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz",
@ -582,6 +597,26 @@
"node_modules/waitpid": { "node_modules/waitpid": {
"resolved": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa" "resolved": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa"
}, },
"node_modules/ws": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz",
"integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==",
"engines": {
"node": ">=8.3.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/yallist": { "node_modules/yallist": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
@ -728,6 +763,14 @@
"vary": "~1.1.2" "vary": "~1.1.2"
} }
}, },
"express-ws": {
"version": "5.0.2",
"resolved": "https://registry.npmjs.org/express-ws/-/express-ws-5.0.2.tgz",
"integrity": "sha512-0uvmuk61O9HXgLhGl3QhNSEtRsQevtmbL94/eILaliEADZBHZOQUAiHFrGPrgsjikohyrmSG5g+sCfASTt0lkQ==",
"requires": {
"ws": "^7.4.6"
}
},
"finalhandler": { "finalhandler": {
"version": "1.1.2", "version": "1.1.2",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz",
@ -1010,6 +1053,12 @@
"version": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa", "version": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa",
"from": "waitpid@git+https://github.com/HexF/node-waitpid.git" "from": "waitpid@git+https://github.com/HexF/node-waitpid.git"
}, },
"ws": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz",
"integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==",
"requires": {}
},
"yallist": { "yallist": {
"version": "4.0.0", "version": "4.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",

View File

@ -1,12 +1,13 @@
{ {
"name": "piston-api", "name": "piston-api",
"version": "3.0.0", "version": "3.1.0",
"description": "API for piston - a high performance code execution engine", "description": "API for piston - a high performance code execution engine",
"main": "src/index.js", "main": "src/index.js",
"dependencies": { "dependencies": {
"body-parser": "^1.19.0", "body-parser": "^1.19.0",
"chownr": "^2.0.0", "chownr": "^2.0.0",
"express": "^4.17.1", "express": "^4.17.1",
"express-ws": "^5.0.2",
"is-docker": "^2.1.1", "is-docker": "^2.1.1",
"logplease": "^1.2.15", "logplease": "^1.2.15",
"nocamel": "HexF/nocamel#patch-1", "nocamel": "HexF/nocamel#patch-1",

View File

@ -1,12 +1,126 @@
const express = require('express'); const express = require('express');
const router = express.Router(); const router = express.Router();
const events = require('events');
const config = require('../config'); const config = require('../config');
const runtime = require('../runtime'); const runtime = require('../runtime');
const { Job } = require('../job'); const { Job } = require('../job');
const package = require('../package'); const package = require('../package');
const logger = require('logplease').create('api/v2'); const logger = require('logplease').create('api/v2');
const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
// ref: https://man7.org/linux/man-pages/man7/signal.7.html
function get_job(body){
const {
language,
version,
args,
stdin,
files,
compile_memory_limit,
run_memory_limit,
run_timeout,
compile_timeout
} = body;
return new Promise((resolve, reject) => {
if (!language || typeof language !== 'string') {
return reject({
message: 'language is required as a string',
});
}
if (!version || typeof version !== 'string') {
return reject({
message: 'version is required as a string',
});
}
if (!files || !Array.isArray(files)) {
return reject({
message: 'files is required as an array',
});
}
for (const [i, file] of files.entries()) {
if (typeof file.content !== 'string') {
return reject({
message: `files[${i}].content is required as a string`,
});
}
}
if (compile_memory_limit) {
if (typeof compile_memory_limit !== 'number') {
return reject({
message: 'if specified, compile_memory_limit must be a number',
});
}
if (
config.compile_memory_limit >= 0 &&
(compile_memory_limit > config.compile_memory_limit ||
compile_memory_limit < 0)
) {
return reject({
message:
'compile_memory_limit cannot exceed the configured limit of ' +
config.compile_memory_limit,
});
}
}
if (run_memory_limit) {
if (typeof run_memory_limit !== 'number') {
return reject({
message: 'if specified, run_memory_limit must be a number',
});
}
if (
config.run_memory_limit >= 0 &&
(run_memory_limit > config.run_memory_limit || run_memory_limit < 0)
) {
return reject({
message:
'run_memory_limit cannot exceed the configured limit of ' +
config.run_memory_limit,
});
}
}
const rt = runtime.get_latest_runtime_matching_language_version(
language,
version
);
if (rt === undefined) {
return reject({
message: `${language}-${version} runtime is unknown`,
});
}
resolve(new Job({
runtime: rt,
alias: language,
args: args || [],
stdin: stdin || "",
files,
timeouts: {
run: run_timeout || 3000,
compile: compile_timeout || 10000,
},
memory_limits: {
run: run_memory_limit || config.run_memory_limit,
compile: compile_memory_limit || config.compile_memory_limit,
}
}));
})
}
router.use((req, res, next) => { router.use((req, res, next) => {
if (['GET', 'HEAD', 'OPTIONS'].includes(req.method)) { if (['GET', 'HEAD', 'OPTIONS'].includes(req.method)) {
return next(); return next();
@ -21,110 +135,90 @@ router.use((req, res, next) => {
next(); next();
}); });
router.ws('/connect', async (ws, req) => {
let job = null;
let eventBus = new events.EventEmitter();
eventBus.on("stdout", (data) => ws.send(JSON.stringify({type: "data", stream: "stdout", data: data.toString()})))
eventBus.on("stderr", (data) => ws.send(JSON.stringify({type: "data", stream: "stderr", data: data.toString()})))
eventBus.on("stage", (stage)=> ws.send(JSON.stringify({type: "stage", stage})))
eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status})))
ws.on("message", async (data) => {
try{
const msg = JSON.parse(data);
switch(msg.type){
case "init":
if(job === null){
job = await get_job(msg);
await job.prime();
ws.send(JSON.stringify({
type: "runtime",
language: job.runtime.language,
version: job.runtime.version.raw
}))
await job.execute_interactive(eventBus);
ws.close(4999, "Job Completed");
}else{
ws.close(4000, "Already Initialized");
}
break;
case "data":
if(job !== null){
if(msg.stream === "stdin"){
eventBus.emit("stdin", msg.data)
}else{
ws.close(4004, "Can only write to stdin")
}
}else{
ws.close(4003, "Not yet initialized")
}
break;
case "signal":
if(job !== null){
if(SIGNALS.includes(msg.signal)){
eventBus.emit("signal", msg.signal)
}else{
ws.close(4005, "Invalid signal")
}
}else{
ws.close(4003, "Not yet initialized")
}
break;
}
}catch(error){
ws.send(JSON.stringify({type: "error", message: error.message}))
ws.close(4002, "Notified Error")
// ws.close message is limited to 123 characters, so we notify over WS then close.
}
})
ws.on("close", async ()=>{
if(job !== null){
await job.cleanup()
}
})
setTimeout(()=>{
//Terminate the socket after 1 second, if not initialized.
if(job === null)
ws.close(4001, "Initialization Timeout");
}, 1000)
})
router.post('/execute', async (req, res) => { router.post('/execute', async (req, res) => {
const {
language,
version,
files,
stdin,
args,
run_timeout,
compile_timeout,
compile_memory_limit,
run_memory_limit,
} = req.body;
if (!language || typeof language !== 'string') { try{
return res.status(400).send({ const job = await get_job(req.body);
message: 'language is required as a string',
});
}
if (!version || typeof version !== 'string') {
return res.status(400).send({
message: 'version is required as a string',
});
}
if (!files || !Array.isArray(files)) {
return res.status(400).send({
message: 'files is required as an array',
});
}
for (const [i, file] of files.entries()) {
if (typeof file.content !== 'string') {
return res.status(400).send({
message: `files[${i}].content is required as a string`,
});
}
}
if (compile_memory_limit) {
if (typeof compile_memory_limit !== 'number') {
return res.status(400).send({
message: 'if specified, compile_memory_limit must be a number',
});
}
if (
config.compile_memory_limit >= 0 &&
(compile_memory_limit > config.compile_memory_limit ||
compile_memory_limit < 0)
) {
return res.status(400).send({
message:
'compile_memory_limit cannot exceed the configured limit of ' +
config.compile_memory_limit,
});
}
}
if (run_memory_limit) {
if (typeof run_memory_limit !== 'number') {
return res.status(400).send({
message: 'if specified, run_memory_limit must be a number',
});
}
if (
config.run_memory_limit >= 0 &&
(run_memory_limit > config.run_memory_limit || run_memory_limit < 0)
) {
return res.status(400).send({
message:
'run_memory_limit cannot exceed the configured limit of ' +
config.run_memory_limit,
});
}
}
const rt = runtime.get_latest_runtime_matching_language_version(
language,
version
);
if (rt === undefined) {
return res.status(400).send({
message: `${language}-${version} runtime is unknown`,
});
}
const job = new Job({
runtime: rt,
alias: language,
files: files,
args: args || [],
stdin: stdin || '',
timeouts: {
run: run_timeout || 3000,
compile: compile_timeout || 10000,
},
memory_limits: {
run: run_memory_limit || config.run_memory_limit,
compile: compile_memory_limit || config.compile_memory_limit,
},
});
await job.prime(); await job.prime();
@ -133,6 +227,9 @@ router.post('/execute', async (req, res) => {
await job.cleanup(); await job.cleanup();
return res.status(200).send(result); return res.status(200).send(result);
}catch(error){
return res.status(400).json(error);
}
}); });
router.get('/runtimes', (req, res) => { router.get('/runtimes', (req, res) => {

View File

@ -114,6 +114,13 @@ const options = [
'https://github.com/engineer-man/piston/releases/download/pkgs/index', 'https://github.com/engineer-man/piston/releases/download/pkgs/index',
validators: [], validators: [],
}, },
{
key: 'max_concurrent_jobs',
desc: 'Maximum number of concurrent jobs to run at one time',
default: 64,
parser: parse_int,
validators: [(x) => x > 0 || `${x} cannot be negative`]
}
]; ];
logger.info(`Loading Configuration from environment`); logger.info(`Loading Configuration from environment`);

View File

@ -2,6 +2,7 @@
require('nocamel'); require('nocamel');
const Logger = require('logplease'); const Logger = require('logplease');
const express = require('express'); const express = require('express');
const expressWs = require('express-ws');
const globals = require('./globals'); const globals = require('./globals');
const config = require('./config'); const config = require('./config');
const path = require('path'); const path = require('path');
@ -12,6 +13,9 @@ const runtime = require('./runtime');
const logger = Logger.create('index'); const logger = Logger.create('index');
const app = express(); const app = express();
expressWs(app);
(async () => { (async () => {
logger.info('Setting loglevel to', config.log_level); logger.info('Setting loglevel to', config.log_level);

View File

@ -16,6 +16,19 @@ const job_states = {
let uid = 0; let uid = 0;
let gid = 0; let gid = 0;
let remainingJobSpaces = config.max_concurrent_jobs;
let jobQueue = [];
setInterval(()=>{
// Every 10ms try resolve a new job, if there is an available slot
if(jobQueue.length > 0 && remainingJobSpaces > 0){
jobQueue.shift()()
}
}, 10)
class Job { class Job {
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) { constructor({ runtime, files, args, stdin, timeouts, memory_limits }) {
this.uuid = uuidv4(); this.uuid = uuidv4();
@ -48,8 +61,15 @@ class Job {
} }
async prime() { async prime() {
logger.info(`Priming job uuid=${this.uuid}`); if(remainingJobSpaces < 1){
logger.info(`Awaiting job slot uuid=${this.uuid}`)
await new Promise((resolve)=>{
jobQueue.push(resolve)
})
}
logger.info(`Priming job uuid=${this.uuid}`);
remainingJobSpaces--;
logger.debug('Writing files to job cache'); logger.debug('Writing files to job cache');
logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`); logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`);
@ -76,7 +96,7 @@ class Job {
logger.debug('Primed job'); logger.debug('Primed job');
} }
async safe_call(file, args, timeout, memory_limit) { async safe_call(file, args, timeout, memory_limit, eventBus = null) {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const nonetwork = config.disable_networking ? ['nosocket'] : []; const nonetwork = config.disable_networking ? ['nosocket'] : [];
@ -109,48 +129,72 @@ class Job {
detached: true, //give this process its own process group detached: true, //give this process its own process group
}); });
if(eventBus === null){
proc.stdin.write(this.stdin); proc.stdin.write(this.stdin);
proc.stdin.end(); proc.stdin.end();
proc.stdin.destroy(); proc.stdin.destroy();
}else{
eventBus.on("stdin", (data) => {
proc.stdin.write(data);
})
eventBus.on("kill", (signal) => {
proc.kill(signal)
})
}
const kill_timeout = set_timeout( const kill_timeout = set_timeout(
_ => proc.kill('SIGKILL'), async _ => {
logger.info(`Timeout exceeded timeout=${timeout} uuid=${this.uuid}`)
process.kill(proc.pid, 'SIGKILL')
},
timeout timeout
); );
proc.stderr.on('data', data => { proc.stderr.on('data', async data => {
if (stderr.length > config.output_max_size) { if(eventBus !== null) {
proc.kill('SIGKILL'); eventBus.emit("stderr", data);
} else if (stderr.length > config.output_max_size) {
logger.info(`stderr length exceeded uuid=${this.uuid}`)
process.kill(proc.pid, 'SIGKILL')
} else { } else {
stderr += data; stderr += data;
output += data; output += data;
} }
}); });
proc.stdout.on('data', data => { proc.stdout.on('data', async data => {
if (stdout.length > config.output_max_size) { if(eventBus !== null){
proc.kill('SIGKILL'); eventBus.emit("stdout", data);
} else if (stdout.length > config.output_max_size) {
logger.info(`stdout length exceeded uuid=${this.uuid}`)
process.kill(proc.pid, 'SIGKILL')
} else { } else {
stdout += data; stdout += data;
output += data; output += data;
} }
}); });
const exit_cleanup = () => { const exit_cleanup = async () => {
clear_timeout(kill_timeout); clear_timeout(kill_timeout);
proc.stderr.destroy(); proc.stderr.destroy();
proc.stdout.destroy(); proc.stdout.destroy();
await this.cleanup_processes()
logger.debug(`Finished exit cleanup uuid=${this.uuid}`)
}; };
proc.on('exit', (code, signal) => { proc.on('exit', async (code, signal) => {
exit_cleanup(); await exit_cleanup();
resolve({stdout, stderr, code, signal, output }); resolve({stdout, stderr, code, signal, output });
}); });
proc.on('error', err => { proc.on('error', async err => {
exit_cleanup(); await exit_cleanup();
reject({ error: err, stdout, stderr, output }); reject({ error: err, stdout, stderr, output });
}); });
@ -203,36 +247,90 @@ class Job {
}; };
} }
async cleanup_processes() { async execute_interactive(eventBus){
let processes = [1]; if (this.state !== job_states.PRIMED) {
throw new Error(
while (processes.length > 0) { 'Job must be in primed state, current state: ' +
processes = await new Promise((resolve, reject) => this.state.toString()
cp.execFile('ps', ['awwxo', 'pid,ruid'], (err, stdout) => { );
if (err === null) {
const lines = stdout.split('\n').slice(1); //Remove header with slice
const procs = lines.map(line => {
const [pid, ruid] = line
.trim()
.split(/\s+/)
.map(n => parseInt(n));
return { pid, ruid };
});
resolve(procs);
} else {
reject(error);
} }
})
logger.info(
`Interactively executing job uuid=${this.uuid} uid=${this.uid} gid=${
this.gid
} runtime=${this.runtime.toString()}`
); );
processes = processes.filter(proc => proc.ruid === this.uid); if(this.runtime.compiled){
eventBus.emit("stage", "compile")
const {error, code, signal} = await this.safe_call(
path.join(this.runtime.pkgdir, 'compile'),
this.files.map(x => x.name),
this.timeouts.compile,
this.memory_limits.compile,
eventBus
)
eventBus.emit("exit", "compile", {error, code, signal})
}
logger.debug('Running');
eventBus.emit("stage", "run")
const {error, code, signal} = await this.safe_call(
path.join(this.runtime.pkgdir, 'run'),
[this.files[0].name, ...this.args],
this.timeouts.run,
this.memory_limits.run,
eventBus
);
eventBus.emit("exit", "run", {error, code, signal})
this.state = job_states.EXECUTED;
}
async cleanup_processes(dont_wait = []) {
let processes = [1];
logger.debug(`Cleaning up processes uuid=${this.uuid}`)
while (processes.length > 0) {
processes = []
const proc_ids = await fs.readdir("/proc");
processes = await Promise.all(proc_ids.map(async (proc_id) => {
if(isNaN(proc_id)) return -1;
try{
const proc_status = await fs.read_file(path.join("/proc",proc_id,"status"));
const proc_lines = proc_status.to_string().split("\n")
const uid_line = proc_lines.find(line=>line.starts_with("Uid:"))
const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/);
if(ruid == this.uid || euid == this.uid)
return parse_int(proc_id)
}catch{
return -1
}
return -1
}))
processes = processes.filter(p => p > 0)
if(processes.length > 0)
logger.debug(`Got processes to kill: ${processes} uuid=${this.uuid}`)
for (const proc of processes) { for (const proc of processes) {
// First stop the processes, but keep their resources allocated so they cant re-fork // First stop the processes, but keep their resources allocated so they cant re-fork
try { try {
process.kill(proc.pid, 'SIGSTOP'); process.kill(proc, 'SIGSTOP');
} catch { } catch {
// Could already be dead // Could already be dead
} }
@ -241,14 +339,17 @@ class Job {
for (const proc of processes) { for (const proc of processes) {
// Then clear them out of the process tree // Then clear them out of the process tree
try { try {
process.kill(proc.pid, 'SIGKILL'); process.kill(proc, 'SIGKILL');
} catch { } catch {
// Could already be dead and just needs to be waited on // Could already be dead and just needs to be waited on
} }
wait_pid(proc.pid); if(!dont_wait.includes(proc))
wait_pid(proc);
} }
} }
logger.debug(`Cleaned up processes uuid=${this.uuid}`)
} }
async cleanup_filesystem() { async cleanup_filesystem() {
@ -280,13 +381,13 @@ class Job {
async cleanup() { async cleanup() {
logger.info(`Cleaning up job uuid=${this.uuid}`); logger.info(`Cleaning up job uuid=${this.uuid}`);
await Promise.all([ await this.cleanup_filesystem();
this.cleanup_processes(),
this.cleanup_filesystem(), remainingJobSpaces++;
]);
} }
} }
module.exports = { module.exports = {
Job, Job,
}; };

View File

@ -1,7 +1,10 @@
//const fetch = require('node-fetch');
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const chalk = require('chalk'); const chalk = require('chalk');
const WebSocket = require('ws');
const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
exports.command = ['execute <language> <file> [args..]']; exports.command = ['execute <language> <file> [args..]'];
exports.aliases = ['run']; exports.aliases = ['run'];
@ -35,17 +38,115 @@ exports.builder = {
alias: ['f'], alias: ['f'],
array: true, array: true,
desc: 'Additional files to add', desc: 'Additional files to add',
},
interactive: {
boolean: true,
alias: ['t'],
desc: 'Run interactively using WebSocket transport'
},
status: {
boolean: true,
alias: ['s'],
desc: 'Output additional status to stderr'
} }
}; };
exports.handler = async (argv) => { async function handle_interactive(files, argv){
const files = [...(argv.files || []),argv.file] const ws = new WebSocket(argv.pistonUrl.replace("http", "ws") + "/api/v2/connect")
.map(file_path => {
return { const log_message = (process.stderr.isTTY && argv.status) ? console.error : ()=>{};
name: path.basename(file_path),
content: fs.readFileSync(file_path).toString() process.on("exit", ()=>{
}; ws.close();
}); process.stdin.end();
process.stdin.destroy();
process.exit();
})
for(const signal of SIGNALS){
process.on(signal, ()=>{
ws.send(JSON.stringify({type: 'signal', signal}))
})
}
ws.on('open', ()=>{
const request = {
type: "init",
language: argv.language,
version: argv['language_version'],
files: files,
args: argv.args,
compile_timeout: argv.ct,
run_timeout: argv.rt
}
ws.send(JSON.stringify(request))
log_message(chalk.white.bold("Connected"))
process.stdin.resume();
process.stdin.on("data", (data) => {
ws.send(JSON.stringify({
type: "data",
stream: "stdin",
data: data.toString()
}))
})
})
ws.on("close", (code, reason)=>{
log_message(
chalk.white.bold("Disconnected: "),
chalk.white.bold("Reason: "),
chalk.yellow(`"${reason}"`),
chalk.white.bold("Code: "),
chalk.yellow(`"${code}"`),
)
process.stdin.pause()
})
ws.on('message', function(data){
const msg = JSON.parse(data);
switch(msg.type){
case "runtime":
log_message(chalk.bold.white("Runtime:"), chalk.yellow(`${msg.language} ${msg.version}`))
break;
case "stage":
log_message(chalk.bold.white("Stage:"), chalk.yellow(msg.stage))
break;
case "data":
if(msg.stream == "stdout") process.stdout.write(msg.data)
else if(msg.stream == "stderr") process.stderr.write(msg.data)
else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data)
break;
case "exit":
if(msg.signal === null)
log_message(
chalk.white.bold("Stage"),
chalk.yellow(msg.stage),
chalk.white.bold("exited with code"),
chalk.yellow(msg.code)
)
else
log_message(
chalk.white.bold("Stage"),
chalk.yellow(msg.stage),
chalk.white.bold("exited with signal"),
chalk.yellow(msg.signal)
)
break;
default:
log_message(chalk.red.bold("Unknown message:"), msg)
}
})
}
async function run_non_interactively(files, argv) {
const stdin = (argv.stdin && await new Promise((resolve, _) => { const stdin = (argv.stdin && await new Promise((resolve, _) => {
let data = ''; let data = '';
@ -99,3 +200,18 @@ exports.handler = async (argv) => {
step('Run', response.run); step('Run', response.run);
} }
exports.handler = async (argv) => {
const files = [...(argv.files || []),argv.file]
.map(file_path => {
return {
name: path.basename(file_path),
content: fs.readFileSync(file_path).toString()
};
});
if(argv.interactive) await handle_interactive(files, argv);
else await run_non_interactively(files, argv);
}

72
cli/package-lock.json generated
View File

@ -1,19 +1,20 @@
{ {
"name": "piston-cli", "name": "piston-cli",
"version": "1.0.0", "version": "1.1.0",
"lockfileVersion": 2, "lockfileVersion": 2,
"requires": true, "requires": true,
"packages": { "packages": {
"": { "": {
"name": "piston-cli", "name": "piston-cli",
"version": "1.0.0", "version": "1.1.0",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"axios": "^0.21.1", "axios": "^0.21.2",
"chalk": "^4.1.0", "chalk": "^4.1.0",
"minimatch": "^3.0.4", "minimatch": "^3.0.4",
"nocamel": "^1.0.2", "nocamel": "^1.0.2",
"semver": "^7.3.5", "semver": "^7.3.5",
"ws": "^7.5.3",
"yargs": "^16.2.0" "yargs": "^16.2.0"
} }
}, },
@ -37,11 +38,11 @@
} }
}, },
"node_modules/axios": { "node_modules/axios": {
"version": "0.21.1", "version": "0.21.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz", "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.2.tgz",
"integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==", "integrity": "sha512-87otirqUw3e8CzHTMO+/9kh/FSgXt/eVDvipijwDtEuwbkySWZ9SBm6VEubmJ/kLKEoLQV/POhxXFb66bfekfg==",
"dependencies": { "dependencies": {
"follow-redirects": "^1.10.0" "follow-redirects": "^1.14.0"
} }
}, },
"node_modules/balanced-match": { "node_modules/balanced-match": {
@ -115,11 +116,22 @@
} }
}, },
"node_modules/follow-redirects": { "node_modules/follow-redirects": {
"version": "1.13.3", "version": "1.14.3",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.13.3.tgz", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.3.tgz",
"integrity": "sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA==", "integrity": "sha512-3MkHxknWMUtb23apkgz/83fDoe+y+qr0TdgacGIA7bew+QLBo3vdgEN2xEsuXNivpFy4CyDhBBZnNZOtalmenw==",
"funding": [
{
"type": "individual",
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"engines": { "engines": {
"node": ">=4.0" "node": ">=4.0"
},
"peerDependenciesMeta": {
"debug": {
"optional": true
}
} }
}, },
"node_modules/get-caller-file": { "node_modules/get-caller-file": {
@ -243,6 +255,26 @@
"node": ">=10" "node": ">=10"
} }
}, },
"node_modules/ws": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz",
"integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==",
"engines": {
"node": ">=8.3.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/y18n": { "node_modules/y18n": {
"version": "5.0.5", "version": "5.0.5",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz",
@ -297,11 +329,11 @@
} }
}, },
"axios": { "axios": {
"version": "0.21.1", "version": "0.21.2",
"resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz", "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.2.tgz",
"integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==", "integrity": "sha512-87otirqUw3e8CzHTMO+/9kh/FSgXt/eVDvipijwDtEuwbkySWZ9SBm6VEubmJ/kLKEoLQV/POhxXFb66bfekfg==",
"requires": { "requires": {
"follow-redirects": "^1.10.0" "follow-redirects": "^1.14.0"
} }
}, },
"balanced-match": { "balanced-match": {
@ -366,9 +398,9 @@
"integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw=="
}, },
"follow-redirects": { "follow-redirects": {
"version": "1.13.3", "version": "1.14.3",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.13.3.tgz", "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.3.tgz",
"integrity": "sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA==" "integrity": "sha512-3MkHxknWMUtb23apkgz/83fDoe+y+qr0TdgacGIA7bew+QLBo3vdgEN2xEsuXNivpFy4CyDhBBZnNZOtalmenw=="
}, },
"get-caller-file": { "get-caller-file": {
"version": "2.0.5", "version": "2.0.5",
@ -455,6 +487,12 @@
"strip-ansi": "^6.0.0" "strip-ansi": "^6.0.0"
} }
}, },
"ws": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz",
"integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==",
"requires": {}
},
"y18n": { "y18n": {
"version": "5.0.5", "version": "5.0.5",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz",

View File

@ -1,15 +1,16 @@
{ {
"name": "piston-cli", "name": "piston-cli",
"version": "1.0.0", "version": "1.1.0",
"description": "Piston Execution Engine CLI tools", "description": "Piston Execution Engine CLI tools",
"main": "index.js", "main": "index.js",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"axios": "^0.21.1", "axios": "^0.21.2",
"chalk": "^4.1.0", "chalk": "^4.1.0",
"minimatch": "^3.0.4", "minimatch": "^3.0.4",
"nocamel": "^1.0.2", "nocamel": "^1.0.2",
"semver": "^7.3.5", "semver": "^7.3.5",
"ws": "^7.5.3",
"yargs": "^16.2.0" "yargs": "^16.2.0"
} }
} }

View File

@ -145,3 +145,12 @@ default: https://github.com/engineer-man/piston/releases/download/pkgs/index
``` ```
URL for repository index, where packages will be downloaded from. URL for repository index, where packages will be downloaded from.
## Maximum Concurrent Jobs
```yaml
key: PISTON_MAX_CONCURRENT_JOBS
default: 64
```
Maximum number of jobs to run concurrently.

View File

@ -1,4 +1,4 @@
Copyright (c) 2018-2021 Brian Seymour, EMKC Contributors Copyright (c) 2018-2021 Brian Seymour, Thomas Hobson, EMKC Contributors
Permission is hereby granted, free of charge, to any person obtaining a copy Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal of this software and associated documentation files (the "Software"), to deal

View File

@ -2,7 +2,7 @@
## Naming Languages ## Naming Languages
Languages should be named after their interpreters, and the command line binaries you call. Languages should be named after their interpreters, and the command line binaries you call. The language version should use semantic versioning.
For example, the full name of the standard python interpreter is `CPython`, however we would name it `python`, after the main binary which it provides. For example, the full name of the standard python interpreter is `CPython`, however we would name it `python`, after the main binary which it provides.
In the example of NodeJS, we would call this `node`, after the main binary. In the example of NodeJS, we would call this `node`, after the main binary.

View File

@ -1,7 +1,7 @@
#!/usr/bin/env bash #!/usr/bin/env bash
# Grab the latest cow source from github # Grab the latest cow source from github
git clone -q https://github.com/BigZaphod/COW.git cow git clone -q https://github.com/Hydrazer/COW.git cow
# Generate the cow binary into bin # Generate the cow binary into bin
mkdir -p bin mkdir -p bin

17
packages/iverilog/11.0.0/build.sh vendored Executable file
View File

@ -0,0 +1,17 @@
#!/bin/bash
PREFIX=$(realpath $(dirname $0))
mkdir -p build/iverilog
cd build/iverilog
curl -L https://github.com/steveicarus/iverilog/archive/refs/tags/v11_0.tar.gz -o iverilog.tar.gz
tar xzf iverilog.tar.gz --strip-components=1
chmod +x ./autoconf.sh
./autoconf.sh
./configure --prefix="$PREFIX"
make -j$(nproc)
make install -j$(nproc)
cd ../../
rm -rf build

4
packages/iverilog/11.0.0/compile vendored Normal file
View File

@ -0,0 +1,4 @@
#!/bin/bash
rename 's/$/\.v/' "$@" # Add .v extension
iverilog *.v

2
packages/iverilog/11.0.0/environment vendored Normal file
View File

@ -0,0 +1,2 @@
#!/bin/bash
export PATH=$PWD/bin:$PATH

View File

@ -0,0 +1,5 @@
{
"language": "iverilog",
"version": "11.0.0",
"aliases": ["verilog", "vvp"]
}

4
packages/iverilog/11.0.0/run vendored Normal file
View File

@ -0,0 +1,4 @@
#!/bin/bash
shift
vvp a.out "$@"

7
packages/iverilog/11.0.0/test.verilog vendored Normal file
View File

@ -0,0 +1,7 @@
module hello;
initial
begin
$display("OK");
$finish ;
end
endmodule

View File

@ -2,12 +2,15 @@
PREFIX=$(realpath $(dirname $0)) PREFIX=$(realpath $(dirname $0))
mkdir -p build/mono mkdir -p build/mono build/mono-basic
cd build cd build
curl "https://download.mono-project.com/sources/mono/mono-6.12.0.122.tar.xz" -o mono.tar.xz curl "https://download.mono-project.com/sources/mono/mono-6.12.0.122.tar.xz" -o mono.tar.xz
curl -L "https://github.com/mono/mono-basic/archive/refs/tags/4.7.tar.gz" -o mono-basic.tar.gz
tar xf mono.tar.xz --strip-components=1 -C mono tar xf mono.tar.xz --strip-components=1 -C mono
tar xf mono-basic.tar.gz --strip-components=1 -C mono-basic
# Compiling Mono
cd mono cd mono
./configure --prefix "$PREFIX" ./configure --prefix "$PREFIX"
@ -15,6 +18,15 @@ cd mono
make -j$(nproc) make -j$(nproc)
make install -j$(nproc) make install -j$(nproc)
export PATH="$PREFIX/bin:$PATH" # To be able to use mono commands
# Compiling mono-basic
cd ../mono-basic
./configure --prefix="$PREFIX"
make -j$(nproc) PLATFORM="linux" # Avoids conflict with the $PLATFORM variable we have
make install -j$(nproc) PLATFORM="linux"
# Remove redundant files
cd ../../ cd ../../
rm -rf build rm -rf build

View File

@ -1,4 +1,23 @@
#!/bin/bash #!/bin/bash
check_errors () {
grep -q 'error [A-Z]\+[0-9]\+:' check.txt && cat check.txt 1>&2 || cat check.txt
rm check.txt
}
case "${PISTON_LANGUAGE}" in
csharp)
rename 's/$/\.cs/' "$@" # Add .cs extension rename 's/$/\.cs/' "$@" # Add .cs extension
csc -out:out *.cs csc -out:out *.cs > check.txt
check_errors
;;
basic)
rename 's/$/\.vb/' "$@" # Add .vb extension
vbnc -out:out *.vb > check.txt
check_errors
;;
*)
echo "How did you get here? (${PISTON_LANGUAGE})"
exit 1
;;
esac

View File

@ -5,6 +5,10 @@
{ {
"language": "csharp", "language": "csharp",
"aliases": ["mono", "mono-csharp", "mono-c#", "mono-cs", "c#", "cs"] "aliases": ["mono", "mono-csharp", "mono-c#", "mono-cs", "c#", "cs"]
},
{
"language": "basic",
"aliases": ["vb", "mono-vb", "mono-basic", "visual-basic", "visual basic"]
} }
] ]
} }

9
packages/mono/6.12.0/test.vb vendored Normal file
View File

@ -0,0 +1,9 @@
Imports System
Module Module1
Sub Main()
Console.WriteLine("OK")
End Sub
End Module

6
packages/pwsh/7.1.4/build.sh vendored Executable file
View File

@ -0,0 +1,6 @@
#!/bin/bash
curl -L https://github.com/PowerShell/PowerShell/releases/download/v7.1.4/powershell-7.1.4-linux-x64.tar.gz -o powershell.tar.gz
tar zxf powershell.tar.gz
rm powershell.tar.gz
chmod +x pwsh

1
packages/pwsh/7.1.4/environment vendored Normal file
View File

@ -0,0 +1 @@
export PATH=$PWD:$PATH

10
packages/pwsh/7.1.4/metadata.json vendored Normal file
View File

@ -0,0 +1,10 @@
{
"language": "pwsh",
"version": "7.1.4",
"provides": [
{
"language": "powershell",
"aliases": ["ps", "pwsh", "ps1"]
}
]
}

3
packages/pwsh/7.1.4/run vendored Normal file
View File

@ -0,0 +1,3 @@
#!/bin/bash
pwsh "$@"

1
packages/pwsh/7.1.4/test.ps1 vendored Normal file
View File

@ -0,0 +1 @@
echo "OK"

16
packages/rscript/4.1.1/build.sh vendored Executable file
View File

@ -0,0 +1,16 @@
#!/bin/bash
PREFIX=$(realpath $(dirname $0))
mkdir build
cd build
curl https://cloud.r-project.org/src/base/R-4/R-4.1.1.tar.gz -o R.tar.gz
tar xzf R.tar.gz --strip-components 1
./configure --prefix="$PREFIX" --with-x=no
make -j$(nproc)
make install -j$(nproc)
cd ../
rm -rf build

1
packages/rscript/4.1.1/environment vendored Normal file
View File

@ -0,0 +1 @@
export PATH=$PWD/bin:$PATH

5
packages/rscript/4.1.1/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "rscript",
"version": "4.1.1",
"aliases": ["r"]
}

2
packages/rscript/4.1.1/run vendored Normal file
View File

@ -0,0 +1,2 @@
#/bin/bash
Rscript "$@"

1
packages/rscript/4.1.1/test.r vendored Normal file
View File

@ -0,0 +1 @@
cat('OK')

10
packages/sqlite3/3.36.0/build.sh vendored Executable file
View File

@ -0,0 +1,10 @@
#!/bin/bash
PREFIX=$(realpath $(dirname $0))
curl https://www.sqlite.org/2021/sqlite-amalgamation-3360000.zip -o sqlite.zip
unzip -q sqlite.zip
rm -rf sqlite.zip
gcc -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION sqlite-amalgamation-3360000/shell.c sqlite-amalgamation-3360000/sqlite3.c -o sqlite3
rm -rf sqlite-amalgamation-3360000

2
packages/sqlite3/3.36.0/environment vendored Normal file
View File

@ -0,0 +1,2 @@
#!/bin/bash
export PATH=$PWD:$PATH

5
packages/sqlite3/3.36.0/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "sqlite3",
"version": "3.36.0",
"aliases": ["sqlite", "sql"]
}

3
packages/sqlite3/3.36.0/run vendored Normal file
View File

@ -0,0 +1,3 @@
#!/bin/bash
sqlite3 < "$1"

1
packages/sqlite3/3.36.0/test.sql vendored Normal file
View File

@ -0,0 +1 @@
SELECT 'OK';

View File

@ -6,8 +6,9 @@ export TMPDIR="$PWD"
# Put instructions to run the runtime # Put instructions to run the runtime
rename 's/$/\.v/' "$@" # Add .v extension filename=$1
rename 's/$/\.v/' $filename # Add .v extension
filename=$1.v
shift shift
v run $filename "$@" v run $filename.v "$@"

9
piston
View File

@ -19,10 +19,12 @@ case $1 in
echo "Commands:" echo "Commands:"
echo " select <environment> Select the environment" echo " select <environment> Select the environment"
echo " docker_compose <args...> Interact directly with the docker-compose for the selected environment" echo " docker_compose <args...> Interact directly with the docker-compose for the selected environment"
echo " logs Show docker-compose logs"
echo echo
echo " start Starts piston" echo " start Starts piston"
echo " stop Stops piston" echo " stop Stops piston"
echo " restart Restarts piston" echo " restart Restarts piston"
echo " bash Opens a bash shell for the piston_api container"
echo echo
echo " update Fetches and applies latest updates" echo " update Fetches and applies latest updates"
echo echo
@ -35,11 +37,12 @@ case $1 in
echo " clean-pkgs Clean any package build artifacts on disk" echo " clean-pkgs Clean any package build artifacts on disk"
echo " clean-repo Remove all packages from local repo" echo " clean-repo Remove all packages from local repo"
echo " build-pkg <package> <version> Build a package" echo " build-pkg <package> <version> Build a package"
echo " rebuild Build and restart the docker container"
else else
echo " Switch to developement environment for more info" echo " Switch to developement environment for more info"
echo " > piston switch dev" echo " > piston select dev"
fi fi
;; ;;
@ -47,10 +50,14 @@ case $1 in
select) echo "$2" > .piston_env ;; select) echo "$2" > .piston_env ;;
docker_compose) shift; docker_compose "$@";; docker_compose) shift; docker_compose "$@";;
logs) docker_compose logs -f ;;
restart) docker_compose restart ;; restart) docker_compose restart ;;
start) docker_compose up -d ;; start) docker_compose up -d ;;
stop) docker_compose down ;; stop) docker_compose down ;;
bash) docker_compose exec api /bin/bash ;;
rebuild) docker_compose build && docker_compose up -d ;;
update) update)
git pull git pull

View File

@ -41,6 +41,20 @@
<br> <br>
# Notes About Hacktoberfest
While we are accepting pull requests for Hacktoberfest, we will reject any low-quality PRs.
If we see PR abuse for Hacktoberfest, we will stop providing Hacktoberfest approval for pull requests.
We are accepting PRs for:
* Packages - updating package versions, adding new packages
* Documentation updates
* CLI/API improvements - please discuss these with us in the Discord first
Any queries or concerns, ping @HexF#0015 in the Discord.
<br>
# About # About
<h4> <h4>
@ -88,7 +102,9 @@ POST https://emkc.org/api/v2/piston/execute
> Important Note: The Piston API is rate limited to 5 requests per second. If you have a need for more requests than that > Important Note: The Piston API is rate limited to 5 requests per second. If you have a need for more requests than that
> and it's for a good cause, please reach out to me (EngineerMan#0001) on [Discord](https://discord.gg/engineerman) > and it's for a good cause, please reach out to me (EngineerMan#0001) on [Discord](https://discord.gg/engineerman)
> so we can discuss potentially getting you an unlimited key. > so we can discuss potentially getting you an unlimited key. What is and isn't a good cause is up to me, but, in general
> if your project is a) open source, b) helping people at no cost to them, and c) not likely to use tons of resources
> thereby impairing another's ability to enjoy Piston, you'll likely be granted a key.
<br> <br>
@ -100,7 +116,7 @@ POST https://emkc.org/api/v2/piston/execute
- Docker - Docker
- Docker Compose - Docker Compose
- Node JS - Node JS (>= 13, preferably >= 15)
### After system dependencies are installed, clone this repository: ### After system dependencies are installed, clone this repository:
@ -139,6 +155,22 @@ docker run \
ghcr.io/engineer-man/piston ghcr.io/engineer-man/piston
``` ```
## Piston for testing packages locally
### Host System Package Dependencies
- Same as [All In One](#All-In-One)
### Installation
```sh
# Build the Docker containers
./piston start
# For more help
./piston help
```
<br> <br>
# Usage # Usage
@ -312,6 +344,7 @@ Content-Type: application/json
`golfscript`, `golfscript`,
`groovy`, `groovy`,
`haskell`, `haskell`,
`iverilog`,
`java`, `java`,
`javascript`, `javascript`,
`jelly`, `jelly`,
@ -331,6 +364,7 @@ Content-Type: application/json
`perl`, `perl`,
`php`, `php`,
`ponylang`, `ponylang`,
`powershell`,
`prolog`, `prolog`,
`pure`, `pure`,
`pyth`, `pyth`,
@ -338,11 +372,14 @@ Content-Type: application/json
`python2`, `python2`,
`raku`, `raku`,
`rockstar`, `rockstar`,
`rscript`,
`ruby`, `ruby`,
`rust`, `rust`,
`scala`, `scala`,
`sqlite3`,
`swift`, `swift`,
`typescript`, `typescript`,
`basic`,
`vlang`, `vlang`,
`yeethon`, `yeethon`,
`zig`, `zig`,

View File

@ -8,7 +8,8 @@ RUN apt-get update && apt-get install -y unzip autoconf build-essential libssl-d
util-linux pciutils usbutils coreutils binutils findutils grep libncurses5-dev \ util-linux pciutils usbutils coreutils binutils findutils grep libncurses5-dev \
libncursesw5-dev python3-pip libgmp-dev libmpfr-dev python2 libffi-dev gfortran\ libncursesw5-dev python3-pip libgmp-dev libmpfr-dev python2 libffi-dev gfortran\
libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev libfftw3-dev \ libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev libfftw3-dev \
libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev libsundials-dev && \ libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev libsundials-dev \
libbz2-dev liblzma-dev libpcre2-dev gperf bison flex g++ && \
ln -sf /bin/bash /bin/sh && \ ln -sf /bin/bash /bin/sh && \
rm -rf /var/lib/apt/lists/* && \ rm -rf /var/lib/apt/lists/* && \
update-alternatives --install /usr/bin/python python /usr/bin/python3.7 2 update-alternatives --install /usr/bin/python python /usr/bin/python3.7 2
@ -17,4 +18,3 @@ ADD entrypoint.sh mkindex.sh /
ENTRYPOINT ["bash","/entrypoint.sh"] ENTRYPOINT ["bash","/entrypoint.sh"]
CMD ["--no-build"] CMD ["--no-build"]