diff --git a/.github/workflows/package-pr.yaml b/.github/workflows/package-pr.yaml index b3027ec..bb264a3 100644 --- a/.github/workflows/package-pr.yaml +++ b/.github/workflows/package-pr.yaml @@ -55,7 +55,9 @@ jobs: run: | PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u) echo "Packages: $PACKAGES" - docker run -v "${{ github.workspace }}:/piston" docker.pkg.github.com/engineer-man/piston/repo-builder:latest --no-server $PACKAGES + docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest + docker build -t repo-builder repo + docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES ls -la packages - name: Upload package as artifact @@ -89,7 +91,9 @@ jobs: run: | ls -la docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build - docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api docker.pkg.github.com/engineer-man/piston/api + docker pull docker.pkg.github.com/engineer-man/piston/api + docker build -t piston-api api + docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api echo Waiting for API to start.. docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes diff --git a/.github/workflows/package-push.yaml b/.github/workflows/package-push.yaml index ad33f3e..bbb44af 100644 --- a/.github/workflows/package-push.yaml +++ b/.github/workflows/package-push.yaml @@ -33,7 +33,9 @@ jobs: run: | PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u) echo "Packages: $PACKAGES" - docker run -v "${{ github.workspace }}:/piston" docker.pkg.github.com/engineer-man/piston/repo-builder:latest --no-server $PACKAGES + docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest + docker build -t repo-builder repo + docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES ls -la packages - name: Upload Packages @@ -73,4 +75,4 @@ jobs: file: index tag: pkgs overwrite: true - file_glob: true \ No newline at end of file + file_glob: true diff --git a/api/Dockerfile b/api/Dockerfile index 668c54a..ec0d2a8 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -13,7 +13,7 @@ RUN apt-get update && \ libncurses6 libncurses5 libedit-dev libseccomp-dev rename procps python3 \ libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev \ libfftw3-dev libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev \ - libsundials-dev && \ + libsundials-dev libpcre2-dev && \ rm -rf /var/lib/apt/lists/* RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen diff --git a/api/package-lock.json b/api/package-lock.json index c46ae87..83df240 100644 --- a/api/package-lock.json +++ b/api/package-lock.json @@ -12,6 +12,7 @@ "body-parser": "^1.19.0", "chownr": "^2.0.0", "express": "^4.17.1", + "express-ws": "^5.0.2", "is-docker": "^2.1.1", "logplease": "^1.2.15", "nocamel": "HexF/nocamel#patch-1", @@ -196,6 +197,20 @@ "node": ">= 0.10.0" } }, + "node_modules/express-ws": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/express-ws/-/express-ws-5.0.2.tgz", + "integrity": "sha512-0uvmuk61O9HXgLhGl3QhNSEtRsQevtmbL94/eILaliEADZBHZOQUAiHFrGPrgsjikohyrmSG5g+sCfASTt0lkQ==", + "dependencies": { + "ws": "^7.4.6" + }, + "engines": { + "node": ">=4.5.0" + }, + "peerDependencies": { + "express": "^4.0.0 || ^5.0.0-alpha.1" + } + }, "node_modules/finalhandler": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", @@ -582,6 +597,26 @@ "node_modules/waitpid": { "resolved": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa" }, + "node_modules/ws": { + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", + "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==", + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", @@ -728,6 +763,14 @@ "vary": "~1.1.2" } }, + "express-ws": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/express-ws/-/express-ws-5.0.2.tgz", + "integrity": "sha512-0uvmuk61O9HXgLhGl3QhNSEtRsQevtmbL94/eILaliEADZBHZOQUAiHFrGPrgsjikohyrmSG5g+sCfASTt0lkQ==", + "requires": { + "ws": "^7.4.6" + } + }, "finalhandler": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", @@ -1010,6 +1053,12 @@ "version": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa", "from": "waitpid@git+https://github.com/HexF/node-waitpid.git" }, + "ws": { + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", + "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==", + "requires": {} + }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", diff --git a/api/package.json b/api/package.json index ab34063..e8e5b5d 100644 --- a/api/package.json +++ b/api/package.json @@ -1,12 +1,13 @@ { "name": "piston-api", - "version": "3.0.0", + "version": "3.1.0", "description": "API for piston - a high performance code execution engine", "main": "src/index.js", "dependencies": { "body-parser": "^1.19.0", "chownr": "^2.0.0", "express": "^4.17.1", + "express-ws": "^5.0.2", "is-docker": "^2.1.1", "logplease": "^1.2.15", "nocamel": "HexF/nocamel#patch-1", diff --git a/api/src/api/v2.js b/api/src/api/v2.js index 948dccf..e3e0522 100644 --- a/api/src/api/v2.js +++ b/api/src/api/v2.js @@ -1,12 +1,126 @@ const express = require('express'); const router = express.Router(); +const events = require('events'); + const config = require('../config'); const runtime = require('../runtime'); const { Job } = require('../job'); const package = require('../package'); const logger = require('logplease').create('api/v2'); +const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"] +// ref: https://man7.org/linux/man-pages/man7/signal.7.html + +function get_job(body){ + const { + language, + version, + args, + stdin, + files, + compile_memory_limit, + run_memory_limit, + run_timeout, + compile_timeout + } = body; + + return new Promise((resolve, reject) => { + if (!language || typeof language !== 'string') { + return reject({ + message: 'language is required as a string', + }); + } + + if (!version || typeof version !== 'string') { + return reject({ + message: 'version is required as a string', + }); + } + + if (!files || !Array.isArray(files)) { + return reject({ + message: 'files is required as an array', + }); + } + + for (const [i, file] of files.entries()) { + if (typeof file.content !== 'string') { + return reject({ + message: `files[${i}].content is required as a string`, + }); + } + } + + if (compile_memory_limit) { + if (typeof compile_memory_limit !== 'number') { + return reject({ + message: 'if specified, compile_memory_limit must be a number', + }); + } + + if ( + config.compile_memory_limit >= 0 && + (compile_memory_limit > config.compile_memory_limit || + compile_memory_limit < 0) + ) { + return reject({ + message: + 'compile_memory_limit cannot exceed the configured limit of ' + + config.compile_memory_limit, + }); + } + } + + if (run_memory_limit) { + if (typeof run_memory_limit !== 'number') { + return reject({ + message: 'if specified, run_memory_limit must be a number', + }); + } + + if ( + config.run_memory_limit >= 0 && + (run_memory_limit > config.run_memory_limit || run_memory_limit < 0) + ) { + return reject({ + message: + 'run_memory_limit cannot exceed the configured limit of ' + + config.run_memory_limit, + }); + } + } + + const rt = runtime.get_latest_runtime_matching_language_version( + language, + version + ); + + if (rt === undefined) { + return reject({ + message: `${language}-${version} runtime is unknown`, + }); + } + + resolve(new Job({ + runtime: rt, + alias: language, + args: args || [], + stdin: stdin || "", + files, + timeouts: { + run: run_timeout || 3000, + compile: compile_timeout || 10000, + }, + memory_limits: { + run: run_memory_limit || config.run_memory_limit, + compile: compile_memory_limit || config.compile_memory_limit, + } + })); + }) + +} + router.use((req, res, next) => { if (['GET', 'HEAD', 'OPTIONS'].includes(req.method)) { return next(); @@ -21,118 +135,101 @@ router.use((req, res, next) => { next(); }); +router.ws('/connect', async (ws, req) => { + + let job = null; + let eventBus = new events.EventEmitter(); + + eventBus.on("stdout", (data) => ws.send(JSON.stringify({type: "data", stream: "stdout", data: data.toString()}))) + eventBus.on("stderr", (data) => ws.send(JSON.stringify({type: "data", stream: "stderr", data: data.toString()}))) + eventBus.on("stage", (stage)=> ws.send(JSON.stringify({type: "stage", stage}))) + eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status}))) + + ws.on("message", async (data) => { + + try{ + const msg = JSON.parse(data); + + switch(msg.type){ + case "init": + if(job === null){ + job = await get_job(msg); + + await job.prime(); + + ws.send(JSON.stringify({ + type: "runtime", + language: job.runtime.language, + version: job.runtime.version.raw + })) + + await job.execute_interactive(eventBus); + + ws.close(4999, "Job Completed"); + + }else{ + ws.close(4000, "Already Initialized"); + } + break; + case "data": + if(job !== null){ + if(msg.stream === "stdin"){ + eventBus.emit("stdin", msg.data) + }else{ + ws.close(4004, "Can only write to stdin") + } + }else{ + ws.close(4003, "Not yet initialized") + } + break; + case "signal": + if(job !== null){ + if(SIGNALS.includes(msg.signal)){ + eventBus.emit("signal", msg.signal) + }else{ + ws.close(4005, "Invalid signal") + } + }else{ + ws.close(4003, "Not yet initialized") + } + break; + } + + }catch(error){ + ws.send(JSON.stringify({type: "error", message: error.message})) + ws.close(4002, "Notified Error") + // ws.close message is limited to 123 characters, so we notify over WS then close. + } + }) + + ws.on("close", async ()=>{ + if(job !== null){ + await job.cleanup() + } + }) + + setTimeout(()=>{ + //Terminate the socket after 1 second, if not initialized. + if(job === null) + ws.close(4001, "Initialization Timeout"); + }, 1000) +}) + router.post('/execute', async (req, res) => { - const { - language, - version, - files, - stdin, - args, - run_timeout, - compile_timeout, - compile_memory_limit, - run_memory_limit, - } = req.body; - if (!language || typeof language !== 'string') { - return res.status(400).send({ - message: 'language is required as a string', - }); + try{ + const job = await get_job(req.body); + + await job.prime(); + + const result = await job.execute(); + + await job.cleanup(); + + return res.status(200).send(result); + }catch(error){ + return res.status(400).json(error); } - - if (!version || typeof version !== 'string') { - return res.status(400).send({ - message: 'version is required as a string', - }); - } - - if (!files || !Array.isArray(files)) { - return res.status(400).send({ - message: 'files is required as an array', - }); - } - - for (const [i, file] of files.entries()) { - if (typeof file.content !== 'string') { - return res.status(400).send({ - message: `files[${i}].content is required as a string`, - }); - } - } - - if (compile_memory_limit) { - if (typeof compile_memory_limit !== 'number') { - return res.status(400).send({ - message: 'if specified, compile_memory_limit must be a number', - }); - } - - if ( - config.compile_memory_limit >= 0 && - (compile_memory_limit > config.compile_memory_limit || - compile_memory_limit < 0) - ) { - return res.status(400).send({ - message: - 'compile_memory_limit cannot exceed the configured limit of ' + - config.compile_memory_limit, - }); - } - } - - if (run_memory_limit) { - if (typeof run_memory_limit !== 'number') { - return res.status(400).send({ - message: 'if specified, run_memory_limit must be a number', - }); - } - - if ( - config.run_memory_limit >= 0 && - (run_memory_limit > config.run_memory_limit || run_memory_limit < 0) - ) { - return res.status(400).send({ - message: - 'run_memory_limit cannot exceed the configured limit of ' + - config.run_memory_limit, - }); - } - } - - const rt = runtime.get_latest_runtime_matching_language_version( - language, - version - ); - - if (rt === undefined) { - return res.status(400).send({ - message: `${language}-${version} runtime is unknown`, - }); - } - - const job = new Job({ - runtime: rt, - alias: language, - files: files, - args: args || [], - stdin: stdin || '', - timeouts: { - run: run_timeout || 3000, - compile: compile_timeout || 10000, - }, - memory_limits: { - run: run_memory_limit || config.run_memory_limit, - compile: compile_memory_limit || config.compile_memory_limit, - }, - }); - - await job.prime(); - - const result = await job.execute(); - - await job.cleanup(); - - return res.status(200).send(result); }); router.get('/runtimes', (req, res) => { diff --git a/api/src/config.js b/api/src/config.js index 84270aa..bbd7ae9 100644 --- a/api/src/config.js +++ b/api/src/config.js @@ -114,6 +114,13 @@ const options = [ 'https://github.com/engineer-man/piston/releases/download/pkgs/index', validators: [], }, + { + key: 'max_concurrent_jobs', + desc: 'Maximum number of concurrent jobs to run at one time', + default: 64, + parser: parse_int, + validators: [(x) => x > 0 || `${x} cannot be negative`] + } ]; logger.info(`Loading Configuration from environment`); diff --git a/api/src/index.js b/api/src/index.js index ef16916..afd4d15 100644 --- a/api/src/index.js +++ b/api/src/index.js @@ -2,6 +2,7 @@ require('nocamel'); const Logger = require('logplease'); const express = require('express'); +const expressWs = require('express-ws'); const globals = require('./globals'); const config = require('./config'); const path = require('path'); @@ -12,6 +13,9 @@ const runtime = require('./runtime'); const logger = Logger.create('index'); const app = express(); +expressWs(app); + + (async () => { logger.info('Setting loglevel to', config.log_level); diff --git a/api/src/job.js b/api/src/job.js index bb78448..ecc4ab3 100644 --- a/api/src/job.js +++ b/api/src/job.js @@ -16,6 +16,19 @@ const job_states = { let uid = 0; let gid = 0; +let remainingJobSpaces = config.max_concurrent_jobs; +let jobQueue = []; + + +setInterval(()=>{ + // Every 10ms try resolve a new job, if there is an available slot + if(jobQueue.length > 0 && remainingJobSpaces > 0){ + jobQueue.shift()() + } +}, 10) + + + class Job { constructor({ runtime, files, args, stdin, timeouts, memory_limits }) { this.uuid = uuidv4(); @@ -48,8 +61,15 @@ class Job { } async prime() { - logger.info(`Priming job uuid=${this.uuid}`); + if(remainingJobSpaces < 1){ + logger.info(`Awaiting job slot uuid=${this.uuid}`) + await new Promise((resolve)=>{ + jobQueue.push(resolve) + }) + } + logger.info(`Priming job uuid=${this.uuid}`); + remainingJobSpaces--; logger.debug('Writing files to job cache'); logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`); @@ -76,7 +96,7 @@ class Job { logger.debug('Primed job'); } - async safe_call(file, args, timeout, memory_limit) { + async safe_call(file, args, timeout, memory_limit, eventBus = null) { return new Promise((resolve, reject) => { const nonetwork = config.disable_networking ? ['nosocket'] : []; @@ -109,48 +129,72 @@ class Job { detached: true, //give this process its own process group }); - proc.stdin.write(this.stdin); - proc.stdin.end(); - proc.stdin.destroy(); + if(eventBus === null){ + proc.stdin.write(this.stdin); + proc.stdin.end(); + proc.stdin.destroy(); + }else{ + eventBus.on("stdin", (data) => { + proc.stdin.write(data); + }) + + eventBus.on("kill", (signal) => { + proc.kill(signal) + }) + } + + const kill_timeout = set_timeout( - _ => proc.kill('SIGKILL'), + async _ => { + logger.info(`Timeout exceeded timeout=${timeout} uuid=${this.uuid}`) + process.kill(proc.pid, 'SIGKILL') + }, timeout ); - proc.stderr.on('data', data => { - if (stderr.length > config.output_max_size) { - proc.kill('SIGKILL'); + proc.stderr.on('data', async data => { + if(eventBus !== null) { + eventBus.emit("stderr", data); + } else if (stderr.length > config.output_max_size) { + logger.info(`stderr length exceeded uuid=${this.uuid}`) + process.kill(proc.pid, 'SIGKILL') } else { stderr += data; output += data; } }); - proc.stdout.on('data', data => { - if (stdout.length > config.output_max_size) { - proc.kill('SIGKILL'); + proc.stdout.on('data', async data => { + if(eventBus !== null){ + eventBus.emit("stdout", data); + } else if (stdout.length > config.output_max_size) { + logger.info(`stdout length exceeded uuid=${this.uuid}`) + process.kill(proc.pid, 'SIGKILL') } else { stdout += data; output += data; } }); - const exit_cleanup = () => { + const exit_cleanup = async () => { clear_timeout(kill_timeout); proc.stderr.destroy(); proc.stdout.destroy(); + + await this.cleanup_processes() + logger.debug(`Finished exit cleanup uuid=${this.uuid}`) }; - proc.on('exit', (code, signal) => { - exit_cleanup(); + proc.on('exit', async (code, signal) => { + await exit_cleanup(); resolve({stdout, stderr, code, signal, output }); }); - proc.on('error', err => { - exit_cleanup(); + proc.on('error', async err => { + await exit_cleanup(); reject({ error: err, stdout, stderr, output }); }); @@ -203,36 +247,90 @@ class Job { }; } - async cleanup_processes() { + async execute_interactive(eventBus){ + if (this.state !== job_states.PRIMED) { + throw new Error( + 'Job must be in primed state, current state: ' + + this.state.toString() + ); + } + + logger.info( + `Interactively executing job uuid=${this.uuid} uid=${this.uid} gid=${ + this.gid + } runtime=${this.runtime.toString()}` + ); + + if(this.runtime.compiled){ + eventBus.emit("stage", "compile") + const {error, code, signal} = await this.safe_call( + path.join(this.runtime.pkgdir, 'compile'), + this.files.map(x => x.name), + this.timeouts.compile, + this.memory_limits.compile, + eventBus + ) + + eventBus.emit("exit", "compile", {error, code, signal}) + } + + logger.debug('Running'); + eventBus.emit("stage", "run") + const {error, code, signal} = await this.safe_call( + path.join(this.runtime.pkgdir, 'run'), + [this.files[0].name, ...this.args], + this.timeouts.run, + this.memory_limits.run, + eventBus + ); + + eventBus.emit("exit", "run", {error, code, signal}) + + + this.state = job_states.EXECUTED; + } + + async cleanup_processes(dont_wait = []) { let processes = [1]; + logger.debug(`Cleaning up processes uuid=${this.uuid}`) while (processes.length > 0) { - processes = await new Promise((resolve, reject) => - cp.execFile('ps', ['awwxo', 'pid,ruid'], (err, stdout) => { - if (err === null) { - const lines = stdout.split('\n').slice(1); //Remove header with slice - const procs = lines.map(line => { - const [pid, ruid] = line - .trim() - .split(/\s+/) - .map(n => parseInt(n)); + processes = [] - return { pid, ruid }; - }); - resolve(procs); - } else { - reject(error); - } - }) - ); + const proc_ids = await fs.readdir("/proc"); + + + processes = await Promise.all(proc_ids.map(async (proc_id) => { + if(isNaN(proc_id)) return -1; + try{ + const proc_status = await fs.read_file(path.join("/proc",proc_id,"status")); + const proc_lines = proc_status.to_string().split("\n") + const uid_line = proc_lines.find(line=>line.starts_with("Uid:")) + const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/); + + + if(ruid == this.uid || euid == this.uid) + return parse_int(proc_id) + + }catch{ + return -1 + } + + return -1 + })) + + processes = processes.filter(p => p > 0) + + if(processes.length > 0) + logger.debug(`Got processes to kill: ${processes} uuid=${this.uuid}`) + - processes = processes.filter(proc => proc.ruid === this.uid); for (const proc of processes) { // First stop the processes, but keep their resources allocated so they cant re-fork try { - process.kill(proc.pid, 'SIGSTOP'); + process.kill(proc, 'SIGSTOP'); } catch { // Could already be dead } @@ -241,14 +339,17 @@ class Job { for (const proc of processes) { // Then clear them out of the process tree try { - process.kill(proc.pid, 'SIGKILL'); + process.kill(proc, 'SIGKILL'); } catch { // Could already be dead and just needs to be waited on } - wait_pid(proc.pid); + if(!dont_wait.includes(proc)) + wait_pid(proc); } } + + logger.debug(`Cleaned up processes uuid=${this.uuid}`) } async cleanup_filesystem() { @@ -280,13 +381,13 @@ class Job { async cleanup() { logger.info(`Cleaning up job uuid=${this.uuid}`); - await Promise.all([ - this.cleanup_processes(), - this.cleanup_filesystem(), - ]); + await this.cleanup_filesystem(); + + remainingJobSpaces++; } } + module.exports = { Job, }; diff --git a/cli/commands/execute.js b/cli/commands/execute.js index e273548..abb1f63 100644 --- a/cli/commands/execute.js +++ b/cli/commands/execute.js @@ -1,7 +1,10 @@ -//const fetch = require('node-fetch'); const fs = require('fs'); const path = require('path'); const chalk = require('chalk'); +const WebSocket = require('ws'); + +const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"] + exports.command = ['execute [args..]']; exports.aliases = ['run']; @@ -35,17 +38,115 @@ exports.builder = { alias: ['f'], array: true, desc: 'Additional files to add', + }, + interactive: { + boolean: true, + alias: ['t'], + desc: 'Run interactively using WebSocket transport' + }, + status: { + boolean: true, + alias: ['s'], + desc: 'Output additional status to stderr' } }; -exports.handler = async (argv) => { - const files = [...(argv.files || []),argv.file] - .map(file_path => { - return { - name: path.basename(file_path), - content: fs.readFileSync(file_path).toString() - }; - }); +async function handle_interactive(files, argv){ + const ws = new WebSocket(argv.pistonUrl.replace("http", "ws") + "/api/v2/connect") + + const log_message = (process.stderr.isTTY && argv.status) ? console.error : ()=>{}; + + process.on("exit", ()=>{ + ws.close(); + process.stdin.end(); + process.stdin.destroy(); + process.exit(); + }) + + for(const signal of SIGNALS){ + process.on(signal, ()=>{ + ws.send(JSON.stringify({type: 'signal', signal})) + }) + } + + + + ws.on('open', ()=>{ + const request = { + type: "init", + language: argv.language, + version: argv['language_version'], + files: files, + args: argv.args, + compile_timeout: argv.ct, + run_timeout: argv.rt + } + + ws.send(JSON.stringify(request)) + log_message(chalk.white.bold("Connected")) + + process.stdin.resume(); + + process.stdin.on("data", (data) => { + ws.send(JSON.stringify({ + type: "data", + stream: "stdin", + data: data.toString() + })) + }) + }) + + ws.on("close", (code, reason)=>{ + log_message( + chalk.white.bold("Disconnected: "), + chalk.white.bold("Reason: "), + chalk.yellow(`"${reason}"`), + chalk.white.bold("Code: "), + chalk.yellow(`"${code}"`), + ) + process.stdin.pause() + }) + + ws.on('message', function(data){ + const msg = JSON.parse(data); + + switch(msg.type){ + case "runtime": + log_message(chalk.bold.white("Runtime:"), chalk.yellow(`${msg.language} ${msg.version}`)) + break; + case "stage": + log_message(chalk.bold.white("Stage:"), chalk.yellow(msg.stage)) + break; + case "data": + if(msg.stream == "stdout") process.stdout.write(msg.data) + else if(msg.stream == "stderr") process.stderr.write(msg.data) + else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data) + break; + case "exit": + if(msg.signal === null) + log_message( + chalk.white.bold("Stage"), + chalk.yellow(msg.stage), + chalk.white.bold("exited with code"), + chalk.yellow(msg.code) + ) + else + log_message( + chalk.white.bold("Stage"), + chalk.yellow(msg.stage), + chalk.white.bold("exited with signal"), + chalk.yellow(msg.signal) + ) + break; + default: + log_message(chalk.red.bold("Unknown message:"), msg) + } + }) + +} + +async function run_non_interactively(files, argv) { + const stdin = (argv.stdin && await new Promise((resolve, _) => { let data = ''; @@ -99,3 +200,18 @@ exports.handler = async (argv) => { step('Run', response.run); } + +exports.handler = async (argv) => { + const files = [...(argv.files || []),argv.file] + .map(file_path => { + return { + name: path.basename(file_path), + content: fs.readFileSync(file_path).toString() + }; + }); + + if(argv.interactive) await handle_interactive(files, argv); + else await run_non_interactively(files, argv); +} + + diff --git a/cli/package-lock.json b/cli/package-lock.json index d564e5f..335ed21 100644 --- a/cli/package-lock.json +++ b/cli/package-lock.json @@ -1,19 +1,20 @@ { "name": "piston-cli", - "version": "1.0.0", + "version": "1.1.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "piston-cli", - "version": "1.0.0", + "version": "1.1.0", "license": "MIT", "dependencies": { - "axios": "^0.21.1", + "axios": "^0.21.2", "chalk": "^4.1.0", "minimatch": "^3.0.4", "nocamel": "^1.0.2", "semver": "^7.3.5", + "ws": "^7.5.3", "yargs": "^16.2.0" } }, @@ -37,11 +38,11 @@ } }, "node_modules/axios": { - "version": "0.21.1", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz", - "integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==", + "version": "0.21.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.2.tgz", + "integrity": "sha512-87otirqUw3e8CzHTMO+/9kh/FSgXt/eVDvipijwDtEuwbkySWZ9SBm6VEubmJ/kLKEoLQV/POhxXFb66bfekfg==", "dependencies": { - "follow-redirects": "^1.10.0" + "follow-redirects": "^1.14.0" } }, "node_modules/balanced-match": { @@ -115,11 +116,22 @@ } }, "node_modules/follow-redirects": { - "version": "1.13.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.13.3.tgz", - "integrity": "sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA==", + "version": "1.14.3", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.3.tgz", + "integrity": "sha512-3MkHxknWMUtb23apkgz/83fDoe+y+qr0TdgacGIA7bew+QLBo3vdgEN2xEsuXNivpFy4CyDhBBZnNZOtalmenw==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], "engines": { "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } } }, "node_modules/get-caller-file": { @@ -243,6 +255,26 @@ "node": ">=10" } }, + "node_modules/ws": { + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", + "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==", + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/y18n": { "version": "5.0.5", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", @@ -297,11 +329,11 @@ } }, "axios": { - "version": "0.21.1", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz", - "integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==", + "version": "0.21.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.2.tgz", + "integrity": "sha512-87otirqUw3e8CzHTMO+/9kh/FSgXt/eVDvipijwDtEuwbkySWZ9SBm6VEubmJ/kLKEoLQV/POhxXFb66bfekfg==", "requires": { - "follow-redirects": "^1.10.0" + "follow-redirects": "^1.14.0" } }, "balanced-match": { @@ -366,9 +398,9 @@ "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" }, "follow-redirects": { - "version": "1.13.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.13.3.tgz", - "integrity": "sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA==" + "version": "1.14.3", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.3.tgz", + "integrity": "sha512-3MkHxknWMUtb23apkgz/83fDoe+y+qr0TdgacGIA7bew+QLBo3vdgEN2xEsuXNivpFy4CyDhBBZnNZOtalmenw==" }, "get-caller-file": { "version": "2.0.5", @@ -455,6 +487,12 @@ "strip-ansi": "^6.0.0" } }, + "ws": { + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", + "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==", + "requires": {} + }, "y18n": { "version": "5.0.5", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", diff --git a/cli/package.json b/cli/package.json index 6df989d..ef1103e 100644 --- a/cli/package.json +++ b/cli/package.json @@ -1,15 +1,16 @@ { "name": "piston-cli", - "version": "1.0.0", + "version": "1.1.0", "description": "Piston Execution Engine CLI tools", "main": "index.js", "license": "MIT", "dependencies": { - "axios": "^0.21.1", + "axios": "^0.21.2", "chalk": "^4.1.0", "minimatch": "^3.0.4", "nocamel": "^1.0.2", "semver": "^7.3.5", + "ws": "^7.5.3", "yargs": "^16.2.0" } } diff --git a/docs/configuration.md b/docs/configuration.md index 1388e9d..16a5df0 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -145,3 +145,12 @@ default: https://github.com/engineer-man/piston/releases/download/pkgs/index ``` URL for repository index, where packages will be downloaded from. + +## Maximum Concurrent Jobs + +```yaml +key: PISTON_MAX_CONCURRENT_JOBS +default: 64 +``` + +Maximum number of jobs to run concurrently. diff --git a/license b/license index 4f45aea..fd203f8 100644 --- a/license +++ b/license @@ -1,4 +1,4 @@ -Copyright (c) 2018-2021 Brian Seymour, EMKC Contributors +Copyright (c) 2018-2021 Brian Seymour, Thomas Hobson, EMKC Contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal diff --git a/packages/CONTRIBUTING.MD b/packages/CONTRIBUTING.MD index 813f71e..b1ed6d3 100644 --- a/packages/CONTRIBUTING.MD +++ b/packages/CONTRIBUTING.MD @@ -2,7 +2,7 @@ ## Naming Languages -Languages should be named after their interpreters, and the command line binaries you call. +Languages should be named after their interpreters, and the command line binaries you call. The language version should use semantic versioning. For example, the full name of the standard python interpreter is `CPython`, however we would name it `python`, after the main binary which it provides. In the example of NodeJS, we would call this `node`, after the main binary. diff --git a/packages/cow/1.0.0/build.sh b/packages/cow/1.0.0/build.sh index 3bf5938..4753cae 100755 --- a/packages/cow/1.0.0/build.sh +++ b/packages/cow/1.0.0/build.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash # Grab the latest cow source from github -git clone -q https://github.com/BigZaphod/COW.git cow +git clone -q https://github.com/Hydrazer/COW.git cow # Generate the cow binary into bin mkdir -p bin diff --git a/packages/iverilog/11.0.0/build.sh b/packages/iverilog/11.0.0/build.sh new file mode 100755 index 0000000..befb2fa --- /dev/null +++ b/packages/iverilog/11.0.0/build.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +PREFIX=$(realpath $(dirname $0)) + +mkdir -p build/iverilog +cd build/iverilog +curl -L https://github.com/steveicarus/iverilog/archive/refs/tags/v11_0.tar.gz -o iverilog.tar.gz +tar xzf iverilog.tar.gz --strip-components=1 + +chmod +x ./autoconf.sh +./autoconf.sh +./configure --prefix="$PREFIX" +make -j$(nproc) +make install -j$(nproc) + +cd ../../ +rm -rf build diff --git a/packages/iverilog/11.0.0/compile b/packages/iverilog/11.0.0/compile new file mode 100644 index 0000000..56f4b4e --- /dev/null +++ b/packages/iverilog/11.0.0/compile @@ -0,0 +1,4 @@ +#!/bin/bash + +rename 's/$/\.v/' "$@" # Add .v extension +iverilog *.v diff --git a/packages/iverilog/11.0.0/environment b/packages/iverilog/11.0.0/environment new file mode 100644 index 0000000..b482830 --- /dev/null +++ b/packages/iverilog/11.0.0/environment @@ -0,0 +1,2 @@ +#!/bin/bash +export PATH=$PWD/bin:$PATH diff --git a/packages/iverilog/11.0.0/metadata.json b/packages/iverilog/11.0.0/metadata.json new file mode 100644 index 0000000..5a35bde --- /dev/null +++ b/packages/iverilog/11.0.0/metadata.json @@ -0,0 +1,5 @@ +{ + "language": "iverilog", + "version": "11.0.0", + "aliases": ["verilog", "vvp"] +} diff --git a/packages/iverilog/11.0.0/run b/packages/iverilog/11.0.0/run new file mode 100644 index 0000000..39e898c --- /dev/null +++ b/packages/iverilog/11.0.0/run @@ -0,0 +1,4 @@ +#!/bin/bash + +shift +vvp a.out "$@" diff --git a/packages/iverilog/11.0.0/test.verilog b/packages/iverilog/11.0.0/test.verilog new file mode 100644 index 0000000..88fcd7a --- /dev/null +++ b/packages/iverilog/11.0.0/test.verilog @@ -0,0 +1,7 @@ +module hello; + initial + begin + $display("OK"); + $finish ; + end +endmodule diff --git a/packages/mono/6.12.0/build.sh b/packages/mono/6.12.0/build.sh index 2cecc07..7bb4b63 100755 --- a/packages/mono/6.12.0/build.sh +++ b/packages/mono/6.12.0/build.sh @@ -2,19 +2,31 @@ PREFIX=$(realpath $(dirname $0)) -mkdir -p build/mono +mkdir -p build/mono build/mono-basic cd build curl "https://download.mono-project.com/sources/mono/mono-6.12.0.122.tar.xz" -o mono.tar.xz +curl -L "https://github.com/mono/mono-basic/archive/refs/tags/4.7.tar.gz" -o mono-basic.tar.gz tar xf mono.tar.xz --strip-components=1 -C mono +tar xf mono-basic.tar.gz --strip-components=1 -C mono-basic +# Compiling Mono cd mono ./configure --prefix "$PREFIX" -make -j$(nproc) +make -j$(nproc) make install -j$(nproc) +export PATH="$PREFIX/bin:$PATH" # To be able to use mono commands + +# Compiling mono-basic +cd ../mono-basic +./configure --prefix="$PREFIX" + +make -j$(nproc) PLATFORM="linux" # Avoids conflict with the $PLATFORM variable we have +make install -j$(nproc) PLATFORM="linux" + +# Remove redundant files cd ../../ rm -rf build - diff --git a/packages/mono/6.12.0/compile b/packages/mono/6.12.0/compile index 8728714..e3ae230 100644 --- a/packages/mono/6.12.0/compile +++ b/packages/mono/6.12.0/compile @@ -1,4 +1,23 @@ #!/bin/bash -rename 's/$/\.cs/' "$@" # Add .cs extension -csc -out:out *.cs +check_errors () { + grep -q 'error [A-Z]\+[0-9]\+:' check.txt && cat check.txt 1>&2 || cat check.txt + rm check.txt +} + +case "${PISTON_LANGUAGE}" in + csharp) + rename 's/$/\.cs/' "$@" # Add .cs extension + csc -out:out *.cs > check.txt + check_errors + ;; + basic) + rename 's/$/\.vb/' "$@" # Add .vb extension + vbnc -out:out *.vb > check.txt + check_errors + ;; + *) + echo "How did you get here? (${PISTON_LANGUAGE})" + exit 1 + ;; +esac diff --git a/packages/mono/6.12.0/environment b/packages/mono/6.12.0/environment index bd0ff98..977a5e8 100644 --- a/packages/mono/6.12.0/environment +++ b/packages/mono/6.12.0/environment @@ -1 +1 @@ -export PATH=$PWD/bin:$PATH \ No newline at end of file +export PATH=$PWD/bin:$PATH diff --git a/packages/mono/6.12.0/metadata.json b/packages/mono/6.12.0/metadata.json index a053884..4d09ae7 100644 --- a/packages/mono/6.12.0/metadata.json +++ b/packages/mono/6.12.0/metadata.json @@ -5,6 +5,10 @@ { "language": "csharp", "aliases": ["mono", "mono-csharp", "mono-c#", "mono-cs", "c#", "cs"] + }, + { + "language": "basic", + "aliases": ["vb", "mono-vb", "mono-basic", "visual-basic", "visual basic"] } ] } diff --git a/packages/mono/6.12.0/test.vb b/packages/mono/6.12.0/test.vb new file mode 100644 index 0000000..291042e --- /dev/null +++ b/packages/mono/6.12.0/test.vb @@ -0,0 +1,9 @@ +Imports System + +Module Module1 + + Sub Main() + Console.WriteLine("OK") + End Sub + +End Module diff --git a/packages/pwsh/7.1.4/build.sh b/packages/pwsh/7.1.4/build.sh new file mode 100755 index 0000000..3f4b070 --- /dev/null +++ b/packages/pwsh/7.1.4/build.sh @@ -0,0 +1,6 @@ +#!/bin/bash +curl -L https://github.com/PowerShell/PowerShell/releases/download/v7.1.4/powershell-7.1.4-linux-x64.tar.gz -o powershell.tar.gz +tar zxf powershell.tar.gz +rm powershell.tar.gz + +chmod +x pwsh diff --git a/packages/pwsh/7.1.4/environment b/packages/pwsh/7.1.4/environment new file mode 100644 index 0000000..42644cd --- /dev/null +++ b/packages/pwsh/7.1.4/environment @@ -0,0 +1 @@ +export PATH=$PWD:$PATH diff --git a/packages/pwsh/7.1.4/metadata.json b/packages/pwsh/7.1.4/metadata.json new file mode 100644 index 0000000..da90f76 --- /dev/null +++ b/packages/pwsh/7.1.4/metadata.json @@ -0,0 +1,10 @@ +{ + "language": "pwsh", + "version": "7.1.4", + "provides": [ + { + "language": "powershell", + "aliases": ["ps", "pwsh", "ps1"] + } + ] +} diff --git a/packages/pwsh/7.1.4/run b/packages/pwsh/7.1.4/run new file mode 100644 index 0000000..02a0be7 --- /dev/null +++ b/packages/pwsh/7.1.4/run @@ -0,0 +1,3 @@ +#!/bin/bash + +pwsh "$@" diff --git a/packages/pwsh/7.1.4/test.ps1 b/packages/pwsh/7.1.4/test.ps1 new file mode 100644 index 0000000..f0a4be3 --- /dev/null +++ b/packages/pwsh/7.1.4/test.ps1 @@ -0,0 +1 @@ +echo "OK" diff --git a/packages/rscript/4.1.1/build.sh b/packages/rscript/4.1.1/build.sh new file mode 100755 index 0000000..9837c22 --- /dev/null +++ b/packages/rscript/4.1.1/build.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +PREFIX=$(realpath $(dirname $0)) + +mkdir build +cd build + +curl https://cloud.r-project.org/src/base/R-4/R-4.1.1.tar.gz -o R.tar.gz +tar xzf R.tar.gz --strip-components 1 + +./configure --prefix="$PREFIX" --with-x=no +make -j$(nproc) +make install -j$(nproc) + +cd ../ +rm -rf build diff --git a/packages/rscript/4.1.1/environment b/packages/rscript/4.1.1/environment new file mode 100644 index 0000000..977a5e8 --- /dev/null +++ b/packages/rscript/4.1.1/environment @@ -0,0 +1 @@ +export PATH=$PWD/bin:$PATH diff --git a/packages/rscript/4.1.1/metadata.json b/packages/rscript/4.1.1/metadata.json new file mode 100644 index 0000000..db16a76 --- /dev/null +++ b/packages/rscript/4.1.1/metadata.json @@ -0,0 +1,5 @@ +{ + "language": "rscript", + "version": "4.1.1", + "aliases": ["r"] +} diff --git a/packages/rscript/4.1.1/run b/packages/rscript/4.1.1/run new file mode 100644 index 0000000..d122eb8 --- /dev/null +++ b/packages/rscript/4.1.1/run @@ -0,0 +1,2 @@ +#/bin/bash +Rscript "$@" diff --git a/packages/rscript/4.1.1/test.r b/packages/rscript/4.1.1/test.r new file mode 100644 index 0000000..9273f27 --- /dev/null +++ b/packages/rscript/4.1.1/test.r @@ -0,0 +1 @@ +cat('OK') diff --git a/packages/sqlite3/3.36.0/build.sh b/packages/sqlite3/3.36.0/build.sh new file mode 100755 index 0000000..18d5b8f --- /dev/null +++ b/packages/sqlite3/3.36.0/build.sh @@ -0,0 +1,10 @@ +#!/bin/bash +PREFIX=$(realpath $(dirname $0)) + +curl https://www.sqlite.org/2021/sqlite-amalgamation-3360000.zip -o sqlite.zip +unzip -q sqlite.zip +rm -rf sqlite.zip + +gcc -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION sqlite-amalgamation-3360000/shell.c sqlite-amalgamation-3360000/sqlite3.c -o sqlite3 + +rm -rf sqlite-amalgamation-3360000 diff --git a/packages/sqlite3/3.36.0/environment b/packages/sqlite3/3.36.0/environment new file mode 100644 index 0000000..50242cc --- /dev/null +++ b/packages/sqlite3/3.36.0/environment @@ -0,0 +1,2 @@ +#!/bin/bash +export PATH=$PWD:$PATH diff --git a/packages/sqlite3/3.36.0/metadata.json b/packages/sqlite3/3.36.0/metadata.json new file mode 100644 index 0000000..d531aaf --- /dev/null +++ b/packages/sqlite3/3.36.0/metadata.json @@ -0,0 +1,5 @@ +{ + "language": "sqlite3", + "version": "3.36.0", + "aliases": ["sqlite", "sql"] +} diff --git a/packages/sqlite3/3.36.0/run b/packages/sqlite3/3.36.0/run new file mode 100644 index 0000000..8484f3d --- /dev/null +++ b/packages/sqlite3/3.36.0/run @@ -0,0 +1,3 @@ +#!/bin/bash + +sqlite3 < "$1" diff --git a/packages/sqlite3/3.36.0/test.sql b/packages/sqlite3/3.36.0/test.sql new file mode 100644 index 0000000..3a3c57b --- /dev/null +++ b/packages/sqlite3/3.36.0/test.sql @@ -0,0 +1 @@ +SELECT 'OK'; diff --git a/packages/vlang/0.1.13/run b/packages/vlang/0.1.13/run index 18b6c05..c999cca 100644 --- a/packages/vlang/0.1.13/run +++ b/packages/vlang/0.1.13/run @@ -6,8 +6,9 @@ export TMPDIR="$PWD" # Put instructions to run the runtime -rename 's/$/\.v/' "$@" # Add .v extension +filename=$1 + +rename 's/$/\.v/' $filename # Add .v extension -filename=$1.v shift -v run $filename "$@" \ No newline at end of file +v run $filename.v "$@" diff --git a/piston b/piston index 2dc36fa..a14e7f5 100755 --- a/piston +++ b/piston @@ -19,38 +19,45 @@ case $1 in echo "Commands:" echo " select Select the environment" echo " docker_compose Interact directly with the docker-compose for the selected environment" + echo " logs Show docker-compose logs" echo echo " start Starts piston" echo " stop Stops piston" echo " restart Restarts piston" + echo " bash Opens a bash shell for the piston_api container" echo echo " update Fetches and applies latest updates" echo echo " Passthrough to piston cli tool" - echo + echo echo "Development Commands:" - + if [ $PISTON_ENV == dev ]; then - + echo " clean-pkgs Clean any package build artifacts on disk" echo " clean-repo Remove all packages from local repo" echo " build-pkg Build a package" - + echo " rebuild Build and restart the docker container" + else - + echo " Switch to developement environment for more info" - echo " > piston switch dev" - + echo " > piston select dev" + fi ;; select) echo "$2" > .piston_env ;; docker_compose) shift; docker_compose "$@";; + logs) docker_compose logs -f ;; restart) docker_compose restart ;; start) docker_compose up -d ;; stop) docker_compose down ;; + bash) docker_compose exec api /bin/bash ;; + + rebuild) docker_compose build && docker_compose up -d ;; update) git pull @@ -74,4 +81,4 @@ case $1 in cd ../ node cli/index.js "$@" ;; -esac \ No newline at end of file +esac diff --git a/readme.md b/readme.md index e95de75..1240cbe 100644 --- a/readme.md +++ b/readme.md @@ -41,6 +41,20 @@
+# Notes About Hacktoberfest + +While we are accepting pull requests for Hacktoberfest, we will reject any low-quality PRs. +If we see PR abuse for Hacktoberfest, we will stop providing Hacktoberfest approval for pull requests. + +We are accepting PRs for: +* Packages - updating package versions, adding new packages +* Documentation updates +* CLI/API improvements - please discuss these with us in the Discord first + +Any queries or concerns, ping @HexF#0015 in the Discord. + +
+ # About

@@ -88,7 +102,9 @@ POST https://emkc.org/api/v2/piston/execute > Important Note: The Piston API is rate limited to 5 requests per second. If you have a need for more requests than that > and it's for a good cause, please reach out to me (EngineerMan#0001) on [Discord](https://discord.gg/engineerman) -> so we can discuss potentially getting you an unlimited key. +> so we can discuss potentially getting you an unlimited key. What is and isn't a good cause is up to me, but, in general +> if your project is a) open source, b) helping people at no cost to them, and c) not likely to use tons of resources +> thereby impairing another's ability to enjoy Piston, you'll likely be granted a key.
@@ -100,7 +116,7 @@ POST https://emkc.org/api/v2/piston/execute - Docker - Docker Compose -- Node JS +- Node JS (>= 13, preferably >= 15) ### After system dependencies are installed, clone this repository: @@ -139,6 +155,22 @@ docker run \ ghcr.io/engineer-man/piston ``` +## Piston for testing packages locally + +### Host System Package Dependencies + +- Same as [All In One](#All-In-One) + +### Installation + +```sh +# Build the Docker containers +./piston start + +# For more help +./piston help +``` +
# Usage @@ -312,6 +344,7 @@ Content-Type: application/json `golfscript`, `groovy`, `haskell`, +`iverilog`, `java`, `javascript`, `jelly`, @@ -331,6 +364,7 @@ Content-Type: application/json `perl`, `php`, `ponylang`, +`powershell`, `prolog`, `pure`, `pyth`, @@ -338,11 +372,14 @@ Content-Type: application/json `python2`, `raku`, `rockstar`, +`rscript`, `ruby`, `rust`, `scala`, +`sqlite3`, `swift`, `typescript`, +`basic`, `vlang`, `yeethon`, `zig`, diff --git a/repo/Dockerfile b/repo/Dockerfile index 106fef4..de28c11 100644 --- a/repo/Dockerfile +++ b/repo/Dockerfile @@ -8,7 +8,8 @@ RUN apt-get update && apt-get install -y unzip autoconf build-essential libssl-d util-linux pciutils usbutils coreutils binutils findutils grep libncurses5-dev \ libncursesw5-dev python3-pip libgmp-dev libmpfr-dev python2 libffi-dev gfortran\ libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev libfftw3-dev \ - libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev libsundials-dev && \ + libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev libsundials-dev \ + libbz2-dev liblzma-dev libpcre2-dev gperf bison flex g++ && \ ln -sf /bin/bash /bin/sh && \ rm -rf /var/lib/apt/lists/* && \ update-alternatives --install /usr/bin/python python /usr/bin/python3.7 2 @@ -17,4 +18,3 @@ ADD entrypoint.sh mkindex.sh / ENTRYPOINT ["bash","/entrypoint.sh"] CMD ["--no-build"] -