From de89acb617470beffff6be2a3343302a51183ce7 Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Fri, 16 Jul 2021 23:10:44 +1200 Subject: [PATCH 01/35] api: Implement Websocket transport for live data --- api/package-lock.json | 49 +++++++ api/package.json | 1 + api/src/api/v2.js | 299 +++++++++++++++++++++++++++--------------- api/src/index.js | 4 + api/src/job.js | 59 ++++++++- 5 files changed, 299 insertions(+), 113 deletions(-) diff --git a/api/package-lock.json b/api/package-lock.json index c46ae87..83df240 100644 --- a/api/package-lock.json +++ b/api/package-lock.json @@ -12,6 +12,7 @@ "body-parser": "^1.19.0", "chownr": "^2.0.0", "express": "^4.17.1", + "express-ws": "^5.0.2", "is-docker": "^2.1.1", "logplease": "^1.2.15", "nocamel": "HexF/nocamel#patch-1", @@ -196,6 +197,20 @@ "node": ">= 0.10.0" } }, + "node_modules/express-ws": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/express-ws/-/express-ws-5.0.2.tgz", + "integrity": "sha512-0uvmuk61O9HXgLhGl3QhNSEtRsQevtmbL94/eILaliEADZBHZOQUAiHFrGPrgsjikohyrmSG5g+sCfASTt0lkQ==", + "dependencies": { + "ws": "^7.4.6" + }, + "engines": { + "node": ">=4.5.0" + }, + "peerDependencies": { + "express": "^4.0.0 || ^5.0.0-alpha.1" + } + }, "node_modules/finalhandler": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", @@ -582,6 +597,26 @@ "node_modules/waitpid": { "resolved": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa" }, + "node_modules/ws": { + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", + "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==", + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", @@ -728,6 +763,14 @@ "vary": "~1.1.2" } }, + "express-ws": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/express-ws/-/express-ws-5.0.2.tgz", + "integrity": "sha512-0uvmuk61O9HXgLhGl3QhNSEtRsQevtmbL94/eILaliEADZBHZOQUAiHFrGPrgsjikohyrmSG5g+sCfASTt0lkQ==", + "requires": { + "ws": "^7.4.6" + } + }, "finalhandler": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.1.2.tgz", @@ -1010,6 +1053,12 @@ "version": "git+ssh://git@github.com/HexF/node-waitpid.git#a08d116a5d993a747624fe72ff890167be8c34aa", "from": "waitpid@git+https://github.com/HexF/node-waitpid.git" }, + "ws": { + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", + "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==", + "requires": {} + }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", diff --git a/api/package.json b/api/package.json index ab34063..0c32198 100644 --- a/api/package.json +++ b/api/package.json @@ -7,6 +7,7 @@ "body-parser": "^1.19.0", "chownr": "^2.0.0", "express": "^4.17.1", + "express-ws": "^5.0.2", "is-docker": "^2.1.1", "logplease": "^1.2.15", "nocamel": "HexF/nocamel#patch-1", diff --git a/api/src/api/v2.js b/api/src/api/v2.js index 948dccf..487b10b 100644 --- a/api/src/api/v2.js +++ b/api/src/api/v2.js @@ -1,12 +1,124 @@ const express = require('express'); const router = express.Router(); +const events = require('events'); + const config = require('../config'); const runtime = require('../runtime'); const { Job } = require('../job'); const package = require('../package'); const logger = require('logplease').create('api/v2'); + +function get_job(body){ + const { + language, + version, + args, + stdin, + files, + compile_memory_limit, + run_memory_limit, + run_timeout, + compile_timeout + } = body; + + return new Promise((resolve, reject) => { + if (!language || typeof language !== 'string') { + return reject({ + message: 'language is required as a string', + }); + } + + if (!version || typeof version !== 'string') { + return reject({ + message: 'version is required as a string', + }); + } + + if (!files || !Array.isArray(files)) { + return reject({ + message: 'files is required as an array', + }); + } + + for (const [i, file] of files.entries()) { + if (typeof file.content !== 'string') { + return reject({ + message: `files[${i}].content is required as a string`, + }); + } + } + + if (compile_memory_limit) { + if (typeof compile_memory_limit !== 'number') { + return reject({ + message: 'if specified, compile_memory_limit must be a number', + }); + } + + if ( + config.compile_memory_limit >= 0 && + (compile_memory_limit > config.compile_memory_limit || + compile_memory_limit < 0) + ) { + return reject({ + message: + 'compile_memory_limit cannot exceed the configured limit of ' + + config.compile_memory_limit, + }); + } + } + + if (run_memory_limit) { + if (typeof run_memory_limit !== 'number') { + return reject({ + message: 'if specified, run_memory_limit must be a number', + }); + } + + if ( + config.run_memory_limit >= 0 && + (run_memory_limit > config.run_memory_limit || run_memory_limit < 0) + ) { + return reject({ + message: + 'run_memory_limit cannot exceed the configured limit of ' + + config.run_memory_limit, + }); + } + } + + const rt = runtime.get_latest_runtime_matching_language_version( + language, + version + ); + + if (rt === undefined) { + return reject({ + message: `${language}-${version} runtime is unknown`, + }); + } + + resolve(new Job({ + runtime: rt, + alias: language, + args: args || [], + stdin: '', + files, + timeouts: { + run: run_timeout || 3000, + compile: compile_timeout || 10000, + }, + memory_limits: { + run: run_memory_limit || config.run_memory_limit, + compile: compile_memory_limit || config.compile_memory_limit, + } + })); + }) + +} + router.use((req, res, next) => { if (['GET', 'HEAD', 'OPTIONS'].includes(req.method)) { return next(); @@ -21,118 +133,87 @@ router.use((req, res, next) => { next(); }); +router.ws('/connect', async (ws, req) => { + + let job = null; + let eventBus = new events.EventEmitter(); + + eventBus.on("stdout", (data) => ws.send(JSON.stringify({type: "data", stream: "stdout", data: data.toString()}))) + eventBus.on("stderr", (data) => ws.send(JSON.stringify({type: "data", stream: "stderr", data: data.toString()}))) + eventBus.on("stage", (stage)=> ws.send(JSON.stringify({type: "stage", stage}))) + eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status}))) + + ws.on("message", async (data) => { + + try{ + const msg = JSON.parse(data); + + if(msg.type === "init"){ + if(job === null){ + const job = await get_job(msg); + + await job.prime(); + + ws.send(JSON.stringify({ + type: "runtime", + language: job.runtime.language, + version: job.runtime.version.raw + })) + + await job.execute_interactive(eventBus); + + ws.close(4999, "Job Completed"); + + }else{ + ws.close(4000, "Already Initialized"); + } + + }else if(msg.type === "data"){ + if(job !== null){ + if(msg.stream === "stdin"){ + eventBus.emit("stdin", msg.data) + }else{ + ws.close(4004, "Can only write to stdin") + } + }else{ + ws.close(4003, "Not yet initialized") + } + } + }catch(error){ + ws.send(JSON.stringify({type: "error", message: error.message})) + ws.close(4002, "Notified Error") + // ws.close message is limited to 123 characters, so we notify over WS then close. + } + }) + + ws.on("close", async ()=>{ + if(job !== null){ + await job.cleanup() + } + }) + + setTimeout(()=>{ + //Terminate the socket after 1 second, if not initialized. + //if(job === null) + // ws.close(4001, "Initialization Timeout"); + }, 1000) +}) + router.post('/execute', async (req, res) => { - const { - language, - version, - files, - stdin, - args, - run_timeout, - compile_timeout, - compile_memory_limit, - run_memory_limit, - } = req.body; - if (!language || typeof language !== 'string') { - return res.status(400).send({ - message: 'language is required as a string', - }); + try{ + const job = await get_job(req.body); + + await job.prime(); + + const result = await job.execute(); + + await job.cleanup(); + + return res.status(200).send(result); + }catch(error){ + return res.status(400).json(error); } - - if (!version || typeof version !== 'string') { - return res.status(400).send({ - message: 'version is required as a string', - }); - } - - if (!files || !Array.isArray(files)) { - return res.status(400).send({ - message: 'files is required as an array', - }); - } - - for (const [i, file] of files.entries()) { - if (typeof file.content !== 'string') { - return res.status(400).send({ - message: `files[${i}].content is required as a string`, - }); - } - } - - if (compile_memory_limit) { - if (typeof compile_memory_limit !== 'number') { - return res.status(400).send({ - message: 'if specified, compile_memory_limit must be a number', - }); - } - - if ( - config.compile_memory_limit >= 0 && - (compile_memory_limit > config.compile_memory_limit || - compile_memory_limit < 0) - ) { - return res.status(400).send({ - message: - 'compile_memory_limit cannot exceed the configured limit of ' + - config.compile_memory_limit, - }); - } - } - - if (run_memory_limit) { - if (typeof run_memory_limit !== 'number') { - return res.status(400).send({ - message: 'if specified, run_memory_limit must be a number', - }); - } - - if ( - config.run_memory_limit >= 0 && - (run_memory_limit > config.run_memory_limit || run_memory_limit < 0) - ) { - return res.status(400).send({ - message: - 'run_memory_limit cannot exceed the configured limit of ' + - config.run_memory_limit, - }); - } - } - - const rt = runtime.get_latest_runtime_matching_language_version( - language, - version - ); - - if (rt === undefined) { - return res.status(400).send({ - message: `${language}-${version} runtime is unknown`, - }); - } - - const job = new Job({ - runtime: rt, - alias: language, - files: files, - args: args || [], - stdin: stdin || '', - timeouts: { - run: run_timeout || 3000, - compile: compile_timeout || 10000, - }, - memory_limits: { - run: run_memory_limit || config.run_memory_limit, - compile: compile_memory_limit || config.compile_memory_limit, - }, - }); - - await job.prime(); - - const result = await job.execute(); - - await job.cleanup(); - - return res.status(200).send(result); }); router.get('/runtimes', (req, res) => { diff --git a/api/src/index.js b/api/src/index.js index ef16916..afd4d15 100644 --- a/api/src/index.js +++ b/api/src/index.js @@ -2,6 +2,7 @@ require('nocamel'); const Logger = require('logplease'); const express = require('express'); +const expressWs = require('express-ws'); const globals = require('./globals'); const config = require('./config'); const path = require('path'); @@ -12,6 +13,9 @@ const runtime = require('./runtime'); const logger = Logger.create('index'); const app = express(); +expressWs(app); + + (async () => { logger.info('Setting loglevel to', config.log_level); diff --git a/api/src/job.js b/api/src/job.js index d4b90ea..8001a76 100644 --- a/api/src/job.js +++ b/api/src/job.js @@ -69,7 +69,7 @@ class Job { logger.debug('Primed job'); } - async safe_call(file, args, timeout, memory_limit) { + async safe_call(file, args, timeout, memory_limit, eventBus = null) { return new Promise((resolve, reject) => { const nonetwork = config.disable_networking ? ['nosocket'] : []; @@ -102,9 +102,15 @@ class Job { detached: true, //give this process its own process group }); - proc.stdin.write(this.stdin); - proc.stdin.end(); - proc.stdin.destroy(); + if(eventBus === null){ + proc.stdin.write(this.stdin); + proc.stdin.end(); + proc.stdin.destroy(); + }else{ + eventBus.on("stdin", (data) => { + proc.stdin.write(data); + }) + } const kill_timeout = set_timeout( _ => proc.kill('SIGKILL'), @@ -115,6 +121,7 @@ class Job { if (stderr.length > config.output_max_size) { proc.kill('SIGKILL'); } else { + if(eventBus !== null) eventBus.emit("stderr", data); stderr += data; output += data; } @@ -124,6 +131,7 @@ class Job { if (stdout.length > config.output_max_size) { proc.kill('SIGKILL'); } else { + if(eventBus !== null) eventBus.emit("stdout", data); stdout += data; output += data; } @@ -196,6 +204,49 @@ class Job { }; } + async execute_interactive(eventBus){ + if (this.state !== job_states.PRIMED) { + throw new Error( + 'Job must be in primed state, current state: ' + + this.state.toString() + ); + } + + logger.info( + `Interactively executing job uuid=${this.uuid} uid=${this.uid} gid=${ + this.gid + } runtime=${this.runtime.toString()}` + ); + + if(this.runtime.compiled){ + eventBus.emit("stage", "compile") + const {error, code, signal} = await this.safe_call( + path.join(this.runtime.pkgdir, 'compile'), + this.files.map(x => x.name), + this.timeouts.compile, + this.memory_limits.compile, + eventBus + ) + + eventBus.emit("exit", "compile", {error, code, signal}) + } + + logger.debug('Running'); + eventBus.emit("stage", "run") + const {error, code, signal} = await this.safe_call( + path.join(this.runtime.pkgdir, 'run'), + [this.files[0].name, ...this.args], + this.timeouts.run, + this.memory_limits.run, + eventBus + ); + + eventBus.emit("exit", "run", {error, code, signal}) + + + this.state = job_states.EXECUTED; + } + async cleanup_processes() { let processes = [1]; From f58927d79a6a3d57b1773d8d717c6204a142bba7 Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Fri, 16 Jul 2021 23:12:46 +1200 Subject: [PATCH 02/35] script: correct typo and add additional rebuild command --- piston | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/piston b/piston index 2dc36fa..7e3a469 100755 --- a/piston +++ b/piston @@ -35,11 +35,12 @@ case $1 in echo " clean-pkgs Clean any package build artifacts on disk" echo " clean-repo Remove all packages from local repo" echo " build-pkg Build a package" + echo " rebuild Build and restart the docker container" else echo " Switch to developement environment for more info" - echo " > piston switch dev" + echo " > piston select dev" fi ;; @@ -52,6 +53,8 @@ case $1 in start) docker_compose up -d ;; stop) docker_compose down ;; + rebuild) docker_compose build && docker_compose up -d ;; + update) git pull docker_compose pull From 3436648add766bea162e46b3f160402701f973f1 Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Sat, 17 Jul 2021 00:22:55 +1200 Subject: [PATCH 03/35] api: signaling process via ws --- api/src/api/v2.js | 48 +++++++++++++++++++++++++++++++---------------- api/src/job.js | 16 ++++++++++++---- 2 files changed, 44 insertions(+), 20 deletions(-) diff --git a/api/src/api/v2.js b/api/src/api/v2.js index 487b10b..76bab08 100644 --- a/api/src/api/v2.js +++ b/api/src/api/v2.js @@ -9,6 +9,8 @@ const { Job } = require('../job'); const package = require('../package'); const logger = require('logplease').create('api/v2'); +const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"] +// ref: https://man7.org/linux/man-pages/man7/signal.7.html function get_job(body){ const { @@ -148,27 +150,28 @@ router.ws('/connect', async (ws, req) => { try{ const msg = JSON.parse(data); - if(msg.type === "init"){ - if(job === null){ - const job = await get_job(msg); + switch(msg.type){ + case "init": + if(job === null){ + job = await get_job(msg); - await job.prime(); + await job.prime(); - ws.send(JSON.stringify({ - type: "runtime", - language: job.runtime.language, - version: job.runtime.version.raw - })) + ws.send(JSON.stringify({ + type: "runtime", + language: job.runtime.language, + version: job.runtime.version.raw + })) - await job.execute_interactive(eventBus); + await job.execute_interactive(eventBus); - ws.close(4999, "Job Completed"); + ws.close(4999, "Job Completed"); - }else{ - ws.close(4000, "Already Initialized"); - } - - }else if(msg.type === "data"){ + }else{ + ws.close(4000, "Already Initialized"); + } + break; + case "data": if(job !== null){ if(msg.stream === "stdin"){ eventBus.emit("stdin", msg.data) @@ -178,7 +181,20 @@ router.ws('/connect', async (ws, req) => { }else{ ws.close(4003, "Not yet initialized") } + break; + case "signal": + if(job !== null){ + if(SIGNALS.includes(msg.signal)){ + eventBus.emit("signal", msg.signal) + }else{ + ws.close(4005, "Invalid signal") + } + }else{ + ws.close(4003, "Not yet initialized") + } + break; } + }catch(error){ ws.send(JSON.stringify({type: "error", message: error.message})) ws.close(4002, "Notified Error") diff --git a/api/src/job.js b/api/src/job.js index 8001a76..d213bbc 100644 --- a/api/src/job.js +++ b/api/src/job.js @@ -110,7 +110,13 @@ class Job { eventBus.on("stdin", (data) => { proc.stdin.write(data); }) + + eventBus.on("kill", (signal) => { + proc.kill(signal) + }) } + + const kill_timeout = set_timeout( _ => proc.kill('SIGKILL'), @@ -118,20 +124,22 @@ class Job { ); proc.stderr.on('data', data => { - if (stderr.length > config.output_max_size) { + if(eventBus !== null) { + eventBus.emit("stderr", data); + } else if (stderr.length > config.output_max_size) { proc.kill('SIGKILL'); } else { - if(eventBus !== null) eventBus.emit("stderr", data); stderr += data; output += data; } }); proc.stdout.on('data', data => { - if (stdout.length > config.output_max_size) { + if(eventBus !== null){ + eventBus.emit("stdout", data); + } else if (stdout.length > config.output_max_size) { proc.kill('SIGKILL'); } else { - if(eventBus !== null) eventBus.emit("stdout", data); stdout += data; output += data; } From 4933577dae4833db342c88d76800a24d5ae237c5 Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Sat, 17 Jul 2021 00:23:45 +1200 Subject: [PATCH 04/35] cli: interactive run with -t flag --- cli/commands/execute.js | 134 +++++++++++++++++++++++++++++++++++++--- cli/package-lock.json | 27 ++++++++ cli/package.json | 1 + 3 files changed, 153 insertions(+), 9 deletions(-) diff --git a/cli/commands/execute.js b/cli/commands/execute.js index e273548..abb1f63 100644 --- a/cli/commands/execute.js +++ b/cli/commands/execute.js @@ -1,7 +1,10 @@ -//const fetch = require('node-fetch'); const fs = require('fs'); const path = require('path'); const chalk = require('chalk'); +const WebSocket = require('ws'); + +const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"] + exports.command = ['execute [args..]']; exports.aliases = ['run']; @@ -35,17 +38,115 @@ exports.builder = { alias: ['f'], array: true, desc: 'Additional files to add', + }, + interactive: { + boolean: true, + alias: ['t'], + desc: 'Run interactively using WebSocket transport' + }, + status: { + boolean: true, + alias: ['s'], + desc: 'Output additional status to stderr' } }; -exports.handler = async (argv) => { - const files = [...(argv.files || []),argv.file] - .map(file_path => { - return { - name: path.basename(file_path), - content: fs.readFileSync(file_path).toString() - }; - }); +async function handle_interactive(files, argv){ + const ws = new WebSocket(argv.pistonUrl.replace("http", "ws") + "/api/v2/connect") + + const log_message = (process.stderr.isTTY && argv.status) ? console.error : ()=>{}; + + process.on("exit", ()=>{ + ws.close(); + process.stdin.end(); + process.stdin.destroy(); + process.exit(); + }) + + for(const signal of SIGNALS){ + process.on(signal, ()=>{ + ws.send(JSON.stringify({type: 'signal', signal})) + }) + } + + + + ws.on('open', ()=>{ + const request = { + type: "init", + language: argv.language, + version: argv['language_version'], + files: files, + args: argv.args, + compile_timeout: argv.ct, + run_timeout: argv.rt + } + + ws.send(JSON.stringify(request)) + log_message(chalk.white.bold("Connected")) + + process.stdin.resume(); + + process.stdin.on("data", (data) => { + ws.send(JSON.stringify({ + type: "data", + stream: "stdin", + data: data.toString() + })) + }) + }) + + ws.on("close", (code, reason)=>{ + log_message( + chalk.white.bold("Disconnected: "), + chalk.white.bold("Reason: "), + chalk.yellow(`"${reason}"`), + chalk.white.bold("Code: "), + chalk.yellow(`"${code}"`), + ) + process.stdin.pause() + }) + + ws.on('message', function(data){ + const msg = JSON.parse(data); + + switch(msg.type){ + case "runtime": + log_message(chalk.bold.white("Runtime:"), chalk.yellow(`${msg.language} ${msg.version}`)) + break; + case "stage": + log_message(chalk.bold.white("Stage:"), chalk.yellow(msg.stage)) + break; + case "data": + if(msg.stream == "stdout") process.stdout.write(msg.data) + else if(msg.stream == "stderr") process.stderr.write(msg.data) + else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data) + break; + case "exit": + if(msg.signal === null) + log_message( + chalk.white.bold("Stage"), + chalk.yellow(msg.stage), + chalk.white.bold("exited with code"), + chalk.yellow(msg.code) + ) + else + log_message( + chalk.white.bold("Stage"), + chalk.yellow(msg.stage), + chalk.white.bold("exited with signal"), + chalk.yellow(msg.signal) + ) + break; + default: + log_message(chalk.red.bold("Unknown message:"), msg) + } + }) + +} + +async function run_non_interactively(files, argv) { + const stdin = (argv.stdin && await new Promise((resolve, _) => { let data = ''; @@ -99,3 +200,18 @@ exports.handler = async (argv) => { step('Run', response.run); } + +exports.handler = async (argv) => { + const files = [...(argv.files || []),argv.file] + .map(file_path => { + return { + name: path.basename(file_path), + content: fs.readFileSync(file_path).toString() + }; + }); + + if(argv.interactive) await handle_interactive(files, argv); + else await run_non_interactively(files, argv); +} + + diff --git a/cli/package-lock.json b/cli/package-lock.json index d564e5f..76a2cc6 100644 --- a/cli/package-lock.json +++ b/cli/package-lock.json @@ -14,6 +14,7 @@ "minimatch": "^3.0.4", "nocamel": "^1.0.2", "semver": "^7.3.5", + "ws": "^7.5.3", "yargs": "^16.2.0" } }, @@ -243,6 +244,26 @@ "node": ">=10" } }, + "node_modules/ws": { + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", + "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==", + "engines": { + "node": ">=8.3.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": "^5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, "node_modules/y18n": { "version": "5.0.5", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", @@ -455,6 +476,12 @@ "strip-ansi": "^6.0.0" } }, + "ws": { + "version": "7.5.3", + "resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz", + "integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==", + "requires": {} + }, "y18n": { "version": "5.0.5", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", diff --git a/cli/package.json b/cli/package.json index 6df989d..9dcf061 100644 --- a/cli/package.json +++ b/cli/package.json @@ -10,6 +10,7 @@ "minimatch": "^3.0.4", "nocamel": "^1.0.2", "semver": "^7.3.5", + "ws": "^7.5.3", "yargs": "^16.2.0" } } From 5ace2bf0e42e986f5d8639629dbde06fec891867 Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Sat, 17 Jul 2021 00:25:19 +1200 Subject: [PATCH 05/35] api: websocket init timeout --- api/src/api/v2.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/api/src/api/v2.js b/api/src/api/v2.js index 76bab08..0c34d70 100644 --- a/api/src/api/v2.js +++ b/api/src/api/v2.js @@ -210,8 +210,8 @@ router.ws('/connect', async (ws, req) => { setTimeout(()=>{ //Terminate the socket after 1 second, if not initialized. - //if(job === null) - // ws.close(4001, "Initialization Timeout"); + if(job === null) + ws.close(4001, "Initialization Timeout"); }, 1000) }) From 230cb3abe1e5b57715bd60f6bd4760a7ca103bad Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Sat, 17 Jul 2021 01:00:44 +1200 Subject: [PATCH 06/35] bump versions major --- api/package.json | 2 +- cli/package.json | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/api/package.json b/api/package.json index 0c32198..e8e5b5d 100644 --- a/api/package.json +++ b/api/package.json @@ -1,6 +1,6 @@ { "name": "piston-api", - "version": "3.0.0", + "version": "3.1.0", "description": "API for piston - a high performance code execution engine", "main": "src/index.js", "dependencies": { diff --git a/cli/package.json b/cli/package.json index 9dcf061..90e3e12 100644 --- a/cli/package.json +++ b/cli/package.json @@ -1,6 +1,6 @@ { "name": "piston-cli", - "version": "1.0.0", + "version": "1.1.0", "description": "Piston Execution Engine CLI tools", "main": "index.js", "license": "MIT", From 2386684a050109e5f5236bb340b805ac0a4c243c Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Fri, 20 Aug 2021 01:05:13 +1200 Subject: [PATCH 07/35] api: fix file cleanup edge case Processes could still be spawned writing files after the app has cleaned the dir out, and is ready to clear it out. Dumb edge case, but oh well. --- api/src/job.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/api/src/job.js b/api/src/job.js index bb78448..07a7ee4 100644 --- a/api/src/job.js +++ b/api/src/job.js @@ -280,10 +280,8 @@ class Job { async cleanup() { logger.info(`Cleaning up job uuid=${this.uuid}`); - await Promise.all([ - this.cleanup_processes(), - this.cleanup_filesystem(), - ]); + await this.cleanup_processes(); + await this.cleanup_filesystem(); } } From c699688b36b7050af3a5001ffa7f786c8f33cc07 Mon Sep 17 00:00:00 2001 From: Hydrazer <73801166+Hydrazer@users.noreply.github.com> Date: Tue, 31 Aug 2021 09:08:13 -0600 Subject: [PATCH 08/35] fixed slurp for COW lang forked the original repo by BigZaphod and fixed the slurping issue when reading stdin for the Moo and oom commands --- packages/cow/1.0.0/build.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/cow/1.0.0/build.sh b/packages/cow/1.0.0/build.sh index 3bf5938..4753cae 100755 --- a/packages/cow/1.0.0/build.sh +++ b/packages/cow/1.0.0/build.sh @@ -1,7 +1,7 @@ #!/usr/bin/env bash # Grab the latest cow source from github -git clone -q https://github.com/BigZaphod/COW.git cow +git clone -q https://github.com/Hydrazer/COW.git cow # Generate the cow binary into bin mkdir -p bin From 30f2715c0197c32f2ac709990278d0cccfcd89df Mon Sep 17 00:00:00 2001 From: Brikaa Date: Tue, 7 Sep 2021 13:41:51 +0200 Subject: [PATCH 09/35] Add NodeJS version to docs --- readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/readme.md b/readme.md index 01f3a2a..f092e16 100644 --- a/readme.md +++ b/readme.md @@ -100,7 +100,7 @@ POST https://emkc.org/api/v2/piston/execute - Docker - Docker Compose -- Node JS +- Node JS (>= 13, preferably >= 15) ### After system dependencies are installed, clone this repository: From dc80ed3a1d35d09cb73d2ba8a5e21e9815dc8eb1 Mon Sep 17 00:00:00 2001 From: Brikaa Date: Tue, 7 Sep 2021 13:50:10 +0200 Subject: [PATCH 10/35] Document testing packages locally --- readme.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/readme.md b/readme.md index f092e16..1e45681 100644 --- a/readme.md +++ b/readme.md @@ -139,6 +139,21 @@ docker run \ ghcr.io/engineer-man/piston ``` +## Piston for testing packages locally + +### Host System Package Dependencies + +- Same as [All In One](#All-In-One) + + +```sh +# Build the Docker containers +./piston start + +# For more help +./piston help +``` +
# Usage From 528eb0e2630d9fcaec8776bf43fb01ba4bb6a9f8 Mon Sep 17 00:00:00 2001 From: Brikaa Date: Tue, 7 Sep 2021 13:53:37 +0200 Subject: [PATCH 11/35] Installation --- readme.md | 1 + 1 file changed, 1 insertion(+) diff --git a/readme.md b/readme.md index 1e45681..3c9c8a8 100644 --- a/readme.md +++ b/readme.md @@ -145,6 +145,7 @@ docker run \ - Same as [All In One](#All-In-One) +### Installation ```sh # Build the Docker containers From 3c15de61447a89772ff1cd9978a4ec8e2fa156b5 Mon Sep 17 00:00:00 2001 From: Brikaa Date: Tue, 7 Sep 2021 19:21:52 +0200 Subject: [PATCH 12/35] pkg(pwsh-7.1.4): Added Powershell 7.1.4 --- packages/pwsh/7.1.4/build.sh | 6 ++++++ packages/pwsh/7.1.4/environment | 1 + packages/pwsh/7.1.4/metadata.json | 10 ++++++++++ packages/pwsh/7.1.4/run | 3 +++ packages/pwsh/7.1.4/test.ps1 | 1 + 5 files changed, 21 insertions(+) create mode 100755 packages/pwsh/7.1.4/build.sh create mode 100644 packages/pwsh/7.1.4/environment create mode 100644 packages/pwsh/7.1.4/metadata.json create mode 100644 packages/pwsh/7.1.4/run create mode 100644 packages/pwsh/7.1.4/test.ps1 diff --git a/packages/pwsh/7.1.4/build.sh b/packages/pwsh/7.1.4/build.sh new file mode 100755 index 0000000..3f4b070 --- /dev/null +++ b/packages/pwsh/7.1.4/build.sh @@ -0,0 +1,6 @@ +#!/bin/bash +curl -L https://github.com/PowerShell/PowerShell/releases/download/v7.1.4/powershell-7.1.4-linux-x64.tar.gz -o powershell.tar.gz +tar zxf powershell.tar.gz +rm powershell.tar.gz + +chmod +x pwsh diff --git a/packages/pwsh/7.1.4/environment b/packages/pwsh/7.1.4/environment new file mode 100644 index 0000000..42644cd --- /dev/null +++ b/packages/pwsh/7.1.4/environment @@ -0,0 +1 @@ +export PATH=$PWD:$PATH diff --git a/packages/pwsh/7.1.4/metadata.json b/packages/pwsh/7.1.4/metadata.json new file mode 100644 index 0000000..da90f76 --- /dev/null +++ b/packages/pwsh/7.1.4/metadata.json @@ -0,0 +1,10 @@ +{ + "language": "pwsh", + "version": "7.1.4", + "provides": [ + { + "language": "powershell", + "aliases": ["ps", "pwsh", "ps1"] + } + ] +} diff --git a/packages/pwsh/7.1.4/run b/packages/pwsh/7.1.4/run new file mode 100644 index 0000000..02a0be7 --- /dev/null +++ b/packages/pwsh/7.1.4/run @@ -0,0 +1,3 @@ +#!/bin/bash + +pwsh "$@" diff --git a/packages/pwsh/7.1.4/test.ps1 b/packages/pwsh/7.1.4/test.ps1 new file mode 100644 index 0000000..f0a4be3 --- /dev/null +++ b/packages/pwsh/7.1.4/test.ps1 @@ -0,0 +1 @@ +echo "OK" From 0a6e5140954c98fd654bbc469ba7f886f556b727 Mon Sep 17 00:00:00 2001 From: Brikaa Date: Tue, 7 Sep 2021 19:23:07 +0200 Subject: [PATCH 13/35] Add powershell to readme --- readme.md | 1 + 1 file changed, 1 insertion(+) diff --git a/readme.md b/readme.md index 3c9c8a8..2d1f3d6 100644 --- a/readme.md +++ b/readme.md @@ -346,6 +346,7 @@ Content-Type: application/json `perl`, `php`, `ponylang`, +`powershell`, `prolog`, `pure`, `pyth`, From 4577a02401cef45deb6b3c96e39b4cd7102dbff4 Mon Sep 17 00:00:00 2001 From: Brian Seymour Date: Wed, 8 Sep 2021 12:31:06 -0500 Subject: [PATCH 14/35] Update readme.md --- readme.md | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/readme.md b/readme.md index 3c9c8a8..f7dec40 100644 --- a/readme.md +++ b/readme.md @@ -88,7 +88,9 @@ POST https://emkc.org/api/v2/piston/execute > Important Note: The Piston API is rate limited to 5 requests per second. If you have a need for more requests than that > and it's for a good cause, please reach out to me (EngineerMan#0001) on [Discord](https://discord.gg/engineerman) -> so we can discuss potentially getting you an unlimited key. +> so we can discuss potentially getting you an unlimited key. What is and isn't a good cause is up to me, but, in general +> if your project is a) open source, b) helping people at not cost to them, and c) not likely to use tons of resources +> thereby impairing another's ability to enjoy Piston, you'll likely be granted a key.
From e197ca5b7a382dd77bd10a5c180b1fc4033029c6 Mon Sep 17 00:00:00 2001 From: Brian Seymour Date: Wed, 8 Sep 2021 12:32:04 -0500 Subject: [PATCH 15/35] Update readme.md --- readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/readme.md b/readme.md index f7dec40..b9d8e12 100644 --- a/readme.md +++ b/readme.md @@ -89,7 +89,7 @@ POST https://emkc.org/api/v2/piston/execute > Important Note: The Piston API is rate limited to 5 requests per second. If you have a need for more requests than that > and it's for a good cause, please reach out to me (EngineerMan#0001) on [Discord](https://discord.gg/engineerman) > so we can discuss potentially getting you an unlimited key. What is and isn't a good cause is up to me, but, in general -> if your project is a) open source, b) helping people at not cost to them, and c) not likely to use tons of resources +> if your project is a) open source, b) helping people at no cost to them, and c) not likely to use tons of resources > thereby impairing another's ability to enjoy Piston, you'll likely be granted a key.
From 08b2fa094a12653af20846658b87d93d8fc533b6 Mon Sep 17 00:00:00 2001 From: Brikaa Date: Thu, 9 Sep 2021 20:20:01 +0200 Subject: [PATCH 16/35] pkg(mono-6.12.0): Added mono-basic 7.4 --- packages/mono/6.12.0/build.sh | 18 +++++++++++++++--- packages/mono/6.12.0/compile | 16 ++++++++++++++-- packages/mono/6.12.0/environment | 2 +- packages/mono/6.12.0/metadata.json | 4 ++++ packages/mono/6.12.0/test.vb | 9 +++++++++ 5 files changed, 43 insertions(+), 6 deletions(-) create mode 100644 packages/mono/6.12.0/test.vb diff --git a/packages/mono/6.12.0/build.sh b/packages/mono/6.12.0/build.sh index 2cecc07..7bb4b63 100755 --- a/packages/mono/6.12.0/build.sh +++ b/packages/mono/6.12.0/build.sh @@ -2,19 +2,31 @@ PREFIX=$(realpath $(dirname $0)) -mkdir -p build/mono +mkdir -p build/mono build/mono-basic cd build curl "https://download.mono-project.com/sources/mono/mono-6.12.0.122.tar.xz" -o mono.tar.xz +curl -L "https://github.com/mono/mono-basic/archive/refs/tags/4.7.tar.gz" -o mono-basic.tar.gz tar xf mono.tar.xz --strip-components=1 -C mono +tar xf mono-basic.tar.gz --strip-components=1 -C mono-basic +# Compiling Mono cd mono ./configure --prefix "$PREFIX" -make -j$(nproc) +make -j$(nproc) make install -j$(nproc) +export PATH="$PREFIX/bin:$PATH" # To be able to use mono commands + +# Compiling mono-basic +cd ../mono-basic +./configure --prefix="$PREFIX" + +make -j$(nproc) PLATFORM="linux" # Avoids conflict with the $PLATFORM variable we have +make install -j$(nproc) PLATFORM="linux" + +# Remove redundant files cd ../../ rm -rf build - diff --git a/packages/mono/6.12.0/compile b/packages/mono/6.12.0/compile index 8728714..5246bc2 100644 --- a/packages/mono/6.12.0/compile +++ b/packages/mono/6.12.0/compile @@ -1,4 +1,16 @@ #!/bin/bash -rename 's/$/\.cs/' "$@" # Add .cs extension -csc -out:out *.cs +case "${PISTON_LANGUAGE}" in + csharp) + rename 's/$/\.cs/' "$@" # Add .cs extension + csc -out:out *.cs + ;; + basic) + rename 's/$/\.vb/' "$@" # Add .vb extension + vbnc -out:out *.vb + ;; + *) + echo "How did you get here? (${PISTON_LANGUAGE})" + exit 1 + ;; +esac diff --git a/packages/mono/6.12.0/environment b/packages/mono/6.12.0/environment index bd0ff98..977a5e8 100644 --- a/packages/mono/6.12.0/environment +++ b/packages/mono/6.12.0/environment @@ -1 +1 @@ -export PATH=$PWD/bin:$PATH \ No newline at end of file +export PATH=$PWD/bin:$PATH diff --git a/packages/mono/6.12.0/metadata.json b/packages/mono/6.12.0/metadata.json index a053884..4d09ae7 100644 --- a/packages/mono/6.12.0/metadata.json +++ b/packages/mono/6.12.0/metadata.json @@ -5,6 +5,10 @@ { "language": "csharp", "aliases": ["mono", "mono-csharp", "mono-c#", "mono-cs", "c#", "cs"] + }, + { + "language": "basic", + "aliases": ["vb", "mono-vb", "mono-basic", "visual-basic", "visual basic"] } ] } diff --git a/packages/mono/6.12.0/test.vb b/packages/mono/6.12.0/test.vb new file mode 100644 index 0000000..291042e --- /dev/null +++ b/packages/mono/6.12.0/test.vb @@ -0,0 +1,9 @@ +Imports System + +Module Module1 + + Sub Main() + Console.WriteLine("OK") + End Sub + +End Module From dbf89dbb582e51d856e54fd00a81a45562b7fe68 Mon Sep 17 00:00:00 2001 From: Brikaa Date: Thu, 9 Sep 2021 20:21:41 +0200 Subject: [PATCH 17/35] Add basic to readme --- readme.md | 1 + 1 file changed, 1 insertion(+) diff --git a/readme.md b/readme.md index b9d8e12..4f75115 100644 --- a/readme.md +++ b/readme.md @@ -360,6 +360,7 @@ Content-Type: application/json `scala`, `swift`, `typescript`, +`basic`, `vlang`, `yeethon`, `zig`, From b3772c9502207f264357ca219b56d4d76ab376e3 Mon Sep 17 00:00:00 2001 From: Brikaa Date: Fri, 10 Sep 2021 15:33:36 +0200 Subject: [PATCH 18/35] pkg(mono-6.12.0): redirect mono error to stderr --- packages/mono/6.12.0/compile | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/packages/mono/6.12.0/compile b/packages/mono/6.12.0/compile index 5246bc2..e3ae230 100644 --- a/packages/mono/6.12.0/compile +++ b/packages/mono/6.12.0/compile @@ -1,13 +1,20 @@ #!/bin/bash +check_errors () { + grep -q 'error [A-Z]\+[0-9]\+:' check.txt && cat check.txt 1>&2 || cat check.txt + rm check.txt +} + case "${PISTON_LANGUAGE}" in csharp) rename 's/$/\.cs/' "$@" # Add .cs extension - csc -out:out *.cs + csc -out:out *.cs > check.txt + check_errors ;; basic) rename 's/$/\.vb/' "$@" # Add .vb extension - vbnc -out:out *.vb + vbnc -out:out *.vb > check.txt + check_errors ;; *) echo "How did you get here? (${PISTON_LANGUAGE})" From 5004635c55d5de04c60cb1e1e313cba4c210195e Mon Sep 17 00:00:00 2001 From: Brikaa Date: Sat, 11 Sep 2021 14:20:36 +0200 Subject: [PATCH 19/35] Added bash shell option in piston script --- piston | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/piston b/piston index 2dc36fa..f979f44 100755 --- a/piston +++ b/piston @@ -23,24 +23,25 @@ case $1 in echo " start Starts piston" echo " stop Stops piston" echo " restart Restarts piston" + echo " bash Opens a bash shell for the piston_api container" echo echo " update Fetches and applies latest updates" echo echo " Passthrough to piston cli tool" - echo + echo echo "Development Commands:" - + if [ $PISTON_ENV == dev ]; then - + echo " clean-pkgs Clean any package build artifacts on disk" echo " clean-repo Remove all packages from local repo" echo " build-pkg Build a package" - + else - + echo " Switch to developement environment for more info" echo " > piston switch dev" - + fi ;; @@ -51,6 +52,7 @@ case $1 in restart) docker_compose restart ;; start) docker_compose up -d ;; stop) docker_compose down ;; + bash) docker_compose exec api /bin/bash ;; update) git pull @@ -74,4 +76,4 @@ case $1 in cd ../ node cli/index.js "$@" ;; -esac \ No newline at end of file +esac From 1250cf213adce6bb5d26615506d9823463c0a371 Mon Sep 17 00:00:00 2001 From: Brikaa Date: Sun, 12 Sep 2021 14:38:57 +0200 Subject: [PATCH 20/35] pkg(rscript-4.1.1): Added R 4.1.1 --- api/Dockerfile | 2 +- packages/rscript/4.1.1/build.sh | 16 ++++++++++++++++ packages/rscript/4.1.1/environment | 1 + packages/rscript/4.1.1/metadata.json | 5 +++++ packages/rscript/4.1.1/run | 2 ++ packages/rscript/4.1.1/test.r | 1 + repo/Dockerfile | 4 ++-- 7 files changed, 28 insertions(+), 3 deletions(-) create mode 100755 packages/rscript/4.1.1/build.sh create mode 100644 packages/rscript/4.1.1/environment create mode 100644 packages/rscript/4.1.1/metadata.json create mode 100644 packages/rscript/4.1.1/run create mode 100644 packages/rscript/4.1.1/test.r diff --git a/api/Dockerfile b/api/Dockerfile index 668c54a..ec0d2a8 100644 --- a/api/Dockerfile +++ b/api/Dockerfile @@ -13,7 +13,7 @@ RUN apt-get update && \ libncurses6 libncurses5 libedit-dev libseccomp-dev rename procps python3 \ libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev \ libfftw3-dev libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev \ - libsundials-dev && \ + libsundials-dev libpcre2-dev && \ rm -rf /var/lib/apt/lists/* RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen diff --git a/packages/rscript/4.1.1/build.sh b/packages/rscript/4.1.1/build.sh new file mode 100755 index 0000000..9837c22 --- /dev/null +++ b/packages/rscript/4.1.1/build.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +PREFIX=$(realpath $(dirname $0)) + +mkdir build +cd build + +curl https://cloud.r-project.org/src/base/R-4/R-4.1.1.tar.gz -o R.tar.gz +tar xzf R.tar.gz --strip-components 1 + +./configure --prefix="$PREFIX" --with-x=no +make -j$(nproc) +make install -j$(nproc) + +cd ../ +rm -rf build diff --git a/packages/rscript/4.1.1/environment b/packages/rscript/4.1.1/environment new file mode 100644 index 0000000..977a5e8 --- /dev/null +++ b/packages/rscript/4.1.1/environment @@ -0,0 +1 @@ +export PATH=$PWD/bin:$PATH diff --git a/packages/rscript/4.1.1/metadata.json b/packages/rscript/4.1.1/metadata.json new file mode 100644 index 0000000..db16a76 --- /dev/null +++ b/packages/rscript/4.1.1/metadata.json @@ -0,0 +1,5 @@ +{ + "language": "rscript", + "version": "4.1.1", + "aliases": ["r"] +} diff --git a/packages/rscript/4.1.1/run b/packages/rscript/4.1.1/run new file mode 100644 index 0000000..d122eb8 --- /dev/null +++ b/packages/rscript/4.1.1/run @@ -0,0 +1,2 @@ +#/bin/bash +Rscript "$@" diff --git a/packages/rscript/4.1.1/test.r b/packages/rscript/4.1.1/test.r new file mode 100644 index 0000000..9273f27 --- /dev/null +++ b/packages/rscript/4.1.1/test.r @@ -0,0 +1 @@ +cat('OK') diff --git a/repo/Dockerfile b/repo/Dockerfile index 106fef4..56ca59d 100644 --- a/repo/Dockerfile +++ b/repo/Dockerfile @@ -8,7 +8,8 @@ RUN apt-get update && apt-get install -y unzip autoconf build-essential libssl-d util-linux pciutils usbutils coreutils binutils findutils grep libncurses5-dev \ libncursesw5-dev python3-pip libgmp-dev libmpfr-dev python2 libffi-dev gfortran\ libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev libfftw3-dev \ - libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev libsundials-dev && \ + libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev libsundials-dev \ + libbz2-dev liblzma-dev libpcre2-dev && \ ln -sf /bin/bash /bin/sh && \ rm -rf /var/lib/apt/lists/* && \ update-alternatives --install /usr/bin/python python /usr/bin/python3.7 2 @@ -17,4 +18,3 @@ ADD entrypoint.sh mkindex.sh / ENTRYPOINT ["bash","/entrypoint.sh"] CMD ["--no-build"] - From 864e94739f29afa241b54b03055a3c15aa8d762e Mon Sep 17 00:00:00 2001 From: Brikaa Date: Sun, 12 Sep 2021 14:39:56 +0200 Subject: [PATCH 21/35] Add rscript to readme --- readme.md | 1 + 1 file changed, 1 insertion(+) diff --git a/readme.md b/readme.md index 919025c..f43f70a 100644 --- a/readme.md +++ b/readme.md @@ -356,6 +356,7 @@ Content-Type: application/json `python2`, `raku`, `rockstar`, +`rscript`, `ruby`, `rust`, `scala`, From 252987932c69cbb1104c94dbb2a253db481353d4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Sun, 12 Sep 2021 12:57:09 +0000 Subject: [PATCH 22/35] build(deps): bump axios from 0.21.1 to 0.21.2 in /cli Bumps [axios](https://github.com/axios/axios) from 0.21.1 to 0.21.2. - [Release notes](https://github.com/axios/axios/releases) - [Changelog](https://github.com/axios/axios/blob/master/CHANGELOG.md) - [Commits](https://github.com/axios/axios/compare/v0.21.1...v0.21.2) --- updated-dependencies: - dependency-name: axios dependency-type: direct:production ... Signed-off-by: dependabot[bot] --- cli/package-lock.json | 41 ++++++++++++++++++++++++++--------------- cli/package.json | 2 +- 2 files changed, 27 insertions(+), 16 deletions(-) diff --git a/cli/package-lock.json b/cli/package-lock.json index d564e5f..ad65043 100644 --- a/cli/package-lock.json +++ b/cli/package-lock.json @@ -9,7 +9,7 @@ "version": "1.0.0", "license": "MIT", "dependencies": { - "axios": "^0.21.1", + "axios": "^0.21.2", "chalk": "^4.1.0", "minimatch": "^3.0.4", "nocamel": "^1.0.2", @@ -37,11 +37,11 @@ } }, "node_modules/axios": { - "version": "0.21.1", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz", - "integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==", + "version": "0.21.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.2.tgz", + "integrity": "sha512-87otirqUw3e8CzHTMO+/9kh/FSgXt/eVDvipijwDtEuwbkySWZ9SBm6VEubmJ/kLKEoLQV/POhxXFb66bfekfg==", "dependencies": { - "follow-redirects": "^1.10.0" + "follow-redirects": "^1.14.0" } }, "node_modules/balanced-match": { @@ -115,11 +115,22 @@ } }, "node_modules/follow-redirects": { - "version": "1.13.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.13.3.tgz", - "integrity": "sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA==", + "version": "1.14.3", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.3.tgz", + "integrity": "sha512-3MkHxknWMUtb23apkgz/83fDoe+y+qr0TdgacGIA7bew+QLBo3vdgEN2xEsuXNivpFy4CyDhBBZnNZOtalmenw==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], "engines": { "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } } }, "node_modules/get-caller-file": { @@ -297,11 +308,11 @@ } }, "axios": { - "version": "0.21.1", - "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.1.tgz", - "integrity": "sha512-dKQiRHxGD9PPRIUNIWvZhPTPpl1rf/OxTYKsqKUDjBwYylTvV7SjSHJb9ratfyzM6wCdLCOYLzs73qpg5c4iGA==", + "version": "0.21.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.21.2.tgz", + "integrity": "sha512-87otirqUw3e8CzHTMO+/9kh/FSgXt/eVDvipijwDtEuwbkySWZ9SBm6VEubmJ/kLKEoLQV/POhxXFb66bfekfg==", "requires": { - "follow-redirects": "^1.10.0" + "follow-redirects": "^1.14.0" } }, "balanced-match": { @@ -366,9 +377,9 @@ "integrity": "sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==" }, "follow-redirects": { - "version": "1.13.3", - "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.13.3.tgz", - "integrity": "sha512-DUgl6+HDzB0iEptNQEXLx/KhTmDb8tZUHSeLqpnjpknR70H0nC2t9N73BK6fN4hOvJ84pKlIQVQ4k5FFlBedKA==" + "version": "1.14.3", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.14.3.tgz", + "integrity": "sha512-3MkHxknWMUtb23apkgz/83fDoe+y+qr0TdgacGIA7bew+QLBo3vdgEN2xEsuXNivpFy4CyDhBBZnNZOtalmenw==" }, "get-caller-file": { "version": "2.0.5", diff --git a/cli/package.json b/cli/package.json index 6df989d..c244403 100644 --- a/cli/package.json +++ b/cli/package.json @@ -5,7 +5,7 @@ "main": "index.js", "license": "MIT", "dependencies": { - "axios": "^0.21.1", + "axios": "^0.21.2", "chalk": "^4.1.0", "minimatch": "^3.0.4", "nocamel": "^1.0.2", From 1566b4957412658b4a4ee979388da065484da4cb Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Mon, 13 Sep 2021 01:00:54 +1200 Subject: [PATCH 23/35] Build repo-builder if required --- .github/workflows/package-pr.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/package-pr.yaml b/.github/workflows/package-pr.yaml index b3027ec..a7f74c0 100644 --- a/.github/workflows/package-pr.yaml +++ b/.github/workflows/package-pr.yaml @@ -55,7 +55,9 @@ jobs: run: | PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u) echo "Packages: $PACKAGES" - docker run -v "${{ github.workspace }}:/piston" docker.pkg.github.com/engineer-man/piston/repo-builder:latest --no-server $PACKAGES + docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest + docker build -t repo-builder repo + docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES ls -la packages - name: Upload package as artifact From d630b5ebe7bc6b50fd5ef2298f599d7d5d6a4cf7 Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Mon, 13 Sep 2021 01:01:59 +1200 Subject: [PATCH 24/35] Build repo-builder if required --- .github/workflows/package-push.yaml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/package-push.yaml b/.github/workflows/package-push.yaml index ad33f3e..bbb44af 100644 --- a/.github/workflows/package-push.yaml +++ b/.github/workflows/package-push.yaml @@ -33,7 +33,9 @@ jobs: run: | PACKAGES=$(jq '.[]' -r ${HOME}/files.json | awk -F/ '{ print $2 "-" $3 }' | sort -u) echo "Packages: $PACKAGES" - docker run -v "${{ github.workspace }}:/piston" docker.pkg.github.com/engineer-man/piston/repo-builder:latest --no-server $PACKAGES + docker pull docker.pkg.github.com/engineer-man/piston/repo-builder:latest + docker build -t repo-builder repo + docker run -v "${{ github.workspace }}:/piston" repo-builder --no-server $PACKAGES ls -la packages - name: Upload Packages @@ -73,4 +75,4 @@ jobs: file: index tag: pkgs overwrite: true - file_glob: true \ No newline at end of file + file_glob: true From dc20ec2bda9ccf35cb660822a2cbd553b8e05b90 Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Mon, 13 Sep 2021 01:09:38 +1200 Subject: [PATCH 25/35] ci: Rebuild API container if required when testing --- .github/workflows/package-pr.yaml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/package-pr.yaml b/.github/workflows/package-pr.yaml index a7f74c0..bb264a3 100644 --- a/.github/workflows/package-pr.yaml +++ b/.github/workflows/package-pr.yaml @@ -91,7 +91,9 @@ jobs: run: | ls -la docker run -v $(pwd)'/repo:/piston/repo' -v $(pwd)'/packages:/piston/packages' -d --name repo docker.pkg.github.com/engineer-man/piston/repo-builder --no-build - docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api docker.pkg.github.com/engineer-man/piston/api + docker pull docker.pkg.github.com/engineer-man/piston/api + docker build -t piston-api api + docker run --network container:repo -v $(pwd)'/data:/piston' -e PISTON_LOG_LEVEL=DEBUG -e 'PISTON_REPO_URL=http://localhost:8000/index' -d --name api piston-api echo Waiting for API to start.. docker run --network container:api appropriate/curl -s --retry 10 --retry-connrefused http://localhost:2000/api/v2/runtimes From 5e1a51c8133b9873a258822646a98571cc5d7b1b Mon Sep 17 00:00:00 2001 From: Brikaa Date: Wed, 15 Sep 2021 11:33:02 +0200 Subject: [PATCH 26/35] pkg(vlang-0.1.13): Fix command line arguments bug --- packages/vlang/0.1.13/run | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/packages/vlang/0.1.13/run b/packages/vlang/0.1.13/run index 18b6c05..c999cca 100644 --- a/packages/vlang/0.1.13/run +++ b/packages/vlang/0.1.13/run @@ -6,8 +6,9 @@ export TMPDIR="$PWD" # Put instructions to run the runtime -rename 's/$/\.v/' "$@" # Add .v extension +filename=$1 + +rename 's/$/\.v/' $filename # Add .v extension -filename=$1.v shift -v run $filename "$@" \ No newline at end of file +v run $filename.v "$@" From 7cf99fb4f2c822eb01add42e0b7201cc37d6a636 Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Wed, 22 Sep 2021 11:26:35 +1200 Subject: [PATCH 27/35] Update readme.md --- readme.md | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/readme.md b/readme.md index f43f70a..a154fe6 100644 --- a/readme.md +++ b/readme.md @@ -41,6 +41,20 @@
+# Notes About Hacktoberfest + +While we are accepting pull requests for Hacktoberfest, we will reject any low-quality PRs. +If we see PR abuse for Hacktoberfest, we will stop providing Hacktoberfest approval for pull requests. + +We are accepting PRs for: +* Packages - updating package versions, adding new packages +* Documentation updates +* CLI/API improvements - please discuss these with us in the Discord first + +Any queries or concerns, ping @HexF#0015 in the Discord. + +
+ # About

From 15e2e81d9681284e5c5043e86164644ec6cf653e Mon Sep 17 00:00:00 2001 From: Dan Vargas Date: Wed, 22 Sep 2021 16:31:19 -0500 Subject: [PATCH 28/35] fix stdin for normal execute --- api/src/api/v2.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/api/src/api/v2.js b/api/src/api/v2.js index 0c34d70..215453b 100644 --- a/api/src/api/v2.js +++ b/api/src/api/v2.js @@ -106,7 +106,7 @@ function get_job(body){ runtime: rt, alias: language, args: args || [], - stdin: '', + stdin: stdin || "", files, timeouts: { run: run_timeout || 3000, From 56a3c91a6af100fe83e8907220e17c28abbd74b2 Mon Sep 17 00:00:00 2001 From: Brikaa Date: Sun, 26 Sep 2021 13:25:13 +0200 Subject: [PATCH 29/35] pkg(iverilog-11.0.0): Added iverilog 11.0.0 --- api/src/api/v2.js | 4 ++-- cli/package-lock.json | 4 ++-- packages/iverilog/11.0.0/build.sh | 17 +++++++++++++++++ packages/iverilog/11.0.0/compile | 4 ++++ packages/iverilog/11.0.0/environment | 2 ++ packages/iverilog/11.0.0/metadata.json | 5 +++++ packages/iverilog/11.0.0/run | 4 ++++ packages/iverilog/11.0.0/test.verilog | 7 +++++++ readme.md | 5 +++-- repo/Dockerfile | 2 +- 10 files changed, 47 insertions(+), 7 deletions(-) create mode 100755 packages/iverilog/11.0.0/build.sh create mode 100644 packages/iverilog/11.0.0/compile create mode 100644 packages/iverilog/11.0.0/environment create mode 100644 packages/iverilog/11.0.0/metadata.json create mode 100644 packages/iverilog/11.0.0/run create mode 100644 packages/iverilog/11.0.0/test.verilog diff --git a/api/src/api/v2.js b/api/src/api/v2.js index 215453b..e3e0522 100644 --- a/api/src/api/v2.js +++ b/api/src/api/v2.js @@ -146,7 +146,7 @@ router.ws('/connect', async (ws, req) => { eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status}))) ws.on("message", async (data) => { - + try{ const msg = JSON.parse(data); @@ -194,7 +194,7 @@ router.ws('/connect', async (ws, req) => { } break; } - + }catch(error){ ws.send(JSON.stringify({type: "error", message: error.message})) ws.close(4002, "Notified Error") diff --git a/cli/package-lock.json b/cli/package-lock.json index f7c2771..335ed21 100644 --- a/cli/package-lock.json +++ b/cli/package-lock.json @@ -1,12 +1,12 @@ { "name": "piston-cli", - "version": "1.0.0", + "version": "1.1.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "piston-cli", - "version": "1.0.0", + "version": "1.1.0", "license": "MIT", "dependencies": { "axios": "^0.21.2", diff --git a/packages/iverilog/11.0.0/build.sh b/packages/iverilog/11.0.0/build.sh new file mode 100755 index 0000000..befb2fa --- /dev/null +++ b/packages/iverilog/11.0.0/build.sh @@ -0,0 +1,17 @@ +#!/bin/bash + +PREFIX=$(realpath $(dirname $0)) + +mkdir -p build/iverilog +cd build/iverilog +curl -L https://github.com/steveicarus/iverilog/archive/refs/tags/v11_0.tar.gz -o iverilog.tar.gz +tar xzf iverilog.tar.gz --strip-components=1 + +chmod +x ./autoconf.sh +./autoconf.sh +./configure --prefix="$PREFIX" +make -j$(nproc) +make install -j$(nproc) + +cd ../../ +rm -rf build diff --git a/packages/iverilog/11.0.0/compile b/packages/iverilog/11.0.0/compile new file mode 100644 index 0000000..56f4b4e --- /dev/null +++ b/packages/iverilog/11.0.0/compile @@ -0,0 +1,4 @@ +#!/bin/bash + +rename 's/$/\.v/' "$@" # Add .v extension +iverilog *.v diff --git a/packages/iverilog/11.0.0/environment b/packages/iverilog/11.0.0/environment new file mode 100644 index 0000000..b482830 --- /dev/null +++ b/packages/iverilog/11.0.0/environment @@ -0,0 +1,2 @@ +#!/bin/bash +export PATH=$PWD/bin:$PATH diff --git a/packages/iverilog/11.0.0/metadata.json b/packages/iverilog/11.0.0/metadata.json new file mode 100644 index 0000000..5a35bde --- /dev/null +++ b/packages/iverilog/11.0.0/metadata.json @@ -0,0 +1,5 @@ +{ + "language": "iverilog", + "version": "11.0.0", + "aliases": ["verilog", "vvp"] +} diff --git a/packages/iverilog/11.0.0/run b/packages/iverilog/11.0.0/run new file mode 100644 index 0000000..39e898c --- /dev/null +++ b/packages/iverilog/11.0.0/run @@ -0,0 +1,4 @@ +#!/bin/bash + +shift +vvp a.out "$@" diff --git a/packages/iverilog/11.0.0/test.verilog b/packages/iverilog/11.0.0/test.verilog new file mode 100644 index 0000000..88fcd7a --- /dev/null +++ b/packages/iverilog/11.0.0/test.verilog @@ -0,0 +1,7 @@ +module hello; + initial + begin + $display("OK"); + $finish ; + end +endmodule diff --git a/readme.md b/readme.md index a154fe6..2737bd9 100644 --- a/readme.md +++ b/readme.md @@ -42,10 +42,10 @@
# Notes About Hacktoberfest - + While we are accepting pull requests for Hacktoberfest, we will reject any low-quality PRs. If we see PR abuse for Hacktoberfest, we will stop providing Hacktoberfest approval for pull requests. - + We are accepting PRs for: * Packages - updating package versions, adding new packages * Documentation updates @@ -343,6 +343,7 @@ Content-Type: application/json `golfscript`, `groovy`, `haskell`, +`iverilog`, `java`, `javascript`, `jelly`, diff --git a/repo/Dockerfile b/repo/Dockerfile index 56ca59d..de28c11 100644 --- a/repo/Dockerfile +++ b/repo/Dockerfile @@ -9,7 +9,7 @@ RUN apt-get update && apt-get install -y unzip autoconf build-essential libssl-d libncursesw5-dev python3-pip libgmp-dev libmpfr-dev python2 libffi-dev gfortran\ libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev libfftw3-dev \ libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev libsundials-dev \ - libbz2-dev liblzma-dev libpcre2-dev && \ + libbz2-dev liblzma-dev libpcre2-dev gperf bison flex g++ && \ ln -sf /bin/bash /bin/sh && \ rm -rf /var/lib/apt/lists/* && \ update-alternatives --install /usr/bin/python python /usr/bin/python3.7 2 From 474c986879886180a6ab72eb1497588610a29fa5 Mon Sep 17 00:00:00 2001 From: Brikaa Date: Sun, 26 Sep 2021 14:02:03 +0200 Subject: [PATCH 30/35] Add semantic versioning in CONTRIBUTING.MD --- packages/CONTRIBUTING.MD | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/CONTRIBUTING.MD b/packages/CONTRIBUTING.MD index 813f71e..b1ed6d3 100644 --- a/packages/CONTRIBUTING.MD +++ b/packages/CONTRIBUTING.MD @@ -2,7 +2,7 @@ ## Naming Languages -Languages should be named after their interpreters, and the command line binaries you call. +Languages should be named after their interpreters, and the command line binaries you call. The language version should use semantic versioning. For example, the full name of the standard python interpreter is `CPython`, however we would name it `python`, after the main binary which it provides. In the example of NodeJS, we would call this `node`, after the main binary. From e5ac7a2acc3196f811fa1fe65828c5a4076f9667 Mon Sep 17 00:00:00 2001 From: Brikaa Date: Sun, 26 Sep 2021 14:09:25 +0200 Subject: [PATCH 31/35] Add ./piston logs --- piston | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/piston b/piston index 28e18db..a14e7f5 100755 --- a/piston +++ b/piston @@ -19,6 +19,7 @@ case $1 in echo "Commands:" echo " select Select the environment" echo " docker_compose Interact directly with the docker-compose for the selected environment" + echo " logs Show docker-compose logs" echo echo " start Starts piston" echo " stop Stops piston" @@ -37,18 +38,19 @@ case $1 in echo " clean-repo Remove all packages from local repo" echo " build-pkg Build a package" echo " rebuild Build and restart the docker container" - + else echo " Switch to developement environment for more info" echo " > piston select dev" - + fi ;; select) echo "$2" > .piston_env ;; docker_compose) shift; docker_compose "$@";; + logs) docker_compose logs -f ;; restart) docker_compose restart ;; start) docker_compose up -d ;; From 1835ab5cab354e79f05e052b4c58c3ecc2ba9617 Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Thu, 30 Sep 2021 08:11:47 +1300 Subject: [PATCH 32/35] Add self to license --- license | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/license b/license index 4f45aea..fd203f8 100644 --- a/license +++ b/license @@ -1,4 +1,4 @@ -Copyright (c) 2018-2021 Brian Seymour, EMKC Contributors +Copyright (c) 2018-2021 Brian Seymour, Thomas Hobson, EMKC Contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal From 5cdc71d268e8a972e8d30b224c4c191effbf5e61 Mon Sep 17 00:00:00 2001 From: Brikaa Date: Tue, 28 Sep 2021 17:29:56 +0200 Subject: [PATCH 33/35] pkg(sqlite3-3.36.0): Added sqlite3 3.36.0 --- cli/package-lock.json | 4 ++-- packages/sqlite3/3.36.0/build.sh | 10 ++++++++++ packages/sqlite3/3.36.0/environment | 2 ++ packages/sqlite3/3.36.0/metadata.json | 5 +++++ packages/sqlite3/3.36.0/run | 3 +++ packages/sqlite3/3.36.0/test.sql | 1 + readme.md | 5 +++-- 7 files changed, 26 insertions(+), 4 deletions(-) create mode 100755 packages/sqlite3/3.36.0/build.sh create mode 100644 packages/sqlite3/3.36.0/environment create mode 100644 packages/sqlite3/3.36.0/metadata.json create mode 100644 packages/sqlite3/3.36.0/run create mode 100644 packages/sqlite3/3.36.0/test.sql diff --git a/cli/package-lock.json b/cli/package-lock.json index f7c2771..335ed21 100644 --- a/cli/package-lock.json +++ b/cli/package-lock.json @@ -1,12 +1,12 @@ { "name": "piston-cli", - "version": "1.0.0", + "version": "1.1.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "piston-cli", - "version": "1.0.0", + "version": "1.1.0", "license": "MIT", "dependencies": { "axios": "^0.21.2", diff --git a/packages/sqlite3/3.36.0/build.sh b/packages/sqlite3/3.36.0/build.sh new file mode 100755 index 0000000..18d5b8f --- /dev/null +++ b/packages/sqlite3/3.36.0/build.sh @@ -0,0 +1,10 @@ +#!/bin/bash +PREFIX=$(realpath $(dirname $0)) + +curl https://www.sqlite.org/2021/sqlite-amalgamation-3360000.zip -o sqlite.zip +unzip -q sqlite.zip +rm -rf sqlite.zip + +gcc -DSQLITE_THREADSAFE=0 -DSQLITE_OMIT_LOAD_EXTENSION sqlite-amalgamation-3360000/shell.c sqlite-amalgamation-3360000/sqlite3.c -o sqlite3 + +rm -rf sqlite-amalgamation-3360000 diff --git a/packages/sqlite3/3.36.0/environment b/packages/sqlite3/3.36.0/environment new file mode 100644 index 0000000..50242cc --- /dev/null +++ b/packages/sqlite3/3.36.0/environment @@ -0,0 +1,2 @@ +#!/bin/bash +export PATH=$PWD:$PATH diff --git a/packages/sqlite3/3.36.0/metadata.json b/packages/sqlite3/3.36.0/metadata.json new file mode 100644 index 0000000..d531aaf --- /dev/null +++ b/packages/sqlite3/3.36.0/metadata.json @@ -0,0 +1,5 @@ +{ + "language": "sqlite3", + "version": "3.36.0", + "aliases": ["sqlite", "sql"] +} diff --git a/packages/sqlite3/3.36.0/run b/packages/sqlite3/3.36.0/run new file mode 100644 index 0000000..8484f3d --- /dev/null +++ b/packages/sqlite3/3.36.0/run @@ -0,0 +1,3 @@ +#!/bin/bash + +sqlite3 < "$1" diff --git a/packages/sqlite3/3.36.0/test.sql b/packages/sqlite3/3.36.0/test.sql new file mode 100644 index 0000000..3a3c57b --- /dev/null +++ b/packages/sqlite3/3.36.0/test.sql @@ -0,0 +1 @@ +SELECT 'OK'; diff --git a/readme.md b/readme.md index a154fe6..95fdb07 100644 --- a/readme.md +++ b/readme.md @@ -42,10 +42,10 @@
# Notes About Hacktoberfest - + While we are accepting pull requests for Hacktoberfest, we will reject any low-quality PRs. If we see PR abuse for Hacktoberfest, we will stop providing Hacktoberfest approval for pull requests. - + We are accepting PRs for: * Packages - updating package versions, adding new packages * Documentation updates @@ -374,6 +374,7 @@ Content-Type: application/json `ruby`, `rust`, `scala`, +`sqlite3`, `swift`, `typescript`, `basic`, From 7313958155b3a0e136b5a5df0946d3e78dfca081 Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Fri, 1 Oct 2021 20:28:54 +1300 Subject: [PATCH 34/35] api: maximum concurrent jobs and potential fix for gcc --- api/src/config.js | 7 +++++++ api/src/job.js | 38 +++++++++++++++++++++++++++++++------- docs/configuration.md | 9 +++++++++ 3 files changed, 47 insertions(+), 7 deletions(-) diff --git a/api/src/config.js b/api/src/config.js index 84270aa..bbd7ae9 100644 --- a/api/src/config.js +++ b/api/src/config.js @@ -114,6 +114,13 @@ const options = [ 'https://github.com/engineer-man/piston/releases/download/pkgs/index', validators: [], }, + { + key: 'max_concurrent_jobs', + desc: 'Maximum number of concurrent jobs to run at one time', + default: 64, + parser: parse_int, + validators: [(x) => x > 0 || `${x} cannot be negative`] + } ]; logger.info(`Loading Configuration from environment`); diff --git a/api/src/job.js b/api/src/job.js index 683cda6..712dcd8 100644 --- a/api/src/job.js +++ b/api/src/job.js @@ -16,6 +16,19 @@ const job_states = { let uid = 0; let gid = 0; +let remainingJobSpaces = config.max_concurrent_jobs; +let jobQueue = []; + + +setInterval(()=>{ + // Every 10ms try resolve a new job, if there is an available slot + if(jobQueue.length > 0 && remainingJobSpaces > 0){ + jobQueue.shift()() + } +}, 10) + + + class Job { constructor({ runtime, files, args, stdin, timeouts, memory_limits }) { this.uuid = uuidv4(); @@ -48,8 +61,15 @@ class Job { } async prime() { - logger.info(`Priming job uuid=${this.uuid}`); + if(remainingJobSpaces < 1){ + logger.info(`Awaiting job slot uuid=${this.uuid}`) + await new Promise((resolve)=>{ + jobQueue.push(resolve) + }) + } + logger.info(`Priming job uuid=${this.uuid}`); + remainingJobSpaces--; logger.debug('Writing files to job cache'); logger.debug(`Transfering ownership uid=${this.uid} gid=${this.gid}`); @@ -152,21 +172,23 @@ class Job { } }); - const exit_cleanup = () => { + const exit_cleanup = async () => { clear_timeout(kill_timeout); proc.stderr.destroy(); proc.stdout.destroy(); + + await this.cleanup_processes() }; - proc.on('exit', (code, signal) => { - exit_cleanup(); + proc.on('exit', async (code, signal) => { + await exit_cleanup(); resolve({stdout, stderr, code, signal, output }); }); - proc.on('error', err => { - exit_cleanup(); + proc.on('error', async err => { + await exit_cleanup(); reject({ error: err, stdout, stderr, output }); }); @@ -339,11 +361,13 @@ class Job { async cleanup() { logger.info(`Cleaning up job uuid=${this.uuid}`); - await this.cleanup_processes(); await this.cleanup_filesystem(); + + remainingJobSpaces++; } } + module.exports = { Job, }; diff --git a/docs/configuration.md b/docs/configuration.md index 1388e9d..16a5df0 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -145,3 +145,12 @@ default: https://github.com/engineer-man/piston/releases/download/pkgs/index ``` URL for repository index, where packages will be downloaded from. + +## Maximum Concurrent Jobs + +```yaml +key: PISTON_MAX_CONCURRENT_JOBS +default: 64 +``` + +Maximum number of jobs to run concurrently. From 1b6563d1817679ff487c6007c3745b9d463fd449 Mon Sep 17 00:00:00 2001 From: Thomas Hobson Date: Sat, 2 Oct 2021 00:07:37 +1300 Subject: [PATCH 35/35] rework process janitor Old process janitor required starting a `ps` process. This was problematic, as `ps` requires another entry in the process table, which in some cases was impossible as it was exhausted. --- api/src/job.js | 74 ++++++++++++++++++++++++++++++++------------------ 1 file changed, 47 insertions(+), 27 deletions(-) diff --git a/api/src/job.js b/api/src/job.js index 712dcd8..ecc4ab3 100644 --- a/api/src/job.js +++ b/api/src/job.js @@ -146,26 +146,31 @@ class Job { const kill_timeout = set_timeout( - _ => proc.kill('SIGKILL'), + async _ => { + logger.info(`Timeout exceeded timeout=${timeout} uuid=${this.uuid}`) + process.kill(proc.pid, 'SIGKILL') + }, timeout ); - proc.stderr.on('data', data => { + proc.stderr.on('data', async data => { if(eventBus !== null) { eventBus.emit("stderr", data); } else if (stderr.length > config.output_max_size) { - proc.kill('SIGKILL'); + logger.info(`stderr length exceeded uuid=${this.uuid}`) + process.kill(proc.pid, 'SIGKILL') } else { stderr += data; output += data; } }); - proc.stdout.on('data', data => { + proc.stdout.on('data', async data => { if(eventBus !== null){ eventBus.emit("stdout", data); } else if (stdout.length > config.output_max_size) { - proc.kill('SIGKILL'); + logger.info(`stdout length exceeded uuid=${this.uuid}`) + process.kill(proc.pid, 'SIGKILL') } else { stdout += data; output += data; @@ -179,6 +184,7 @@ class Job { proc.stdout.destroy(); await this.cleanup_processes() + logger.debug(`Finished exit cleanup uuid=${this.uuid}`) }; proc.on('exit', async (code, signal) => { @@ -284,36 +290,47 @@ class Job { this.state = job_states.EXECUTED; } - async cleanup_processes() { + async cleanup_processes(dont_wait = []) { let processes = [1]; + logger.debug(`Cleaning up processes uuid=${this.uuid}`) while (processes.length > 0) { - processes = await new Promise((resolve, reject) => - cp.execFile('ps', ['awwxo', 'pid,ruid'], (err, stdout) => { - if (err === null) { - const lines = stdout.split('\n').slice(1); //Remove header with slice - const procs = lines.map(line => { - const [pid, ruid] = line - .trim() - .split(/\s+/) - .map(n => parseInt(n)); + processes = [] - return { pid, ruid }; - }); - resolve(procs); - } else { - reject(error); - } - }) - ); + const proc_ids = await fs.readdir("/proc"); + + + processes = await Promise.all(proc_ids.map(async (proc_id) => { + if(isNaN(proc_id)) return -1; + try{ + const proc_status = await fs.read_file(path.join("/proc",proc_id,"status")); + const proc_lines = proc_status.to_string().split("\n") + const uid_line = proc_lines.find(line=>line.starts_with("Uid:")) + const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/); + + + if(ruid == this.uid || euid == this.uid) + return parse_int(proc_id) + + }catch{ + return -1 + } + + return -1 + })) + + processes = processes.filter(p => p > 0) + + if(processes.length > 0) + logger.debug(`Got processes to kill: ${processes} uuid=${this.uuid}`) + - processes = processes.filter(proc => proc.ruid === this.uid); for (const proc of processes) { // First stop the processes, but keep their resources allocated so they cant re-fork try { - process.kill(proc.pid, 'SIGSTOP'); + process.kill(proc, 'SIGSTOP'); } catch { // Could already be dead } @@ -322,14 +339,17 @@ class Job { for (const proc of processes) { // Then clear them out of the process tree try { - process.kill(proc.pid, 'SIGKILL'); + process.kill(proc, 'SIGKILL'); } catch { // Could already be dead and just needs to be waited on } - wait_pid(proc.pid); + if(!dont_wait.includes(proc)) + wait_pid(proc); } } + + logger.debug(`Cleaned up processes uuid=${this.uuid}`) } async cleanup_filesystem() {