Compare commits

..

No commits in common. "230cb3abe1e5b57715bd60f6bd4760a7ca103bad" and "de89acb617470beffff6be2a3343302a51183ce7" have entirely different histories.

7 changed files with 34 additions and 205 deletions

View File

@ -1,6 +1,6 @@
{ {
"name": "piston-api", "name": "piston-api",
"version": "3.1.0", "version": "3.0.0",
"description": "API for piston - a high performance code execution engine", "description": "API for piston - a high performance code execution engine",
"main": "src/index.js", "main": "src/index.js",
"dependencies": { "dependencies": {

View File

@ -9,8 +9,6 @@ const { Job } = require('../job');
const package = require('../package'); const package = require('../package');
const logger = require('logplease').create('api/v2'); const logger = require('logplease').create('api/v2');
const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
// ref: https://man7.org/linux/man-pages/man7/signal.7.html
function get_job(body){ function get_job(body){
const { const {
@ -150,10 +148,9 @@ router.ws('/connect', async (ws, req) => {
try{ try{
const msg = JSON.parse(data); const msg = JSON.parse(data);
switch(msg.type){ if(msg.type === "init"){
case "init":
if(job === null){ if(job === null){
job = await get_job(msg); const job = await get_job(msg);
await job.prime(); await job.prime();
@ -170,8 +167,8 @@ router.ws('/connect', async (ws, req) => {
}else{ }else{
ws.close(4000, "Already Initialized"); ws.close(4000, "Already Initialized");
} }
break;
case "data": }else if(msg.type === "data"){
if(job !== null){ if(job !== null){
if(msg.stream === "stdin"){ if(msg.stream === "stdin"){
eventBus.emit("stdin", msg.data) eventBus.emit("stdin", msg.data)
@ -181,20 +178,7 @@ router.ws('/connect', async (ws, req) => {
}else{ }else{
ws.close(4003, "Not yet initialized") ws.close(4003, "Not yet initialized")
} }
break;
case "signal":
if(job !== null){
if(SIGNALS.includes(msg.signal)){
eventBus.emit("signal", msg.signal)
}else{
ws.close(4005, "Invalid signal")
} }
}else{
ws.close(4003, "Not yet initialized")
}
break;
}
}catch(error){ }catch(error){
ws.send(JSON.stringify({type: "error", message: error.message})) ws.send(JSON.stringify({type: "error", message: error.message}))
ws.close(4002, "Notified Error") ws.close(4002, "Notified Error")
@ -210,8 +194,8 @@ router.ws('/connect', async (ws, req) => {
setTimeout(()=>{ setTimeout(()=>{
//Terminate the socket after 1 second, if not initialized. //Terminate the socket after 1 second, if not initialized.
if(job === null) //if(job === null)
ws.close(4001, "Initialization Timeout"); // ws.close(4001, "Initialization Timeout");
}, 1000) }, 1000)
}) })

View File

@ -110,36 +110,28 @@ class Job {
eventBus.on("stdin", (data) => { eventBus.on("stdin", (data) => {
proc.stdin.write(data); proc.stdin.write(data);
}) })
eventBus.on("kill", (signal) => {
proc.kill(signal)
})
} }
const kill_timeout = set_timeout( const kill_timeout = set_timeout(
_ => proc.kill('SIGKILL'), _ => proc.kill('SIGKILL'),
timeout timeout
); );
proc.stderr.on('data', data => { proc.stderr.on('data', data => {
if(eventBus !== null) { if (stderr.length > config.output_max_size) {
eventBus.emit("stderr", data);
} else if (stderr.length > config.output_max_size) {
proc.kill('SIGKILL'); proc.kill('SIGKILL');
} else { } else {
if(eventBus !== null) eventBus.emit("stderr", data);
stderr += data; stderr += data;
output += data; output += data;
} }
}); });
proc.stdout.on('data', data => { proc.stdout.on('data', data => {
if(eventBus !== null){ if (stdout.length > config.output_max_size) {
eventBus.emit("stdout", data);
} else if (stdout.length > config.output_max_size) {
proc.kill('SIGKILL'); proc.kill('SIGKILL');
} else { } else {
if(eventBus !== null) eventBus.emit("stdout", data);
stdout += data; stdout += data;
output += data; output += data;
} }

View File

@ -1,10 +1,7 @@
//const fetch = require('node-fetch');
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const chalk = require('chalk'); const chalk = require('chalk');
const WebSocket = require('ws');
const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
exports.command = ['execute <language> <file> [args..]']; exports.command = ['execute <language> <file> [args..]'];
exports.aliases = ['run']; exports.aliases = ['run'];
@ -38,115 +35,17 @@ exports.builder = {
alias: ['f'], alias: ['f'],
array: true, array: true,
desc: 'Additional files to add', desc: 'Additional files to add',
},
interactive: {
boolean: true,
alias: ['t'],
desc: 'Run interactively using WebSocket transport'
},
status: {
boolean: true,
alias: ['s'],
desc: 'Output additional status to stderr'
} }
}; };
async function handle_interactive(files, argv){ exports.handler = async (argv) => {
const ws = new WebSocket(argv.pistonUrl.replace("http", "ws") + "/api/v2/connect") const files = [...(argv.files || []),argv.file]
.map(file_path => {
const log_message = (process.stderr.isTTY && argv.status) ? console.error : ()=>{}; return {
name: path.basename(file_path),
process.on("exit", ()=>{ content: fs.readFileSync(file_path).toString()
ws.close(); };
process.stdin.end(); });
process.stdin.destroy();
process.exit();
})
for(const signal of SIGNALS){
process.on(signal, ()=>{
ws.send(JSON.stringify({type: 'signal', signal}))
})
}
ws.on('open', ()=>{
const request = {
type: "init",
language: argv.language,
version: argv['language_version'],
files: files,
args: argv.args,
compile_timeout: argv.ct,
run_timeout: argv.rt
}
ws.send(JSON.stringify(request))
log_message(chalk.white.bold("Connected"))
process.stdin.resume();
process.stdin.on("data", (data) => {
ws.send(JSON.stringify({
type: "data",
stream: "stdin",
data: data.toString()
}))
})
})
ws.on("close", (code, reason)=>{
log_message(
chalk.white.bold("Disconnected: "),
chalk.white.bold("Reason: "),
chalk.yellow(`"${reason}"`),
chalk.white.bold("Code: "),
chalk.yellow(`"${code}"`),
)
process.stdin.pause()
})
ws.on('message', function(data){
const msg = JSON.parse(data);
switch(msg.type){
case "runtime":
log_message(chalk.bold.white("Runtime:"), chalk.yellow(`${msg.language} ${msg.version}`))
break;
case "stage":
log_message(chalk.bold.white("Stage:"), chalk.yellow(msg.stage))
break;
case "data":
if(msg.stream == "stdout") process.stdout.write(msg.data)
else if(msg.stream == "stderr") process.stderr.write(msg.data)
else log_message(chalk.bold.red(`(${msg.stream}) `), msg.data)
break;
case "exit":
if(msg.signal === null)
log_message(
chalk.white.bold("Stage"),
chalk.yellow(msg.stage),
chalk.white.bold("exited with code"),
chalk.yellow(msg.code)
)
else
log_message(
chalk.white.bold("Stage"),
chalk.yellow(msg.stage),
chalk.white.bold("exited with signal"),
chalk.yellow(msg.signal)
)
break;
default:
log_message(chalk.red.bold("Unknown message:"), msg)
}
})
}
async function run_non_interactively(files, argv) {
const stdin = (argv.stdin && await new Promise((resolve, _) => { const stdin = (argv.stdin && await new Promise((resolve, _) => {
let data = ''; let data = '';
@ -200,18 +99,3 @@ async function run_non_interactively(files, argv) {
step('Run', response.run); step('Run', response.run);
} }
exports.handler = async (argv) => {
const files = [...(argv.files || []),argv.file]
.map(file_path => {
return {
name: path.basename(file_path),
content: fs.readFileSync(file_path).toString()
};
});
if(argv.interactive) await handle_interactive(files, argv);
else await run_non_interactively(files, argv);
}

27
cli/package-lock.json generated
View File

@ -14,7 +14,6 @@
"minimatch": "^3.0.4", "minimatch": "^3.0.4",
"nocamel": "^1.0.2", "nocamel": "^1.0.2",
"semver": "^7.3.5", "semver": "^7.3.5",
"ws": "^7.5.3",
"yargs": "^16.2.0" "yargs": "^16.2.0"
} }
}, },
@ -244,26 +243,6 @@
"node": ">=10" "node": ">=10"
} }
}, },
"node_modules/ws": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz",
"integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==",
"engines": {
"node": ">=8.3.0"
},
"peerDependencies": {
"bufferutil": "^4.0.1",
"utf-8-validate": "^5.0.2"
},
"peerDependenciesMeta": {
"bufferutil": {
"optional": true
},
"utf-8-validate": {
"optional": true
}
}
},
"node_modules/y18n": { "node_modules/y18n": {
"version": "5.0.5", "version": "5.0.5",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz",
@ -476,12 +455,6 @@
"strip-ansi": "^6.0.0" "strip-ansi": "^6.0.0"
} }
}, },
"ws": {
"version": "7.5.3",
"resolved": "https://registry.npmjs.org/ws/-/ws-7.5.3.tgz",
"integrity": "sha512-kQ/dHIzuLrS6Je9+uv81ueZomEwH0qVYstcAQ4/Z93K8zeko9gtAbttJWzoC5ukqXY1PpoouV3+VSOqEAFt5wg==",
"requires": {}
},
"y18n": { "y18n": {
"version": "5.0.5", "version": "5.0.5",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz", "resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.5.tgz",

View File

@ -1,6 +1,6 @@
{ {
"name": "piston-cli", "name": "piston-cli",
"version": "1.1.0", "version": "1.0.0",
"description": "Piston Execution Engine CLI tools", "description": "Piston Execution Engine CLI tools",
"main": "index.js", "main": "index.js",
"license": "MIT", "license": "MIT",
@ -10,7 +10,6 @@
"minimatch": "^3.0.4", "minimatch": "^3.0.4",
"nocamel": "^1.0.2", "nocamel": "^1.0.2",
"semver": "^7.3.5", "semver": "^7.3.5",
"ws": "^7.5.3",
"yargs": "^16.2.0" "yargs": "^16.2.0"
} }
} }

5
piston
View File

@ -35,12 +35,11 @@ case $1 in
echo " clean-pkgs Clean any package build artifacts on disk" echo " clean-pkgs Clean any package build artifacts on disk"
echo " clean-repo Remove all packages from local repo" echo " clean-repo Remove all packages from local repo"
echo " build-pkg <package> <version> Build a package" echo " build-pkg <package> <version> Build a package"
echo " rebuild Build and restart the docker container"
else else
echo " Switch to developement environment for more info" echo " Switch to developement environment for more info"
echo " > piston select dev" echo " > piston switch dev"
fi fi
;; ;;
@ -53,8 +52,6 @@ case $1 in
start) docker_compose up -d ;; start) docker_compose up -d ;;
stop) docker_compose down ;; stop) docker_compose down ;;
rebuild) docker_compose build && docker_compose up -d ;;
update) update)
git pull git pull
docker_compose pull docker_compose pull