mirror of
https://github.com/engineer-man/piston.git
synced 2025-04-23 05:26:28 +02:00
BREAKING: replace custom build scripts with nix
General: - Switched to yarn to better work with nix-based tooling - Switched package system to use nix. This stops double dependencies and slow cloud compile times, while providing more compile/runtime support to the Nix project - Removed container builder in favor of internal container tooling - Package versions no-longer need to be SemVer compliant - Removed "piston package spec" files, replaced with nix-flake based runtimes - Exported nosocket and piston-api as packages within the nix-flake - Removed repo container - Switched docker building to nix-based container outputting - Removed docker compose as this is a single container - Removed package commands from CLI Packages: - Move bash, clojure, cobol, node, python2, python3 to new format - Remainder of packages still need to be moved v2 API: - Removed "version" specifier. To select specific versions, use the v3 api - Removed "/package" endpoints as this doesn't work with the new nix-based system v3 API: - Duplicate of v2 API, except instead of passing in a language name an ID is used intead.
This commit is contained in:
parent
63de4850d8
commit
5bc793cd70
111 changed files with 2215 additions and 2720 deletions
|
@ -1,37 +0,0 @@
|
|||
FROM node:15.10.0-buster-slim
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN dpkg-reconfigure -p critical dash
|
||||
RUN for i in $(seq 1001 1500); do \
|
||||
groupadd -g $i runner$i && \
|
||||
useradd -M runner$i -g $i -u $i ; \
|
||||
done
|
||||
RUN apt-get update && \
|
||||
apt-get install -y libxml2 gnupg tar coreutils util-linux libc6-dev \
|
||||
binutils build-essential locales libpcre3-dev libevent-dev libgmp3-dev \
|
||||
libncurses6 libncurses5 libedit-dev libseccomp-dev rename procps python3 \
|
||||
libreadline-dev libblas-dev liblapack-dev libpcre3-dev libarpack2-dev \
|
||||
libfftw3-dev libglpk-dev libqhull-dev libqrupdate-dev libsuitesparse-dev \
|
||||
libsundials-dev libpcre2-dev curl sudo && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen
|
||||
|
||||
RUN mkdir -m 0755 /nix && chown node /nix && touch /nix/piston_detected
|
||||
RUN runuser -l node -c 'curl -L https://nixos.org/nix/install | sh -s -- --no-daemon '
|
||||
RUN runuser -l node -c 'source ~/.profile; nix-env -iA nixpkgs.nixUnstable'
|
||||
RUN runuser -l node -c 'mkdir -p /home/node/.config/nix/; echo "experimental-features = nix-command flakes" >> /home/node/.config/nix/nix.conf'
|
||||
RUN cp -r /nix /var/nix
|
||||
|
||||
WORKDIR /piston_api
|
||||
COPY ["package.json", "package-lock.json", "./"]
|
||||
RUN npm install
|
||||
COPY ./src ./src
|
||||
|
||||
RUN make -C ./src/nosocket/ all && make -C ./src/nosocket/ install
|
||||
|
||||
COPY ./entrypoint.sh .
|
||||
|
||||
CMD [ "./entrypoint.sh"]
|
||||
EXPOSE 2000/tcp
|
28
api/default.nix
Normal file
28
api/default.nix
Normal file
|
@ -0,0 +1,28 @@
|
|||
{pkgs, ...}:
|
||||
with pkgs; {
|
||||
package = mkYarnPackage {
|
||||
name = "piston";
|
||||
src = ./.;
|
||||
|
||||
yarnPreBuild = ''
|
||||
mkdir -p $HOME/.node-gyp/${nodejs.version}
|
||||
echo 9 > $HOME/.node-gyp/${nodejs.version}/installVersion
|
||||
ln -sfv ${nodejs}/include $HOME/.node-gyp/${nodejs.version}
|
||||
export npm_config_nodedir=${nodejs}
|
||||
'';
|
||||
|
||||
pkgConfig = {
|
||||
waitpid = {
|
||||
buildInputs = [
|
||||
gcc
|
||||
gnumake
|
||||
python3
|
||||
];
|
||||
|
||||
postInstall = ''
|
||||
yarn --offline run install
|
||||
'';
|
||||
};
|
||||
};
|
||||
};
|
||||
}
|
|
@ -4,8 +4,11 @@ echo "Starting Piston API"
|
|||
echo "Checking presense of nix store"
|
||||
if [[ ! -f "/nix/piston_detected" ]]; then
|
||||
echo "Nix Store is not loaded, assuming /nix has been mounted - copying contents"
|
||||
cp -r /var/nix /nix
|
||||
cp -rp /var/nix/* /nix
|
||||
fi
|
||||
|
||||
echo "Adding nix to env"
|
||||
. ~/.profile
|
||||
|
||||
echo "Launching Piston API"
|
||||
node src
|
||||
node src
|
1068
api/package-lock.json
generated
1068
api/package-lock.json
generated
File diff suppressed because it is too large
Load diff
|
@ -1,26 +1,28 @@
|
|||
{
|
||||
"name": "piston-api",
|
||||
"version": "3.1.0",
|
||||
"version": "4.0.0",
|
||||
"description": "API for piston - a high performance code execution engine",
|
||||
"main": "src/index.js",
|
||||
"main": "src/pistond.js",
|
||||
"dependencies": {
|
||||
"body-parser": "^1.19.0",
|
||||
"chownr": "^2.0.0",
|
||||
"express": "^4.17.1",
|
||||
"express-ws": "^5.0.2",
|
||||
"is-docker": "^2.1.1",
|
||||
"logplease": "^1.2.15",
|
||||
"nocamel": "HexF/nocamel#patch-1",
|
||||
"node-fetch": "^2.6.1",
|
||||
"semver": "^7.3.4",
|
||||
"uuid": "^8.3.2",
|
||||
"waitpid": "git+https://github.com/HexF/node-waitpid.git"
|
||||
"nocamel": "git://github.com/HexF/nocamel.git#patch-1",
|
||||
"waitpid": "git://github.com/HexF/node-waitpid.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"lint": "prettier . --write"
|
||||
"lint": "prettier . --write",
|
||||
"prepack": "yarn2nix > yarn.nix"
|
||||
},
|
||||
"devDependencies": {
|
||||
"node2nix": "^1.6.0",
|
||||
"prettier": "2.2.1"
|
||||
},
|
||||
"bin": {
|
||||
"pistond": "./src/pistond.js"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,8 +6,7 @@ const events = require('events');
|
|||
const config = require('../config');
|
||||
const runtime = require('../runtime');
|
||||
const { Job } = require('../job');
|
||||
const package = require('../package');
|
||||
const logger = require('logplease').create('api/v2');
|
||||
const logger = require('logplease').create('api/v3');
|
||||
|
||||
const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
|
||||
// ref: https://man7.org/linux/man-pages/man7/signal.7.html
|
||||
|
@ -15,7 +14,6 @@ const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGE
|
|||
function get_job(body){
|
||||
const {
|
||||
language,
|
||||
version,
|
||||
args,
|
||||
stdin,
|
||||
files,
|
||||
|
@ -32,12 +30,6 @@ function get_job(body){
|
|||
});
|
||||
}
|
||||
|
||||
if (!version || typeof version !== 'string') {
|
||||
return reject({
|
||||
message: 'version is required as a string',
|
||||
});
|
||||
}
|
||||
|
||||
if (!files || !Array.isArray(files)) {
|
||||
return reject({
|
||||
message: 'files is required as an array',
|
||||
|
@ -91,10 +83,10 @@ function get_job(body){
|
|||
}
|
||||
}
|
||||
|
||||
const rt = runtime.get_latest_runtime_matching_language_version(
|
||||
language,
|
||||
version
|
||||
);
|
||||
const rt = runtime.find(rt => [
|
||||
...rt.aliases,
|
||||
rt.language
|
||||
].includes(rt.language))
|
||||
|
||||
if (rt === undefined) {
|
||||
return reject({
|
||||
|
@ -245,77 +237,4 @@ router.get('/runtimes', (req, res) => {
|
|||
return res.status(200).send(runtimes);
|
||||
});
|
||||
|
||||
router.get('/packages', async (req, res) => {
|
||||
logger.debug('Request to list packages');
|
||||
let packages = await package.get_package_list();
|
||||
|
||||
packages = packages.map(pkg => {
|
||||
return {
|
||||
language: pkg.language,
|
||||
language_version: pkg.version.raw,
|
||||
installed: pkg.installed,
|
||||
};
|
||||
});
|
||||
|
||||
return res.status(200).send(packages);
|
||||
});
|
||||
|
||||
router.post('/packages', async (req, res) => {
|
||||
logger.debug('Request to install package');
|
||||
|
||||
const { language, version } = req.body;
|
||||
|
||||
const pkg = await package.get_package(language, version);
|
||||
|
||||
if (pkg == null) {
|
||||
return res.status(404).send({
|
||||
message: `Requested package ${language}-${version} does not exist`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await pkg.install();
|
||||
|
||||
return res.status(200).send(response);
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Error while installing package ${pkg.language}-${pkg.version}:`,
|
||||
e.message
|
||||
);
|
||||
|
||||
return res.status(500).send({
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
router.delete('/packages', async (req, res) => {
|
||||
logger.debug('Request to uninstall package');
|
||||
|
||||
const { language, version } = req.body;
|
||||
|
||||
const pkg = await package.get_package(language, version);
|
||||
|
||||
if (pkg == null) {
|
||||
return res.status(404).send({
|
||||
message: `Requested package ${language}-${version} does not exist`,
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await pkg.uninstall();
|
||||
|
||||
return res.status(200).send(response);
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
`Error while uninstalling package ${pkg.language}-${pkg.version}:`,
|
||||
e.message
|
||||
);
|
||||
|
||||
return res.status(500).send({
|
||||
message: e.message,
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
module.exports = router;
|
||||
|
|
236
api/src/api/v3.js
Normal file
236
api/src/api/v3.js
Normal file
|
@ -0,0 +1,236 @@
|
|||
const express = require('express');
|
||||
const router = express.Router();
|
||||
|
||||
const events = require('events');
|
||||
|
||||
const config = require('../config');
|
||||
const runtime = require('../runtime');
|
||||
const { Job } = require('../job');
|
||||
const logger = require('logplease').create('api/v3');
|
||||
|
||||
const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
|
||||
// ref: https://man7.org/linux/man-pages/man7/signal.7.html
|
||||
|
||||
function get_job(body){
|
||||
const {
|
||||
runtime_id,
|
||||
args,
|
||||
stdin,
|
||||
files,
|
||||
compile_memory_limit,
|
||||
run_memory_limit,
|
||||
run_timeout,
|
||||
compile_timeout
|
||||
} = body;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (typeof runtime_id !== 'number') {
|
||||
return reject({
|
||||
message: 'runtime_id is required as a number'
|
||||
});
|
||||
}
|
||||
|
||||
if (!Array.isArray(files)) {
|
||||
return reject({
|
||||
message: 'files is required as an array',
|
||||
});
|
||||
}
|
||||
|
||||
for (const [i, file] of files.entries()) {
|
||||
if (typeof file.content !== 'string') {
|
||||
return reject({
|
||||
message: `files[${i}].content is required as a string`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (compile_memory_limit) {
|
||||
if (typeof compile_memory_limit !== 'number') {
|
||||
return reject({
|
||||
message: 'if specified, compile_memory_limit must be a number',
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
config.compile_memory_limit >= 0 &&
|
||||
(compile_memory_limit > config.compile_memory_limit ||
|
||||
compile_memory_limit < 0)
|
||||
) {
|
||||
return reject({
|
||||
message:
|
||||
'compile_memory_limit cannot exceed the configured limit of ' +
|
||||
config.compile_memory_limit,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
if (run_memory_limit) {
|
||||
if (typeof run_memory_limit !== 'number') {
|
||||
return reject({
|
||||
message: 'if specified, run_memory_limit must be a number',
|
||||
});
|
||||
}
|
||||
|
||||
if (
|
||||
config.run_memory_limit >= 0 &&
|
||||
(run_memory_limit > config.run_memory_limit || run_memory_limit < 0)
|
||||
) {
|
||||
return reject({
|
||||
message:
|
||||
'run_memory_limit cannot exceed the configured limit of ' +
|
||||
config.run_memory_limit,
|
||||
});
|
||||
}
|
||||
}
|
||||
const rt = runtime[runtime_id];
|
||||
|
||||
|
||||
if (rt === undefined) {
|
||||
return reject({
|
||||
message: `Runtime #${runtime_id} is unknown`,
|
||||
});
|
||||
}
|
||||
|
||||
resolve(new Job({
|
||||
runtime: rt,
|
||||
args: args || [],
|
||||
stdin: stdin || "",
|
||||
files,
|
||||
timeouts: {
|
||||
run: run_timeout || 3000,
|
||||
compile: compile_timeout || 10000,
|
||||
},
|
||||
memory_limits: {
|
||||
run: run_memory_limit || config.run_memory_limit,
|
||||
compile: compile_memory_limit || config.compile_memory_limit,
|
||||
}
|
||||
}));
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
router.use((req, res, next) => {
|
||||
if (['GET', 'HEAD', 'OPTIONS'].includes(req.method)) {
|
||||
return next();
|
||||
}
|
||||
|
||||
if (!req.headers['content-type'].startsWith('application/json')) {
|
||||
return res.status(415).send({
|
||||
message: 'requests must be of type application/json',
|
||||
});
|
||||
}
|
||||
|
||||
next();
|
||||
});
|
||||
|
||||
router.ws('/connect', async (ws, req) => {
|
||||
|
||||
let job = null;
|
||||
let eventBus = new events.EventEmitter();
|
||||
|
||||
eventBus.on("stdout", (data) => ws.send(JSON.stringify({type: "data", stream: "stdout", data: data.toString()})))
|
||||
eventBus.on("stderr", (data) => ws.send(JSON.stringify({type: "data", stream: "stderr", data: data.toString()})))
|
||||
eventBus.on("stage", (stage)=> ws.send(JSON.stringify({type: "stage", stage})))
|
||||
eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status})))
|
||||
|
||||
ws.on("message", async (data) => {
|
||||
|
||||
try{
|
||||
const msg = JSON.parse(data);
|
||||
|
||||
switch(msg.type){
|
||||
case "init":
|
||||
if(job === null){
|
||||
job = await get_job(msg);
|
||||
|
||||
await job.prime();
|
||||
|
||||
ws.send(JSON.stringify({
|
||||
type: "runtime",
|
||||
language: job.runtime.language,
|
||||
version: job.runtime.version.raw
|
||||
}))
|
||||
|
||||
await job.execute_interactive(eventBus);
|
||||
|
||||
ws.close(4999, "Job Completed");
|
||||
|
||||
}else{
|
||||
ws.close(4000, "Already Initialized");
|
||||
}
|
||||
break;
|
||||
case "data":
|
||||
if(job !== null){
|
||||
if(msg.stream === "stdin"){
|
||||
eventBus.emit("stdin", msg.data)
|
||||
}else{
|
||||
ws.close(4004, "Can only write to stdin")
|
||||
}
|
||||
}else{
|
||||
ws.close(4003, "Not yet initialized")
|
||||
}
|
||||
break;
|
||||
case "signal":
|
||||
if(job !== null){
|
||||
if(SIGNALS.includes(msg.signal)){
|
||||
eventBus.emit("signal", msg.signal)
|
||||
}else{
|
||||
ws.close(4005, "Invalid signal")
|
||||
}
|
||||
}else{
|
||||
ws.close(4003, "Not yet initialized")
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
}catch(error){
|
||||
ws.send(JSON.stringify({type: "error", message: error.message}))
|
||||
ws.close(4002, "Notified Error")
|
||||
// ws.close message is limited to 123 characters, so we notify over WS then close.
|
||||
}
|
||||
})
|
||||
|
||||
ws.on("close", async ()=>{
|
||||
if(job !== null){
|
||||
await job.cleanup()
|
||||
}
|
||||
})
|
||||
|
||||
setTimeout(()=>{
|
||||
//Terminate the socket after 1 second, if not initialized.
|
||||
if(job === null)
|
||||
ws.close(4001, "Initialization Timeout");
|
||||
}, 1000)
|
||||
})
|
||||
|
||||
router.post('/execute', async (req, res) => {
|
||||
|
||||
try{
|
||||
const job = await get_job(req.body);
|
||||
await job.prime();
|
||||
|
||||
const result = await job.execute();
|
||||
|
||||
await job.cleanup();
|
||||
|
||||
return res.status(200).send(result);
|
||||
}catch(error){
|
||||
return res.status(400).json(error);
|
||||
}
|
||||
});
|
||||
|
||||
router.get('/runtimes', (req, res) => {
|
||||
const runtimes = runtime.map(rt => {
|
||||
return {
|
||||
language: rt.language,
|
||||
version: rt.version.raw,
|
||||
aliases: rt.aliases,
|
||||
runtime: rt.runtime,
|
||||
id: rt.id
|
||||
};
|
||||
});
|
||||
|
||||
return res.status(200).send(runtimes);
|
||||
});
|
||||
|
||||
module.exports = router;
|
|
@ -108,10 +108,9 @@ const options = [
|
|||
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
|
||||
},
|
||||
{
|
||||
key: 'repo_url',
|
||||
desc: 'URL of repo index',
|
||||
default:
|
||||
'https://github.com/engineer-man/piston/releases/download/pkgs/index',
|
||||
key: 'flake_path',
|
||||
desc: 'Path to nix flake defining runtimes to install',
|
||||
default: 'github:engineer-man/piston?directory=packages',
|
||||
validators: [],
|
||||
},
|
||||
{
|
||||
|
|
|
@ -1,20 +1,9 @@
|
|||
// Globals are things the user shouldn't change in config, but is good to not use inline constants for
|
||||
const is_docker = require('is-docker');
|
||||
const fs = require('fs');
|
||||
const platform = `${is_docker() ? 'docker' : 'baremetal'}-${fs
|
||||
.read_file_sync('/etc/os-release')
|
||||
.toString()
|
||||
.split('\n')
|
||||
.find(x => x.startsWith('ID'))
|
||||
.replace('ID=', '')}`;
|
||||
|
||||
module.exports = {
|
||||
data_directories: {
|
||||
packages: 'packages',
|
||||
jobs: 'jobs',
|
||||
},
|
||||
version: require('../package.json').version,
|
||||
platform,
|
||||
pkg_installed_file: '.ppman-installed', //Used as indication for if a package was installed
|
||||
clean_directories: ['/dev/shm', '/run/lock', '/tmp', '/var/tmp'],
|
||||
};
|
||||
|
|
|
@ -221,7 +221,7 @@ class Job {
|
|||
|
||||
if (this.runtime.compiled) {
|
||||
compile = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'compile'),
|
||||
this.runtime.compile,
|
||||
this.files.map(x => x.name),
|
||||
this.timeouts.compile,
|
||||
this.memory_limits.compile
|
||||
|
@ -231,7 +231,7 @@ class Job {
|
|||
logger.debug('Running');
|
||||
|
||||
const run = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'run'),
|
||||
this.runtime.run,
|
||||
[this.files[0].name, ...this.args],
|
||||
this.timeouts.run,
|
||||
this.memory_limits.run
|
||||
|
@ -243,7 +243,7 @@ class Job {
|
|||
compile,
|
||||
run,
|
||||
language: this.runtime.language,
|
||||
version: this.runtime.version.raw,
|
||||
version: this.runtime.version,
|
||||
};
|
||||
}
|
||||
|
||||
|
@ -264,7 +264,7 @@ class Job {
|
|||
if(this.runtime.compiled){
|
||||
eventBus.emit("stage", "compile")
|
||||
const {error, code, signal} = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'compile'),
|
||||
this.runtime.compile,
|
||||
this.files.map(x => x.name),
|
||||
this.timeouts.compile,
|
||||
this.memory_limits.compile,
|
||||
|
@ -277,7 +277,7 @@ class Job {
|
|||
logger.debug('Running');
|
||||
eventBus.emit("stage", "run")
|
||||
const {error, code, signal} = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'run'),
|
||||
this.runtime.run,
|
||||
[this.files[0].name, ...this.args],
|
||||
this.timeouts.run,
|
||||
this.memory_limits.run,
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
CC = gcc
|
||||
CFLAGS = -O2 -Wall -lseccomp
|
||||
TARGET = nosocket
|
||||
BUILD_PATH = ./
|
||||
INSTALL_PATH = /usr/local/bin/
|
||||
SOURCE = nosocket.c
|
||||
|
||||
all: $(TARGET)
|
||||
|
||||
$(TARGET): $(SOURCE)
|
||||
$(CC) $(BUILD_PATH)$(SOURCE) $(CFLAGS) -o $(TARGET)
|
||||
|
||||
install:
|
||||
mv $(TARGET) $(INSTALL_PATH)
|
||||
|
||||
clean:
|
||||
$(RM) $(TARGET)
|
||||
$(RM) $(INSTALL_PATH)$(TARGET)
|
||||
|
|
@ -1,62 +0,0 @@
|
|||
/*
|
||||
nosocket.c
|
||||
|
||||
Disables access to the `socket` syscall and runs a program provided as the first
|
||||
commandline argument.
|
||||
*/
|
||||
#include <stdio.h>
|
||||
#include <errno.h>
|
||||
#include <unistd.h>
|
||||
#include <sys/prctl.h>
|
||||
#include <seccomp.h>
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
// Disallow any new capabilities from being added
|
||||
prctl(PR_SET_NO_NEW_PRIVS, 1, 0, 0, 0);
|
||||
|
||||
// SCMP_ACT_ALLOW lets the filter have no effect on syscalls not matching a
|
||||
// configured filter rule (allow all by default)
|
||||
scmp_filter_ctx ctx = seccomp_init(SCMP_ACT_ALLOW);
|
||||
if (!ctx)
|
||||
{
|
||||
fprintf(stderr, "Unable to initialize seccomp filter context\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
// Add 32 bit and 64 bit architectures to seccomp filter
|
||||
int rc;
|
||||
uint32_t arch[] = {SCMP_ARCH_X86_64, SCMP_ARCH_X86, SCMP_ARCH_X32};
|
||||
// We first remove the existing arch, otherwise our subsequent call to add
|
||||
// it will fail
|
||||
seccomp_arch_remove(ctx, seccomp_arch_native());
|
||||
for (int i = 0; i < sizeof(arch) / sizeof(arch[0]); i++)
|
||||
{
|
||||
rc = seccomp_arch_add(ctx, arch[i]);
|
||||
if (rc != 0)
|
||||
{
|
||||
fprintf(stderr, "Unable to add arch: %d\n", arch[i]);
|
||||
return 1;
|
||||
}
|
||||
}
|
||||
|
||||
// Add a seccomp rule to the syscall blacklist - blacklist the socket syscall
|
||||
if (seccomp_rule_add(ctx, SCMP_ACT_ERRNO(EACCES), SCMP_SYS(socket), 0) < 0)
|
||||
{
|
||||
fprintf(stderr, "Unable to add seccomp rule to context\n");
|
||||
return 1;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
seccomp_export_pfc(ctx, 0);
|
||||
#endif
|
||||
|
||||
if (argc < 2)
|
||||
{
|
||||
fprintf(stderr, "Usage %s: %s <program name> <arguments>\n", argv[0], argv[0]);
|
||||
return 1;
|
||||
}
|
||||
seccomp_load(ctx);
|
||||
execvp(argv[1], argv + 1);
|
||||
return 1;
|
||||
}
|
|
@ -1,227 +0,0 @@
|
|||
const logger = require('logplease').create('package');
|
||||
const semver = require('semver');
|
||||
const config = require('./config');
|
||||
const globals = require('./globals');
|
||||
const fetch = require('node-fetch');
|
||||
const path = require('path');
|
||||
const fs = require('fs/promises');
|
||||
const fss = require('fs');
|
||||
const cp = require('child_process');
|
||||
const crypto = require('crypto');
|
||||
const runtime = require('./runtime');
|
||||
const chownr = require('chownr');
|
||||
const util = require('util');
|
||||
|
||||
class Package {
|
||||
constructor({ language, version, download, checksum }) {
|
||||
this.language = language;
|
||||
this.version = semver.parse(version);
|
||||
this.checksum = checksum;
|
||||
this.download = download;
|
||||
}
|
||||
|
||||
get installed() {
|
||||
return fss.exists_sync(
|
||||
path.join(this.install_path, globals.pkg_installed_file)
|
||||
);
|
||||
}
|
||||
|
||||
get install_path() {
|
||||
return path.join(
|
||||
config.data_directory,
|
||||
globals.data_directories.packages,
|
||||
this.language,
|
||||
this.version.raw
|
||||
);
|
||||
}
|
||||
|
||||
async install() {
|
||||
if (this.installed) {
|
||||
throw new Error('Already installed');
|
||||
}
|
||||
|
||||
logger.info(`Installing ${this.language}-${this.version.raw}`);
|
||||
|
||||
if (fss.exists_sync(this.install_path)) {
|
||||
logger.warn(
|
||||
`${this.language}-${this.version.raw} has residual files. Removing them.`
|
||||
);
|
||||
await fs.rm(this.install_path, { recursive: true, force: true });
|
||||
}
|
||||
|
||||
logger.debug(`Making directory ${this.install_path}`);
|
||||
await fs.mkdir(this.install_path, { recursive: true });
|
||||
|
||||
logger.debug(
|
||||
`Downloading package from ${this.download} in to ${this.install_path}`
|
||||
);
|
||||
const pkgpath = path.join(this.install_path, 'pkg.tar.gz');
|
||||
const download = await fetch(this.download);
|
||||
|
||||
const file_stream = fss.create_write_stream(pkgpath);
|
||||
await new Promise((resolve, reject) => {
|
||||
download.body.pipe(file_stream);
|
||||
download.body.on('error', reject);
|
||||
|
||||
file_stream.on('finish', resolve);
|
||||
});
|
||||
|
||||
logger.debug('Validating checksums');
|
||||
logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`);
|
||||
const hash = crypto.create_hash('sha256');
|
||||
|
||||
const read_stream = fss.create_read_stream(pkgpath);
|
||||
await new Promise((resolve, reject) => {
|
||||
read_stream.on('data', chunk => hash.update(chunk));
|
||||
read_stream.on('end', () => resolve());
|
||||
read_stream.on('error', error => reject(error))
|
||||
});
|
||||
|
||||
|
||||
const cs = hash.digest('hex');
|
||||
|
||||
if (cs !== this.checksum) {
|
||||
throw new Error(`Checksum miss-match want: ${val} got: ${cs}`);
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
`Extracting package files from archive ${pkgpath} in to ${this.install_path}`
|
||||
);
|
||||
|
||||
await new Promise((resolve, reject) => {
|
||||
const proc = cp.exec(
|
||||
`bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'`
|
||||
);
|
||||
|
||||
proc.once('exit', (code, _) => {
|
||||
code === 0 ? resolve() : reject();
|
||||
});
|
||||
|
||||
proc.stdout.pipe(process.stdout);
|
||||
proc.stderr.pipe(process.stderr);
|
||||
|
||||
proc.once('error', reject);
|
||||
});
|
||||
|
||||
logger.debug('Registering runtime');
|
||||
runtime.load_package(this.install_path);
|
||||
|
||||
logger.debug('Caching environment');
|
||||
const get_env_command = `cd ${this.install_path}; source environment; env`;
|
||||
|
||||
const envout = await new Promise((resolve, reject) => {
|
||||
let stdout = '';
|
||||
|
||||
const proc = cp.spawn(
|
||||
'env',
|
||||
['-i', 'bash', '-c', `${get_env_command}`],
|
||||
{
|
||||
stdio: ['ignore', 'pipe', 'pipe'],
|
||||
}
|
||||
);
|
||||
|
||||
proc.once('exit', (code, _) => {
|
||||
code === 0 ? resolve(stdout) : reject();
|
||||
});
|
||||
|
||||
proc.stdout.on('data', data => {
|
||||
stdout += data;
|
||||
});
|
||||
|
||||
proc.once('error', reject);
|
||||
});
|
||||
|
||||
const filtered_env = envout
|
||||
.split('\n')
|
||||
.filter(
|
||||
l =>
|
||||
!['PWD', 'OLDPWD', '_', 'SHLVL'].includes(
|
||||
l.split('=', 2)[0]
|
||||
)
|
||||
)
|
||||
.join('\n');
|
||||
|
||||
await fs.write_file(path.join(this.install_path, '.env'), filtered_env);
|
||||
|
||||
logger.debug('Changing Ownership of package directory');
|
||||
await util.promisify(chownr)(this.install_path, 0, 0);
|
||||
|
||||
logger.debug('Writing installed state to disk');
|
||||
await fs.write_file(
|
||||
path.join(this.install_path, globals.pkg_installed_file),
|
||||
Date.now().toString()
|
||||
);
|
||||
|
||||
logger.info(`Installed ${this.language}-${this.version.raw}`);
|
||||
|
||||
return {
|
||||
language: this.language,
|
||||
version: this.version.raw,
|
||||
};
|
||||
}
|
||||
|
||||
async uninstall() {
|
||||
logger.info(`Uninstalling ${this.language}-${this.version.raw}`);
|
||||
|
||||
logger.debug('Finding runtime');
|
||||
const found_runtime = runtime.get_runtime_by_name_and_version(
|
||||
this.language,
|
||||
this.version.raw
|
||||
);
|
||||
|
||||
if (!found_runtime) {
|
||||
logger.error(
|
||||
`Uninstalling ${this.language}-${this.version.raw} failed: Not installed`
|
||||
);
|
||||
throw new Error(
|
||||
`${this.language}-${this.version.raw} is not installed`
|
||||
);
|
||||
}
|
||||
|
||||
logger.debug('Unregistering runtime');
|
||||
found_runtime.unregister();
|
||||
|
||||
logger.debug('Cleaning files from disk');
|
||||
await fs.rmdir(this.install_path, { recursive: true });
|
||||
|
||||
logger.info(`Uninstalled ${this.language}-${this.version.raw}`);
|
||||
|
||||
return {
|
||||
language: this.language,
|
||||
version: this.version.raw,
|
||||
};
|
||||
}
|
||||
|
||||
static async get_package_list() {
|
||||
const repo_content = await fetch(config.repo_url).then(x => x.text());
|
||||
|
||||
const entries = repo_content.split('\n').filter(x => x.length > 0);
|
||||
|
||||
return entries.map(line => {
|
||||
const [language, version, checksum, download] = line.split(',', 4);
|
||||
|
||||
return new Package({
|
||||
language,
|
||||
version,
|
||||
checksum,
|
||||
download,
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
static async get_package(lang, version) {
|
||||
const packages = await Package.get_package_list();
|
||||
|
||||
const candidates = packages.filter(pkg => {
|
||||
return (
|
||||
pkg.language == lang && semver.satisfies(pkg.version, version)
|
||||
);
|
||||
});
|
||||
|
||||
candidates.sort((a, b) => semver.rcompare(a.version, b.version));
|
||||
|
||||
return candidates[0] || null;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = Package;
|
29
api/src/index.js → api/src/pistond.js
Normal file → Executable file
29
api/src/index.js → api/src/pistond.js
Normal file → Executable file
|
@ -5,6 +5,7 @@ const express = require('express');
|
|||
const expressWs = require('express-ws');
|
||||
const globals = require('./globals');
|
||||
const config = require('./config');
|
||||
const cp = require('child_process');
|
||||
const path = require('path');
|
||||
const fs = require('fs/promises');
|
||||
const fss = require('fs');
|
||||
|
@ -39,28 +40,11 @@ expressWs(app);
|
|||
});
|
||||
|
||||
logger.info('Loading packages');
|
||||
const pkgdir = path.join(
|
||||
config.data_directory,
|
||||
globals.data_directories.packages
|
||||
);
|
||||
|
||||
const pkglist = await fs.readdir(pkgdir);
|
||||
|
||||
const languages = await Promise.all(
|
||||
pkglist.map(lang => {
|
||||
return fs.readdir(path.join(pkgdir, lang)).then(x => {
|
||||
return x.map(y => path.join(pkgdir, lang, y));
|
||||
});
|
||||
})
|
||||
);
|
||||
|
||||
const installed_languages = languages
|
||||
.flat()
|
||||
.filter(pkg =>
|
||||
fss.exists_sync(path.join(pkg, globals.pkg_installed_file))
|
||||
);
|
||||
|
||||
installed_languages.for_each(pkg => runtime.load_package(pkg));
|
||||
const runtimes_data = cp.execSync(`nix eval --json ${config.flake_path}#pistonRuntimes --apply builtins.attrNames`).toString();
|
||||
const runtimes = JSON.parse(runtimes_data);
|
||||
|
||||
runtimes.for_each(pkg => runtime.load_runtime(pkg));
|
||||
|
||||
logger.info('Starting API Server');
|
||||
logger.debug('Constructing Express App');
|
||||
|
@ -78,8 +62,9 @@ expressWs(app);
|
|||
logger.debug('Registering Routes');
|
||||
|
||||
const api_v2 = require('./api/v2');
|
||||
const api_v3 = require('./api/v3');
|
||||
app.use('/api/v2', api_v2);
|
||||
app.use('/api/v2', api_v2);
|
||||
app.use('/api/v3', api_v3);
|
||||
|
||||
app.use((req, res, next) => {
|
||||
return res.status(404).send({ message: 'Not Found' });
|
|
@ -1,5 +1,5 @@
|
|||
const logger = require('logplease').create('runtime');
|
||||
const semver = require('semver');
|
||||
const cp = require('child_process');
|
||||
const config = require('./config');
|
||||
const globals = require('./globals');
|
||||
const fss = require('fs');
|
||||
|
@ -7,119 +7,72 @@ const path = require('path');
|
|||
|
||||
const runtimes = [];
|
||||
|
||||
|
||||
class Runtime {
|
||||
constructor({ language, version, aliases, pkgdir, runtime }) {
|
||||
constructor({ language, version, aliases, runtime, run, compile, packageSupport, flake_key }) {
|
||||
this.language = language;
|
||||
this.version = version;
|
||||
this.aliases = aliases || [];
|
||||
this.pkgdir = pkgdir;
|
||||
this.runtime = runtime;
|
||||
this.aliases = aliases;
|
||||
this.version = version;
|
||||
|
||||
this.run = run;
|
||||
this.compile = compile;
|
||||
|
||||
this.flake_key = flake_key;
|
||||
this.package_support = packageSupport;
|
||||
}
|
||||
|
||||
static load_package(package_dir) {
|
||||
let info = JSON.parse(
|
||||
fss.read_file_sync(path.join(package_dir, 'pkg-info.json'))
|
||||
);
|
||||
ensure_built(){
|
||||
logger.info(`Ensuring ${this} is built`);
|
||||
|
||||
let { language, version, build_platform, aliases, provides } = info;
|
||||
version = semver.parse(version);
|
||||
const flake_key = this.flake_key;
|
||||
|
||||
if (build_platform !== globals.platform) {
|
||||
logger.warn(
|
||||
`Package ${language}-${version} was built for platform ${build_platform}, ` +
|
||||
`but our platform is ${globals.platform}`
|
||||
);
|
||||
function _ensure_built(key){
|
||||
const command = `nix build ${config.flake_path}#pistonRuntimes.${flake_key}.metadata.${key} --no-link`;
|
||||
cp.execSync(command, {stdio: "pipe"})
|
||||
}
|
||||
|
||||
if (provides) {
|
||||
// Multiple languages in 1 package
|
||||
provides.forEach(lang => {
|
||||
runtimes.push(
|
||||
new Runtime({
|
||||
language: lang.language,
|
||||
aliases: lang.aliases,
|
||||
version,
|
||||
pkgdir: package_dir,
|
||||
runtime: language,
|
||||
})
|
||||
);
|
||||
});
|
||||
} else {
|
||||
runtimes.push(
|
||||
new Runtime({
|
||||
language,
|
||||
version,
|
||||
aliases,
|
||||
pkgdir: package_dir,
|
||||
})
|
||||
);
|
||||
}
|
||||
_ensure_built("run");
|
||||
if(this.compiled) _ensure_built("compile");
|
||||
|
||||
logger.debug(`Finished ensuring ${this} is installed`)
|
||||
|
||||
}
|
||||
|
||||
static load_runtime(flake_key){
|
||||
logger.info(`Loading ${flake_key}`)
|
||||
const metadata_command = `nix eval --json ${config.flake_path}#pistonRuntimes.${flake_key}.metadata`;
|
||||
const metadata = JSON.parse(cp.execSync(metadata_command));
|
||||
|
||||
const this_runtime = new Runtime({
|
||||
...metadata,
|
||||
flake_key
|
||||
});
|
||||
|
||||
this_runtime.ensure_built();
|
||||
|
||||
runtimes.push(this_runtime);
|
||||
|
||||
|
||||
logger.debug(`Package ${flake_key} was loaded`);
|
||||
|
||||
logger.debug(`Package ${language}-${version} was loaded`);
|
||||
}
|
||||
|
||||
get compiled() {
|
||||
if (this._compiled === undefined) {
|
||||
this._compiled = fss.exists_sync(path.join(this.pkgdir, 'compile'));
|
||||
}
|
||||
|
||||
return this._compiled;
|
||||
return this.compile !== null;
|
||||
}
|
||||
|
||||
get env_vars() {
|
||||
if (!this._env_vars) {
|
||||
const env_file = path.join(this.pkgdir, '.env');
|
||||
const env_content = fss.read_file_sync(env_file).toString();
|
||||
|
||||
this._env_vars = {};
|
||||
|
||||
env_content
|
||||
.trim()
|
||||
.split('\n')
|
||||
.map(line => line.split('=', 2))
|
||||
.forEach(([key, val]) => {
|
||||
this._env_vars[key.trim()] = val.trim();
|
||||
});
|
||||
}
|
||||
|
||||
return this._env_vars;
|
||||
get id(){
|
||||
return runtimes.indexOf(this);
|
||||
}
|
||||
|
||||
toString() {
|
||||
return `${this.language}-${this.version.raw}`;
|
||||
return `${this.language}-${this.version}`;
|
||||
}
|
||||
|
||||
unregister() {
|
||||
const index = runtimes.indexOf(this);
|
||||
runtimes.splice(index, 1); //Remove from runtimes list
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = runtimes;
|
||||
module.exports.Runtime = Runtime;
|
||||
module.exports.get_runtimes_matching_language_version = function (lang, ver) {
|
||||
return runtimes.filter(
|
||||
rt =>
|
||||
(rt.language == lang || rt.aliases.includes(lang)) &&
|
||||
semver.satisfies(rt.version, ver)
|
||||
);
|
||||
};
|
||||
module.exports.get_latest_runtime_matching_language_version = function (
|
||||
lang,
|
||||
ver
|
||||
) {
|
||||
return module.exports
|
||||
.get_runtimes_matching_language_version(lang, ver)
|
||||
.sort((a, b) => semver.rcompare(a.version, b.version))[0];
|
||||
};
|
||||
module.exports.load_runtime = Runtime.load_runtime;
|
||||
|
||||
module.exports.get_runtime_by_name_and_version = function (runtime, ver) {
|
||||
return runtimes.find(
|
||||
rt =>
|
||||
(rt.runtime == runtime ||
|
||||
(rt.runtime === undefined && rt.language == runtime)) &&
|
||||
semver.satisfies(rt.version, ver)
|
||||
);
|
||||
};
|
||||
|
||||
module.exports.load_package = Runtime.load_package;
|
||||
|
|
1406
api/yarn.lock
Normal file
1406
api/yarn.lock
Normal file
File diff suppressed because it is too large
Load diff
Loading…
Add table
Add a link
Reference in a new issue