fix: added incremental typing to piston

This commit is contained in:
Endercheif 2023-02-27 18:08:05 -08:00
parent 593f59a184
commit 2962e0cf99
No known key found for this signature in database
GPG key ID: 7767459A0C8BEE00
14 changed files with 1039 additions and 1318 deletions

View file

@ -1,12 +1,14 @@
const express = require('express');
const router = express.Router();
import { Router } from 'express';
const events = require('events');
import { EventEmitter } from 'events';
const runtime = require('../runtime');
const { Job } = require('../job');
const package = require('../package');
const logger = require('logplease').create('api/v2');
import { get_latest_runtime_matching_language_version, runtimes as _runtimes } from '../runtime';
import Job from '../job';
import package_ from '../package';
import { create } from 'logplease'
const logger = create('api/v2', {});
const router = Router();
const SIGNALS = [
'SIGABRT',
@ -87,7 +89,7 @@ function get_job(body) {
}
}
const rt = runtime.get_latest_runtime_matching_language_version(
const rt = get_latest_runtime_matching_language_version(
language,
version
);
@ -174,7 +176,7 @@ router.use((req, res, next) => {
router.ws('/connect', async (ws, req) => {
let job = null;
let eventBus = new events.EventEmitter();
let eventBus = new EventEmitter();
eventBus.on('stdout', data =>
ws.send(
@ -203,6 +205,7 @@ router.ws('/connect', async (ws, req) => {
ws.on('message', async data => {
try {
// @ts-ignore
const msg = JSON.parse(data);
switch (msg.type) {
@ -286,7 +289,7 @@ router.post('/execute', async (req, res) => {
});
router.get('/runtimes', (req, res) => {
const runtimes = runtime.map(rt => {
const runtimes = _runtimes.map(rt => {
return {
language: rt.language,
version: rt.version.raw,
@ -300,9 +303,9 @@ router.get('/runtimes', (req, res) => {
router.get('/packages', async (req, res) => {
logger.debug('Request to list packages');
let packages = await package.get_package_list();
let packages = await package_.get_package_list();
packages = packages.map(pkg => {
const pkgs = packages.map(pkg => {
return {
language: pkg.language,
language_version: pkg.version.raw,
@ -310,7 +313,7 @@ router.get('/packages', async (req, res) => {
};
});
return res.status(200).send(packages);
return res.status(200).send(pkgs);
});
router.post('/packages', async (req, res) => {
@ -318,7 +321,7 @@ router.post('/packages', async (req, res) => {
const { language, version } = req.body;
const pkg = await package.get_package(language, version);
const pkg = await package_.get_package(language, version);
if (pkg == null) {
return res.status(404).send({
@ -347,7 +350,7 @@ router.delete('/packages', async (req, res) => {
const { language, version } = req.body;
const pkg = await package.get_package(language, version);
const pkg = await package_.get_package(language, version);
if (pkg == null) {
return res.status(404).send({
@ -371,4 +374,4 @@ router.delete('/packages', async (req, res) => {
}
});
module.exports = router;
export default router;

View file

@ -1,6 +1,6 @@
const fss = require('fs');
const Logger = require('logplease');
const logger = Logger.create('config');
import { existsSync } from 'fs';
import { create, LogLevels } from 'logplease';
const logger = create('config', {});
const options = {
log_level: {
@ -8,7 +8,7 @@ const options = {
default: 'INFO',
validators: [
x =>
Object.values(Logger.LogLevels).includes(x) ||
Object.values(LogLevels).includes(x) ||
`Log level ${x} does not exist`,
],
},
@ -20,33 +20,31 @@ const options = {
data_directory: {
desc: 'Absolute path to store all piston related data at',
default: '/piston',
validators: [
x => fss.exists_sync(x) || `Directory ${x} does not exist`,
],
validators: [x => existsSync(x) || `Directory ${x} does not exist`],
},
runner_uid_min: {
desc: 'Minimum uid to use for runner',
default: 1001,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
parser: parseInt,
validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
},
runner_uid_max: {
desc: 'Maximum uid to use for runner',
default: 1500,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
parser: parseInt,
validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
},
runner_gid_min: {
desc: 'Minimum gid to use for runner',
default: 1001,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
parser: parseInt,
validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
},
runner_gid_max: {
desc: 'Maximum gid to use for runner',
default: 1500,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
parser: parseInt,
validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
},
disable_networking: {
desc: 'Set to true to disable networking',
@ -57,50 +55,50 @@ const options = {
output_max_size: {
desc: 'Max size of each stdio buffer',
default: 1024,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
parser: parseInt,
validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
},
max_process_count: {
desc: 'Max number of processes per job',
default: 64,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
parser: parseInt,
validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
},
max_open_files: {
desc: 'Max number of open files per job',
default: 2048,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
parser: parseInt,
validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
},
max_file_size: {
desc: 'Max file size in bytes for a file',
default: 10000000, //10MB
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
parser: parseInt,
validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
},
compile_timeout: {
desc: 'Max time allowed for compile stage in milliseconds',
default: 10000, // 10 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
parser: parseInt,
validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
},
run_timeout: {
desc: 'Max time allowed for run stage in milliseconds',
default: 3000, // 3 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
parser: parseInt,
validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
},
compile_memory_limit: {
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
parser: parseInt,
validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
},
run_memory_limit: {
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
parser: parseInt,
validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
},
repo_url: {
desc: 'URL of repo index',
@ -111,7 +109,7 @@ const options = {
max_concurrent_jobs: {
desc: 'Maximum number of concurrent jobs to run at one time',
default: 64,
parser: parse_int,
parser: parseInt,
validators: [x => x > 0 || `${x} cannot be negative`],
},
limit_overrides: {
@ -201,12 +199,14 @@ function validate_overrides(overrides) {
logger.info(`Loading Configuration from environment`);
/** @type {import('./types').ObjectType<typeof options, "default">} */
// @ts-ignore
let config = {};
for (const option_name in options) {
const env_key = 'PISTON_' + option_name.to_upper_case();
const env_key = 'PISTON_' + option_name.toUpperCase();
const option = options[option_name];
const parser = option.parser || (x => x);
const parser = option.parser || ((/** @type {any} */ x) => x);
const env_val = process.env[env_key];
const parsed_val = parser(env_val);
const value = env_val === undefined ? option.default : parsed_val;
@ -228,4 +228,4 @@ for (const option_name in options) {
logger.info('Configuration successfully loaded');
module.exports = config;
export default config;

View file

@ -1,20 +1,20 @@
// Globals are things the user shouldn't change in config, but is good to not use inline constants for
const is_docker = require('is-docker');
const fs = require('fs');
const platform = `${is_docker() ? 'docker' : 'baremetal'}-${fs
.read_file_sync('/etc/os-release')
import is_docker from 'is-docker';
import { readFileSync } from 'fs';
export const platform = `${is_docker() ? 'docker' : 'baremetal'}-${readFileSync('/etc/os-release')
.toString()
.split('\n')
.find(x => x.startsWith('ID'))
.replace('ID=', '')}`;
module.exports = {
data_directories: {
packages: 'packages',
jobs: 'jobs',
},
version: require('../package.json').version,
platform,
pkg_installed_file: '.ppman-installed', //Used as indication for if a package was installed
clean_directories: ['/dev/shm', '/run/lock', '/tmp', '/var/tmp'],
};
export const data_directories = {
packages: 'packages',
jobs: 'jobs',
}
export const version = require('../package.json').version
export const pkg_installed_file = '.ppman-installed' //Used as indication for if a package was installed
export const clean_directories = ['/dev/shm', '/run/lock', '/tmp', '/var/tmp']

View file

@ -1,72 +1,71 @@
#!/usr/bin/env node
require('nocamel');
const Logger = require('logplease');
const express = require('express');
const expressWs = require('express-ws');
const globals = require('./globals');
const config = require('./config');
const path = require('path');
const fs = require('fs/promises');
const fss = require('fs');
const body_parser = require('body-parser');
const runtime = require('./runtime');
import { create, setLogLevel } from 'logplease';
import express from 'express';
import expressWs from 'express-ws';
import * as globals from './globals';
import config from './config';
import { join } from 'path';
import { readdir } from 'fs/promises';
import { existsSync, mkdirSync, chmodSync } from 'fs';
import { urlencoded, json } from 'body-parser';
import { load_package } from './runtime';
const logger = Logger.create('index');
const logger = create('index', {});
const app = express();
expressWs(app);
(async () => {
logger.info('Setting loglevel to', config.log_level);
Logger.setLogLevel(config.log_level);
logger.info('Setting loglevel to');
// @ts-ignore
setLogLevel(config.log_level);
logger.debug('Ensuring data directories exist');
Object.values(globals.data_directories).for_each(dir => {
let data_path = path.join(config.data_directory, dir);
Object.values(globals.data_directories).forEach(dir => {
let data_path = join(config.data_directory, dir);
logger.debug(`Ensuring ${data_path} exists`);
if (!fss.exists_sync(data_path)) {
if (!existsSync(data_path)) {
logger.info(`${data_path} does not exist.. Creating..`);
try {
fss.mkdir_sync(data_path);
mkdirSync(data_path);
} catch (e) {
logger.error(`Failed to create ${data_path}: `, e.message);
}
}
});
fss.chmodSync(path.join(config.data_directory, globals.data_directories.jobs), 0o711)
chmodSync(join(config.data_directory, globals.data_directories.jobs), 0o711)
logger.info('Loading packages');
const pkgdir = path.join(
const pkgdir = join(
config.data_directory,
globals.data_directories.packages
);
const pkglist = await fs.readdir(pkgdir);
const pkglist = await readdir(pkgdir);
const languages = await Promise.all(
pkglist.map(lang => {
return fs.readdir(path.join(pkgdir, lang)).then(x => {
return x.map(y => path.join(pkgdir, lang, y));
});
pkglist.map(async lang => {
const x = await readdir(join(pkgdir, lang));
return x.map(y => join(pkgdir, lang, y));
})
);
const installed_languages = languages
.flat()
.filter(pkg =>
fss.exists_sync(path.join(pkg, globals.pkg_installed_file))
existsSync(join(pkg, globals.pkg_installed_file))
);
installed_languages.for_each(pkg => runtime.load_package(pkg));
installed_languages.forEach(pkg => load_package(pkg));
logger.info('Starting API Server');
logger.debug('Constructing Express App');
logger.debug('Registering middleware');
app.use(body_parser.urlencoded({ extended: true }));
app.use(body_parser.json());
app.use(urlencoded({ extended: true }));
app.use(json());
app.use((err, req, res, next) => {
return res.status(400).send({
@ -76,7 +75,7 @@ expressWs(app);
logger.debug('Registering Routes');
const api_v2 = require('./api/v2');
const api_v2 = require('./api/v2').default;
app.use('/api/v2', api_v2);
app.use((req, res, next) => {
@ -86,7 +85,7 @@ expressWs(app);
logger.debug('Calling app.listen');
const [address, port] = config.bind_address.split(':');
app.listen(port, address, () => {
app.listen(+port, address, () => {
logger.info('API server started on', config.bind_address);
});
})();

View file

@ -1,13 +1,14 @@
const logplease = require('logplease');
const logger = logplease.create('job');
const { v4: uuidv4 } = require('uuid');
const cp = require('child_process');
const path = require('path');
const config = require('./config');
const globals = require('./globals');
const fs = require('fs/promises');
const fss = require('fs');
const wait_pid = require('waitpid');
import { create } from 'logplease';
// @ts-ignore
const logger = create('job');
import { v4 as uuidv4 } from 'uuid';
import { spawn } from 'child_process';
import { join, relative, dirname } from 'path';
import config from './config';
import * as globals from './globals';
import { mkdir, chown, writeFile, readdir, stat as _stat, rm } from 'fs/promises';
import { readdirSync, readFileSync } from 'fs';
import wait_pid from 'waitpid';
const job_states = {
READY: Symbol('Ready to be primed'),
@ -28,11 +29,11 @@ setInterval(() => {
}
}, 10);
class Job {
export default class Job {
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) {
this.uuid = uuidv4();
this.logger = logplease.create(`job/${this.uuid}`);
this.logger = create(`job/${this.uuid}`, {});
this.runtime = runtime;
this.files = files.map((file, i) => ({
@ -61,7 +62,7 @@ class Job {
this.logger.debug(`Assigned uid=${this.uid} gid=${this.gid}`);
this.state = job_states.READY;
this.dir = path.join(
this.dir = join(
config.data_directory,
globals.data_directories.jobs,
this.uuid
@ -82,12 +83,12 @@ class Job {
this.logger.debug(`Transfering ownership`);
await fs.mkdir(this.dir, { mode: 0o700 });
await fs.chown(this.dir, this.uid, this.gid);
await mkdir(this.dir, { mode: 0o700 });
await chown(this.dir, this.uid, this.gid);
for (const file of this.files) {
const file_path = path.join(this.dir, file.name);
const rel = path.relative(this.dir, file_path);
const file_path = join(this.dir, file.name);
const rel = relative(this.dir, file_path);
const file_content = Buffer.from(file.content, file.encoding);
if (rel.startsWith('..'))
@ -95,14 +96,14 @@ class Job {
`File path "${file.name}" tries to escape parent directory: ${rel}`
);
await fs.mkdir(path.dirname(file_path), {
await mkdir(dirname(file_path), {
recursive: true,
mode: 0o700,
});
await fs.chown(path.dirname(file_path), this.uid, this.gid);
await chown(dirname(file_path), this.uid, this.gid);
await fs.write_file(file_path, file_content);
await fs.chown(file_path, this.uid, this.gid);
await writeFile(file_path, file_content);
await chown(file_path, this.uid, this.gid);
}
this.state = job_states.PRIMED;
@ -120,16 +121,19 @@ class Job {
'--nofile=' + this.runtime.max_open_files,
'--fsize=' + this.runtime.max_file_size,
];
const timeout_call = [
'timeout', '-s', '9', Math.ceil(timeout / 1000),
'timeout',
'-s',
'9',
Math.ceil(timeout / 1000),
];
if (memory_limit >= 0) {
prlimit.push('--as=' + memory_limit);
}
const proc_call = [
const proc_call = [
'nice',
...timeout_call,
...prlimit,
@ -143,7 +147,7 @@ class Job {
var stderr = '';
var output = '';
const proc = cp.spawn(proc_call[0], proc_call.splice(1), {
const proc = spawn(proc_call[0], proc_call.splice(1), {
env: {
...this.runtime.env_vars,
PISTON_LANGUAGE: this.runtime.language,
@ -171,7 +175,7 @@ class Job {
const kill_timeout =
(timeout >= 0 &&
set_timeout(async _ => {
setTimeout(async _ => {
this.logger.info(`Timeout exceeded timeout=${timeout}`);
process.kill(proc.pid, 'SIGKILL');
}, timeout)) ||
@ -202,7 +206,7 @@ class Job {
});
const exit_cleanup = () => {
clear_timeout(kill_timeout);
clearTimeout(kill_timeout);
proc.stderr.destroy();
proc.stdout.destroy();
@ -245,7 +249,7 @@ class Job {
if (this.runtime.compiled) {
compile = await this.safe_call(
path.join(this.runtime.pkgdir, 'compile'),
join(this.runtime.pkgdir, 'compile'),
code_files.map(x => x.name),
this.timeouts.compile,
this.memory_limits.compile
@ -255,7 +259,7 @@ class Job {
this.logger.debug('Running');
const run = await this.safe_call(
path.join(this.runtime.pkgdir, 'run'),
join(this.runtime.pkgdir, 'run'),
[code_files[0].name, ...this.args],
this.timeouts.run,
this.memory_limits.run
@ -290,7 +294,7 @@ class Job {
if (this.runtime.compiled) {
eventBus.emit('stage', 'compile');
const { error, code, signal } = await this.safe_call(
path.join(this.runtime.pkgdir, 'compile'),
join(this.runtime.pkgdir, 'compile'),
code_files.map(x => x.name),
this.timeouts.compile,
this.memory_limits.compile,
@ -303,7 +307,7 @@ class Job {
this.logger.debug('Running');
eventBus.emit('stage', 'run');
const { error, code, signal } = await this.safe_call(
path.join(this.runtime.pkgdir, 'run'),
join(this.runtime.pkgdir, 'run'),
[code_files[0].name, ...this.args],
this.timeouts.run,
this.memory_limits.run,
@ -323,35 +327,36 @@ class Job {
while (processes.length > 0) {
processes = [];
const proc_ids = fss.readdir_sync('/proc');
const proc_ids = readdirSync('/proc');
processes = proc_ids.map(proc_id => {
if (isNaN(proc_id)) return -1;
if (isNaN(+proc_id)) return -1;
try {
const proc_status = fss.read_file_sync(
path.join('/proc', proc_id, 'status')
const proc_status = readFileSync(
join('/proc', proc_id, 'status')
);
const proc_lines = proc_status.to_string().split('\n');
const proc_lines = proc_status.toString().split('\n');
const state_line = proc_lines.find(line =>
line.starts_with('State:')
line.startsWith('State:')
);
const uid_line = proc_lines.find(line =>
line.starts_with('Uid:')
line.startsWith('Uid:')
);
const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/);
const [_1, state, user_friendly] = state_line.split(/\s+/);
const proc_id_int = parse_int(proc_id);
// Skip over any processes that aren't ours.
if(ruid != this.uid && euid != this.uid) return -1;
if (state == 'Z'){
const proc_id_int = parseInt(proc_id);
// Skip over any processes that aren't ours.
// @ts-ignore: dont want to risk fixing this
if (ruid != this.uid && euid != this.uid) return -1;
if (state == 'Z') {
// Zombie process, just needs to be waited, regardless of the user id
if(!to_wait.includes(proc_id_int))
if (!to_wait.includes(proc_id_int))
to_wait.push(proc_id_int);
return -1;
}
// We should kill in all other state (Sleep, Stopped & Running)
@ -386,7 +391,7 @@ class Job {
// Then clear them out of the process tree
try {
process.kill(proc, 'SIGKILL');
} catch(e) {
} catch (e) {
// Could already be dead and just needs to be waited on
this.logger.debug(
`Got error while SIGKILLing process ${proc}:`,
@ -413,16 +418,16 @@ class Job {
async cleanup_filesystem() {
for (const clean_path of globals.clean_directories) {
const contents = await fs.readdir(clean_path);
const contents = await readdir(clean_path);
for (const file of contents) {
const file_path = path.join(clean_path, file);
const file_path = join(clean_path, file);
try {
const stat = await fs.stat(file_path);
const stat = await _stat(file_path);
if (stat.uid === this.uid) {
await fs.rm(file_path, {
await rm(file_path, {
recursive: true,
force: true,
});
@ -434,7 +439,7 @@ class Job {
}
}
await fs.rm(this.dir, { recursive: true, force: true });
await rm(this.dir, { recursive: true, force: true });
}
async cleanup() {
@ -446,7 +451,3 @@ class Job {
remaining_job_spaces++;
}
}
module.exports = {
Job,
};

View file

@ -1,33 +1,34 @@
const logger = require('logplease').create('package');
const semver = require('semver');
const config = require('./config');
const globals = require('./globals');
const fetch = require('node-fetch');
const path = require('path');
const fs = require('fs/promises');
const fss = require('fs');
const cp = require('child_process');
const crypto = require('crypto');
const runtime = require('./runtime');
const chownr = require('chownr');
const util = require('util');
import { create } from 'logplease'
import { parse, satisfies, rcompare } from 'semver';
import config from './config';
import * as globals from './globals';
import fetch from 'node-fetch';
import { join } from 'path';
import { rm, mkdir, writeFile, rmdir } from 'fs/promises';
import { existsSync, createWriteStream, createReadStream } from 'fs';
import { exec, spawn } from 'child_process';
import { createHash } from 'crypto';
import { load_package, get_runtime_by_name_and_version } from './runtime';
import chownr from 'chownr';
import { promisify } from 'util';
const logger = create('package', {});
class Package {
constructor({ language, version, download, checksum }) {
this.language = language;
this.version = semver.parse(version);
this.version = parse(version);
this.checksum = checksum;
this.download = download;
}
get installed() {
return fss.exists_sync(
path.join(this.install_path, globals.pkg_installed_file)
return existsSync(
join(this.install_path, globals.pkg_installed_file)
);
}
get install_path() {
return path.join(
return join(
config.data_directory,
globals.data_directories.packages,
this.language,
@ -42,23 +43,23 @@ class Package {
logger.info(`Installing ${this.language}-${this.version.raw}`);
if (fss.exists_sync(this.install_path)) {
if (existsSync(this.install_path)) {
logger.warn(
`${this.language}-${this.version.raw} has residual files. Removing them.`
);
await fs.rm(this.install_path, { recursive: true, force: true });
await rm(this.install_path, { recursive: true, force: true });
}
logger.debug(`Making directory ${this.install_path}`);
await fs.mkdir(this.install_path, { recursive: true });
await mkdir(this.install_path, { recursive: true });
logger.debug(
`Downloading package from ${this.download} in to ${this.install_path}`
);
const pkgpath = path.join(this.install_path, 'pkg.tar.gz');
const pkgpath = join(this.install_path, 'pkg.tar.gz');
const download = await fetch(this.download);
const file_stream = fss.create_write_stream(pkgpath);
const file_stream = createWriteStream(pkgpath);
await new Promise((resolve, reject) => {
download.body.pipe(file_stream);
download.body.on('error', reject);
@ -68,9 +69,9 @@ class Package {
logger.debug('Validating checksums');
logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`);
const hash = crypto.create_hash('sha256');
const hash = createHash('sha256');
const read_stream = fss.create_read_stream(pkgpath);
const read_stream = createReadStream(pkgpath);
await new Promise((resolve, reject) => {
read_stream.on('data', chunk => hash.update(chunk));
read_stream.on('end', () => resolve());
@ -90,7 +91,7 @@ class Package {
);
await new Promise((resolve, reject) => {
const proc = cp.exec(
const proc = exec(
`bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'`
);
@ -105,7 +106,7 @@ class Package {
});
logger.debug('Registering runtime');
runtime.load_package(this.install_path);
load_package(this.install_path);
logger.debug('Caching environment');
const get_env_command = `cd ${this.install_path}; source environment; env`;
@ -113,7 +114,7 @@ class Package {
const envout = await new Promise((resolve, reject) => {
let stdout = '';
const proc = cp.spawn(
const proc = spawn(
'env',
['-i', 'bash', '-c', `${get_env_command}`],
{
@ -142,14 +143,14 @@ class Package {
)
.join('\n');
await fs.write_file(path.join(this.install_path, '.env'), filtered_env);
await writeFile(join(this.install_path, '.env'), filtered_env);
logger.debug('Changing Ownership of package directory');
await util.promisify(chownr)(this.install_path, 0, 0);
await promisify(chownr)(this.install_path, 0, 0);
logger.debug('Writing installed state to disk');
await fs.write_file(
path.join(this.install_path, globals.pkg_installed_file),
await writeFile(
join(this.install_path, globals.pkg_installed_file),
Date.now().toString()
);
@ -165,7 +166,7 @@ class Package {
logger.info(`Uninstalling ${this.language}-${this.version.raw}`);
logger.debug('Finding runtime');
const found_runtime = runtime.get_runtime_by_name_and_version(
const found_runtime = get_runtime_by_name_and_version(
this.language,
this.version.raw
);
@ -183,7 +184,7 @@ class Package {
found_runtime.unregister();
logger.debug('Cleaning files from disk');
await fs.rmdir(this.install_path, { recursive: true });
await rmdir(this.install_path, { recursive: true });
logger.info(`Uninstalled ${this.language}-${this.version.raw}`);
@ -215,14 +216,14 @@ class Package {
const candidates = packages.filter(pkg => {
return (
pkg.language == lang && semver.satisfies(pkg.version, version)
pkg.language == lang && satisfies(pkg.version, version)
);
});
candidates.sort((a, b) => semver.rcompare(a.version, b.version));
candidates.sort((a, b) => rcompare(a.version, b.version));
return candidates[0] || null;
}
}
module.exports = Package;
export default Package;

View file

@ -1,10 +1,13 @@
const logger = require('logplease').create('runtime');
const semver = require('semver');
const config = require('./config');
const globals = require('./globals');
const fss = require('fs');
const path = require('path');
import { create } from 'logplease';
import { parse, satisfies, rcompare } from 'semver';
import config from './config';
import { platform } from './globals';
import { readFileSync, existsSync } from 'fs';
import { join } from 'path';
const logger = create('runtime', {});
/** @type {Array<Runtime>} */
const runtimes = [];
class Runtime {
@ -99,7 +102,8 @@ class Runtime {
static load_package(package_dir) {
let info = JSON.parse(
fss.read_file_sync(path.join(package_dir, 'pkg-info.json'))
// @ts-ignore
readFileSync(join(package_dir, 'pkg-info.json'))
);
let {
@ -110,12 +114,12 @@ class Runtime {
provides,
limit_overrides,
} = info;
version = semver.parse(version);
version = parse(version);
if (build_platform !== globals.platform) {
if (build_platform !== platform) {
logger.warn(
`Package ${language}-${version} was built for platform ${build_platform}, ` +
`but our platform is ${globals.platform}`
`but our platform is ${platform}`
);
}
@ -138,6 +142,7 @@ class Runtime {
});
} else {
runtimes.push(
// @ts-ignore
new Runtime({
language,
version,
@ -153,7 +158,7 @@ class Runtime {
get compiled() {
if (this._compiled === undefined) {
this._compiled = fss.exists_sync(path.join(this.pkgdir, 'compile'));
this._compiled = existsSync(join(this.pkgdir, 'compile'));
}
return this._compiled;
@ -161,8 +166,8 @@ class Runtime {
get env_vars() {
if (!this._env_vars) {
const env_file = path.join(this.pkgdir, '.env');
const env_content = fss.read_file_sync(env_file).toString();
const env_file = join(this.pkgdir, '.env');
const env_content = readFileSync(env_file).toString();
this._env_vars = {};
@ -188,31 +193,32 @@ class Runtime {
}
}
module.exports = runtimes;
module.exports.Runtime = Runtime;
module.exports.get_runtimes_matching_language_version = function (lang, ver) {
const _runtimes = runtimes;
export { _runtimes as runtimes };
const _Runtime = Runtime;
export { _Runtime as Runtime };
export function get_runtimes_matching_language_version(lang, ver) {
return runtimes.filter(
rt =>
(rt.language == lang || rt.aliases.includes(lang)) &&
semver.satisfies(rt.version, ver)
satisfies(rt.version, ver)
);
};
module.exports.get_latest_runtime_matching_language_version = function (
}
export function get_latest_runtime_matching_language_version(
lang,
ver
) {
return module.exports
.get_runtimes_matching_language_version(lang, ver)
.sort((a, b) => semver.rcompare(a.version, b.version))[0];
};
return get_runtimes_matching_language_version(lang, ver)
.sort((a, b) => rcompare(a.version, b.version))[0];
}
module.exports.get_runtime_by_name_and_version = function (runtime, ver) {
export function get_runtime_by_name_and_version(runtime, ver) {
return runtimes.find(
rt =>
(rt.runtime == runtime ||
(rt.runtime === undefined && rt.language == runtime)) &&
semver.satisfies(rt.version, ver)
satisfies(rt.version, ver)
);
};
}
module.exports.load_package = Runtime.load_package;
export const load_package = Runtime.load_package;

21
api/src/types.ts Normal file
View file

@ -0,0 +1,21 @@
export type File = {
content: string;
name?: string;
encoding?: 'base64' | 'hex' | 'utf8';
};
export interface Body {
language: string;
version: string;
files: Array<File>;
stdin?: string;
args?: Array<string>;
run_timeout?: number;
compile_timeout?: number;
compile_memory_limit?: number;
run_memory_limit?: number;
}
export type ObjectType<TObject extends Record<any, Record<Key, any>>, Key extends string> = {
[K in keyof TObject]: TObject[K][Key];
};