Merge pull request #442 from Brikaa/refactor-runtimes
Refactor runtimes
This commit is contained in:
commit
d244f0138c
|
@ -48,7 +48,7 @@ const SIGNALS = [
|
|||
];
|
||||
// ref: https://man7.org/linux/man-pages/man7/signal.7.html
|
||||
|
||||
function get_job(body) {
|
||||
function get_job(job_info, available_runtimes) {
|
||||
let {
|
||||
language,
|
||||
args,
|
||||
|
@ -58,7 +58,7 @@ function get_job(body) {
|
|||
run_memory_limit,
|
||||
run_timeout,
|
||||
compile_timeout,
|
||||
} = body;
|
||||
} = job_info;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!language || typeof language !== 'string') {
|
||||
|
@ -80,7 +80,7 @@ function get_job(body) {
|
|||
}
|
||||
}
|
||||
|
||||
const rt = runtime.find(rt =>
|
||||
const rt = available_runtimes.find(rt =>
|
||||
[...rt.aliases, rt.language].includes(rt.language)
|
||||
);
|
||||
|
||||
|
@ -102,7 +102,7 @@ function get_job(body) {
|
|||
for (const constraint of ['memory_limit', 'timeout']) {
|
||||
for (const type of ['compile', 'run']) {
|
||||
const constraint_name = `${type}_${constraint}`;
|
||||
const constraint_value = body[constraint_name];
|
||||
const constraint_value = job_info[constraint_name];
|
||||
const configured_limit = rt[`${constraint}s`][type];
|
||||
if (!constraint_value) {
|
||||
continue;
|
||||
|
@ -201,7 +201,7 @@ router.ws('/connect', async (ws, req) => {
|
|||
switch (msg.type) {
|
||||
case 'init':
|
||||
if (job === null) {
|
||||
job = await get_job(msg);
|
||||
job = await get_job(msg, req.app.locals.runtimes);
|
||||
|
||||
await job.prime();
|
||||
|
||||
|
@ -264,7 +264,7 @@ router.ws('/connect', async (ws, req) => {
|
|||
|
||||
router.post('/execute', async (req, res) => {
|
||||
try {
|
||||
const job = await get_job(req.body);
|
||||
const job = await get_job(req.body, req.app.locals.runtimes);
|
||||
|
||||
await job.prime();
|
||||
|
||||
|
@ -279,7 +279,7 @@ router.post('/execute', async (req, res) => {
|
|||
});
|
||||
|
||||
router.get('/runtimes', (req, res) => {
|
||||
const runtimes = runtime.map(rt => {
|
||||
const runtimes = req.app.locals.runtimes.map(rt => {
|
||||
return {
|
||||
language: rt.language,
|
||||
version: rt.version.raw,
|
||||
|
|
|
@ -3,7 +3,6 @@ const router = express.Router();
|
|||
|
||||
const events = require('events');
|
||||
|
||||
const runtime = require('../runtime');
|
||||
const { Job } = require('../job');
|
||||
|
||||
const SIGNALS = [
|
||||
|
@ -48,7 +47,7 @@ const SIGNALS = [
|
|||
];
|
||||
// ref: https://man7.org/linux/man-pages/man7/signal.7.html
|
||||
|
||||
function get_job(body) {
|
||||
function get_job(job_info, available_runtimes) {
|
||||
const {
|
||||
runtime_id,
|
||||
args,
|
||||
|
@ -58,7 +57,7 @@ function get_job(body) {
|
|||
run_memory_limit,
|
||||
run_timeout,
|
||||
compile_timeout,
|
||||
} = body;
|
||||
} = job_info;
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
if (typeof runtime_id !== 'number') {
|
||||
|
@ -73,7 +72,7 @@ function get_job(body) {
|
|||
});
|
||||
}
|
||||
|
||||
const rt = runtime[runtime_id];
|
||||
const rt = available_runtimes[runtime_id];
|
||||
|
||||
if (rt === undefined) {
|
||||
return reject({
|
||||
|
@ -99,7 +98,7 @@ function get_job(body) {
|
|||
for (const constraint of ['memory_limit', 'timeout']) {
|
||||
for (const type of ['compile', 'run']) {
|
||||
const constraint_name = `${type}_${constraint}`;
|
||||
const constraint_value = body[constraint_name];
|
||||
const constraint_value = job_info[constraint_name];
|
||||
const configured_limit = rt[`${constraint}s`][type];
|
||||
if (!constraint_value) {
|
||||
continue;
|
||||
|
@ -199,7 +198,7 @@ router.ws('/connect', async (ws, req) => {
|
|||
switch (msg.type) {
|
||||
case 'init':
|
||||
if (job === null) {
|
||||
job = await get_job(msg);
|
||||
job = await get_job(msg, req.app.locals.runtimes);
|
||||
|
||||
await job.prime();
|
||||
|
||||
|
@ -262,7 +261,7 @@ router.ws('/connect', async (ws, req) => {
|
|||
|
||||
router.post('/execute', async (req, res) => {
|
||||
try {
|
||||
const job = await get_job(req.body);
|
||||
const job = await get_job(req.body, req.app.locals.runtimes);
|
||||
|
||||
await job.prime();
|
||||
|
||||
|
@ -277,13 +276,13 @@ router.post('/execute', async (req, res) => {
|
|||
});
|
||||
|
||||
router.get('/runtimes', (req, res) => {
|
||||
const runtimes = runtime.map(rt => {
|
||||
const runtimes = req.app.locals.runtimes.map((rt, index) => {
|
||||
return {
|
||||
language: rt.language,
|
||||
version: rt.version.raw,
|
||||
aliases: rt.aliases,
|
||||
runtime: rt.runtime,
|
||||
id: rt.id,
|
||||
id: index,
|
||||
};
|
||||
});
|
||||
|
||||
|
|
|
@ -7,7 +7,6 @@ const globals = require('../globals');
|
|||
const config = require('../config');
|
||||
const cp = require('child_process');
|
||||
const path = require('path');
|
||||
const fs = require('fs/promises');
|
||||
const fss = require('fs');
|
||||
const body_parser = require('body-parser');
|
||||
const runtime = require('../runtime');
|
||||
|
@ -39,10 +38,23 @@ expressWs(app);
|
|||
|
||||
logger.info('Loading packages');
|
||||
|
||||
const runtimes_data = cp.execSync(`nix eval --json ${config.flake_path}#pistonRuntimeSets.${config.runtime_set} --apply builtins.attrNames`).toString();
|
||||
const runtimes = JSON.parse(runtimes_data);
|
||||
|
||||
runtimes.for_each(pkg => runtime.load_runtime(pkg));
|
||||
const runtimes_data = cp
|
||||
.execSync(
|
||||
`nix eval --json ${config.flake_path}#pistonRuntimeSets.${config.runtime_set} --apply builtins.attrNames`
|
||||
)
|
||||
.toString();
|
||||
const runtime_names = JSON.parse(runtimes_data);
|
||||
|
||||
logger.info('Loading the runtimes from the flakes');
|
||||
const runtimes = runtime_names.map(runtime_name => {
|
||||
logger.info(`Loading ${runtime_name}`);
|
||||
return runtime.get_runtime_from_flakes(runtime_name);
|
||||
});
|
||||
logger.info('Ensuring all of the runtimes are built');
|
||||
runtimes.for_each(r => r.ensure_built());
|
||||
|
||||
Object.freeze(runtimes);
|
||||
app.locals.runtimes = runtimes;
|
||||
|
||||
logger.info('Starting API Server');
|
||||
logger.debug('Constructing Express App');
|
||||
|
|
|
@ -30,29 +30,17 @@ const { Job } = require('../job');
|
|||
const runtime_path = `${config.flake_path}#pistonRuntimes.${runtime_name}`;
|
||||
logger.info(`Testing runtime ${runtime_path}`);
|
||||
|
||||
logger.debug(`Loading runtime metadata`);
|
||||
const metadata = JSON.parse(
|
||||
cp.execSync(`nix eval --json ${runtime_path}.metadata --json`)
|
||||
);
|
||||
logger.debug(`Loading runtime`);
|
||||
|
||||
const testable_runtime = runtime.get_runtime_from_flakes(runtime_name);
|
||||
|
||||
testable_runtime.ensure_built();
|
||||
|
||||
logger.debug(`Loading runtime tests`);
|
||||
const tests = JSON.parse(
|
||||
cp.execSync(`nix eval --json ${runtime_path}.tests --json`)
|
||||
);
|
||||
|
||||
logger.debug(`Loading runtime`);
|
||||
|
||||
const testable_runtime = new runtime.Runtime({
|
||||
...metadata,
|
||||
...runtime.Runtime.compute_all_limits(
|
||||
metadata.language,
|
||||
metadata.limitOverrides
|
||||
),
|
||||
flake_path: runtime_path,
|
||||
});
|
||||
|
||||
testable_runtime.ensure_built();
|
||||
|
||||
logger.info(`Running tests`);
|
||||
|
||||
for (const test of tests) {
|
||||
|
|
|
@ -2,8 +2,6 @@ const logger = require('logplease').create('runtime');
|
|||
const cp = require('child_process');
|
||||
const config = require('./config');
|
||||
|
||||
const runtimes = [];
|
||||
|
||||
class Runtime {
|
||||
constructor({
|
||||
language,
|
||||
|
@ -41,69 +39,6 @@ class Runtime {
|
|||
this.package_support = packageSupport;
|
||||
}
|
||||
|
||||
static compute_single_limit(
|
||||
language_name,
|
||||
limit_name,
|
||||
language_limit_overrides
|
||||
) {
|
||||
return (
|
||||
(config.limit_overrides[language_name] &&
|
||||
config.limit_overrides[language_name][limit_name]) ||
|
||||
(language_limit_overrides &&
|
||||
language_limit_overrides[limit_name]) ||
|
||||
config[limit_name]
|
||||
);
|
||||
}
|
||||
|
||||
static compute_all_limits(language_name, language_limit_overrides) {
|
||||
return {
|
||||
timeouts: {
|
||||
compile: this.compute_single_limit(
|
||||
language_name,
|
||||
'compile_timeout',
|
||||
language_limit_overrides
|
||||
),
|
||||
run: this.compute_single_limit(
|
||||
language_name,
|
||||
'run_timeout',
|
||||
language_limit_overrides
|
||||
),
|
||||
},
|
||||
memory_limits: {
|
||||
compile: this.compute_single_limit(
|
||||
language_name,
|
||||
'compile_memory_limit',
|
||||
language_limit_overrides
|
||||
),
|
||||
run: this.compute_single_limit(
|
||||
language_name,
|
||||
'run_memory_limit',
|
||||
language_limit_overrides
|
||||
),
|
||||
},
|
||||
max_process_count: this.compute_single_limit(
|
||||
language_name,
|
||||
'max_process_count',
|
||||
language_limit_overrides
|
||||
),
|
||||
max_open_files: this.compute_single_limit(
|
||||
language_name,
|
||||
'max_open_files',
|
||||
language_limit_overrides
|
||||
),
|
||||
max_file_size: this.compute_single_limit(
|
||||
language_name,
|
||||
'max_file_size',
|
||||
language_limit_overrides
|
||||
),
|
||||
output_max_size: this.compute_single_limit(
|
||||
language_name,
|
||||
'output_max_size',
|
||||
language_limit_overrides
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
ensure_built() {
|
||||
logger.info(`Ensuring ${this} is built`);
|
||||
|
||||
|
@ -120,41 +55,94 @@ class Runtime {
|
|||
logger.debug(`Finished ensuring ${this} is installed`);
|
||||
}
|
||||
|
||||
static load_runtime(flake_key) {
|
||||
logger.info(`Loading ${flake_key}`);
|
||||
const flake_path = `${config.flake_path}#pistonRuntimeSets.${config.runtime_set}.${flake_key}`;
|
||||
const metadata_command = `nix eval --json ${flake_path}.metadata`;
|
||||
const metadata = JSON.parse(cp.execSync(metadata_command));
|
||||
|
||||
const this_runtime = new Runtime({
|
||||
...metadata,
|
||||
...Runtime.compute_all_limits(
|
||||
metadata.language,
|
||||
metadata.limitOverrides
|
||||
),
|
||||
flake_path,
|
||||
});
|
||||
|
||||
this_runtime.ensure_built();
|
||||
|
||||
runtimes.push(this_runtime);
|
||||
|
||||
logger.debug(`Package ${flake_key} was loaded`);
|
||||
}
|
||||
|
||||
get compiled() {
|
||||
return this.compile !== null;
|
||||
}
|
||||
|
||||
get id() {
|
||||
return runtimes.indexOf(this);
|
||||
}
|
||||
|
||||
toString() {
|
||||
return `${this.language}-${this.version}`;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = runtimes;
|
||||
function compute_single_limit(
|
||||
language_name,
|
||||
limit_name,
|
||||
language_limit_overrides
|
||||
) {
|
||||
return (
|
||||
(config.limit_overrides[language_name] &&
|
||||
config.limit_overrides[language_name][limit_name]) ||
|
||||
(language_limit_overrides &&
|
||||
language_limit_overrides[limit_name]) ||
|
||||
config[limit_name]
|
||||
);
|
||||
}
|
||||
|
||||
function compute_all_limits(language_name, language_limit_overrides) {
|
||||
return {
|
||||
timeouts: {
|
||||
compile: compute_single_limit(
|
||||
language_name,
|
||||
'compile_timeout',
|
||||
language_limit_overrides
|
||||
),
|
||||
run: compute_single_limit(
|
||||
language_name,
|
||||
'run_timeout',
|
||||
language_limit_overrides
|
||||
),
|
||||
},
|
||||
memory_limits: {
|
||||
compile: compute_single_limit(
|
||||
language_name,
|
||||
'compile_memory_limit',
|
||||
language_limit_overrides
|
||||
),
|
||||
run: compute_single_limit(
|
||||
language_name,
|
||||
'run_memory_limit',
|
||||
language_limit_overrides
|
||||
),
|
||||
},
|
||||
max_process_count: compute_single_limit(
|
||||
language_name,
|
||||
'max_process_count',
|
||||
language_limit_overrides
|
||||
),
|
||||
max_open_files: compute_single_limit(
|
||||
language_name,
|
||||
'max_open_files',
|
||||
language_limit_overrides
|
||||
),
|
||||
max_file_size: compute_single_limit(
|
||||
language_name,
|
||||
'max_file_size',
|
||||
language_limit_overrides
|
||||
),
|
||||
output_max_size: compute_single_limit(
|
||||
language_name,
|
||||
'output_max_size',
|
||||
language_limit_overrides
|
||||
),
|
||||
};
|
||||
}
|
||||
|
||||
function get_runtime_from_flakes(runtime_name) {
|
||||
const flake_path = `${config.flake_path}#pistonRuntimeSets.${config.runtime_set}.${runtime_name}`;
|
||||
const metadata_command = `nix eval --json ${flake_path}.metadata`;
|
||||
const metadata = JSON.parse(cp.execSync(metadata_command));
|
||||
|
||||
const this_runtime = new Runtime({
|
||||
...metadata,
|
||||
...compute_all_limits(
|
||||
metadata.language,
|
||||
metadata.limitOverrides
|
||||
),
|
||||
flake_path,
|
||||
});
|
||||
|
||||
return this_runtime
|
||||
}
|
||||
|
||||
module.exports.Runtime = Runtime;
|
||||
module.exports.load_runtime = Runtime.load_runtime;
|
||||
module.exports.get_runtime_from_flakes = get_runtime_from_flakes;
|
||||
|
|
Loading…
Reference in New Issue