Refactor config.js

This commit is contained in:
Omar Brikaa 2022-02-05 14:31:29 +02:00
commit cdd87ca9a1
1 changed files with 34 additions and 54 deletions

View File

@ -2,144 +2,123 @@ const fss = require('fs');
const Logger = require('logplease'); const Logger = require('logplease');
const logger = Logger.create('config'); const logger = Logger.create('config');
const options = [ const options = {
{ log_level: {
key: 'log_level',
desc: 'Level of data to log', desc: 'Level of data to log',
default: 'INFO', default: 'INFO',
options: Object.values(Logger.LogLevels),
validators: [ validators: [
x => x =>
Object.values(Logger.LogLevels).includes(x) || Object.values(Logger.LogLevels).includes(x) ||
`Log level ${x} does not exist`, `Log level ${x} does not exist`,
], ],
}, },
{ bind_address: {
key: 'bind_address',
desc: 'Address to bind REST API on', desc: 'Address to bind REST API on',
default: `0.0.0.0:${process.env['PORT'] || 2000}`, default: `0.0.0.0:${process.env['PORT'] || 2000}`,
validators: [], validators: [],
}, },
{ data_directory: {
key: 'data_directory',
desc: 'Absolute path to store all piston related data at', desc: 'Absolute path to store all piston related data at',
default: '/piston', default: '/piston',
validators: [ validators: [
x => fss.exists_sync(x) || `Directory ${x} does not exist`, x => fss.exists_sync(x) || `Directory ${x} does not exist`,
], ],
}, },
{ runner_uid_min: {
key: 'runner_uid_min',
desc: 'Minimum uid to use for runner', desc: 'Minimum uid to use for runner',
default: 1001, default: 1001,
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ runner_uid_max: {
key: 'runner_uid_max',
desc: 'Maximum uid to use for runner', desc: 'Maximum uid to use for runner',
default: 1500, default: 1500,
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ runner_gid_min: {
key: 'runner_gid_min',
desc: 'Minimum gid to use for runner', desc: 'Minimum gid to use for runner',
default: 1001, default: 1001,
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ runner_gid_max: {
key: 'runner_gid_max',
desc: 'Maximum gid to use for runner', desc: 'Maximum gid to use for runner',
default: 1500, default: 1500,
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ disable_networking: {
key: 'disable_networking',
desc: 'Set to true to disable networking', desc: 'Set to true to disable networking',
default: true, default: true,
parser: x => x === 'true', parser: x => x === 'true',
validators: [x => typeof x === 'boolean' || `${x} is not a boolean`], validators: [x => typeof x === 'boolean' || `${x} is not a boolean`],
}, },
{ output_max_size: {
key: 'output_max_size',
desc: 'Max size of each stdio buffer', desc: 'Max size of each stdio buffer',
default: 1024, default: 1024,
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ max_process_count: {
key: 'max_process_count',
desc: 'Max number of processes per job', desc: 'Max number of processes per job',
default: 64, default: 64,
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ max_open_files: {
key: 'max_open_files',
desc: 'Max number of open files per job', desc: 'Max number of open files per job',
default: 2048, default: 2048,
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ max_file_size: {
key: 'max_file_size',
desc: 'Max file size in bytes for a file', desc: 'Max file size in bytes for a file',
default: 10000000, //10MB default: 10000000, //10MB
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ compile_timeout: {
key: 'compile_timeout',
desc: 'Max time allowed for compile stage in milliseconds', desc: 'Max time allowed for compile stage in milliseconds',
default: 10000, // 10 seconds default: 10000, // 10 seconds
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ run_timeout: {
key: 'run_timeout',
desc: 'Max time allowed for run stage in milliseconds', desc: 'Max time allowed for run stage in milliseconds',
default: 3000, // 3 seconds default: 3000, // 3 seconds
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ compile_memory_limit: {
key: 'compile_memory_limit',
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)', desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit default: -1, // no limit
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ run_memory_limit: {
key: 'run_memory_limit',
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)', desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit default: -1, // no limit
parser: parse_int, parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
}, },
{ flake_path: {
key: 'flake_path',
desc: 'Path to nix flake defining runtimes to install', desc: 'Path to nix flake defining runtimes to install',
default: 'github:engineer-man/piston?directory=packages', default: 'github:engineer-man/piston?directory=packages',
validators: [], validators: [],
}, },
{ runtime_set: {
key: 'runtime_set',
desc: 'Key on the flake specified by flake_path to access runtimes from', desc: 'Key on the flake specified by flake_path to access runtimes from',
default: 'all', default: 'all',
validators: [], validators: [],
}, },
{ max_concurrent_jobs: {
key: 'max_concurrent_jobs',
desc: 'Maximum number of concurrent jobs to run at one time', desc: 'Maximum number of concurrent jobs to run at one time',
default: 64, default: 64,
parser: parse_int, parser: parse_int,
validators: [x => x > 0 || `${x} cannot be negative`], validators: [x => x > 0 || `${x} cannot be negative`],
}, },
{ limit_overrides: {
key: 'limit_overrides',
desc: 'Per-language exceptions in JSON format for each of:\ desc: 'Per-language exceptions in JSON format for each of:\
max_process_count, max_open_files, max_file_size, compile_memory_limit,\ max_process_count, max_open_files, max_file_size, compile_memory_limit,\
run_memory_limit, compile_timeout, run_timeout, output_max_size', run_memory_limit, compile_timeout, run_timeout, output_max_size',
@ -150,7 +129,7 @@ const options = [
validate_overrides, validate_overrides,
], ],
}, },
]; };
Object.freeze(options); Object.freeze(options);
@ -174,7 +153,7 @@ function parse_overrides(overrides_string) {
} }
const overrides = get_parsed_json_or_null(overrides_string); const overrides = get_parsed_json_or_null(overrides_string);
if (typeof overrides === null) { if (overrides === null) {
return null; return null;
} }
const parsed_overrides = {}; const parsed_overrides = {};
@ -196,11 +175,11 @@ function parse_overrides(overrides_string) {
return null; return null;
} }
// Find the option for the override // Find the option for the override
const option = options.find(o => o.key === key); const option = options[key];
const parser = option.parser; const parser = option.parser;
const raw = overrides[language][key]; const raw_value = overrides[language][key];
const value = parser(raw); const parsed_value = parser(raw_value);
parsed_overrides[language][key] = value; parsed_overrides[language][key] = parsed_value;
} }
} }
return parsed_overrides; return parsed_overrides;
@ -210,7 +189,7 @@ function validate_overrides(overrides) {
for (const language in overrides) { for (const language in overrides) {
for (const key in overrides[language]) { for (const key in overrides[language]) {
const value = overrides[language][key]; const value = overrides[language][key];
const option = options.find(o => o.key === key); const option = options[key];
const validators = option.validators; const validators = option.validators;
const validation_response = apply_validators(validators, [ const validation_response = apply_validators(validators, [
value, value,
@ -228,8 +207,9 @@ logger.info(`Loading Configuration from environment`);
let config = {}; let config = {};
options.forEach(option => { for (const option_name in options) {
const env_key = 'PISTON_' + option.key.to_upper_case(); const env_key = 'PISTON_' + option_name.to_upper_case();
const option = options[option_name];
const parser = option.parser || (x => x); const parser = option.parser || (x => x);
const env_val = process.env[env_key]; const env_val = process.env[env_key];
const parsed_val = parser(env_val); const parsed_val = parser(env_val);
@ -242,13 +222,13 @@ options.forEach(option => {
); );
if (validation_response !== true) { if (validation_response !== true) {
logger.error( logger.error(
`Config option ${option.key} failed validation:`, `Config option ${option_name} failed validation:`,
validation_response validation_response
); );
process.exit(1); process.exit(1);
} }
config[option.key] = value; config[option_name] = value;
}); }
logger.info('Configuration successfully loaded'); logger.info('Configuration successfully loaded');