Merge branch 'master' into emojicode

This commit is contained in:
Thomas Hobson 2022-02-24 19:18:08 +13:00 committed by GitHub
commit 3eff371bd4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 193 additions and 204 deletions

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
data/
.piston_env
node_modules
.vscode/

View File

@ -2,16 +2,159 @@ const fss = require('fs');
const Logger = require('logplease');
const logger = Logger.create('config');
function parse_overrides(overrides) {
try {
return JSON.parse(overrides);
} catch (e) {
return null;
const options = {
log_level: {
desc: 'Level of data to log',
default: 'INFO',
validators: [
x =>
Object.values(Logger.LogLevels).includes(x) ||
`Log level ${x} does not exist`,
],
},
bind_address: {
desc: 'Address to bind REST API on',
default: `0.0.0.0:${process.env['PORT'] || 2000}`,
validators: [],
},
data_directory: {
desc: 'Absolute path to store all piston related data at',
default: '/piston',
validators: [
x => fss.exists_sync(x) || `Directory ${x} does not exist`,
],
},
runner_uid_min: {
desc: 'Minimum uid to use for runner',
default: 1001,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
runner_uid_max: {
desc: 'Maximum uid to use for runner',
default: 1500,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
runner_gid_min: {
desc: 'Minimum gid to use for runner',
default: 1001,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
runner_gid_max: {
desc: 'Maximum gid to use for runner',
default: 1500,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
disable_networking: {
desc: 'Set to true to disable networking',
default: true,
parser: x => x === 'true',
validators: [x => typeof x === 'boolean' || `${x} is not a boolean`],
},
output_max_size: {
desc: 'Max size of each stdio buffer',
default: 1024,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
max_process_count: {
desc: 'Max number of processes per job',
default: 64,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
max_open_files: {
desc: 'Max number of open files per job',
default: 2048,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
max_file_size: {
desc: 'Max file size in bytes for a file',
default: 10000000, //10MB
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
compile_timeout: {
desc: 'Max time allowed for compile stage in milliseconds',
default: 10000, // 10 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
run_timeout: {
desc: 'Max time allowed for run stage in milliseconds',
default: 3000, // 3 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
compile_memory_limit: {
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
run_memory_limit: {
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
repo_url: {
desc: 'URL of repo index',
default:
'https://github.com/engineer-man/piston/releases/download/pkgs/index',
validators: [],
},
max_concurrent_jobs: {
desc: 'Maximum number of concurrent jobs to run at one time',
default: 64,
parser: parse_int,
validators: [x => x > 0 || `${x} cannot be negative`],
},
limit_overrides: {
desc: 'Per-language exceptions in JSON format for each of:\
max_process_count, max_open_files, max_file_size, compile_memory_limit,\
run_memory_limit, compile_timeout, run_timeout, output_max_size',
default: {},
parser: parse_overrides,
validators: [
x => !!x || `Failed to parse the overrides\n${x}`,
validate_overrides,
],
},
};
Object.freeze(options);
function apply_validators(validators, validator_parameters) {
for (const validator of validators) {
const validation_response = validator(...validator_parameters);
if (validation_response !== true) {
return validation_response;
}
}
return true;
}
function validate_overrides(overrides, options) {
function parse_overrides(overrides_string) {
function get_parsed_json_or_null(overrides) {
try {
return JSON.parse(overrides);
} catch (e) {
return null;
}
}
const overrides = get_parsed_json_or_null(overrides_string);
if (overrides === null) {
return null;
}
const parsed_overrides = {};
for (const language in overrides) {
parsed_overrides[language] = {};
for (const key in overrides[language]) {
if (
![
@ -25,218 +168,62 @@ function validate_overrides(overrides, options) {
'output_max_size',
].includes(key)
) {
logger.error(`Invalid overridden option: ${key}`);
return false;
return null;
}
const option = options.find(o => o.key === key);
// Find the option for the override
const option = options[key];
const parser = option.parser;
const raw = overrides[language][key];
const value = parser(raw);
const validators = option.validators;
for (const validator of validators) {
const response = validator(value, raw);
if (response !== true) {
logger.error(
`Failed to validate overridden option: ${key}`,
response
);
return false;
}
}
overrides[language][key] = value;
const raw_value = overrides[language][key];
const parsed_value = parser(raw_value);
parsed_overrides[language][key] = parsed_value;
}
}
return parsed_overrides;
}
function validate_overrides(overrides) {
for (const language in overrides) {
for (const key in overrides[language]) {
const value = overrides[language][key];
const option = options[key];
const validators = option.validators;
const validation_response = apply_validators(validators, [
value,
value,
]);
if (validation_response !== true) {
return `In overridden option ${key} for ${language}, ${validation_response}`;
}
}
// Modifies the reference
options[
options.index_of(options.find(o => o.key === 'limit_overrides'))
] = overrides;
}
return true;
}
const options = [
{
key: 'log_level',
desc: 'Level of data to log',
default: 'INFO',
options: Object.values(Logger.LogLevels),
validators: [
x =>
Object.values(Logger.LogLevels).includes(x) ||
`Log level ${x} does not exist`,
],
},
{
key: 'bind_address',
desc: 'Address to bind REST API on',
default: `0.0.0.0:${process.env["PORT"] || 2000}`,
validators: [],
},
{
key: 'data_directory',
desc: 'Absolute path to store all piston related data at',
default: '/piston',
validators: [
x => fss.exists_sync(x) || `Directory ${x} does not exist`,
],
},
{
key: 'runner_uid_min',
desc: 'Minimum uid to use for runner',
default: 1001,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'runner_uid_max',
desc: 'Maximum uid to use for runner',
default: 1500,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'runner_gid_min',
desc: 'Minimum gid to use for runner',
default: 1001,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'runner_gid_max',
desc: 'Maximum gid to use for runner',
default: 1500,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'disable_networking',
desc: 'Set to true to disable networking',
default: true,
parser: x => x === 'true',
validators: [x => typeof x === 'boolean' || `${x} is not a boolean`],
},
{
key: 'output_max_size',
desc: 'Max size of each stdio buffer',
default: 1024,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'max_process_count',
desc: 'Max number of processes per job',
default: 64,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'max_open_files',
desc: 'Max number of open files per job',
default: 2048,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'max_file_size',
desc: 'Max file size in bytes for a file',
default: 10000000, //10MB
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'compile_timeout',
desc: 'Max time allowed for compile stage in milliseconds',
default: 10000, // 10 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'run_timeout',
desc: 'Max time allowed for run stage in milliseconds',
default: 3000, // 3 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'compile_memory_limit',
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'run_memory_limit',
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'repo_url',
desc: 'URL of repo index',
default:
'https://github.com/engineer-man/piston/releases/download/pkgs/index',
validators: [],
},
{
key: 'max_concurrent_jobs',
desc: 'Maximum number of concurrent jobs to run at one time',
default: 64,
parser: parse_int,
validators: [x => x > 0 || `${x} cannot be negative`],
},
{
key: 'limit_overrides',
desc: 'Per-language exceptions in JSON format for each of:\
max_process_count, max_open_files, max_file_size, compile_memory_limit,\
run_memory_limit, compile_timeout, run_timeout, output_max_size',
default: {},
parser: parse_overrides,
validators: [
x => !!x || `Invalid JSON format for the overrides\n${x}`,
(overrides, _, options) =>
validate_overrides(overrides, options) ||
`Failed to validate the overrides`,
],
},
];
logger.info(`Loading Configuration from environment`);
let errored = false;
let config = {};
options.forEach(option => {
const env_key = 'PISTON_' + option.key.to_upper_case();
for (const option_name in options) {
const env_key = 'PISTON_' + option_name.to_upper_case();
const option = options[option_name];
const parser = option.parser || (x => x);
const env_val = process.env[env_key];
const parsed_val = parser(env_val);
const value = env_val === undefined ? option.default : parsed_val;
option.validators.for_each(validator => {
let response = null;
if (env_val) response = validator(parsed_val, env_val, options);
else response = validator(value, value, options);
if (response !== true) {
errored = true;
logger.error(
`Config option ${option.key} failed validation:`,
response
);
return;
}
});
config[option.key] = value;
});
if (errored) {
process.exit(1);
const validator_parameters =
env_val === undefined ? [value, value] : [parsed_val, env_val];
const validation_response = apply_validators(
option.validators,
validator_parameters
);
if (validation_response !== true) {
logger.error(
`Config option ${option_name} failed validation:`,
validation_response
);
process.exit(1);
}
config[option_name] = value;
}
logger.info('Configuration successfully loaded');

View File

@ -246,11 +246,12 @@ async function run_non_interactively(files, argv) {
exports.handler = async argv => {
const files = [...(argv.files || []), argv.file].map(file_path => {
const buffer = fs.readFileSync(file_path);
// Checks for <20> (the replacement character) after encoding the buffer to uf8
const encoding =
(buffer
.toString()
.split('')
.some(x => x.charCodeAt(0) >= 128) &&
.some(x => x.charCodeAt(0) === 65533) &&
'base64') ||
'utf8';
return {