Compare commits

...

12 Commits

Author SHA1 Message Date
Thomas Hobson 286fb57415
Merge pull request #439 from Brikaa/nix-refactor
Include master new features in Nix Piston
2022-02-07 15:35:46 +13:00
Omar Brikaa 7de631383f Add master features in V3 API, refactor 2022-02-05 15:30:44 +02:00
Omar Brikaa cdd87ca9a1 Refactor config.js 2022-02-05 14:31:29 +02:00
Omar Brikaa 3d61d10373 Merge master 2022-02-05 14:17:46 +02:00
Omar Brikaa 9760f8fcf9 config.js: index by key, bug fix and more refactoring 2022-01-30 13:35:16 +02:00
Thomas Hobson a965df2eb9
Merge pull request #427 from Brikaa/refactor-config
Refactor config.js
2022-01-29 14:40:51 +13:00
Omar Brikaa 416ade1b76 Refactor config.js 2022-01-28 17:58:00 +02:00
Omar Brikaa fe7f66a754 Add .vscode to gitignore 2022-01-28 11:43:56 +02:00
Thomas Hobson 9057e3c8d1
Merge pull request #419 from ShaneLee/bug/builder-directory
Make builder script run relative to directory it was called from
2022-01-13 16:42:33 +13:00
Thomas Hobson f4b366978d
Merge pull request #412 from Hydrazer/master
Added MATL
2022-01-12 14:13:25 +13:00
Shane f6fa9cb968 Make builder script run relative to directory it was called from 2022-01-02 09:39:24 +00:00
Hydrazer 021ec1aa94 pkg(MATL-22.5.0): added MATL 22.5.0 2021-12-30 11:12:20 -07:00
11 changed files with 439 additions and 405 deletions

1
.gitignore vendored
View File

@ -2,3 +2,4 @@ data/
.piston_env
node_modules
result
.vscode/

View File

@ -5,7 +5,6 @@ const events = require('events');
const runtime = require('../runtime');
const { Job } = require('../job');
const logger = require('logplease').create('api/v3');
const SIGNALS = [
'SIGABRT',
@ -81,49 +80,9 @@ function get_job(body) {
}
}
if (compile_memory_limit) {
if (typeof compile_memory_limit !== 'number') {
return reject({
message: 'if specified, compile_memory_limit must be a number',
});
}
if (
config.compile_memory_limit >= 0 &&
(compile_memory_limit > config.compile_memory_limit ||
compile_memory_limit < 0)
) {
return reject({
message:
'compile_memory_limit cannot exceed the configured limit of ' +
config.compile_memory_limit,
});
}
}
if (run_memory_limit) {
if (typeof run_memory_limit !== 'number') {
return reject({
message: 'if specified, run_memory_limit must be a number',
});
}
if (
config.run_memory_limit >= 0 &&
(run_memory_limit > config.run_memory_limit || run_memory_limit < 0)
) {
return reject({
message:
'run_memory_limit cannot exceed the configured limit of ' +
config.run_memory_limit,
});
}
}
const rt = runtime.find(rt => [
...rt.aliases,
rt.language
].includes(rt.language))
const rt = runtime.find(rt =>
[...rt.aliases, rt.language].includes(rt.language)
);
if (rt === undefined) {
return reject({

View File

@ -3,15 +3,52 @@ const router = express.Router();
const events = require('events');
const config = require('../config');
const runtime = require('../runtime');
const { Job } = require('../job');
const logger = require('logplease').create('api/v3');
const SIGNALS = ["SIGABRT","SIGALRM","SIGBUS","SIGCHLD","SIGCLD","SIGCONT","SIGEMT","SIGFPE","SIGHUP","SIGILL","SIGINFO","SIGINT","SIGIO","SIGIOT","SIGKILL","SIGLOST","SIGPIPE","SIGPOLL","SIGPROF","SIGPWR","SIGQUIT","SIGSEGV","SIGSTKFLT","SIGSTOP","SIGTSTP","SIGSYS","SIGTERM","SIGTRAP","SIGTTIN","SIGTTOU","SIGUNUSED","SIGURG","SIGUSR1","SIGUSR2","SIGVTALRM","SIGXCPU","SIGXFSZ","SIGWINCH"]
const SIGNALS = [
'SIGABRT',
'SIGALRM',
'SIGBUS',
'SIGCHLD',
'SIGCLD',
'SIGCONT',
'SIGEMT',
'SIGFPE',
'SIGHUP',
'SIGILL',
'SIGINFO',
'SIGINT',
'SIGIO',
'SIGIOT',
'SIGKILL',
'SIGLOST',
'SIGPIPE',
'SIGPOLL',
'SIGPROF',
'SIGPWR',
'SIGQUIT',
'SIGSEGV',
'SIGSTKFLT',
'SIGSTOP',
'SIGTSTP',
'SIGSYS',
'SIGTERM',
'SIGTRAP',
'SIGTTIN',
'SIGTTOU',
'SIGUNUSED',
'SIGURG',
'SIGUSR1',
'SIGUSR2',
'SIGVTALRM',
'SIGXCPU',
'SIGXFSZ',
'SIGWINCH',
];
// ref: https://man7.org/linux/man-pages/man7/signal.7.html
function get_job(body){
function get_job(body) {
const {
runtime_id,
args,
@ -20,93 +57,96 @@ function get_job(body){
compile_memory_limit,
run_memory_limit,
run_timeout,
compile_timeout
compile_timeout,
} = body;
return new Promise((resolve, reject) => {
if (typeof runtime_id !== 'number') {
return reject({
message: 'runtime_id is required as a number'
message: 'runtime_id is required as a number',
});
}
if (!Array.isArray(files)) {
if (!files || !Array.isArray(files)) {
return reject({
message: 'files is required as an array',
});
}
for (const [i, file] of files.entries()) {
if (typeof file.content !== 'string') {
return reject({
message: `files[${i}].content is required as a string`,
});
}
}
if (compile_memory_limit) {
if (typeof compile_memory_limit !== 'number') {
return reject({
message: 'if specified, compile_memory_limit must be a number',
});
}
if (
config.compile_memory_limit >= 0 &&
(compile_memory_limit > config.compile_memory_limit ||
compile_memory_limit < 0)
) {
return reject({
message:
'compile_memory_limit cannot exceed the configured limit of ' +
config.compile_memory_limit,
});
}
}
if (run_memory_limit) {
if (typeof run_memory_limit !== 'number') {
return reject({
message: 'if specified, run_memory_limit must be a number',
});
}
if (
config.run_memory_limit >= 0 &&
(run_memory_limit > config.run_memory_limit || run_memory_limit < 0)
) {
return reject({
message:
'run_memory_limit cannot exceed the configured limit of ' +
config.run_memory_limit,
});
}
}
const rt = runtime[runtime_id];
if (rt === undefined) {
return reject({
message: `Runtime #${runtime_id} is unknown`,
});
}
resolve(new Job({
if (
rt.language !== 'file' &&
!files.some(file => !file.encoding || file.encoding === 'utf8')
) {
return reject({
message: 'files must include at least one utf8 encoded file',
});
}
if (files.some(file => typeof file.content !== 'string')) {
return reject({
message: 'file.content is required as a string',
});
}
for (const constraint of ['memory_limit', 'timeout']) {
for (const type of ['compile', 'run']) {
const constraint_name = `${type}_${constraint}`;
const constraint_value = body[constraint_name];
const configured_limit = rt[`${constraint}s`][type];
if (!constraint_value) {
continue;
}
if (typeof constraint_value !== 'number') {
return reject({
message: `If specified, ${constraint_name} must be a number`,
});
}
if (configured_limit <= 0) {
continue;
}
if (constraint_value > configured_limit) {
return reject({
message: `${constraint_name} cannot exceed the configured limit of ${configured_limit}`,
});
}
if (constraint_value < 0) {
return reject({
message: `${constraint_name} must be non-negative`,
});
}
}
}
const job_compile_timeout = compile_timeout || rt.timeouts.compile;
const job_run_timeout = run_timeout || rt.timeouts.run;
const job_compile_memory_limit =
compile_memory_limit || rt.memory_limits.compile;
const job_run_memory_limit = run_memory_limit || rt.memory_limits.run;
resolve(
new Job({
runtime: rt,
args: args || [],
stdin: stdin || "",
stdin: stdin || '',
files,
timeouts: {
run: run_timeout || 3000,
compile: compile_timeout || 10000,
run: job_run_timeout,
compile: job_compile_timeout,
},
memory_limits: {
run: run_memory_limit || config.run_memory_limit,
compile: compile_memory_limit || config.compile_memory_limit,
}
}));
run: job_run_memory_limit,
compile: job_compile_memory_limit,
},
})
);
});
}
router.use((req, res, next) => {
@ -124,89 +164,106 @@ router.use((req, res, next) => {
});
router.ws('/connect', async (ws, req) => {
let job = null;
let eventBus = new events.EventEmitter();
eventBus.on("stdout", (data) => ws.send(JSON.stringify({type: "data", stream: "stdout", data: data.toString()})))
eventBus.on("stderr", (data) => ws.send(JSON.stringify({type: "data", stream: "stderr", data: data.toString()})))
eventBus.on("stage", (stage)=> ws.send(JSON.stringify({type: "stage", stage})))
eventBus.on("exit", (stage, status) => ws.send(JSON.stringify({type: "exit", stage, ...status})))
eventBus.on('stdout', data =>
ws.send(
JSON.stringify({
type: 'data',
stream: 'stdout',
data: data.toString(),
})
)
);
eventBus.on('stderr', data =>
ws.send(
JSON.stringify({
type: 'data',
stream: 'stderr',
data: data.toString(),
})
)
);
eventBus.on('stage', stage =>
ws.send(JSON.stringify({ type: 'stage', stage }))
);
eventBus.on('exit', (stage, status) =>
ws.send(JSON.stringify({ type: 'exit', stage, ...status }))
);
ws.on("message", async (data) => {
try{
ws.on('message', async data => {
try {
const msg = JSON.parse(data);
switch(msg.type){
case "init":
if(job === null){
switch (msg.type) {
case 'init':
if (job === null) {
job = await get_job(msg);
await job.prime();
ws.send(JSON.stringify({
type: "runtime",
ws.send(
JSON.stringify({
type: 'runtime',
language: job.runtime.language,
version: job.runtime.version.raw
}))
version: job.runtime.version.raw,
})
);
await job.execute_interactive(eventBus);
ws.close(4999, "Job Completed");
}else{
ws.close(4000, "Already Initialized");
ws.close(4999, 'Job Completed');
} else {
ws.close(4000, 'Already Initialized');
}
break;
case "data":
if(job !== null){
if(msg.stream === "stdin"){
eventBus.emit("stdin", msg.data)
}else{
ws.close(4004, "Can only write to stdin")
case 'data':
if (job !== null) {
if (msg.stream === 'stdin') {
eventBus.emit('stdin', msg.data);
} else {
ws.close(4004, 'Can only write to stdin');
}
}else{
ws.close(4003, "Not yet initialized")
} else {
ws.close(4003, 'Not yet initialized');
}
break;
case "signal":
if(job !== null){
if(SIGNALS.includes(msg.signal)){
eventBus.emit("signal", msg.signal)
}else{
ws.close(4005, "Invalid signal")
case 'signal':
if (job !== null) {
if (SIGNALS.includes(msg.signal)) {
eventBus.emit('signal', msg.signal);
} else {
ws.close(4005, 'Invalid signal');
}
}else{
ws.close(4003, "Not yet initialized")
} else {
ws.close(4003, 'Not yet initialized');
}
break;
}
}catch(error){
ws.send(JSON.stringify({type: "error", message: error.message}))
ws.close(4002, "Notified Error")
} catch (error) {
ws.send(JSON.stringify({ type: 'error', message: error.message }));
ws.close(4002, 'Notified Error');
// ws.close message is limited to 123 characters, so we notify over WS then close.
}
})
});
ws.on("close", async ()=>{
if(job !== null){
await job.cleanup()
ws.on('close', async () => {
if (job !== null) {
await job.cleanup();
}
})
});
setTimeout(()=>{
setTimeout(() => {
//Terminate the socket after 1 second, if not initialized.
if(job === null)
ws.close(4001, "Initialization Timeout");
}, 1000)
})
if (job === null) ws.close(4001, 'Initialization Timeout');
}, 1000);
});
router.post('/execute', async (req, res) => {
try{
try {
const job = await get_job(req.body);
await job.prime();
const result = await job.execute();
@ -214,7 +271,7 @@ router.post('/execute', async (req, res) => {
await job.cleanup();
return res.status(200).send(result);
}catch(error){
} catch (error) {
return res.status(400).json(error);
}
});
@ -226,7 +283,7 @@ router.get('/runtimes', (req, res) => {
version: rt.version.raw,
aliases: rt.aliases,
runtime: rt.runtime,
id: rt.id
id: rt.id,
};
});

View File

@ -2,16 +2,163 @@ const fss = require('fs');
const Logger = require('logplease');
const logger = Logger.create('config');
function parse_overrides(overrides) {
const options = {
log_level: {
desc: 'Level of data to log',
default: 'INFO',
validators: [
x =>
Object.values(Logger.LogLevels).includes(x) ||
`Log level ${x} does not exist`,
],
},
bind_address: {
desc: 'Address to bind REST API on',
default: `0.0.0.0:${process.env['PORT'] || 2000}`,
validators: [],
},
data_directory: {
desc: 'Absolute path to store all piston related data at',
default: '/piston',
validators: [
x => fss.exists_sync(x) || `Directory ${x} does not exist`,
],
},
runner_uid_min: {
desc: 'Minimum uid to use for runner',
default: 1001,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
runner_uid_max: {
desc: 'Maximum uid to use for runner',
default: 1500,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
runner_gid_min: {
desc: 'Minimum gid to use for runner',
default: 1001,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
runner_gid_max: {
desc: 'Maximum gid to use for runner',
default: 1500,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
disable_networking: {
desc: 'Set to true to disable networking',
default: true,
parser: x => x === 'true',
validators: [x => typeof x === 'boolean' || `${x} is not a boolean`],
},
output_max_size: {
desc: 'Max size of each stdio buffer',
default: 1024,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
max_process_count: {
desc: 'Max number of processes per job',
default: 64,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
max_open_files: {
desc: 'Max number of open files per job',
default: 2048,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
max_file_size: {
desc: 'Max file size in bytes for a file',
default: 10000000, //10MB
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
compile_timeout: {
desc: 'Max time allowed for compile stage in milliseconds',
default: 10000, // 10 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
run_timeout: {
desc: 'Max time allowed for run stage in milliseconds',
default: 3000, // 3 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
compile_memory_limit: {
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
run_memory_limit: {
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
flake_path: {
desc: 'Path to nix flake defining runtimes to install',
default: 'github:engineer-man/piston?directory=packages',
validators: [],
},
runtime_set: {
desc: 'Key on the flake specified by flake_path to access runtimes from',
default: 'all',
validators: [],
},
max_concurrent_jobs: {
desc: 'Maximum number of concurrent jobs to run at one time',
default: 64,
parser: parse_int,
validators: [x => x > 0 || `${x} cannot be negative`],
},
limit_overrides: {
desc: 'Per-language exceptions in JSON format for each of:\
max_process_count, max_open_files, max_file_size, compile_memory_limit,\
run_memory_limit, compile_timeout, run_timeout, output_max_size',
default: {},
parser: parse_overrides,
validators: [
x => !!x || `Failed to parse the overrides\n${x}`,
validate_overrides,
],
},
};
Object.freeze(options);
function apply_validators(validators, validator_parameters) {
for (const validator of validators) {
const validation_response = validator(...validator_parameters);
if (validation_response !== true) {
return validation_response;
}
}
return true;
}
function parse_overrides(overrides_string) {
function get_parsed_json_or_null(overrides) {
try {
return JSON.parse(overrides);
} catch (e) {
return null;
}
}
}
function validate_overrides(overrides, options) {
const overrides = get_parsed_json_or_null(overrides_string);
if (overrides === null) {
return null;
}
const parsed_overrides = {};
for (const language in overrides) {
parsed_overrides[language] = {};
for (const key in overrides[language]) {
if (
![
@ -25,223 +172,62 @@ function validate_overrides(overrides, options) {
'output_max_size',
].includes(key)
) {
logger.error(`Invalid overridden option: ${key}`);
return false;
return null;
}
const option = options.find(o => o.key === key);
// Find the option for the override
const option = options[key];
const parser = option.parser;
const raw = overrides[language][key];
const value = parser(raw);
const raw_value = overrides[language][key];
const parsed_value = parser(raw_value);
parsed_overrides[language][key] = parsed_value;
}
}
return parsed_overrides;
}
function validate_overrides(overrides) {
for (const language in overrides) {
for (const key in overrides[language]) {
const value = overrides[language][key];
const option = options[key];
const validators = option.validators;
for (const validator of validators) {
const response = validator(value, raw);
if (response !== true) {
logger.error(
`Failed to validate overridden option: ${key}`,
response
);
return false;
const validation_response = apply_validators(validators, [
value,
value,
]);
if (validation_response !== true) {
return `In overridden option ${key} for ${language}, ${validation_response}`;
}
}
overrides[language][key] = value;
}
// Modifies the reference
options[
options.index_of(options.find(o => o.key === 'limit_overrides'))
] = overrides;
}
return true;
}
const options = [
{
key: 'log_level',
desc: 'Level of data to log',
default: 'INFO',
options: Object.values(Logger.LogLevels),
validators: [
x =>
Object.values(Logger.LogLevels).includes(x) ||
`Log level ${x} does not exist`,
],
},
{
key: 'bind_address',
desc: 'Address to bind REST API on',
default: `0.0.0.0:${process.env["PORT"] || 2000}`,
validators: [],
},
{
key: 'data_directory',
desc: 'Absolute path to store all piston related data at',
default: '/piston',
validators: [
x => fss.exists_sync(x) || `Directory ${x} does not exist`,
],
},
{
key: 'runner_uid_min',
desc: 'Minimum uid to use for runner',
default: 1001,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'runner_uid_max',
desc: 'Maximum uid to use for runner',
default: 1500,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'runner_gid_min',
desc: 'Minimum gid to use for runner',
default: 1001,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'runner_gid_max',
desc: 'Maximum gid to use for runner',
default: 1500,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'disable_networking',
desc: 'Set to true to disable networking',
default: true,
parser: x => x === 'true',
validators: [x => typeof x === 'boolean' || `${x} is not a boolean`],
},
{
key: 'output_max_size',
desc: 'Max size of each stdio buffer',
default: 1024,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'max_process_count',
desc: 'Max number of processes per job',
default: 64,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'max_open_files',
desc: 'Max number of open files per job',
default: 2048,
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'max_file_size',
desc: 'Max file size in bytes for a file',
default: 10000000, //10MB
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'compile_timeout',
desc: 'Max time allowed for compile stage in milliseconds',
default: 10000, // 10 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'run_timeout',
desc: 'Max time allowed for run stage in milliseconds',
default: 3000, // 3 seconds
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'compile_memory_limit',
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'run_memory_limit',
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit
parser: parse_int,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`],
},
{
key: 'flake_path',
desc: 'Path to nix flake defining runtimes to install',
default: 'github:engineer-man/piston?directory=packages',
validators: [],
},
{
key: 'runtime_set',
desc: 'Key on the flake specified by flake_path to access runtimes from',
default: 'all',
validators: []
},
{
key: 'max_concurrent_jobs',
desc: 'Maximum number of concurrent jobs to run at one time',
default: 64,
parser: parse_int,
validators: [x => x > 0 || `${x} cannot be negative`],
},
{
key: 'limit_overrides',
desc: 'Per-language exceptions in JSON format for each of:\
max_process_count, max_open_files, max_file_size, compile_memory_limit,\
run_memory_limit, compile_timeout, run_timeout, output_max_size',
default: {},
parser: parse_overrides,
validators: [
x => !!x || `Invalid JSON format for the overrides\n${x}`,
(overrides, _, options) =>
validate_overrides(overrides, options) ||
`Failed to validate the overrides`,
],
},
];
logger.info(`Loading Configuration from environment`);
let errored = false;
let config = {};
options.forEach(option => {
const env_key = 'PISTON_' + option.key.to_upper_case();
for (const option_name in options) {
const env_key = 'PISTON_' + option_name.to_upper_case();
const option = options[option_name];
const parser = option.parser || (x => x);
const env_val = process.env[env_key];
const parsed_val = parser(env_val);
const value = env_val === undefined ? option.default : parsed_val;
option.validators.for_each(validator => {
let response = null;
if (env_val) response = validator(parsed_val, env_val, options);
else response = validator(value, value, options);
if (response !== true) {
errored = true;
logger.error(
`Config option ${option.key} failed validation:`,
response
const validator_parameters =
env_val === undefined ? [value, value] : [parsed_val, env_val];
const validation_response = apply_validators(
option.validators,
validator_parameters
);
if (validation_response !== true) {
logger.error(
`Config option ${option_name} failed validation:`,
validation_response
);
return;
}
});
config[option.key] = value;
});
if (errored) {
process.exit(1);
}
config[option_name] = value;
}
logger.info('Configuration successfully loaded');

View File

@ -1,9 +1,6 @@
const logger = require('logplease').create('runtime');
const cp = require('child_process');
const config = require('./config');
const globals = require('./globals');
const fss = require('fs');
const path = require('path');
const runtimes = [];

9
packages/MATL/22.5.0/build.sh vendored Normal file
View File

@ -0,0 +1,9 @@
#!/usr/bin/env bash
# build octave as dependency
source ../../octave/6.2.0/build.sh
# curl MATL 22.5.0
curl -L "https://github.com/lmendo/MATL/archive/refs/tags/22.5.0.tar.gz" -o MATL.tar.xz
tar xf MATL.tar.xz --strip-components=1
rm MATL.tar.xz

5
packages/MATL/22.5.0/environment vendored Normal file
View File

@ -0,0 +1,5 @@
#!/usr/bin/env bash
# Path to MATL binary
export PATH=$PWD/bin:$PATH
export MATL_PATH=$PWD

5
packages/MATL/22.5.0/metadata.json vendored Normal file
View File

@ -0,0 +1,5 @@
{
"language": "matl",
"version": "22.5.0",
"aliases": []
}

13
packages/MATL/22.5.0/run vendored Normal file
View File

@ -0,0 +1,13 @@
#!/usr/bin/env bash
# get file as first argument
file="$1"
# remove the file from $@
shift
# use the rest of the arguments as stdin
stdin=`printf "%s\n" "$@"`
# pass stdin into octave which will run MATL
echo "$stdin" | octave -W -p "$MATL_PATH" --eval "matl -of '$file'"

1
packages/MATL/22.5.0/test.matl vendored Normal file
View File

@ -0,0 +1 @@
'OK'

View File

@ -350,6 +350,7 @@ Content-Type: application/json
`llvm_ir`,
`lolcode`,
`lua`,
`matl`,
`nasm`,
`nasm64`,
`nim`,