api: code cleanup
This commit is contained in:
parent
a91f3af1fb
commit
af1741becc
|
@ -1,7 +1,6 @@
|
||||||
const globals = require('./globals');
|
const globals = require('./globals');
|
||||||
const logger = require('logplease').create('cache');
|
const logger = require('logplease').create('cache');
|
||||||
const fs = require('fs/promises'),
|
const fs = require('fs/promises'),
|
||||||
fss = require('fs'),
|
|
||||||
path = require('path');
|
path = require('path');
|
||||||
|
|
||||||
const cache = new Map();
|
const cache = new Map();
|
||||||
|
@ -13,43 +12,54 @@ module.exports = {
|
||||||
},
|
},
|
||||||
async get(key, callback, ttl=globals.cache_ttl){
|
async get(key, callback, ttl=globals.cache_ttl){
|
||||||
logger.debug('get:', key);
|
logger.debug('get:', key);
|
||||||
|
|
||||||
if(module.exports.has(key)){
|
if(module.exports.has(key)){
|
||||||
logger.debug('hit:',key);
|
logger.debug('hit:',key);
|
||||||
return cache.get(key).data;
|
return cache.get(key).data;
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug('miss:', key);
|
logger.debug('miss:', key);
|
||||||
var data = await callback();
|
var data = await callback();
|
||||||
cache.set(key, {data, expiry: Date.now() + ttl});
|
cache.set(key, {data, expiry: Date.now() + ttl});
|
||||||
|
|
||||||
return data;
|
return data;
|
||||||
},
|
},
|
||||||
async flush(cache_dir){
|
async flush(cache_dir){
|
||||||
logger.info('Flushing cache');
|
logger.info('Flushing cache');
|
||||||
cache.forEach((v,k)=>{
|
|
||||||
var file_path = path.join(cache_dir, k);
|
async function flush_single(value, key){
|
||||||
if(v.expiry < Date.now()){
|
const file_path = path.join(cache_dir, key);
|
||||||
//remove from cache
|
|
||||||
cache.delete(k);
|
if(value.expiry < Date.now()){
|
||||||
fs.stat(file_path, (err, stats)=>{
|
cache.delete(key);
|
||||||
if(err) return; //ignore - probably hasn't been flushed yet
|
try {
|
||||||
|
const stats = await fs.stat(file_path);
|
||||||
if(stats.is_file())
|
if(stats.is_file())
|
||||||
fs.rm(file_path, (err)=>{
|
await fs.rm(file_path);
|
||||||
if(err) logger.warn(`Couldn't clean up on-disk cache file ${k}`);
|
}catch{
|
||||||
});
|
// Ignore, file hasn't been flushed yet
|
||||||
});
|
}
|
||||||
}else{
|
}else{
|
||||||
//flush to disk
|
await fs.write_file(file_path, JSON.stringify(value));
|
||||||
fs.write_file(file_path, JSON.stringify(v),()=>{});
|
|
||||||
}
|
}
|
||||||
});
|
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.all(
|
||||||
|
Array.from(cache).map(flush_single)
|
||||||
|
);
|
||||||
|
|
||||||
},
|
},
|
||||||
async load(cache_dir){
|
async load(cache_dir){
|
||||||
return fs.readdir(cache_dir)
|
const files = await fs.readdir(cache_dir);
|
||||||
.then(files => Promise.all(files.map(
|
|
||||||
async file => {
|
async function load_single(file_name){
|
||||||
cache.set(file, JSON.parse(fss.read_file_sync(path.join(cache_dir,file)).toString()));
|
const file_path = path.join(cache_dir,file_name);
|
||||||
}
|
const file_content = await fs.read_file(file_path).toString();
|
||||||
)));
|
cache.set(file_name, JSON.parse(file_content));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Promise.all(files.map(load_single));
|
||||||
}
|
}
|
||||||
|
|
||||||
};
|
};
|
|
@ -122,61 +122,70 @@ const options = [
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
const default_config = [
|
function make_default_config(){
|
||||||
...header.split('\n'),
|
let content = header.split('\n');
|
||||||
...options.map(option => `
|
|
||||||
${[
|
options.forEach(option => {
|
||||||
...option.desc.split('\n'),
|
content.concat(option.desc.split('\n').map(x=>`# ${x}`));
|
||||||
option.options?('Options: ' + option.options.join(', ')):''
|
|
||||||
].filter(x=>x.length>0).map(x=>`# ${x}`).join('\n')}
|
if(option.options)
|
||||||
${option.key}: ${option.default}
|
content.append('# Options: ' + option.options.join(', '));
|
||||||
`)].join('\n');
|
|
||||||
|
content.append(`${option.key}: ${option.default}`);
|
||||||
|
|
||||||
|
content.append(''); // New line between
|
||||||
|
});
|
||||||
|
|
||||||
|
return content.join('\n');
|
||||||
|
}
|
||||||
|
|
||||||
logger.info(`Loading Configuration from ${argv.config}`);
|
logger.info(`Loading Configuration from ${argv.config}`);
|
||||||
!!argv['make-config'] && logger.debug('Make configuration flag is set');
|
|
||||||
|
if(argv['make-config'])
|
||||||
|
logger.debug('Make configuration flag is set');
|
||||||
|
|
||||||
if(!!argv['make-config'] && !fss.exists_sync(argv.config)){
|
if(!!argv['make-config'] && !fss.exists_sync(argv.config)){
|
||||||
logger.info('Writing default configuration...');
|
logger.info('Writing default configuration...');
|
||||||
try {
|
try {
|
||||||
fss.write_file_sync(argv.config, default_config);
|
fss.write_file_sync(argv.config, make_default_config());
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
logger.error('Error writing default configuration:', err.message);
|
logger.error('Error writing default configuration:', err.message);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var config = {};
|
var config = {};
|
||||||
|
|
||||||
logger.debug('Reading config file');
|
logger.debug('Reading config file');
|
||||||
|
|
||||||
try{
|
try{
|
||||||
const cfg_content = fss.read_file_sync(argv.config);
|
const cfg_content = fss.read_file_sync(argv.config);
|
||||||
try{
|
config = yaml.load(cfg_content);
|
||||||
config = yaml.load(cfg_content);
|
|
||||||
}catch(err){
|
|
||||||
logger.error('Error parsing configuration file:', err.message);
|
|
||||||
process.exit(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
}catch(err){
|
}catch(err){
|
||||||
logger.error('Error reading configuration from disk:', err.message);
|
logger.error('Error reading configuration file:', err.message);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug('Validating config entries');
|
logger.debug('Validating config entries');
|
||||||
|
|
||||||
var errored=false;
|
var errored=false;
|
||||||
options.forEach(opt => {
|
|
||||||
logger.debug('Checking key',opt.key);
|
options.forEach(option => {
|
||||||
var cfg_val = config[opt.key];
|
logger.debug('Checking option', option.key);
|
||||||
|
|
||||||
|
var cfg_val = config[option.key];
|
||||||
|
|
||||||
if(cfg_val == undefined){
|
if(cfg_val == undefined){
|
||||||
errored = true;
|
errored = true;
|
||||||
logger.error(`Config key ${opt.key} does not exist on currently loaded configuration`);
|
logger.error(`Config key ${option.key} does not exist on currently loaded configuration`);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
opt.validators.forEach(validator => {
|
|
||||||
|
option.validators.forEach(validator => {
|
||||||
var response = validator(cfg_val);
|
var response = validator(cfg_val);
|
||||||
if(response !== true){
|
if(response !== true){
|
||||||
errored = true;
|
errored = true;
|
||||||
logger.error(`Config key ${opt.key} failed validation:`, response);
|
logger.error(`Config option ${option.key} failed validation:`, response);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
|
@ -6,7 +6,6 @@ const config = require('../config');
|
||||||
const globals = require('../globals');
|
const globals = require('../globals');
|
||||||
const fs = require('fs/promises');
|
const fs = require('fs/promises');
|
||||||
|
|
||||||
|
|
||||||
const job_states = {
|
const job_states = {
|
||||||
READY: Symbol('Ready to be primed'),
|
READY: Symbol('Ready to be primed'),
|
||||||
PRIMED: Symbol('Primed and ready for execution'),
|
PRIMED: Symbol('Primed and ready for execution'),
|
||||||
|
@ -17,7 +16,7 @@ var uid=0;
|
||||||
var gid=0;
|
var gid=0;
|
||||||
|
|
||||||
class Job {
|
class Job {
|
||||||
constructor(runtime, files, args, stdin, timeouts, main){
|
constructor({runtime, files, args, stdin, timeouts, main}){
|
||||||
this.uuid = uuidv4();
|
this.uuid = uuidv4();
|
||||||
this.runtime = runtime;
|
this.runtime = runtime;
|
||||||
this.files = files;
|
this.files = files;
|
||||||
|
@ -71,6 +70,7 @@ class Job {
|
||||||
async safe_call(file, args, timeout){
|
async safe_call(file, args, timeout){
|
||||||
return await new Promise((resolve, reject) => {
|
return await new Promise((resolve, reject) => {
|
||||||
const unshare = config.enable_unshare ? ['unshare','-n','-r'] : [];
|
const unshare = config.enable_unshare ? ['unshare','-n','-r'] : [];
|
||||||
|
|
||||||
const prlimit = [
|
const prlimit = [
|
||||||
'prlimit',
|
'prlimit',
|
||||||
'--nproc=' + config.max_process_count,
|
'--nproc=' + config.max_process_count,
|
||||||
|
@ -82,15 +82,17 @@ class Job {
|
||||||
...unshare,
|
...unshare,
|
||||||
'bash',file, ...args
|
'bash',file, ...args
|
||||||
];
|
];
|
||||||
|
|
||||||
var stdout = '';
|
var stdout = '';
|
||||||
var stderr = '';
|
var stderr = '';
|
||||||
|
|
||||||
const proc = cp.spawn(proc_call[0], proc_call.splice(1) ,{
|
const proc = cp.spawn(proc_call[0], proc_call.splice(1) ,{
|
||||||
env: this.runtime.env_vars,
|
env: this.runtime.env_vars,
|
||||||
stdio: 'pipe',
|
stdio: 'pipe',
|
||||||
cwd: this.dir,
|
cwd: this.dir,
|
||||||
uid: this.uid,
|
uid: this.uid,
|
||||||
gid: this.gid,
|
gid: this.gid,
|
||||||
detached: true //dont kill the main process when we kill the group
|
detached: true //give this process its own process group
|
||||||
});
|
});
|
||||||
|
|
||||||
proc.stdin.write(this.stdin);
|
proc.stdin.write(this.stdin);
|
||||||
|
@ -109,7 +111,7 @@ class Job {
|
||||||
try{
|
try{
|
||||||
process.kill(-proc.pid, 'SIGKILL');
|
process.kill(-proc.pid, 'SIGKILL');
|
||||||
}catch{
|
}catch{
|
||||||
// Process will be dead alread, so nothing to kill.
|
// Process will be dead already, so nothing to kill.
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -128,15 +130,20 @@ class Job {
|
||||||
}
|
}
|
||||||
|
|
||||||
async execute(){
|
async execute(){
|
||||||
if(this.state != job_states.PRIMED) throw new Error('Job must be in primed state, current state: ' + this.state.toString());
|
if(this.state != job_states.PRIMED)
|
||||||
|
throw new Error('Job must be in primed state, current state: ' + this.state.toString());
|
||||||
|
|
||||||
logger.info(`Executing job uuid=${this.uuid} uid=${this.uid} gid=${this.gid} runtime=${this.runtime.toString()}`);
|
logger.info(`Executing job uuid=${this.uuid} uid=${this.uid} gid=${this.gid} runtime=${this.runtime.toString()}`);
|
||||||
|
|
||||||
logger.debug('Compiling');
|
logger.debug('Compiling');
|
||||||
|
|
||||||
var compile = undefined;
|
var compile = undefined;
|
||||||
if(this.runtime.compiled)
|
if(this.runtime.compiled)
|
||||||
compile = await this.safe_call(
|
compile = await this.safe_call(
|
||||||
path.join(this.runtime.pkgdir, 'compile'),
|
path.join(this.runtime.pkgdir, 'compile'),
|
||||||
this.files.map(x=>x.name),
|
this.files.map(x=>x.name),
|
||||||
this.timeouts.compile);
|
this.timeouts.compile);
|
||||||
|
|
||||||
|
|
||||||
logger.debug('Running');
|
logger.debug('Running');
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@ module.exports = {
|
||||||
.isString(), // eslint-disable-line snakecasejs/snakecasejs
|
.isString(), // eslint-disable-line snakecasejs/snakecasejs
|
||||||
body('version')
|
body('version')
|
||||||
.isString(), // eslint-disable-line snakecasejs/snakecasejs
|
.isString(), // eslint-disable-line snakecasejs/snakecasejs
|
||||||
// isSemVer requires it to be a version, not a selector
|
// isSemVer requires it to be a version, not a selector
|
||||||
body('files')
|
body('files')
|
||||||
.isArray(), // eslint-disable-line snakecasejs/snakecasejs
|
.isArray(), // eslint-disable-line snakecasejs/snakecasejs
|
||||||
body('files.*.name')
|
body('files.*.name')
|
||||||
|
@ -39,15 +39,23 @@ module.exports = {
|
||||||
const runtime = get_latest_runtime_matching_language_version(req.body.language, req.body.version);
|
const runtime = get_latest_runtime_matching_language_version(req.body.language, req.body.version);
|
||||||
if(runtime == undefined) return res.json_error(`${req.body.language}-${req.body.version} runtime is unknown`, 400);
|
if(runtime == undefined) return res.json_error(`${req.body.language}-${req.body.version} runtime is unknown`, 400);
|
||||||
|
|
||||||
const job = new Job(runtime, req.body.files, req.body.args, req.body.stdin, {run: req.body.run_timeout, compile: req.body.compile_timeout}, req.body.main);
|
const job = new Job({
|
||||||
|
runtime,
|
||||||
|
files: req.body.files,
|
||||||
|
args: req.body.args,
|
||||||
|
stdin: req.body.stdin,
|
||||||
|
timeouts: {
|
||||||
|
run: req.body.run_timeout,
|
||||||
|
compile: req.body.compile_timeout
|
||||||
|
},
|
||||||
|
main: req.body.main
|
||||||
|
});
|
||||||
|
|
||||||
await job.prime();
|
await job.prime();
|
||||||
|
|
||||||
const result = await job.execute();
|
const result = await job.execute();
|
||||||
res.json_success(result);
|
res.json_success(result);
|
||||||
|
|
||||||
await job.cleanup();
|
await job.cleanup();
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
};
|
};
|
107
api/src/index.js
107
api/src/index.js
|
@ -44,16 +44,17 @@ const app = express();
|
||||||
|
|
||||||
logger.info('Loading packages');
|
logger.info('Loading packages');
|
||||||
const pkgdir = path.join(config.data_directory,globals.data_directories.packages);
|
const pkgdir = path.join(config.data_directory,globals.data_directories.packages);
|
||||||
await fs.readdir(pkgdir)
|
|
||||||
.then(langs => Promise.all(
|
|
||||||
langs.map(lang=>
|
|
||||||
fs.readdir(path.join(pkgdir,lang))
|
|
||||||
.then(x=>x.map(y=>path.join(pkgdir, lang, y)))
|
|
||||||
)))
|
|
||||||
.then(pkgs=>pkgs.flat().filter(pkg=>fss.exists_sync(path.join(pkg, globals.pkg_installed_file))))
|
|
||||||
.then(pkgs=>pkgs.forEach(pkg => new runtime.Runtime(pkg)));
|
|
||||||
|
|
||||||
|
const pkglist = await fs.readdir(pkgdir);
|
||||||
|
const languages = await Promise.all(
|
||||||
|
pkglist.map(lang=>
|
||||||
|
fs.readdir(path.join(pkgdir,lang))
|
||||||
|
.then(x=>x.map(y=>path.join(pkgdir, lang, y)))
|
||||||
|
));
|
||||||
|
const installed_languages = languages.flat()
|
||||||
|
.filter(pkg=>fss.exists_sync(path.join(pkg, globals.pkg_installed_file)));
|
||||||
|
|
||||||
|
installed_languages.forEach(pkg => new runtime.Runtime(pkg));
|
||||||
|
|
||||||
logger.info('Starting API Server');
|
logger.info('Starting API Server');
|
||||||
|
|
||||||
|
@ -86,23 +87,69 @@ const app = express();
|
||||||
logger.debug('Registering Routes');
|
logger.debug('Registering Routes');
|
||||||
|
|
||||||
const ppman_routes = require('./ppman/routes');
|
const ppman_routes = require('./ppman/routes');
|
||||||
|
|
||||||
app.get ('/repos', validate, ppman_routes.repo_list);
|
|
||||||
app.post ('/repos', ppman_routes.repo_add_validators, validate, ppman_routes.repo_add);
|
|
||||||
app.get ('/repos/:repo_slug', ppman_routes.repo_info_validators, validate, ppman_routes.repo_info);
|
|
||||||
app.get ('/repos/:repo_slug/packages', ppman_routes.repo_packages_validators, validate, ppman_routes.repo_packages);
|
|
||||||
app.get ('/repos/:repo_slug/packages/:language/:version', ppman_routes.package_info_validators, validate, ppman_routes.package_info);
|
|
||||||
app.post ('/repos/:repo_slug/packages/:language/:version', ppman_routes.package_info_validators, validate, ppman_routes.package_install);
|
|
||||||
app.delete('/repos/:repo_slug/packages/:language/:version', ppman_routes.package_info_validators, validate, ppman_routes.package_uninstall);
|
|
||||||
|
|
||||||
const executor_routes = require('./executor/routes');
|
const executor_routes = require('./executor/routes');
|
||||||
app.post ('/jobs', executor_routes.run_job_validators, validate, executor_routes.run_job);
|
|
||||||
|
|
||||||
app.get ('/runtimes', (_, res) => res.json_success({runtimes: runtime.map(rt=>({
|
app.get('/repos',
|
||||||
language: rt.language,
|
validate,
|
||||||
version: rt.version.raw,
|
ppman_routes.repo_list
|
||||||
author: rt.author
|
);
|
||||||
}))}))
|
|
||||||
|
app.post('/repos',
|
||||||
|
ppman_routes.repo_add_validators,
|
||||||
|
validate,
|
||||||
|
ppman_routes.repo_add
|
||||||
|
);
|
||||||
|
|
||||||
|
app.get('/repos/:repo_slug',
|
||||||
|
ppman_routes.repo_info_validators,
|
||||||
|
validate,
|
||||||
|
ppman_routes.repo_info
|
||||||
|
);
|
||||||
|
|
||||||
|
app.get('/repos/:repo_slug/packages',
|
||||||
|
ppman_routes.repo_packages_validators,
|
||||||
|
validate,
|
||||||
|
ppman_routes.repo_packages
|
||||||
|
);
|
||||||
|
|
||||||
|
app.get('/repos/:repo_slug/packages/:language/:version',
|
||||||
|
ppman_routes.package_info_validators,
|
||||||
|
validate,
|
||||||
|
ppman_routes.package_info
|
||||||
|
);
|
||||||
|
|
||||||
|
app.post('/repos/:repo_slug/packages/:language/:version',
|
||||||
|
ppman_routes.package_info_validators,
|
||||||
|
validate,
|
||||||
|
ppman_routes.package_install
|
||||||
|
);
|
||||||
|
|
||||||
|
app.delete('/repos/:repo_slug/packages/:language/:version',
|
||||||
|
ppman_routes.package_info_validators,
|
||||||
|
validate,
|
||||||
|
ppman_routes.package_uninstall
|
||||||
|
);
|
||||||
|
|
||||||
|
app.post('/jobs',
|
||||||
|
executor_routes.run_job_validators,
|
||||||
|
validate,
|
||||||
|
executor_routes.run_job);
|
||||||
|
|
||||||
|
function list_runtimes(_, res){
|
||||||
|
const runtimes = runtime.map(rt => (
|
||||||
|
{
|
||||||
|
language: rt.language,
|
||||||
|
version: rt.version.raw,
|
||||||
|
author: rt.author
|
||||||
|
}
|
||||||
|
));
|
||||||
|
|
||||||
|
return res.json_success({
|
||||||
|
runtimes
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
app.get('/runtimes', list_runtimes);
|
||||||
|
|
||||||
logger.debug('Calling app.listen');
|
logger.debug('Calling app.listen');
|
||||||
const [address,port] = config.bind_address.split(':');
|
const [address,port] = config.bind_address.split(':');
|
||||||
|
@ -112,6 +159,16 @@ const app = express();
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.debug('Setting up flush timers');
|
logger.debug('Setting up flush timers');
|
||||||
setInterval(cache.flush,config.cache_flush_time,path.join(config.data_directory,globals.data_directories.cache));
|
|
||||||
setInterval(state.save,config.state_flush_time,path.join(config.data_directory,globals.data_files.state));
|
setInterval(
|
||||||
|
cache.flush,
|
||||||
|
config.cache_flush_time,
|
||||||
|
path.join(config.data_directory,globals.data_directories.cache)
|
||||||
|
);
|
||||||
|
|
||||||
|
setInterval(
|
||||||
|
state.save,
|
||||||
|
config.state_flush_time,
|
||||||
|
path.join(config.data_directory,globals.data_files.state)
|
||||||
|
);
|
||||||
})();
|
})();
|
|
@ -63,7 +63,9 @@ class Package {
|
||||||
logger.debug('Validating checksums');
|
logger.debug('Validating checksums');
|
||||||
Object.keys(this.checksums).forEach(algo => {
|
Object.keys(this.checksums).forEach(algo => {
|
||||||
var val = this.checksums[algo];
|
var val = this.checksums[algo];
|
||||||
|
|
||||||
logger.debug(`Assert ${algo}(${pkgpath}) == ${val}`);
|
logger.debug(`Assert ${algo}(${pkgpath}) == ${val}`);
|
||||||
|
|
||||||
var cs = crypto.create_hash(algo)
|
var cs = crypto.create_hash(algo)
|
||||||
.update(fss.read_file_sync(pkgpath))
|
.update(fss.read_file_sync(pkgpath))
|
||||||
.digest('hex');
|
.digest('hex');
|
||||||
|
@ -72,7 +74,8 @@ class Package {
|
||||||
|
|
||||||
await this.repo.import_keys();
|
await this.repo.import_keys();
|
||||||
|
|
||||||
logger.debug('Validating signatutes');
|
logger.debug('Validating signatures');
|
||||||
|
|
||||||
if(this.signature != '')
|
if(this.signature != '')
|
||||||
await new Promise((resolve,reject)=>{
|
await new Promise((resolve,reject)=>{
|
||||||
const gpgspawn = cp.spawn('gpg', ['--verify', '-', pkgpath], {
|
const gpgspawn = cp.spawn('gpg', ['--verify', '-', pkgpath], {
|
||||||
|
@ -94,6 +97,7 @@ class Package {
|
||||||
logger.warn('Package does not contain a signature - allowing install, but proceed with caution');
|
logger.warn('Package does not contain a signature - allowing install, but proceed with caution');
|
||||||
|
|
||||||
logger.debug(`Extracting package files from archive ${pkgfile} in to ${this.install_path}`);
|
logger.debug(`Extracting package files from archive ${pkgfile} in to ${this.install_path}`);
|
||||||
|
|
||||||
await new Promise((resolve, reject)=>{
|
await new Promise((resolve, reject)=>{
|
||||||
const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgfile}'`);
|
const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgfile}'`);
|
||||||
proc.once('exit', (code,_)=>{
|
proc.once('exit', (code,_)=>{
|
||||||
|
@ -109,30 +113,35 @@ class Package {
|
||||||
logger.debug('Ensuring binary files exist for package');
|
logger.debug('Ensuring binary files exist for package');
|
||||||
const pkgbin = path.join(this.install_path, `${this.language}-${this.version.raw}`);
|
const pkgbin = path.join(this.install_path, `${this.language}-${this.version.raw}`);
|
||||||
try{
|
try{
|
||||||
const pkgbinstat = await fs.stat(pkgbin);
|
const pkgbin_stat = await fs.stat(pkgbin);
|
||||||
//eslint-disable-next-line snakecasejs/snakecasejs
|
//eslint-disable-next-line snakecasejs/snakecasejs
|
||||||
if(!pkgbinstat.isDirectory()) throw new Error();
|
if(!pkgbin_stat.isDirectory()) throw new Error();
|
||||||
|
// Throw a blank error here, so it will be caught by the following catch, and output the correct error message
|
||||||
|
// The catch is used to catch fs.stat
|
||||||
}catch(err){
|
}catch(err){
|
||||||
throw new Error(`Invalid package: could not find ${this.language}-${this.version.raw}/ contained within package files`);
|
throw new Error(`Invalid package: could not find ${this.language}-${this.version.raw}/ contained within package files`);
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug('Symlinking into runtimes');
|
logger.debug('Symlinking into runtimes');
|
||||||
|
|
||||||
await fs.symlink(
|
await fs.symlink(
|
||||||
pkgbin,
|
pkgbin,
|
||||||
path.join(config.data_directory,
|
path.join(config.data_directory,
|
||||||
globals.data_directories.runtimes,
|
globals.data_directories.runtimes,
|
||||||
`${this.language}-${this.version.raw}`)
|
`${this.language}-${this.version.raw}`)
|
||||||
).catch((err)=>err); //catch
|
).catch((err)=>err); //Ignore if we fail - probably means its already been installed and not cleaned up right
|
||||||
|
|
||||||
|
|
||||||
logger.debug('Registering runtime');
|
logger.debug('Registering runtime');
|
||||||
const pkgruntime = new runtime.Runtime(this.install_path);
|
const pkg_runtime = new runtime.Runtime(this.install_path);
|
||||||
|
|
||||||
|
|
||||||
logger.debug('Caching environment');
|
logger.debug('Caching environment');
|
||||||
const required_pkgs = [pkgruntime, ...pkgruntime.get_all_dependencies()];
|
const required_pkgs = [pkg_runtime, ...pkg_runtime.get_all_dependencies()];
|
||||||
const get_env_command = [...required_pkgs.map(p=>`cd "${p.runtime_dir}"; source environment; `),
|
const get_env_command = [
|
||||||
'env' ].join(' ');
|
...required_pkgs.map(pkg=>`cd "${pkg.runtime_dir}"; source environment; `),
|
||||||
|
'env'
|
||||||
|
].join(' ');
|
||||||
|
|
||||||
const envout = await new Promise((resolve, reject)=>{
|
const envout = await new Promise((resolve, reject)=>{
|
||||||
var stdout = '';
|
var stdout = '';
|
||||||
|
|
|
@ -30,12 +30,12 @@ module.exports = {
|
||||||
async load(data_file){
|
async load(data_file){
|
||||||
if(fss.exists_sync(data_file)){
|
if(fss.exists_sync(data_file)){
|
||||||
logger.info('Loading state from file');
|
logger.info('Loading state from file');
|
||||||
var content = await fs.read_file(data_file);
|
|
||||||
var obj = JSON.parse(content.toString(), reviver);
|
const content = await fs.read_file(data_file);
|
||||||
|
const obj = JSON.parse(content.toString(), reviver);
|
||||||
[...obj.keys()].forEach(k => state.set(k, obj.get(k)));
|
[...obj.keys()].forEach(k => state.set(k, obj.get(k)));
|
||||||
}else{
|
}else{
|
||||||
logger.info('Creating new statefile');
|
logger.info('Creating new state file');
|
||||||
state.set('repositories', new Map().set('offical', 'https://repo.pistonee.org/index.yaml'));
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
async save(data_file){
|
async save(data_file){
|
||||||
|
|
Loading…
Reference in New Issue