api: remove repos from ppman
This commit is contained in:
parent
22dcad0dd9
commit
812069cc3f
|
@ -1,65 +0,0 @@
|
||||||
const globals = require('./globals');
|
|
||||||
const logger = require('logplease').create('cache');
|
|
||||||
const fs = require('fs/promises'),
|
|
||||||
path = require('path');
|
|
||||||
|
|
||||||
const cache = new Map();
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
cache_key: (context, key) => Buffer.from(`${context}-${key}`).toString('base64'),
|
|
||||||
has(key){
|
|
||||||
return cache.has(key) && cache.get(key).expiry > Date.now();
|
|
||||||
},
|
|
||||||
async get(key, callback, ttl=globals.cache_ttl){
|
|
||||||
logger.debug('get:', key);
|
|
||||||
|
|
||||||
if(module.exports.has(key)){
|
|
||||||
logger.debug('hit:',key);
|
|
||||||
return cache.get(key).data;
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.debug('miss:', key);
|
|
||||||
var data = await callback();
|
|
||||||
cache.set(key, {data, expiry: Date.now() + ttl});
|
|
||||||
|
|
||||||
return data;
|
|
||||||
},
|
|
||||||
async flush(cache_dir){
|
|
||||||
logger.info('Flushing cache');
|
|
||||||
|
|
||||||
async function flush_single(value, key){
|
|
||||||
const file_path = path.join(cache_dir, key);
|
|
||||||
|
|
||||||
if(value.expiry < Date.now()){
|
|
||||||
cache.delete(key);
|
|
||||||
try {
|
|
||||||
const stats = await fs.stat(file_path);
|
|
||||||
if(stats.is_file())
|
|
||||||
await fs.rm(file_path);
|
|
||||||
}catch{
|
|
||||||
// Ignore, file hasn't been flushed yet
|
|
||||||
}
|
|
||||||
}else{
|
|
||||||
await fs.write_file(file_path, JSON.stringify(value));
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
return Promise.all(
|
|
||||||
Array.from(cache).map(flush_single)
|
|
||||||
);
|
|
||||||
|
|
||||||
},
|
|
||||||
async load(cache_dir){
|
|
||||||
const files = await fs.readdir(cache_dir);
|
|
||||||
|
|
||||||
async function load_single(file_name){
|
|
||||||
const file_path = path.join(cache_dir,file_name);
|
|
||||||
const file_content = await fs.read_file(file_path).toString();
|
|
||||||
cache.set(file_name, JSON.parse(file_content));
|
|
||||||
}
|
|
||||||
|
|
||||||
return Promise.all(files.map(load_single));
|
|
||||||
}
|
|
||||||
|
|
||||||
};
|
|
|
@ -54,24 +54,6 @@ const options = [
|
||||||
default: '/piston',
|
default: '/piston',
|
||||||
validators: [x=> fss.exists_sync(x) || `Directory ${x} does not exist`]
|
validators: [x=> fss.exists_sync(x) || `Directory ${x} does not exist`]
|
||||||
},
|
},
|
||||||
{
|
|
||||||
key: 'cache_ttl',
|
|
||||||
desc: 'Time in milliseconds to keep data in cache for at a maximum',
|
|
||||||
default: 60 * 60 * 1000,
|
|
||||||
validators: []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: 'cache_flush_time',
|
|
||||||
desc: 'Interval in milliseconds to flush cache to disk at',
|
|
||||||
default: 90 * 60 * 1000, //90 minutes
|
|
||||||
validators: []
|
|
||||||
},
|
|
||||||
{
|
|
||||||
key: 'state_flush_time',
|
|
||||||
desc: 'Interval in milliseconds to flush state to disk at',
|
|
||||||
default: 5000, // 5 seconds (file is tiny)
|
|
||||||
validators: []
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
key: 'runner_uid_min',
|
key: 'runner_uid_min',
|
||||||
desc: 'Minimum uid to use for runner',
|
desc: 'Minimum uid to use for runner',
|
||||||
|
@ -119,6 +101,12 @@ const options = [
|
||||||
desc: 'Max number of open files per job',
|
desc: 'Max number of open files per job',
|
||||||
default: 2048,
|
default: 2048,
|
||||||
validators: []
|
validators: []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
key: 'repo_url',
|
||||||
|
desc: 'URL of repo index',
|
||||||
|
default: 'https://github.com',
|
||||||
|
validators: []
|
||||||
}
|
}
|
||||||
];
|
];
|
||||||
|
|
||||||
|
@ -126,7 +114,7 @@ function make_default_config(){
|
||||||
let content = header.split('\n');
|
let content = header.split('\n');
|
||||||
|
|
||||||
options.forEach(option => {
|
options.forEach(option => {
|
||||||
content.concat(option.desc.split('\n').map(x=>`# ${x}`));
|
content = content.concat(option.desc.split('\n').map(x=>`# ${x}`));
|
||||||
|
|
||||||
if(option.options)
|
if(option.options)
|
||||||
content.push('# Options: ' + option.options.join(', '));
|
content.push('# Options: ' + option.options.join(', '));
|
||||||
|
|
|
@ -11,14 +11,9 @@ const platform = `${is_docker() ? 'docker' : 'baremetal'}-${
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
data_directories: {
|
data_directories: {
|
||||||
cache: 'cache',
|
|
||||||
packages: 'packages',
|
packages: 'packages',
|
||||||
runtimes: 'runtimes',
|
|
||||||
jobs: 'jobs'
|
jobs: 'jobs'
|
||||||
},
|
},
|
||||||
data_files:{
|
|
||||||
state: 'state.json'
|
|
||||||
},
|
|
||||||
version: require('../package.json').version,
|
version: require('../package.json').version,
|
||||||
platform,
|
platform,
|
||||||
pkg_installed_file: '.ppman-installed' //Used as indication for if a package was installed
|
pkg_installed_file: '.ppman-installed' //Used as indication for if a package was installed
|
||||||
|
|
|
@ -1,33 +0,0 @@
|
||||||
const fs = require('fs/promises'),
|
|
||||||
path= require('path'),
|
|
||||||
fetch = require('node-fetch'),
|
|
||||||
urlp = require('url');
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
async buffer_from_url(url){
|
|
||||||
if(!(url instanceof URL))
|
|
||||||
url = new URL(url);
|
|
||||||
if(url.protocol == 'file:'){
|
|
||||||
//eslint-disable-next-line snakecasejs/snakecasejs
|
|
||||||
return await fs.read_file(urlp.fileURLToPath(url));
|
|
||||||
}else{
|
|
||||||
return await fetch({
|
|
||||||
url: url.toString()
|
|
||||||
});
|
|
||||||
}
|
|
||||||
},
|
|
||||||
add_url_base_if_required(url, base){
|
|
||||||
try{
|
|
||||||
return new URL(url);
|
|
||||||
}catch{
|
|
||||||
//Assume this is a file name
|
|
||||||
return new URL(url, base + '/');
|
|
||||||
}
|
|
||||||
},
|
|
||||||
url_basename(url){
|
|
||||||
return path.basename(url.pathname);
|
|
||||||
},
|
|
||||||
|
|
||||||
};
|
|
|
@ -4,8 +4,6 @@ const Logger = require('logplease');
|
||||||
const express = require('express');
|
const express = require('express');
|
||||||
const globals = require('./globals');
|
const globals = require('./globals');
|
||||||
const config = require('./config');
|
const config = require('./config');
|
||||||
const cache = require('./cache');
|
|
||||||
const state = require('./state');
|
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const fs = require('fs/promises');
|
const fs = require('fs/promises');
|
||||||
const fss = require('fs');
|
const fss = require('fs');
|
||||||
|
@ -35,13 +33,6 @@ const app = express();
|
||||||
|
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
||||||
logger.info('Loading state');
|
|
||||||
await state.load(path.join(config.data_directory,globals.data_files.state));
|
|
||||||
|
|
||||||
logger.info('Loading cache');
|
|
||||||
await cache.load(path.join(config.data_directory,globals.data_directories.cache));
|
|
||||||
|
|
||||||
logger.info('Loading packages');
|
logger.info('Loading packages');
|
||||||
const pkgdir = path.join(config.data_directory,globals.data_directories.packages);
|
const pkgdir = path.join(config.data_directory,globals.data_directories.packages);
|
||||||
|
|
||||||
|
@ -89,44 +80,16 @@ const app = express();
|
||||||
const ppman_routes = require('./ppman/routes');
|
const ppman_routes = require('./ppman/routes');
|
||||||
const executor_routes = require('./executor/routes');
|
const executor_routes = require('./executor/routes');
|
||||||
|
|
||||||
app.get('/repos',
|
|
||||||
validate,
|
app.get('/packages',
|
||||||
ppman_routes.repo_list
|
ppman_routes.package_list
|
||||||
);
|
);
|
||||||
|
|
||||||
app.post('/repos',
|
app.post('/packages/:language/:version',
|
||||||
ppman_routes.repo_add_validators,
|
|
||||||
validate,
|
|
||||||
ppman_routes.repo_add
|
|
||||||
);
|
|
||||||
|
|
||||||
app.get('/repos/:repo_slug',
|
|
||||||
ppman_routes.repo_info_validators,
|
|
||||||
validate,
|
|
||||||
ppman_routes.repo_info
|
|
||||||
);
|
|
||||||
|
|
||||||
app.get('/repos/:repo_slug/packages',
|
|
||||||
ppman_routes.repo_packages_validators,
|
|
||||||
validate,
|
|
||||||
ppman_routes.repo_packages
|
|
||||||
);
|
|
||||||
|
|
||||||
app.get('/repos/:repo_slug/packages/:language/:version',
|
|
||||||
ppman_routes.package_info_validators,
|
|
||||||
validate,
|
|
||||||
ppman_routes.package_info
|
|
||||||
);
|
|
||||||
|
|
||||||
app.post('/repos/:repo_slug/packages/:language/:version',
|
|
||||||
ppman_routes.package_info_validators,
|
|
||||||
validate,
|
|
||||||
ppman_routes.package_install
|
ppman_routes.package_install
|
||||||
);
|
);
|
||||||
|
|
||||||
app.delete('/repos/:repo_slug/packages/:language/:version',
|
app.delete('/packages/:language/:version',
|
||||||
ppman_routes.package_info_validators,
|
|
||||||
validate,
|
|
||||||
ppman_routes.package_uninstall
|
ppman_routes.package_uninstall
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -140,7 +103,8 @@ const app = express();
|
||||||
{
|
{
|
||||||
language: rt.language,
|
language: rt.language,
|
||||||
version: rt.version.raw,
|
version: rt.version.raw,
|
||||||
author: rt.author
|
author: rt.author,
|
||||||
|
aliases: rt.aliases
|
||||||
}
|
}
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -158,17 +122,4 @@ const app = express();
|
||||||
logger.info('API server started on', config.bind_address);
|
logger.info('API server started on', config.bind_address);
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.debug('Setting up flush timers');
|
|
||||||
|
|
||||||
setInterval(
|
|
||||||
cache.flush,
|
|
||||||
config.cache_flush_time,
|
|
||||||
path.join(config.data_directory,globals.data_directories.cache)
|
|
||||||
);
|
|
||||||
|
|
||||||
setInterval(
|
|
||||||
state.save,
|
|
||||||
config.state_flush_time,
|
|
||||||
path.join(config.data_directory,globals.data_files.state)
|
|
||||||
);
|
|
||||||
})();
|
})();
|
|
@ -2,7 +2,7 @@ const logger = require('logplease').create('ppman/package');
|
||||||
const semver = require('semver');
|
const semver = require('semver');
|
||||||
const config = require('../config');
|
const config = require('../config');
|
||||||
const globals = require('../globals');
|
const globals = require('../globals');
|
||||||
const helpers = require('../helpers');
|
const fetch = require('node-fetch');
|
||||||
const path = require('path');
|
const path = require('path');
|
||||||
const fs = require('fs/promises');
|
const fs = require('fs/promises');
|
||||||
const fss = require('fs');
|
const fss = require('fs');
|
||||||
|
@ -11,19 +11,11 @@ const crypto = require('crypto');
|
||||||
const runtime = require('../runtime');
|
const runtime = require('../runtime');
|
||||||
|
|
||||||
class Package {
|
class Package {
|
||||||
constructor(repo, {author, language, version, checksums, dependencies, size, buildfile, download, signature}){
|
constructor({language, version, download, checksum}){
|
||||||
this.author = author;
|
|
||||||
this.language = language;
|
this.language = language;
|
||||||
this.version = semver.parse(version);
|
this.version = semver.parse(version);
|
||||||
this.checksums = checksums;
|
this.checksum = checksum;
|
||||||
this.dependencies = dependencies;
|
|
||||||
this.size = size;
|
|
||||||
this.buildfile = buildfile;
|
|
||||||
this.download = download;
|
this.download = download;
|
||||||
this.signature = signature;
|
|
||||||
|
|
||||||
this.repo = repo;
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get installed(){
|
get installed(){
|
||||||
|
@ -31,7 +23,7 @@ class Package {
|
||||||
}
|
}
|
||||||
|
|
||||||
get download_url(){
|
get download_url(){
|
||||||
return helpers.add_url_base_if_required(this.download, this.repo.base_u_r_l);
|
return this.download;
|
||||||
}
|
}
|
||||||
|
|
||||||
get install_path(){
|
get install_path(){
|
||||||
|
@ -55,51 +47,26 @@ class Package {
|
||||||
|
|
||||||
|
|
||||||
logger.debug(`Downloading package from ${this.download_url} in to ${this.install_path}`);
|
logger.debug(`Downloading package from ${this.download_url} in to ${this.install_path}`);
|
||||||
const pkgfile = helpers.url_basename(this.download_url);
|
const pkgpath = path.join(this.install_path, "pkg.tar.gz");
|
||||||
const pkgpath = path.join(this.install_path, pkgfile);
|
const download = await fetch(this.download_url);
|
||||||
await helpers.buffer_from_url(this.download_url)
|
const file_stream = fss.create_write_stream(pkgpath);
|
||||||
.then(buf=> fs.write_file(pkgpath, buf));
|
await new Promise((resolve, reject) => {
|
||||||
|
download.body.pipe(file_stream)
|
||||||
logger.debug('Validating checksums');
|
download.body.on("error", reject)
|
||||||
Object.keys(this.checksums).forEach(algo => {
|
file_stream.on("finish", resolve)
|
||||||
var val = this.checksums[algo];
|
|
||||||
|
|
||||||
logger.debug(`Assert ${algo}(${pkgpath}) == ${val}`);
|
|
||||||
|
|
||||||
var cs = crypto.create_hash(algo)
|
|
||||||
.update(fss.read_file_sync(pkgpath))
|
|
||||||
.digest('hex');
|
|
||||||
if(cs != val) throw new Error(`Checksum miss-match want: ${val} got: ${cs}`);
|
|
||||||
});
|
});
|
||||||
|
|
||||||
await this.repo.import_keys();
|
logger.debug('Validating checksums');
|
||||||
|
logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`)
|
||||||
|
const cs = crypto.create_hash("sha256")
|
||||||
|
.update(fss.readFileSync(pkgpath))
|
||||||
|
.digest('hex');
|
||||||
|
if(cs != this.checksum) throw new Error(`Checksum miss-match want: ${val} got: ${cs}`);
|
||||||
|
|
||||||
logger.debug('Validating signatures');
|
logger.debug(`Extracting package files from archive ${pkgpath} in to ${this.install_path}`);
|
||||||
|
|
||||||
if(this.signature != '')
|
|
||||||
await new Promise((resolve,reject)=>{
|
|
||||||
const gpgspawn = cp.spawn('gpg', ['--verify', '-', pkgpath], {
|
|
||||||
stdio: ['pipe', 'ignore', 'ignore']
|
|
||||||
});
|
|
||||||
|
|
||||||
gpgspawn.once('exit', (code, _) => {
|
|
||||||
if(code == 0) resolve();
|
|
||||||
else reject(new Error('Invalid signature'));
|
|
||||||
});
|
|
||||||
|
|
||||||
gpgspawn.once('error', reject);
|
|
||||||
|
|
||||||
gpgspawn.stdin.write(this.signature);
|
|
||||||
gpgspawn.stdin.end();
|
|
||||||
|
|
||||||
});
|
|
||||||
else
|
|
||||||
logger.warn('Package does not contain a signature - allowing install, but proceed with caution');
|
|
||||||
|
|
||||||
logger.debug(`Extracting package files from archive ${pkgfile} in to ${this.install_path}`);
|
|
||||||
|
|
||||||
await new Promise((resolve, reject)=>{
|
await new Promise((resolve, reject)=>{
|
||||||
const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgfile}'`);
|
const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'`);
|
||||||
proc.once('exit', (code,_)=>{
|
proc.once('exit', (code,_)=>{
|
||||||
if(code == 0) resolve();
|
if(code == 0) resolve();
|
||||||
else reject(new Error('Failed to extract package'));
|
else reject(new Error('Failed to extract package'));
|
||||||
|
@ -110,38 +77,12 @@ class Package {
|
||||||
proc.once('error', reject);
|
proc.once('error', reject);
|
||||||
});
|
});
|
||||||
|
|
||||||
logger.debug('Ensuring binary files exist for package');
|
|
||||||
const pkgbin = path.join(this.install_path, `${this.language}-${this.version.raw}`);
|
|
||||||
try{
|
|
||||||
const pkgbin_stat = await fs.stat(pkgbin);
|
|
||||||
//eslint-disable-next-line snakecasejs/snakecasejs
|
|
||||||
if(!pkgbin_stat.isDirectory()) throw new Error();
|
|
||||||
// Throw a blank error here, so it will be caught by the following catch, and output the correct error message
|
|
||||||
// The catch is used to catch fs.stat
|
|
||||||
}catch(err){
|
|
||||||
throw new Error(`Invalid package: could not find ${this.language}-${this.version.raw}/ contained within package files`);
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.debug('Symlinking into runtimes');
|
|
||||||
|
|
||||||
await fs.symlink(
|
|
||||||
pkgbin,
|
|
||||||
path.join(config.data_directory,
|
|
||||||
globals.data_directories.runtimes,
|
|
||||||
`${this.language}-${this.version.raw}`)
|
|
||||||
).catch((err)=>err); //Ignore if we fail - probably means its already been installed and not cleaned up right
|
|
||||||
|
|
||||||
|
|
||||||
logger.debug('Registering runtime');
|
logger.debug('Registering runtime');
|
||||||
const pkg_runtime = new runtime.Runtime(this.install_path);
|
new runtime.Runtime(this.install_path);
|
||||||
|
|
||||||
|
|
||||||
logger.debug('Caching environment');
|
logger.debug('Caching environment');
|
||||||
const required_pkgs = [pkg_runtime, ...pkg_runtime.get_all_dependencies()];
|
const get_env_command = `cd ${this.install_path}; source environment; env`;
|
||||||
const get_env_command = [
|
|
||||||
...required_pkgs.map(pkg=>`cd "${pkg.runtime_dir}"; source environment; `),
|
|
||||||
'env'
|
|
||||||
].join(' ');
|
|
||||||
|
|
||||||
const envout = await new Promise((resolve, reject)=>{
|
const envout = await new Promise((resolve, reject)=>{
|
||||||
var stdout = '';
|
var stdout = '';
|
||||||
|
|
|
@ -1,65 +0,0 @@
|
||||||
const logger = require('logplease').create('ppman/repo');
|
|
||||||
const cache = require('../cache');
|
|
||||||
const CACHE_CONTEXT = 'repo';
|
|
||||||
|
|
||||||
const cp = require('child_process');
|
|
||||||
const yaml = require('js-yaml');
|
|
||||||
const { Package } = require('./package');
|
|
||||||
const helpers = require('../helpers');
|
|
||||||
|
|
||||||
class Repository {
|
|
||||||
constructor(slug, url){
|
|
||||||
this.slug = slug;
|
|
||||||
this.url = new URL(url);
|
|
||||||
this.keys = [];
|
|
||||||
this.packages = [];
|
|
||||||
this.base_u_r_l='';
|
|
||||||
logger.debug(`Created repo slug=${this.slug} url=${this.url}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
get cache_key(){
|
|
||||||
return cache.cache_key(CACHE_CONTEXT, this.slug);
|
|
||||||
}
|
|
||||||
|
|
||||||
async load(){
|
|
||||||
try{
|
|
||||||
var index = await cache.get(this.cache_key,async ()=>{
|
|
||||||
return helpers.buffer_from_url(this.url);
|
|
||||||
});
|
|
||||||
|
|
||||||
var repo = yaml.load(index);
|
|
||||||
if(repo.schema != 'ppman-repo-1'){
|
|
||||||
throw new Error('YAML Schema unknown');
|
|
||||||
}
|
|
||||||
|
|
||||||
this.keys = repo.keys;
|
|
||||||
this.packages = repo.packages.map(pkg => new Package(this, pkg));
|
|
||||||
this.base_u_r_l = repo.baseurl;
|
|
||||||
}catch(err){
|
|
||||||
logger.error(`Failed to load repository ${this.slug}:`,err.message);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async import_keys(){
|
|
||||||
await this.load();
|
|
||||||
logger.info(`Importing keys for repo ${this.slug}`);
|
|
||||||
await new Promise((resolve,reject)=>{
|
|
||||||
const gpgspawn = cp.spawn('gpg', ['--receive-keys', ...this.keys], {
|
|
||||||
stdio: ['ignore', 'ignore', 'ignore']
|
|
||||||
});
|
|
||||||
|
|
||||||
gpgspawn.once('exit', (code, _) => {
|
|
||||||
if(code == 0) resolve();
|
|
||||||
else reject(new Error('Failed to import keys'));
|
|
||||||
});
|
|
||||||
|
|
||||||
gpgspawn.once('error', reject);
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
module.exports = {Repository};
|
|
|
@ -1,150 +1,53 @@
|
||||||
const repos = new Map();
|
|
||||||
const state = require('../state');
|
|
||||||
const logger = require('logplease').create('ppman/routes');
|
const logger = require('logplease').create('ppman/routes');
|
||||||
const {Repository} = require('./repo');
|
|
||||||
const semver = require('semver');
|
const semver = require('semver');
|
||||||
const { body, param } = require('express-validator');
|
const fetch = require('node-fetch');
|
||||||
|
const config = require('../config');
|
||||||
|
const { Package } = require('./package');
|
||||||
|
|
||||||
async function get_or_construct_repo(slug){
|
|
||||||
if(repos.has(slug))return repos.get(slug);
|
async function get_package_list(){
|
||||||
if(state.state.get('repositories').has(slug)){
|
const repo_content = await fetch(config.repo_url).then(x=>x.text());
|
||||||
const repo_url = state.state.get('repositories').get(slug);
|
|
||||||
const repo = new Repository(slug, repo_url);
|
const entries = repo_content.split('\n').filter(x=>x.length > 0);
|
||||||
await repo.load();
|
|
||||||
repos.set(slug, repo);
|
return entries.map(line => {
|
||||||
return repo;
|
const [language, version, checksum, download] = line.split(',',4);
|
||||||
}
|
return new Package({language, version, checksum, download});
|
||||||
logger.warn(`Requested repo ${slug} does not exist`);
|
})
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function get_package(repo, lang, version){
|
|
||||||
var candidates = repo.packages.filter(
|
async function get_package(lang, version){
|
||||||
|
const packages = await get_package_list();
|
||||||
|
const candidates = packages.filter(
|
||||||
pkg => pkg.language == lang && semver.satisfies(pkg.version, version)
|
pkg => pkg.language == lang && semver.satisfies(pkg.version, version)
|
||||||
);
|
);
|
||||||
return candidates.sort((a,b)=>semver.rcompare(a.version,b.version))[0] || null;
|
return candidates.sort((a,b)=>semver.rcompare(a.version,b.version))[0] || null;
|
||||||
}
|
}
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
async repo_list(req,res){
|
|
||||||
// GET /repos
|
async package_list(req, res){
|
||||||
|
// GET /packages
|
||||||
|
logger.debug('Request to list packages');
|
||||||
|
|
||||||
logger.debug('Request for repoList');
|
const packages = await get_package_list();
|
||||||
res.json_success({
|
|
||||||
repos: (await Promise.all(
|
|
||||||
[...state.state.get('repositories').keys()].map( async slug => await get_or_construct_repo(slug))
|
|
||||||
)).map(repo=>({
|
|
||||||
slug: repo.slug,
|
|
||||||
url: repo.url,
|
|
||||||
packages: repo.packages.length
|
|
||||||
}))
|
|
||||||
});
|
|
||||||
},
|
|
||||||
repo_add_validators: [
|
|
||||||
body('slug')
|
|
||||||
.notEmpty() // eslint-disable-line snakecasejs/snakecasejs
|
|
||||||
.bail()
|
|
||||||
.isSlug() // eslint-disable-line snakecasejs/snakecasejs
|
|
||||||
.bail()
|
|
||||||
.not()
|
|
||||||
.custom(value=>state.state.get('repositories').keys().includes(value))
|
|
||||||
.withMessage('slug is already in use'), // eslint-disable-line snakecasejs/snakecasejs
|
|
||||||
body('url')
|
|
||||||
.notEmpty() // eslint-disable-line snakecasejs/snakecasejs
|
|
||||||
.bail()
|
|
||||||
.isURL({require_host: false, require_protocol: true, protocols: ['http','https','file']}) // eslint-disable-line snakecasejs/snakecasejs
|
|
||||||
|
|
||||||
],
|
|
||||||
async repo_add(req, res){
|
|
||||||
// POST /repos
|
|
||||||
|
|
||||||
logger.debug(`Request for repoAdd slug=${req.body.slug} url=${req.body.url}`);
|
|
||||||
|
|
||||||
const repo_state = state.state.get('repositories');
|
|
||||||
|
|
||||||
repo_state.set(req.body.slug, req.body.url);
|
|
||||||
logger.info(`Repository ${req.body.slug} added url=${req.body.url}`);
|
|
||||||
|
|
||||||
return res.json_success(req.body.slug);
|
|
||||||
},
|
|
||||||
repo_info_validators: [
|
|
||||||
param('repo_slug')
|
|
||||||
.isSlug() // eslint-disable-line snakecasejs/snakecasejs
|
|
||||||
.bail()
|
|
||||||
.custom(value=>state.state.get('repositories').has(value))
|
|
||||||
.withMessage('repository does not exist') // eslint-disable-line snakecasejs/snakecasejs
|
|
||||||
.bail()
|
|
||||||
],
|
|
||||||
async repo_info(req, res){
|
|
||||||
// GET /repos/:slug
|
|
||||||
|
|
||||||
logger.debug(`Request for repoInfo for ${req.params.repo_slug}`);
|
|
||||||
const repo = await get_or_construct_repo(req.params.repo_slug);
|
|
||||||
|
|
||||||
res.json_success({
|
|
||||||
slug: repo.slug,
|
|
||||||
url: repo.url,
|
|
||||||
packages: repo.packages.length
|
|
||||||
});
|
|
||||||
},
|
|
||||||
repo_packages_validators: [
|
|
||||||
param('repo_slug')
|
|
||||||
.isSlug() // eslint-disable-line snakecasejs/snakecasejs
|
|
||||||
.bail()
|
|
||||||
.custom(value=>state.state.get('repositories').has(value))
|
|
||||||
.withMessage('repository does not exist') // eslint-disable-line snakecasejs/snakecasejs
|
|
||||||
.bail()
|
|
||||||
],
|
|
||||||
async repo_packages(req, res){
|
|
||||||
// GET /repos/:slug/packages
|
|
||||||
logger.debug('Request to repoPackages');
|
|
||||||
|
|
||||||
const repo = await get_or_construct_repo(req.params.repo_slug);
|
|
||||||
if(repo == null) return res.json_error(`Requested repo ${req.params.repo_slug} does not exist`, 404);
|
|
||||||
|
|
||||||
res.json_success({
|
res.json_success({
|
||||||
packages: repo.packages.map(pkg=>({
|
packages: packages.map(pkg=>({
|
||||||
language: pkg.language,
|
language: pkg.language,
|
||||||
language_version: pkg.version.raw,
|
language_version: pkg.version.raw,
|
||||||
installed: pkg.installed
|
installed: pkg.installed
|
||||||
}))
|
}))
|
||||||
});
|
});
|
||||||
},
|
|
||||||
package_info_validators: [
|
|
||||||
param('repo_slug')
|
|
||||||
.isSlug() // eslint-disable-line snakecasejs/snakecasejs
|
|
||||||
.bail()
|
|
||||||
.custom(value=>state.state.get('repositories').has(value))
|
|
||||||
.withMessage('repository does not exist') // eslint-disable-line snakecasejs/snakecasejs
|
|
||||||
.bail()
|
|
||||||
],
|
|
||||||
async package_info(req, res){
|
|
||||||
// GET /repos/:slug/packages/:language/:version
|
|
||||||
|
|
||||||
logger.debug('Request to packageInfo');
|
|
||||||
|
|
||||||
const repo = await get_or_construct_repo(req.params.repo_slug);
|
|
||||||
|
|
||||||
const pkg = await get_package(repo, req.params.language, req.params.version);
|
|
||||||
if(pkg == null) return res.json_error(`Requested package ${req.params.language}-${req.params.version} does not exist`, 404);
|
|
||||||
|
|
||||||
res.json_success({
|
|
||||||
language: pkg.language,
|
|
||||||
language_version: pkg.version.raw,
|
|
||||||
author: pkg.author,
|
|
||||||
buildfile: pkg.buildfile,
|
|
||||||
size: pkg.size,
|
|
||||||
dependencies: pkg.dependencies,
|
|
||||||
installed: pkg.installed
|
|
||||||
});
|
|
||||||
},
|
},
|
||||||
async package_install(req,res){
|
async package_install(req,res){
|
||||||
// POST /repos/:slug/packages/:language/:version
|
// POST /packages/:language/:version
|
||||||
|
|
||||||
logger.debug('Request to packageInstall');
|
logger.debug('Request to install package');
|
||||||
|
|
||||||
const repo = await get_or_construct_repo(req.params.repo_slug);
|
const pkg = await get_package(req.params.language, req.params.version);
|
||||||
const pkg = await get_package(repo, req.params.language, req.params.version);
|
|
||||||
if(pkg == null) return res.json_error(`Requested package ${req.params.language}-${req.params.version} does not exist`, 404);
|
if(pkg == null) return res.json_error(`Requested package ${req.params.language}-${req.params.version} does not exist`, 404);
|
||||||
|
|
||||||
try{
|
try{
|
||||||
|
@ -158,7 +61,7 @@ module.exports = {
|
||||||
|
|
||||||
},
|
},
|
||||||
async package_uninstall(req,res){
|
async package_uninstall(req,res){
|
||||||
// DELETE /repos/:slug/packages/:language/:version
|
// DELETE /packages/:language/:version
|
||||||
|
|
||||||
//res.json(req.body); //TODO
|
//res.json(req.body); //TODO
|
||||||
res.json_error('not implemented', 500);
|
res.json_error('not implemented', 500);
|
||||||
|
|
|
@ -11,7 +11,7 @@ class Runtime {
|
||||||
#env_vars
|
#env_vars
|
||||||
#compiled
|
#compiled
|
||||||
constructor(package_dir){
|
constructor(package_dir){
|
||||||
const {language, version, author, dependencies, build_platform} = JSON.parse(
|
const {language, version, author, build_platform, aliases} = JSON.parse(
|
||||||
fss.read_file_sync(path.join(package_dir, 'pkg-info.json'))
|
fss.read_file_sync(path.join(package_dir, 'pkg-info.json'))
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -19,7 +19,7 @@ class Runtime {
|
||||||
this.language = language;
|
this.language = language;
|
||||||
this.version = semver.parse(version);
|
this.version = semver.parse(version);
|
||||||
this.author = author;
|
this.author = author;
|
||||||
this.dependencies = dependencies;
|
this.aliases = aliases;
|
||||||
|
|
||||||
if(build_platform != globals.platform){
|
if(build_platform != globals.platform){
|
||||||
logger.warn(`Package ${language}-${version} was built for platform ${build_platform}, but our platform is ${globals.platform}`);
|
logger.warn(`Package ${language}-${version} was built for platform ${build_platform}, but our platform is ${globals.platform}`);
|
||||||
|
@ -30,22 +30,7 @@ class Runtime {
|
||||||
}
|
}
|
||||||
|
|
||||||
get env_file_path(){
|
get env_file_path(){
|
||||||
return path.join(this.runtime_dir, 'environment');
|
return path.join(this.pkgdir, 'environment');
|
||||||
}
|
|
||||||
|
|
||||||
get runtime_dir(){
|
|
||||||
return path.join(config.data_directory,globals.data_directories.runtimes, this.toString());
|
|
||||||
}
|
|
||||||
|
|
||||||
get_all_dependencies(){
|
|
||||||
const res = [];
|
|
||||||
Object.keys(this.dependencies).forEach(dep => {
|
|
||||||
const selector = this.dependencies[dep];
|
|
||||||
const lang = module.exports.get_latest_runtime_matching_language_version(dep, selector);
|
|
||||||
res.push(lang);
|
|
||||||
res.concat(lang.get_all_dependencies(lang));
|
|
||||||
});
|
|
||||||
return res;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
get compiled(){
|
get compiled(){
|
||||||
|
@ -77,7 +62,7 @@ class Runtime {
|
||||||
module.exports = runtimes;
|
module.exports = runtimes;
|
||||||
module.exports.Runtime = Runtime;
|
module.exports.Runtime = Runtime;
|
||||||
module.exports.get_runtimes_matching_language_version = function(lang, ver){
|
module.exports.get_runtimes_matching_language_version = function(lang, ver){
|
||||||
return runtimes.filter(rt => rt.language == lang && semver.satisfies(rt.version, ver));
|
return runtimes.filter(rt => (rt.language == lang || rt.aliases.includes(lang)) && semver.satisfies(rt.version, ver));
|
||||||
};
|
};
|
||||||
module.exports.get_latest_runtime_matching_language_version = function(lang, ver){
|
module.exports.get_latest_runtime_matching_language_version = function(lang, ver){
|
||||||
return module.exports.get_runtimes_matching_language_version(lang, ver)
|
return module.exports.get_runtimes_matching_language_version(lang, ver)
|
||||||
|
|
|
@ -1,45 +0,0 @@
|
||||||
const fs = require('fs/promises');
|
|
||||||
const fss = require('fs');
|
|
||||||
|
|
||||||
const logger = require('logplease').create('state');
|
|
||||||
const state = new Map();
|
|
||||||
|
|
||||||
function replacer(key, value) {
|
|
||||||
if(value instanceof Map) {
|
|
||||||
return {
|
|
||||||
data_type: 'Map',
|
|
||||||
value: Array.from(value.entries()),
|
|
||||||
};
|
|
||||||
} else {
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function reviver(key, value) {
|
|
||||||
if(typeof value === 'object' && value !== null) {
|
|
||||||
if (value.data_type === 'Map') {
|
|
||||||
return new Map(value.value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return value;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
module.exports = {
|
|
||||||
state,
|
|
||||||
async load(data_file){
|
|
||||||
if(fss.exists_sync(data_file)){
|
|
||||||
logger.info('Loading state from file');
|
|
||||||
var content = await fs.read_file(data_file);
|
|
||||||
var obj = JSON.parse(content.toString(), reviver);
|
|
||||||
[...obj.keys()].forEach(k => state.set(k, obj.get(k)));
|
|
||||||
}else{
|
|
||||||
logger.info('Creating new state file');
|
|
||||||
state.set('repositories', new Map());
|
|
||||||
}
|
|
||||||
},
|
|
||||||
async save(data_file){
|
|
||||||
logger.info('Saving state to disk');
|
|
||||||
await fs.write_file(data_file, JSON.stringify(state, replacer));
|
|
||||||
}
|
|
||||||
};
|
|
Loading…
Reference in New Issue