diff --git a/ARCHITECTURE.TXT b/ARCHITECTURE.TXT new file mode 100644 index 0000000..edf52d3 --- /dev/null +++ b/ARCHITECTURE.TXT @@ -0,0 +1,71 @@ +== Breif == [ Piston ] + +This document covers the overall architecture of Piston v3, and not the +individual components and their implementations. + +In Piston v2 we saw 2 ways of using piston - through the CLI and the API. +These 2 methods would call the same load of bash scripts contained within +a LXC container which would then resolve your request. +There are a number of issues with this approach: + 1. This uses bash - which isn't the best language for performance + 2. It relied on calling through a `lxc-attach` command to access + inside the container + 3. This isn't easy to distribute + 4. It was difficult to add languages - having to edit 4 different files + in 4 different places to add a single language + +Piston v3 aims to tackle these 4 issues. +Firstly, v3 will be less reliant on bash, only using it as an option +for running different interpreters. +Secondly, v3 can run on bare-metal or in a container, but a core API will be +exposed from within the container, instead of running external to it. +Thirdly, v3 will provide a simple docker container, which will expose both the +piston API, and run all the runners within it. +Finally, v3 will provide a repository of precompiled language executers, so its +1 command away from installing a language + + + +== Piston API == + +Piston v3 exposes a REST API, allowing the user to control the entire thing +over one simple JSON based protocol. This eliminates the need to connect into +the container to do maintainace such as adding new languages, or checking +usage statistics. + +See design/api.txt for more information. + + + +== Package Manager == + +Piston v3 includes a package manager right out of the box. The package manager +manages the different languages and versions that it can run. +The package manager is hooked directly into the API and addresses our point +of easy distibution, as users now can easily enable/disable different +components built into piston as they see fit. + +See design/ppman.txt for more information. + + + +== Runtime Environment == + +The new architecture moves to a more bare-metal approach, where the code can be +run without the overhead of a container manager such as LXC or Docker, making +piston much easier to manage this way + +It is still possible to run Piston v3 in a contain, but now a container engine +is not required for usage, however it is still recommended. + + + +== Proxy API == + +The in-container API is more powerful than a simple execution API and thus +should be limited, however to keep the weight down, and speed up there is a +reference implementation of a proxy API included, which passes through +execution commands to many different piston instances and allows for +security with rate limiting and API keys. + +See design/proxy.txt \ No newline at end of file diff --git a/api-client/index.cjs b/api-client/index.cjs index 14723dd..c58f9b3 100644 --- a/api-client/index.cjs +++ b/api-client/index.cjs @@ -38,32 +38,77 @@ class APIWrapper { }) } + get_child_object(endpoint, class_type){ + return this.get(endpoint).then(x => new class_type(this, x)) + } + get url_base(){ return this.#base } } +class PistonEngineRepositoryPackage extends APIWrapper { + constructor(repo, {language, language_version, author, buildfile, size, dependencies, installed}){ + super(url_join(repo.url_base, `/packages/${language}/${language_version}`)) + + this.language = language; + this.language_version = language_version; + this.author = author; + this,buildfile = buildfile; + this.size = size; + this.dependencies = dependencies; + this.installed = installed; + } + + install(){ + return this.post('/', {}); + } + + uninstall(){ + return this.delete('/', {}); + } +} + +class PistonEngineRepository extends APIWrapper { + + constructor(engine, {slug, url, packages}){ + super(url_join(engine.url_base,`/repos/${slug}`)) + + this.slug = slug; + this.url = url; + this.package_count = packages + + } + + list_packages(){ + return this.get(`/packages`).then(x=>x.packages) + } + + get_package(language, language_version){ + return this.get_child_object(`/packages/${language}/${language_version}`, PistonEngineRepositoryPackage) + } +} class PistonEngine extends APIWrapper { constructor(base_url = 'http://127.0.0.1:6969'){ super(base_url); } + list_repos(){ + return this.get(`/repos`); + } + + add_repo(slug, url){ + return this.post(`/repos`, {slug, url}) + } + + get_repo(slug){ + return this.get_child_object(`/repos/${slug}`, PistonEngineRepository) + } + run_job({language, version, files, main, args, stdin, compile_timeout, run_timeout}){ return this.post(`/jobs`, {language, version, files, main, args, stdin, compile_timeout, run_timeout}) } - - list_packages(){ - return this.get('/packages').then(x=>x.packages) - } - - install_package({language, version}){ - return this.post(`/packages/${language}/${version}`); - } - - uninstall_package({language, version}){ - return this.post(`/packages/${language}/${version}`); - } } diff --git a/api/src/cache.js b/api/src/cache.js new file mode 100644 index 0000000..43727ab --- /dev/null +++ b/api/src/cache.js @@ -0,0 +1,65 @@ +const globals = require('./globals'); +const logger = require('logplease').create('cache'); +const fs = require('fs/promises'), + path = require('path'); + +const cache = new Map(); + +module.exports = { + cache_key: (context, key) => Buffer.from(`${context}-${key}`).toString('base64'), + has(key){ + return cache.has(key) && cache.get(key).expiry > Date.now(); + }, + async get(key, callback, ttl=globals.cache_ttl){ + logger.debug('get:', key); + + if(module.exports.has(key)){ + logger.debug('hit:',key); + return cache.get(key).data; + } + + logger.debug('miss:', key); + var data = await callback(); + cache.set(key, {data, expiry: Date.now() + ttl}); + + return data; + }, + async flush(cache_dir){ + logger.info('Flushing cache'); + + async function flush_single(value, key){ + const file_path = path.join(cache_dir, key); + + if(value.expiry < Date.now()){ + cache.delete(key); + try { + const stats = await fs.stat(file_path); + if(stats.is_file()) + await fs.rm(file_path); + }catch{ + // Ignore, file hasn't been flushed yet + } + }else{ + await fs.write_file(file_path, JSON.stringify(value)); + } + + } + + return Promise.all( + Array.from(cache).map(flush_single) + ); + + }, + async load(cache_dir){ + const files = await fs.readdir(cache_dir); + + async function load_single(file_name){ + const file_path = path.join(cache_dir,file_name); + const file_content = await fs.read_file(file_path).toString(); + cache.set(file_name, JSON.parse(file_content)); + } + + return Promise.all(files.map(load_single)); + } + +}; \ No newline at end of file diff --git a/api/src/config.js b/api/src/config.js index 2d841b0..6d73f54 100644 --- a/api/src/config.js +++ b/api/src/config.js @@ -54,6 +54,24 @@ const options = [ default: '/piston', validators: [x=> fss.exists_sync(x) || `Directory ${x} does not exist`] }, + { + key: 'cache_ttl', + desc: 'Time in milliseconds to keep data in cache for at a maximum', + default: 60 * 60 * 1000, + validators: [] + }, + { + key: 'cache_flush_time', + desc: 'Interval in milliseconds to flush cache to disk at', + default: 90 * 60 * 1000, //90 minutes + validators: [] + }, + { + key: 'state_flush_time', + desc: 'Interval in milliseconds to flush state to disk at', + default: 5000, // 5 seconds (file is tiny) + validators: [] + }, { key: 'runner_uid_min', desc: 'Minimum uid to use for runner', @@ -101,12 +119,6 @@ const options = [ desc: 'Max number of open files per job', default: 2048, validators: [] - }, - { - key: 'repo_url', - desc: 'URL of repo index', - default: 'https://github.com', - validators: [] } ]; @@ -114,7 +126,7 @@ function make_default_config(){ let content = header.split('\n'); options.forEach(option => { - content = content.concat(option.desc.split('\n').map(x=>`# ${x}`)); + content.concat(option.desc.split('\n').map(x=>`# ${x}`)); if(option.options) content.push('# Options: ' + option.options.join(', ')); diff --git a/api/src/globals.js b/api/src/globals.js index c9bd427..09ccb2e 100644 --- a/api/src/globals.js +++ b/api/src/globals.js @@ -11,9 +11,14 @@ const platform = `${is_docker() ? 'docker' : 'baremetal'}-${ module.exports = { data_directories: { + cache: 'cache', packages: 'packages', + runtimes: 'runtimes', jobs: 'jobs' }, + data_files:{ + state: 'state.json' + }, version: require('../package.json').version, platform, pkg_installed_file: '.ppman-installed' //Used as indication for if a package was installed diff --git a/api/src/helpers.js b/api/src/helpers.js new file mode 100644 index 0000000..ba3ef56 --- /dev/null +++ b/api/src/helpers.js @@ -0,0 +1,33 @@ +const fs = require('fs/promises'), + path= require('path'), + fetch = require('node-fetch'), + urlp = require('url'); + + + +module.exports = { + async buffer_from_url(url){ + if(!(url instanceof URL)) + url = new URL(url); + if(url.protocol == 'file:'){ + //eslint-disable-next-line snakecasejs/snakecasejs + return await fs.read_file(urlp.fileURLToPath(url)); + }else{ + return await fetch({ + url: url.toString() + }); + } + }, + add_url_base_if_required(url, base){ + try{ + return new URL(url); + }catch{ + //Assume this is a file name + return new URL(url, base + '/'); + } + }, + url_basename(url){ + return path.basename(url.pathname); + }, + +}; \ No newline at end of file diff --git a/api/src/index.js b/api/src/index.js index 1325243..4d5e76c 100644 --- a/api/src/index.js +++ b/api/src/index.js @@ -4,6 +4,8 @@ const Logger = require('logplease'); const express = require('express'); const globals = require('./globals'); const config = require('./config'); +const cache = require('./cache'); +const state = require('./state'); const path = require('path'); const fs = require('fs/promises'); const fss = require('fs'); @@ -33,6 +35,13 @@ const app = express(); }); + + logger.info('Loading state'); + await state.load(path.join(config.data_directory,globals.data_files.state)); + + logger.info('Loading cache'); + await cache.load(path.join(config.data_directory,globals.data_directories.cache)); + logger.info('Loading packages'); const pkgdir = path.join(config.data_directory,globals.data_directories.packages); @@ -80,16 +89,44 @@ const app = express(); const ppman_routes = require('./ppman/routes'); const executor_routes = require('./executor/routes'); - - app.get('/packages', - ppman_routes.package_list + app.get('/repos', + validate, + ppman_routes.repo_list ); - app.post('/packages/:language/:version', + app.post('/repos', + ppman_routes.repo_add_validators, + validate, + ppman_routes.repo_add + ); + + app.get('/repos/:repo_slug', + ppman_routes.repo_info_validators, + validate, + ppman_routes.repo_info + ); + + app.get('/repos/:repo_slug/packages', + ppman_routes.repo_packages_validators, + validate, + ppman_routes.repo_packages + ); + + app.get('/repos/:repo_slug/packages/:language/:version', + ppman_routes.package_info_validators, + validate, + ppman_routes.package_info + ); + + app.post('/repos/:repo_slug/packages/:language/:version', + ppman_routes.package_info_validators, + validate, ppman_routes.package_install ); - app.delete('/packages/:language/:version', + app.delete('/repos/:repo_slug/packages/:language/:version', + ppman_routes.package_info_validators, + validate, ppman_routes.package_uninstall ); @@ -103,8 +140,7 @@ const app = express(); { language: rt.language, version: rt.version.raw, - author: rt.author, - aliases: rt.aliases + author: rt.author } )); @@ -122,4 +158,17 @@ const app = express(); logger.info('API server started on', config.bind_address); }); + logger.debug('Setting up flush timers'); + + setInterval( + cache.flush, + config.cache_flush_time, + path.join(config.data_directory,globals.data_directories.cache) + ); + + setInterval( + state.save, + config.state_flush_time, + path.join(config.data_directory,globals.data_files.state) + ); })(); \ No newline at end of file diff --git a/api/src/ppman/package.js b/api/src/ppman/package.js index 66a29b7..894376b 100644 --- a/api/src/ppman/package.js +++ b/api/src/ppman/package.js @@ -2,7 +2,7 @@ const logger = require('logplease').create('ppman/package'); const semver = require('semver'); const config = require('../config'); const globals = require('../globals'); -const fetch = require('node-fetch'); +const helpers = require('../helpers'); const path = require('path'); const fs = require('fs/promises'); const fss = require('fs'); @@ -11,11 +11,19 @@ const crypto = require('crypto'); const runtime = require('../runtime'); class Package { - constructor({language, version, download, checksum}){ + constructor(repo, {author, language, version, checksums, dependencies, size, buildfile, download, signature}){ + this.author = author; this.language = language; this.version = semver.parse(version); - this.checksum = checksum; + this.checksums = checksums; + this.dependencies = dependencies; + this.size = size; + this.buildfile = buildfile; this.download = download; + this.signature = signature; + + this.repo = repo; + } get installed(){ @@ -23,7 +31,7 @@ class Package { } get download_url(){ - return this.download; + return helpers.add_url_base_if_required(this.download, this.repo.base_u_r_l); } get install_path(){ @@ -47,26 +55,51 @@ class Package { logger.debug(`Downloading package from ${this.download_url} in to ${this.install_path}`); - const pkgpath = path.join(this.install_path, "pkg.tar.gz"); - const download = await fetch(this.download_url); - const file_stream = fss.create_write_stream(pkgpath); - await new Promise((resolve, reject) => { - download.body.pipe(file_stream) - download.body.on("error", reject) - file_stream.on("finish", resolve) - }); + const pkgfile = helpers.url_basename(this.download_url); + const pkgpath = path.join(this.install_path, pkgfile); + await helpers.buffer_from_url(this.download_url) + .then(buf=> fs.write_file(pkgpath, buf)); logger.debug('Validating checksums'); - logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`) - const cs = crypto.create_hash("sha256") - .update(fss.readFileSync(pkgpath)) - .digest('hex'); - if(cs != this.checksum) throw new Error(`Checksum miss-match want: ${val} got: ${cs}`); + Object.keys(this.checksums).forEach(algo => { + var val = this.checksums[algo]; - logger.debug(`Extracting package files from archive ${pkgpath} in to ${this.install_path}`); + logger.debug(`Assert ${algo}(${pkgpath}) == ${val}`); + + var cs = crypto.create_hash(algo) + .update(fss.read_file_sync(pkgpath)) + .digest('hex'); + if(cs != val) throw new Error(`Checksum miss-match want: ${val} got: ${cs}`); + }); + + await this.repo.import_keys(); + + logger.debug('Validating signatures'); + + if(this.signature != '') + await new Promise((resolve,reject)=>{ + const gpgspawn = cp.spawn('gpg', ['--verify', '-', pkgpath], { + stdio: ['pipe', 'ignore', 'ignore'] + }); + + gpgspawn.once('exit', (code, _) => { + if(code == 0) resolve(); + else reject(new Error('Invalid signature')); + }); + + gpgspawn.once('error', reject); + + gpgspawn.stdin.write(this.signature); + gpgspawn.stdin.end(); + + }); + else + logger.warn('Package does not contain a signature - allowing install, but proceed with caution'); + + logger.debug(`Extracting package files from archive ${pkgfile} in to ${this.install_path}`); await new Promise((resolve, reject)=>{ - const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'`); + const proc = cp.exec(`bash -c 'cd "${this.install_path}" && tar xzf ${pkgfile}'`); proc.once('exit', (code,_)=>{ if(code == 0) resolve(); else reject(new Error('Failed to extract package')); @@ -77,12 +110,38 @@ class Package { proc.once('error', reject); }); + logger.debug('Ensuring binary files exist for package'); + const pkgbin = path.join(this.install_path, `${this.language}-${this.version.raw}`); + try{ + const pkgbin_stat = await fs.stat(pkgbin); + //eslint-disable-next-line snakecasejs/snakecasejs + if(!pkgbin_stat.isDirectory()) throw new Error(); + // Throw a blank error here, so it will be caught by the following catch, and output the correct error message + // The catch is used to catch fs.stat + }catch(err){ + throw new Error(`Invalid package: could not find ${this.language}-${this.version.raw}/ contained within package files`); + } + + logger.debug('Symlinking into runtimes'); + + await fs.symlink( + pkgbin, + path.join(config.data_directory, + globals.data_directories.runtimes, + `${this.language}-${this.version.raw}`) + ).catch((err)=>err); //Ignore if we fail - probably means its already been installed and not cleaned up right + + logger.debug('Registering runtime'); - new runtime.Runtime(this.install_path); + const pkg_runtime = new runtime.Runtime(this.install_path); logger.debug('Caching environment'); - const get_env_command = `cd ${this.install_path}; source environment; env`; + const required_pkgs = [pkg_runtime, ...pkg_runtime.get_all_dependencies()]; + const get_env_command = [ + ...required_pkgs.map(pkg=>`cd "${pkg.runtime_dir}"; source environment; `), + 'env' + ].join(' '); const envout = await new Promise((resolve, reject)=>{ var stdout = ''; diff --git a/api/src/ppman/repo.js b/api/src/ppman/repo.js new file mode 100644 index 0000000..d05c1a9 --- /dev/null +++ b/api/src/ppman/repo.js @@ -0,0 +1,65 @@ +const logger = require('logplease').create('ppman/repo'); +const cache = require('../cache'); +const CACHE_CONTEXT = 'repo'; + +const cp = require('child_process'); +const yaml = require('js-yaml'); +const { Package } = require('./package'); +const helpers = require('../helpers'); + +class Repository { + constructor(slug, url){ + this.slug = slug; + this.url = new URL(url); + this.keys = []; + this.packages = []; + this.base_u_r_l=''; + logger.debug(`Created repo slug=${this.slug} url=${this.url}`); + } + + get cache_key(){ + return cache.cache_key(CACHE_CONTEXT, this.slug); + } + + async load(){ + try{ + var index = await cache.get(this.cache_key,async ()=>{ + return helpers.buffer_from_url(this.url); + }); + + var repo = yaml.load(index); + if(repo.schema != 'ppman-repo-1'){ + throw new Error('YAML Schema unknown'); + } + + this.keys = repo.keys; + this.packages = repo.packages.map(pkg => new Package(this, pkg)); + this.base_u_r_l = repo.baseurl; + }catch(err){ + logger.error(`Failed to load repository ${this.slug}:`,err.message); + } + } + + + async import_keys(){ + await this.load(); + logger.info(`Importing keys for repo ${this.slug}`); + await new Promise((resolve,reject)=>{ + const gpgspawn = cp.spawn('gpg', ['--receive-keys', ...this.keys], { + stdio: ['ignore', 'ignore', 'ignore'] + }); + + gpgspawn.once('exit', (code, _) => { + if(code == 0) resolve(); + else reject(new Error('Failed to import keys')); + }); + + gpgspawn.once('error', reject); + + }); + + } + +} + +module.exports = {Repository}; \ No newline at end of file diff --git a/api/src/ppman/routes.js b/api/src/ppman/routes.js index c4f0b8a..a14fcf0 100644 --- a/api/src/ppman/routes.js +++ b/api/src/ppman/routes.js @@ -1,53 +1,150 @@ +const repos = new Map(); +const state = require('../state'); const logger = require('logplease').create('ppman/routes'); +const {Repository} = require('./repo'); const semver = require('semver'); -const fetch = require('node-fetch'); -const config = require('../config'); -const { Package } = require('./package'); +const { body, param } = require('express-validator'); - -async function get_package_list(){ - const repo_content = await fetch(config.repo_url).then(x=>x.text()); - - const entries = repo_content.split('\n').filter(x=>x.length > 0); - - return entries.map(line => { - const [language, version, checksum, download] = line.split(',',4); - return new Package({language, version, checksum, download}); - }) +async function get_or_construct_repo(slug){ + if(repos.has(slug))return repos.get(slug); + if(state.state.get('repositories').has(slug)){ + const repo_url = state.state.get('repositories').get(slug); + const repo = new Repository(slug, repo_url); + await repo.load(); + repos.set(slug, repo); + return repo; + } + logger.warn(`Requested repo ${slug} does not exist`); + return null; } - -async function get_package(lang, version){ - const packages = await get_package_list(); - const candidates = packages.filter( +async function get_package(repo, lang, version){ + var candidates = repo.packages.filter( pkg => pkg.language == lang && semver.satisfies(pkg.version, version) ); return candidates.sort((a,b)=>semver.rcompare(a.version,b.version))[0] || null; } module.exports = { - - async package_list(req, res){ - // GET /packages - logger.debug('Request to list packages'); + async repo_list(req,res){ + // GET /repos - const packages = await get_package_list(); + logger.debug('Request for repoList'); + res.json_success({ + repos: (await Promise.all( + [...state.state.get('repositories').keys()].map( async slug => await get_or_construct_repo(slug)) + )).map(repo=>({ + slug: repo.slug, + url: repo.url, + packages: repo.packages.length + })) + }); + }, + repo_add_validators: [ + body('slug') + .notEmpty() // eslint-disable-line snakecasejs/snakecasejs + .bail() + .isSlug() // eslint-disable-line snakecasejs/snakecasejs + .bail() + .not() + .custom(value=>state.state.get('repositories').keys().includes(value)) + .withMessage('slug is already in use'), // eslint-disable-line snakecasejs/snakecasejs + body('url') + .notEmpty() // eslint-disable-line snakecasejs/snakecasejs + .bail() + .isURL({require_host: false, require_protocol: true, protocols: ['http','https','file']}) // eslint-disable-line snakecasejs/snakecasejs + + ], + async repo_add(req, res){ + // POST /repos + + logger.debug(`Request for repoAdd slug=${req.body.slug} url=${req.body.url}`); + + const repo_state = state.state.get('repositories'); + + repo_state.set(req.body.slug, req.body.url); + logger.info(`Repository ${req.body.slug} added url=${req.body.url}`); + + return res.json_success(req.body.slug); + }, + repo_info_validators: [ + param('repo_slug') + .isSlug() // eslint-disable-line snakecasejs/snakecasejs + .bail() + .custom(value=>state.state.get('repositories').has(value)) + .withMessage('repository does not exist') // eslint-disable-line snakecasejs/snakecasejs + .bail() + ], + async repo_info(req, res){ + // GET /repos/:slug + + logger.debug(`Request for repoInfo for ${req.params.repo_slug}`); + const repo = await get_or_construct_repo(req.params.repo_slug); + + res.json_success({ + slug: repo.slug, + url: repo.url, + packages: repo.packages.length + }); + }, + repo_packages_validators: [ + param('repo_slug') + .isSlug() // eslint-disable-line snakecasejs/snakecasejs + .bail() + .custom(value=>state.state.get('repositories').has(value)) + .withMessage('repository does not exist') // eslint-disable-line snakecasejs/snakecasejs + .bail() + ], + async repo_packages(req, res){ + // GET /repos/:slug/packages + logger.debug('Request to repoPackages'); + + const repo = await get_or_construct_repo(req.params.repo_slug); + if(repo == null) return res.json_error(`Requested repo ${req.params.repo_slug} does not exist`, 404); res.json_success({ - packages: packages.map(pkg=>({ + packages: repo.packages.map(pkg=>({ language: pkg.language, language_version: pkg.version.raw, installed: pkg.installed })) }); + }, + package_info_validators: [ + param('repo_slug') + .isSlug() // eslint-disable-line snakecasejs/snakecasejs + .bail() + .custom(value=>state.state.get('repositories').has(value)) + .withMessage('repository does not exist') // eslint-disable-line snakecasejs/snakecasejs + .bail() + ], + async package_info(req, res){ + // GET /repos/:slug/packages/:language/:version + logger.debug('Request to packageInfo'); + + const repo = await get_or_construct_repo(req.params.repo_slug); + + const pkg = await get_package(repo, req.params.language, req.params.version); + if(pkg == null) return res.json_error(`Requested package ${req.params.language}-${req.params.version} does not exist`, 404); + + res.json_success({ + language: pkg.language, + language_version: pkg.version.raw, + author: pkg.author, + buildfile: pkg.buildfile, + size: pkg.size, + dependencies: pkg.dependencies, + installed: pkg.installed + }); }, async package_install(req,res){ - // POST /packages/:language/:version + // POST /repos/:slug/packages/:language/:version - logger.debug('Request to install package'); + logger.debug('Request to packageInstall'); - const pkg = await get_package(req.params.language, req.params.version); + const repo = await get_or_construct_repo(req.params.repo_slug); + const pkg = await get_package(repo, req.params.language, req.params.version); if(pkg == null) return res.json_error(`Requested package ${req.params.language}-${req.params.version} does not exist`, 404); try{ @@ -61,7 +158,7 @@ module.exports = { }, async package_uninstall(req,res){ - // DELETE /packages/:language/:version + // DELETE /repos/:slug/packages/:language/:version //res.json(req.body); //TODO res.json_error('not implemented', 500); diff --git a/api/src/runtime.js b/api/src/runtime.js index 5a01891..1658f81 100644 --- a/api/src/runtime.js +++ b/api/src/runtime.js @@ -11,7 +11,7 @@ class Runtime { #env_vars #compiled constructor(package_dir){ - const {language, version, author, build_platform, aliases} = JSON.parse( + const {language, version, author, dependencies, build_platform} = JSON.parse( fss.read_file_sync(path.join(package_dir, 'pkg-info.json')) ); @@ -19,7 +19,7 @@ class Runtime { this.language = language; this.version = semver.parse(version); this.author = author; - this.aliases = aliases; + this.dependencies = dependencies; if(build_platform != globals.platform){ logger.warn(`Package ${language}-${version} was built for platform ${build_platform}, but our platform is ${globals.platform}`); @@ -30,7 +30,22 @@ class Runtime { } get env_file_path(){ - return path.join(this.pkgdir, 'environment'); + return path.join(this.runtime_dir, 'environment'); + } + + get runtime_dir(){ + return path.join(config.data_directory,globals.data_directories.runtimes, this.toString()); + } + + get_all_dependencies(){ + const res = []; + Object.keys(this.dependencies).forEach(dep => { + const selector = this.dependencies[dep]; + const lang = module.exports.get_latest_runtime_matching_language_version(dep, selector); + res.push(lang); + res.concat(lang.get_all_dependencies(lang)); + }); + return res; } get compiled(){ @@ -62,7 +77,7 @@ class Runtime { module.exports = runtimes; module.exports.Runtime = Runtime; module.exports.get_runtimes_matching_language_version = function(lang, ver){ - return runtimes.filter(rt => (rt.language == lang || rt.aliases.includes(lang)) && semver.satisfies(rt.version, ver)); + return runtimes.filter(rt => rt.language == lang && semver.satisfies(rt.version, ver)); }; module.exports.get_latest_runtime_matching_language_version = function(lang, ver){ return module.exports.get_runtimes_matching_language_version(lang, ver) diff --git a/api/src/state.js b/api/src/state.js new file mode 100644 index 0000000..dd6f773 --- /dev/null +++ b/api/src/state.js @@ -0,0 +1,45 @@ +const fs = require('fs/promises'); +const fss = require('fs'); + +const logger = require('logplease').create('state'); +const state = new Map(); + +function replacer(key, value) { + if(value instanceof Map) { + return { + data_type: 'Map', + value: Array.from(value.entries()), + }; + } else { + return value; + } +} + +function reviver(key, value) { + if(typeof value === 'object' && value !== null) { + if (value.data_type === 'Map') { + return new Map(value.value); + } + } + return value; +} + + +module.exports = { + state, + async load(data_file){ + if(fss.exists_sync(data_file)){ + logger.info('Loading state from file'); + var content = await fs.read_file(data_file); + var obj = JSON.parse(content.toString(), reviver); + [...obj.keys()].forEach(k => state.set(k, obj.get(k))); + }else{ + logger.info('Creating new state file'); + state.set('repositories', new Map()); + } + }, + async save(data_file){ + logger.info('Saving state to disk'); + await fs.write_file(data_file, JSON.stringify(state, replacer)); + } +}; \ No newline at end of file diff --git a/cli/commands/ppman_commands/install.js b/cli/commands/ppman_commands/install.js index 39d0a28..1cab867 100644 --- a/cli/commands/ppman_commands/install.js +++ b/cli/commands/ppman_commands/install.js @@ -15,13 +15,23 @@ const msg_format = { exports.handler = async function(argv){ const api = new PistonEngine(argv['piston-url']); + + const repos = await api.list_repos(); + + const repos_obj = await Promise.all(repos.repos.map(({slug}) => api.get_repo(slug))); - const opts = { - language: argv['language'], - version: argv['language-version'] - }; + const repo_pkgs = await Promise.all(repos_obj.map( + async repo => ({ + repo: repo, + packages: await repo.list_packages().catch(_=>[]) + }) + )) - const install = await api.install_package(opts).catch(x=>x); + const repo = repo_pkgs.find(r => r.packages.find(p=>p.language == argv['language'] && p.language_version == argv['language-version'])) + if(!repo) throw Error("Package could not be located") + + const package = await repo.repo.get_package(argv['language'], argv['language-version']) + const install = await package.install().catch(x=>x) console.log(msg_format.color(install)); } \ No newline at end of file diff --git a/cli/commands/ppman_commands/list.js b/cli/commands/ppman_commands/list.js index 4a44dcf..6dd4bed 100644 --- a/cli/commands/ppman_commands/list.js +++ b/cli/commands/ppman_commands/list.js @@ -15,10 +15,17 @@ const msg_format = { exports.handler = async function(argv){ const api = new PistonEngine(argv['piston-url']); + + const repos = await api.list_repos(); - const packages = await api.list_packages(); + const repos_obj = await Promise.all(repos.repos.map(({slug}) => api.get_repo(slug))); - + const packages = await repos_obj.reduce(async (accumulator, repo) => [ + ...await accumulator, + ...await repo.list_packages() + .catch(x=>{console.log(x); return []}) + ], []); // Loops over repos, listing packages and flattening them into a single array + const pkg_msg = packages .map(msg_format.color) .join('\n'); diff --git a/design/api.txt b/design/api.txt new file mode 100644 index 0000000..a9e7e63 --- /dev/null +++ b/design/api.txt @@ -0,0 +1,75 @@ +== Piston API == [ Piston ] + +When we speak of piston, what we actually talk about is the Piston API. +This API provides unrestricted, unlimited access to managing piston and +thus shouldn't be publicly exposed. This API is comparable to one of the +docker engine, where everything regarding control of docker goes directly +through the api. + +The API is responsible for managing the execution lifecycle of any given +job, as well as managing the different languages which it can execute a +job in. + + + +== Job Execution == + +Piston v3 exposes an endpoint per package `/execute`, which when called takes +in both a string of code, and an array of arguments to pass into the program +as well as data to write to STDIN. The stdout and stderr from the process are +then both returned seperately, along with the error code returned. + +All of this is has no rate-limiting built in making it lightning fast as a call +will directly start the runner process and get under way instantly. + +The 2 stages of this process - compile and run are both run in sequence, with +different timeouts configurable in the runners config file located in the +data directory. + +Requests to this endpoint can have caching enabled at 3 different levels. +The first option is to have no caching, which is the default for all +interpreted language. The second option is for the compiled binaries to be +cached, which is the default for all compiled languages. The final option is +for output to be cached, which isn't used by default but can be enabled per +package or per request. This is done for the reason that code may choose to +source data from /dev/(u)random or similar sources and as such may not be as +reliable when their outputs are cached. Caching is per package and is used as +an acceleration method to help boost performance of Piston. Cache entries are +automatically purged after the set time, or can be manually purged through the +API on a per package basis. + + +== Package Manager == + +Piston v3 has an inbuilt package manager which is responsible for +(un)installing different packages. Piston v3 by default has access to a single +offical repository hosting various versions of various common languages. These +packages and repositories conform to the specifications set out in ppman.txt + +The Piston API service downloads the repository index whenever a `/packages` +request is issued to a repository with the `sync` flag is set. This will cause +the service to download the latest repostiory index off the mirror. + +In piston there is no concept of a package being "outdated" as each package is +a specific version of a language, and different languages can be installed in +paralleland function without any issues. Each package should be considered the +final version of that language. If there is a new version of a language +available (i.e. Python 3.9.1 -> 3.9.2), a new package should be created for +this. + +Invidual languages can be queried from the repo using the +`/repos/{repo}/packages/{package}/{package-version}` endpoint. This endpoint +allows for the metadata of the package to be accessed, such as the author, +size, checksums, dependencies, build file git url and download url. + +To install packages, a request to `/install` can be made to the package +endpoint and it will download and install it, making it available on the +`/packages/{package}/{version}` endpoint. + +There is a meta-repository name `all` which can be used to access all +repositories. + +Internally the install process involved downloading and unpacking the package, +ensuring any dependencies are also downloaded and installed, mounting the +squashfs filesystem to a folder, then overlaying it with all its dependencies +in another folder. diff --git a/design/fs.txt b/design/fs.txt new file mode 100644 index 0000000..030d563 --- /dev/null +++ b/design/fs.txt @@ -0,0 +1,18 @@ +== File System Layout == [ Piston ] + +All of pistons files are installed in the `/piston` directory. This directory +contains all runtimes, config, packages and cache that piston uses. + +Each package gets its own directory, where it its prebuilt binaries are +unpacked into. This is contained within `/piston/packages` + +The binaries folder contained within this is then symlinked into the runtimes +directory. This is where all the different runtimes available are placed. This +is contained within the `/piston/runtimes` directory. + +The cache directory a directory containing all the different cache files. It is +recommended to either sym-link this into a folder withing /tmp/ or directly +mount it as a tmpfs filesystem. + +Configuration is stored in a single file - piston.yaml and contains all +documentation required for configuring the piston API \ No newline at end of file diff --git a/design/index.txt b/design/index.txt new file mode 100644 index 0000000..685222a --- /dev/null +++ b/design/index.txt @@ -0,0 +1,13 @@ +== Index == [ Piston ] + +Design outlines the design of the different components and does not give a +concrete definition of the implementation or how to use it. + +api.txt Design of Piston API +ppman.txt Design of the package manager's package and repository format + + +== Glossary == + +Execution Job A single code run with arguments resulting in an output +Package A version of a language bundled together into a tarball \ No newline at end of file diff --git a/design/ppman.txt b/design/ppman.txt new file mode 100644 index 0000000..b4920db --- /dev/null +++ b/design/ppman.txt @@ -0,0 +1,136 @@ +== Package Manager (ppman) == [ Piston ] + +The package manager is the part of the API responsible for managing different +versions of different languages, managing their installation, uninstallation +and their dependencies. The package manager talks over the piston api and is +built directly into piston, although has parts which are not directly built +into the API (i.e. the repositories and the cli utility). + +The package manager is a complex part of piston, and requires 2 different file +specifications - the repository index file and the package file. + + + +== Repository Index File == + +The piston repository is the central place where packages are hosted and +downloaded from. This repository can either be a webserver or a local file +containing the right content - as long as its accessable by a URL, its +considered a valid repository by piston. A repository URL is simply a URL +pointing to a repository index file, as set out by the following information. + +A repository index file is a YAML file containing the keys: `schema`, +`baseurl`, `keys` and `packages`. + +The schema key simply should have a value of `ppman-repo-1`. This indicates the +version and file format for the client to recieve. + +The baseurl key contains the base url that relative URLs should be based off, +this doesn't need to be related to the url that the repository index is hosted +at, only the downloadable files, which are possible to split over many domains +by using absolute paths. + +The keys key contains a list of GPG key ids which should be used when +verifying. + +The packages key contains a list of packages, which contain the keys: `author`, +`language`, `version`, `checksums`, `dependencies`, `size`, `buildfile`, +`download` and `signature`. + +The author field is self explainatory, it is simply the authors name and email, +formatted similar to git's default format: `Full Name `. If the +repository index is automatically generated, it is best to use the commit +author's name in here. + +The language and version fields define the version and name of the compiler or +interpreter contained within. The language should not include a version at all. +In the case of python, use the name python for both python 2 and 3, using the +version field to differentiate between the 2. + +The checksums field is simply a map of hash types to hashes, hash types include +md5, sha1, sha256, sha512. The digests should simply be written as lowercase +hex characters. Only one checksum is required, but if more are supplied the +most secure one is picked, with sha512 as the highest possible. + +The dependencies is simply a map of language names to versions, which should be +installed for the package to run correctly. An example of this would be +typescript requires node to run. + +The size field is the number of bytes the package file is in size, while +uncompressed. This is used to determine if there is enough room, and thus +should be accurate. + +The buildfile field is a URL pointing to the exact build script for this build. +This should always point to a URL either containing steps, a script or other +means of reproducing the build. This field is purely so people can understand +how the image was built, and to make sure you aren't packing any mallicious +code into it. + +The download field is a URL pointing to a place of which the package file can +be obtained from. If this is a relative url, the baseurl will be appended to +it. This is particularly useful if everything is stored within 1 s3 bucket, or +you have a repository in a folder. + +The signature field is an armored signature + + +== Package File == + +Within a repository, many package files are contained. These files are +downloaded and installed into `/piston`. They need to all follow the format +as listed below for the API to properly utilize them. + +A package file is a gzipped tar archive, containing 4/5 things - `run`, +`compile` (optional), `pkg-info.json`, `lang-ver` and `lang-ver/environment`. +Replacing lang-ver with the language name and the version respectively. + + +The `pkg-info.json` file contains 5 different keys: `language`, `version`, +`author`, `dependencies` and `build_platform`. + +The language field is simply the name of the language, all lowercase and not +containing any version number. This is important in the case of python +specifically as python3 and python2 come under the same language, just +different versions. + +The version field is a sem-ver compliant version number for the interpreter +contained wthin the package. It should be a string. + +The author field contains the author name, and should be formatted exactly like +shown previously in the repo index spec. + +The dependencies field is simply a map containing packages which this package +depends on. This should only be used when one language is a requirement for +another, like how typescript is dependent on node. The key should be the name +of the package, with the value being the version selector. + +The build_platform field is used for defining which type of system the package +was built on, this helps resolve system compatability errors. It consists of 2 +parts - environment and disto. The environment is simply one of `baremetal`, +`docker`, `lxc` or any other type of environment you can run piston on. The +distro is the ID of the distro as contained in /etc/os-release on the system +which built the package. This is done to ensure system compatability, +especially inside the offically supported Docker container. + + +The `run` and `compile` files are used in the execution of a job, being used +to both compile and run the source code provided. They are both treated the +same inputs-wise but can have timeouts independently set per stage. The +arguments fed both these files are the same, with the first argument being a +path to the code file, and the rest being the arguments to passed in. These +files are run in a temporary directory contained within the cache folder. +Depending on the cache control level the code and associated files will either +be disposed of or kept. By default only files named `binary` will be kept. +STDIN is only passed into the `run` file, but both files have their output +captured and returned along with their exit codes through the API + +The `lang-ver` folder should contain any interpreter specific files, such as +the binary to execute and any other files that may be required to run the +interpreter/compiler contained within the package. + +The `environment` file contained within `lang-ver` should contain export +commands like a ~/.bashrc file should, as this is its intended purpose. Firstly +the language which is being executed has its environment file sources, then it +walks down the dependency tree sourcing files. The environment variables are +eventually cached to speed up the execution process. + diff --git a/docker-compose.yaml b/docker-compose.yaml index 8b5f62f..c74a814 100644 --- a/docker-compose.yaml +++ b/docker-compose.yaml @@ -11,11 +11,12 @@ services: - ./data/piston:/piston - ./repo:/repo tmpfs: + - /piston/cache - /piston/jobs piston_fs_repo: #Temporary solution until CI works build: repo - command: ['deno-1.7.5'] # Only build deno + command: ['python'] # Only build python volumes: - ./repo:/repo - ./packages:/packages \ No newline at end of file diff --git a/packages/.gitignore b/packages/.gitignore index 92bd97e..6cc2e5b 100644 --- a/packages/.gitignore +++ b/packages/.gitignore @@ -1,8 +1,4 @@ -*/* +*/build +*/output *.pkg.tar.gz -!*/metadata.json -!*/build.sh -!*/environment -!*/run -!*/compile -!*/test.* \ No newline at end of file +*/pkg-info.json \ No newline at end of file diff --git a/packages/CONTRIBUTING.MD b/packages/CONTRIBUTING.MD index 0c5381a..9ae44fd 100644 --- a/packages/CONTRIBUTING.MD +++ b/packages/CONTRIBUTING.MD @@ -16,7 +16,7 @@ See [deno-1.7.5/](deno-1.7.5/) or any other directory for examples. 3. Create a file named `build.sh`, adding a shebang for bash `#!/bin/bash` on the first line. In this file put any steps to compile the specified langauge. -This script should download sources, compile sources and output binaries. They should be dumped into the current working directory, removing any files which aren't required in the process. +It is allowed to use 2 directories, `output` and `build`, the former which contains files which should get packaged into the final output, and the latter containing intermediate build files. 4. Create a file named `run`, containing bash script to run the interpreter. The first argument given to this script (`$1`) is the name of the main file, with the remaining ones as program arguments. @@ -25,7 +25,7 @@ STDIN is piped directly into the run file, and as such nothing special is requir 5. Create a file named `compile`, containing bash script to compile sources into binaries. This is only required if the language requires a compling stage. The first argument is always the main file, followed the names of the other files as additional arguements. If the language does not require a compile stage, don't create a compile file. -6. Create a file named `environment`, containing `export` statements which edit the environment variables accordingly. The `$PWD` variable should be used, and is set inside the package directory when running on the target system. +6. Create a file named `environment`, containing `export` statements which edit the environment variables accordingly. The `$PWD` variable should be used, and is set inside the `output` directory, but when running on the target system. 7. Create a test script starting with test, with the file extension of the language. This script should simply output the phrase `OK`. For example, for mono we would create `test.cs` with the content: ```cs diff --git a/packages/Makefile b/packages/Makefile index 80360f5..a861705 100644 --- a/packages/Makefile +++ b/packages/Makefile @@ -8,15 +8,22 @@ help: @echo "Run $`make [language]-[version].pkg.tar.gz$` to build a specific language" build build-all: $(addsuffix .pkg.tar.gz, ${PACKAGES}) +clean clean-all: $(addprefix clean-, ${PACKAGES}) -%.pkg.tar.gz: %/ %/pkg-info.json - cd $< && chmod +x ./build.sh && ./build.sh +clean-%: %/ + rm -rf $ $@ +%/output: %/ %/build.sh + cd $< && chmod +x ./build.sh && ./build.sh + + diff --git a/packages/deno-1.7.5/build.sh b/packages/deno-1.7.5/build.sh index 0b0674c..b15f804 100755 --- a/packages/deno-1.7.5/build.sh +++ b/packages/deno-1.7.5/build.sh @@ -1,5 +1,11 @@ -curl -L https://github.com/denoland/deno/releases/download/v1.7.5/deno-x86_64-unknown-linux-gnu.zip --output deno.zip -unzip -o deno.zip -rm deno.zip +mkdir -p output build -chmod +x deno \ No newline at end of file +cd build +curl -L https://github.com/denoland/deno/releases/download/v1.7.5/deno-x86_64-unknown-linux-gnu.zip --output deno.zip +unzip deno.zip + +cd .. + +mv build/deno output + +chmod +x output/deno \ No newline at end of file diff --git a/packages/deno-1.7.5/metadata.json b/packages/deno-1.7.5/metadata.json index a73d171..0ab8971 100644 --- a/packages/deno-1.7.5/metadata.json +++ b/packages/deno-1.7.5/metadata.json @@ -2,5 +2,6 @@ "language": "deno", "version": "1.7.5", "author": "Thomas Hobson ", + "dependencies": {}, "aliases": ["deno-ts", "deno-js"] } \ No newline at end of file diff --git a/packages/mono-6.12.0/build.sh b/packages/mono-6.12.0/build.sh index 59fb3ad..52ac46a 100755 --- a/packages/mono-6.12.0/build.sh +++ b/packages/mono-6.12.0/build.sh @@ -13,7 +13,4 @@ cd mono make -j$(nproc) DESTDIR=build/tmp make install -j$(nproc) -mv build/tmp/piston/packages/mono/6.12.0/mono-6.12.0 ../../mono-6.12.0 - -cd ../../ -rm -rf build +mv build/tmp/piston/packages/mono/6.12.0/mono-6.12.0 ../../output diff --git a/packages/mono-6.12.0/environment b/packages/mono-6.12.0/environment index 03bbb35..98fd770 100644 --- a/packages/mono-6.12.0/environment +++ b/packages/mono-6.12.0/environment @@ -1 +1 @@ -export PATH=$PWD/mono-6.12.0:$PATH \ No newline at end of file +export PATH=$PWD:$PATH \ No newline at end of file diff --git a/packages/mono-6.12.0/metadata.json b/packages/mono-6.12.0/metadata.json index a7c979b..24837d9 100644 --- a/packages/mono-6.12.0/metadata.json +++ b/packages/mono-6.12.0/metadata.json @@ -2,5 +2,6 @@ "language": "mono", "version": "6.12.0", "author": "Thomas Hobson ", + "dependencies": {}, "aliases": ["csharp", "cs"] } \ No newline at end of file diff --git a/packages/node-15.10.0/build.sh b/packages/node-15.10.0/build.sh index 25d419d..0c64523 100755 --- a/packages/node-15.10.0/build.sh +++ b/packages/node-15.10.0/build.sh @@ -1,4 +1,11 @@ #!/bin/bash + +mkdir -p build output + +cd build + curl "https://nodejs.org/dist/v15.10.0/node-v15.10.0-linux-x64.tar.xz" -o node.tar.xz tar xf node.tar.xz --strip-components=1 -rm node.tar.xz \ No newline at end of file + +cd .. +mv build/bin/node output \ No newline at end of file diff --git a/packages/node-15.10.0/metadata.json b/packages/node-15.10.0/metadata.json index 972f4f2..793142c 100644 --- a/packages/node-15.10.0/metadata.json +++ b/packages/node-15.10.0/metadata.json @@ -2,5 +2,6 @@ "language": "node", "version": "15.10.0", "author": "Martin Kos ", + "dependencies": {}, "aliases": ["node-javascript", "node-js", "javascript", "js"] } \ No newline at end of file diff --git a/packages/php-8.0.2/build.sh b/packages/php-8.0.2/build.sh index ba78728..9d5f5c7 100755 --- a/packages/php-8.0.2/build.sh +++ b/packages/php-8.0.2/build.sh @@ -15,6 +15,6 @@ make -j$(nproc) INSTALL_ROOT=build/tmp make install -j$(nproc) -mv build/tmp/piston/packages/php/8.0.2/php-8.0.2 ../../php-8.0.2 +mv build/tmp/piston/packages/php/8.0.2/php-8.0.2 ../../output diff --git a/packages/php-8.0.2/environment b/packages/php-8.0.2/environment index 153022d..bd0ff98 100644 --- a/packages/php-8.0.2/environment +++ b/packages/php-8.0.2/environment @@ -1 +1 @@ -export PATH=$PWD/php-8.0.2/bin:$PATH \ No newline at end of file +export PATH=$PWD/bin:$PATH \ No newline at end of file diff --git a/packages/php-8.0.2/metadata.json b/packages/php-8.0.2/metadata.json index 2da0ef9..35ff122 100644 --- a/packages/php-8.0.2/metadata.json +++ b/packages/php-8.0.2/metadata.json @@ -2,5 +2,6 @@ "language": "php", "version": "8.0.2", "author": "Martin Kos ", + "dependencies": {}, "aliases": ["php8","html"] } \ No newline at end of file diff --git a/packages/python-3.9.1/build.sh b/packages/python-3.9.1/build.sh index 1ee0843..83e0873 100755 --- a/packages/python-3.9.1/build.sh +++ b/packages/python-3.9.1/build.sh @@ -1,10 +1,19 @@ #!/bin/bash +mkdir -p build/tmp build/python +cd build + curl "https://www.python.org/ftp/python/3.9.1/Python-3.9.1.tgz" -o python.tar.gz -tar xzf python.tar.gz --strip-components=1 -rm python.tar.gz +tar xzf python.tar.gz --strip-components=1 -C python + +cd python + ./configure --prefix /piston/packages/python/3.9.1/python-3.9.1 make -j$(nproc) -ln -s python python3.9 +DESTDIR=build/tmp make altinstall -j$(nproc) + + +mv build/tmp/piston/packages/python/3.9.1/python-3.9.1 ../../output + diff --git a/packages/python-3.9.1/environment b/packages/python-3.9.1/environment index 98fd770..bd0ff98 100644 --- a/packages/python-3.9.1/environment +++ b/packages/python-3.9.1/environment @@ -1 +1 @@ -export PATH=$PWD:$PATH \ No newline at end of file +export PATH=$PWD/bin:$PATH \ No newline at end of file diff --git a/packages/python-3.9.1/metadata.json b/packages/python-3.9.1/metadata.json index 306c376..98c33ed 100644 --- a/packages/python-3.9.1/metadata.json +++ b/packages/python-3.9.1/metadata.json @@ -2,5 +2,6 @@ "language": "python", "version": "3.9.1", "author": "Thomas Hobson ", + "dependencies": {}, "aliases": ["py", "python3"] } \ No newline at end of file diff --git a/repo/.gitignore b/repo/.gitignore index c3607c0..fd572dc 100644 --- a/repo/.gitignore +++ b/repo/.gitignore @@ -1,2 +1,3 @@ *.pkg.tar.gz -index \ No newline at end of file +index.yaml +*.key \ No newline at end of file diff --git a/repo/entrypoint.sh b/repo/entrypoint.sh index 83e7c73..44a5d65 100755 --- a/repo/entrypoint.sh +++ b/repo/entrypoint.sh @@ -8,4 +8,4 @@ done cd /repo ./mkindex.sh -python3 -m http.server \ No newline at end of file +curl -s http://piston_api:6969/repos -XPOST -d "slug=local&url=file:///repo/index.yaml" || echo "WARNING: Could not add repository" diff --git a/repo/mkindex.sh b/repo/mkindex.sh index c3c72a0..a38caf6 100755 --- a/repo/mkindex.sh +++ b/repo/mkindex.sh @@ -1,22 +1,26 @@ -BASEURL=http://piston_fs_repo:8000/ +echo "schema: ppman-repo-1" > index.yaml +echo "baseurl: file://$PWD" >> index.yaml +echo "keys: []" >> index.yaml +echo "packages: []" >> index.yaml -i=0 +#yq -yi '.keys[0] = "0x107DA02C7AE97B084746564B9F1FD9D87950DB6F"' index.yaml -echo "" > index +i=-1 for pkg in $(find ../packages -type f -name "*.pkg.tar.gz") do - + ((i=i+1)) cp $pkg . - PKGFILE=$(basename $pkg) PKGFILENAME=$(echo $PKGFILE | sed 's/\.pkg\.tar\.gz//g') - PKGNAME=$(echo $PKGFILENAME | grep -oP '^\K.+(?=-)') PKGVERSION=$(echo $PKGFILENAME | grep -oP '^.+-\K.+') - PKGCHECKSUM=$(sha256sum $PKGFILE | awk '{print $1}') + BUILDFILE=https://github.com/engineer-man/piston/tree/v3/packages/ + SIZE=$(tar tzvf $PKGFILE | sed 's/ \+/ /g' | cut -f3 -d' ' | sed '2,$s/^/+ /' | paste -sd' ' | bc) - echo "$PKGNAME,$PKGVERSION,$PKGCHECKSUM,$BASEURL$PKGFILE" >> index + tar xzf $PKGFILE pkg-info.json + + yq -yi ".packages[$i] = {} | .packages[$i].signature = \"\" | .packages[$i].buildfile = \"$BUILDFILE\" | .packages[$i].size = $SIZE | .packages[$i].download = \"$PKGFILE\" | .packages[$i].dependencies = $(jq .dependencies -r pkg-info.json) | .packages[$i].author = $(jq .author pkg-info.json) | .packages[$i].language =\"$PKGNAME\" | .packages[$i].version = \"$PKGVERSION\" | .packages[$i].checksums = {} | .packages[$i].checksums.sha256 = \"$(sha256sum $PKGFILE | awk '{print $1}')\"" index.yaml - ((i=i+1)) + rm pkg-info.json done \ No newline at end of file