mirror of
https://github.com/engineer-man/piston.git
synced 2025-04-24 14:06:27 +02:00
Fix issues after merging upstream
Implements a simple container builder for runtime sets
This commit is contained in:
parent
e022e34a37
commit
83e4a1a136
7 changed files with 135 additions and 55 deletions
|
@ -5,108 +5,105 @@ const config = require('../config');
|
|||
const Logger = require('logplease');
|
||||
const logger = Logger.create('test');
|
||||
const cp = require('child_process');
|
||||
const runtime = require("../runtime");
|
||||
const runtime = require('../runtime');
|
||||
const { Job } = require('../job');
|
||||
|
||||
(async function(){
|
||||
(async function () {
|
||||
logger.info('Setting loglevel to', config.log_level);
|
||||
Logger.setLogLevel(config.log_level);
|
||||
|
||||
|
||||
|
||||
let runtimes_to_test;
|
||||
let failed = false;
|
||||
|
||||
if(process.argv[2] === "--all"){
|
||||
if (process.argv[2] === '--all') {
|
||||
// load all
|
||||
runtimes_to_test = JSON.parse(
|
||||
cp.execSync(`nix eval ${config.flake_path}#pistonRuntimes --json --apply builtins.attrNames`)
|
||||
cp.execSync(
|
||||
`nix eval ${config.flake_path}#pistonRuntimes --json --apply builtins.attrNames`
|
||||
)
|
||||
);
|
||||
}else{
|
||||
} else {
|
||||
runtimes_to_test = [process.argv[2]];
|
||||
}
|
||||
|
||||
|
||||
|
||||
for (const runtime_name of runtimes_to_test) {
|
||||
|
||||
|
||||
const runtime_path = `${config.flake_path}#pistonRuntimes.${runtime_name}`;
|
||||
logger.info(`Testing runtime ${runtime_path}`);
|
||||
|
||||
logger.debug(`Loading runtime metadata`);
|
||||
const metadata = JSON.parse(cp.execSync(`nix eval --json ${runtime_path}.metadata --json`));
|
||||
const metadata = JSON.parse(
|
||||
cp.execSync(`nix eval --json ${runtime_path}.metadata --json`)
|
||||
);
|
||||
|
||||
logger.debug(`Loading runtime tests`);
|
||||
const tests = JSON.parse(cp.execSync(`nix eval --json ${runtime_path}.tests --json`));
|
||||
const tests = JSON.parse(
|
||||
cp.execSync(`nix eval --json ${runtime_path}.tests --json`)
|
||||
);
|
||||
|
||||
logger.debug(`Loading runtime`);
|
||||
|
||||
const testable_runtime = new runtime.Runtime({
|
||||
...metadata,
|
||||
flake_path: runtime_path
|
||||
...runtime.Runtime.compute_all_limits(
|
||||
metadata.language,
|
||||
metadata.limit_overrides
|
||||
),
|
||||
flake_path: runtime_path,
|
||||
});
|
||||
|
||||
testable_runtime.ensure_built();
|
||||
|
||||
|
||||
logger.info(`Running tests`);
|
||||
|
||||
for (const test of tests) {
|
||||
|
||||
const files = [];
|
||||
|
||||
for (const file_name of Object.keys(test.files)) {
|
||||
const file_content = test.files[file_name];
|
||||
const this_file = {
|
||||
name: file_name,
|
||||
content: file_content
|
||||
content: file_content,
|
||||
};
|
||||
|
||||
if(file_name == test.main)
|
||||
files.unshift(this_file);
|
||||
else
|
||||
files.push(this_file);
|
||||
|
||||
if (file_name == test.main) files.unshift(this_file);
|
||||
else files.push(this_file);
|
||||
}
|
||||
|
||||
|
||||
const job = new Job({
|
||||
runtime: testable_runtime,
|
||||
args: test.args || [],
|
||||
stdin: test.stdin || "",
|
||||
stdin: test.stdin || '',
|
||||
files,
|
||||
timeouts: {
|
||||
run: 3000,
|
||||
compile: 10000
|
||||
compile: 10000,
|
||||
},
|
||||
memory_limits: {
|
||||
run: config.run_memory_limit,
|
||||
compile: config.compile_memory_limit
|
||||
}
|
||||
compile: config.compile_memory_limit,
|
||||
},
|
||||
});
|
||||
|
||||
await job.prime()
|
||||
const result = await job.execute()
|
||||
await job.cleanup()
|
||||
|
||||
if(result.run.stdout.trim() !== "OK"){
|
||||
await job.prime();
|
||||
const result = await job.execute();
|
||||
await job.cleanup();
|
||||
|
||||
if (result.run.stdout.trim() !== 'OK') {
|
||||
failed = true;
|
||||
|
||||
logger.error("Test Failed:")
|
||||
console.log(job, result)
|
||||
}else{
|
||||
logger.info("Test Passed")
|
||||
logger.error('Test Failed:');
|
||||
console.log(job, result);
|
||||
} else {
|
||||
logger.info('Test Passed');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if(failed) {
|
||||
logger.error("One or more tests failed")
|
||||
if (failed) {
|
||||
logger.error('One or more tests failed');
|
||||
process.exit(1);
|
||||
}
|
||||
else {
|
||||
logger.info("All tests passed")
|
||||
} else {
|
||||
logger.info('All tests passed');
|
||||
process.exit(0);
|
||||
}
|
||||
})()
|
||||
})();
|
||||
|
|
|
@ -139,10 +139,6 @@ class Job {
|
|||
var output = '';
|
||||
|
||||
const proc = cp.spawn(proc_call[0], proc_call.splice(1), {
|
||||
env: {
|
||||
...this.runtime.env_vars,
|
||||
PISTON_LANGUAGE: this.runtime.language,
|
||||
},
|
||||
stdio: 'pipe',
|
||||
cwd: this.dir,
|
||||
uid: this.uid,
|
||||
|
@ -250,7 +246,7 @@ class Job {
|
|||
this.logger.debug('Running');
|
||||
|
||||
const run = await this.safe_call(
|
||||
path.join(this.runtime.pkgdir, 'run'),
|
||||
this.runtime.run,
|
||||
[code_files[0].name, ...this.args],
|
||||
this.timeouts.run,
|
||||
this.memory_limits.run
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue