Merge pull request #12 from Endercheif/patch/quality

fix: added incremental typing to piston
This commit is contained in:
Endercheif 2023-03-08 18:34:10 -08:00 committed by GitHub
commit aef2611428
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
17 changed files with 1214 additions and 1490 deletions

1
api/.gitignore vendored
View File

@ -1 +1,2 @@
_piston _piston
api/dist

View File

@ -18,12 +18,19 @@ RUN apt-get update && \
RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen RUN sed -i '/en_US.UTF-8/s/^# //g' /etc/locale.gen && locale-gen
RUN npm install --global pnpm
WORKDIR /piston_api WORKDIR /piston_api
COPY ["package.json", "package-lock.json", "./"] COPY ["package.json", "pnpm-lock.yaml", "tsconfig.json", "./"]
RUN npm install
COPY ./src ./src COPY ./src ./src
RUN make -C ./src/nosocket/ all && make -C ./src/nosocket/ install RUN pnpm install
RUN pnpm build
CMD [ "node", "src"] COPY ./src/nosocket ./dist/nosocket
RUN make -C ./dist/nosocket/ all && make -C ./dist/nosocket/ install
CMD [ "pnpm", "api"]
EXPOSE 2000/tcp EXPOSE 2000/tcp

1117
api/package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,20 +1,29 @@
{ {
"name": "piston-api", "name": "piston-api",
"version": "3.1.1", "version": "3.2.0",
"description": "API for piston - a high performance code execution engine", "description": "API for piston - a high performance code execution engine",
"main": "src/index.js", "module": "./dist/index.js",
"type": "module",
"scripts": {
"api": "node ./dist/index.js",
"build": "npx tsc"
},
"dependencies": { "dependencies": {
"body-parser": "^1.19.0", "body-parser": "^1.19.0",
"chownr": "^2.0.0", "chownr": "^2.0.0",
"express": "^4.17.3", "express": "^4.17.3",
"express-ws": "^5.0.2", "express-ws": "^5.0.2",
"is-docker": "^2.1.1", "is-docker": "^2.1.1",
"logplease": "^1.2.15", "logplease": "github:Endercheif/logplease#feature/quality",
"nocamel": "HexF/nocamel#patch-1",
"node-fetch": "^2.6.7", "node-fetch": "^2.6.7",
"semver": "^7.3.4", "semver": "^7.3.4",
"uuid": "^8.3.2", "uuid": "^8.3.2",
"waitpid": "git+https://github.com/HexF/node-waitpid.git" "waitpid": "git+https://github.com/HexF/node-waitpid.git"
}, },
"license": "MIT" "license": "MIT",
"devDependencies": {
"@types/express": "^4.17.17",
"@types/node-fetch": "^2.6.2",
"typescript": "^4.9.5"
}
} }

655
api/pnpm-lock.yaml Normal file
View File

@ -0,0 +1,655 @@
lockfileVersion: 5.4
specifiers:
'@types/express': ^4.17.17
'@types/node-fetch': ^2.6.2
body-parser: ^1.19.0
chownr: ^2.0.0
express: ^4.17.3
express-ws: ^5.0.2
is-docker: ^2.1.1
logplease: github:Endercheif/logplease#feature/quality
node-fetch: ^2.6.7
semver: ^7.3.4
typescript: ^4.9.5
uuid: ^8.3.2
waitpid: git+https://github.com/HexF/node-waitpid.git
dependencies:
body-parser: 1.20.2
chownr: 2.0.0
express: 4.18.2
express-ws: 5.0.2_express@4.18.2
is-docker: 2.2.1
logplease: github.com/Endercheif/logplease/808583a0f24b2d6625d0d30da5b4164cc7bbf23a
node-fetch: 2.6.9
semver: 7.3.8
uuid: 8.3.2
waitpid: github.com/HexF/node-waitpid/a08d116a5d993a747624fe72ff890167be8c34aa
devDependencies:
'@types/express': 4.17.17
'@types/node-fetch': 2.6.2
typescript: 4.9.5
packages:
/@types/body-parser/1.19.2:
resolution: {integrity: sha512-ALYone6pm6QmwZoAgeyNksccT9Q4AWZQ6PvfwR37GT6r6FWUPguq6sUmNGSMV2Wr761oQoBxwGGa6DR5o1DC9g==}
dependencies:
'@types/connect': 3.4.35
'@types/node': 18.14.1
dev: true
/@types/connect/3.4.35:
resolution: {integrity: sha512-cdeYyv4KWoEgpBISTxWvqYsVy444DOqehiF3fM3ne10AmJ62RSyNkUnxMJXHQWRQQX2eR94m5y1IZyDwBjV9FQ==}
dependencies:
'@types/node': 18.14.1
dev: true
/@types/express-serve-static-core/4.17.33:
resolution: {integrity: sha512-TPBqmR/HRYI3eC2E5hmiivIzv+bidAfXofM+sbonAGvyDhySGw9/PQZFt2BLOrjUUR++4eJVpx6KnLQK1Fk9tA==}
dependencies:
'@types/node': 18.14.1
'@types/qs': 6.9.7
'@types/range-parser': 1.2.4
dev: true
/@types/express/4.17.17:
resolution: {integrity: sha512-Q4FmmuLGBG58btUnfS1c1r/NQdlp3DMfGDGig8WhfpA2YRUtEkxAjkZb0yvplJGYdF1fsQ81iMDcH24sSCNC/Q==}
dependencies:
'@types/body-parser': 1.19.2
'@types/express-serve-static-core': 4.17.33
'@types/qs': 6.9.7
'@types/serve-static': 1.15.1
dev: true
/@types/mime/3.0.1:
resolution: {integrity: sha512-Y4XFY5VJAuw0FgAqPNd6NNoV44jbq9Bz2L7Rh/J6jLTiHBSBJa9fxqQIvkIld4GsoDOcCbvzOUAbLPsSKKg+uA==}
dev: true
/@types/node-fetch/2.6.2:
resolution: {integrity: sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A==}
dependencies:
'@types/node': 18.14.1
form-data: 3.0.1
dev: true
/@types/node/18.14.1:
resolution: {integrity: sha512-QH+37Qds3E0eDlReeboBxfHbX9omAcBCXEzswCu6jySP642jiM3cYSIkU/REqwhCUqXdonHFuBfJDiAJxMNhaQ==}
dev: true
/@types/qs/6.9.7:
resolution: {integrity: sha512-FGa1F62FT09qcrueBA6qYTrJPVDzah9a+493+o2PCXsesWHIn27G98TsSMs3WPNbZIEj4+VJf6saSFpvD+3Zsw==}
dev: true
/@types/range-parser/1.2.4:
resolution: {integrity: sha512-EEhsLsD6UsDM1yFhAvy0Cjr6VwmpMWqFBCb9w07wVugF7w9nfajxLuVmngTIpgS6svCnm6Vaw+MZhoDCKnOfsw==}
dev: true
/@types/serve-static/1.15.1:
resolution: {integrity: sha512-NUo5XNiAdULrJENtJXZZ3fHtfMolzZwczzBbnAeBbqBwG+LaG6YaJtuwzwGSQZ2wsCrxjEhNNjAkKigy3n8teQ==}
dependencies:
'@types/mime': 3.0.1
'@types/node': 18.14.1
dev: true
/accepts/1.3.8:
resolution: {integrity: sha512-PYAthTa2m2VKxuvSD3DPC/Gy+U+sOA1LAuT8mkmRuvw+NACSaeXEQ+NHcVF7rONl6qcaxV3Uuemwawk+7+SJLw==}
engines: {node: '>= 0.6'}
dependencies:
mime-types: 2.1.35
negotiator: 0.6.3
dev: false
/array-flatten/1.1.1:
resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==}
dev: false
/asynckit/0.4.0:
resolution: {integrity: sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==}
dev: true
/body-parser/1.20.1:
resolution: {integrity: sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==}
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
dependencies:
bytes: 3.1.2
content-type: 1.0.5
debug: 2.6.9
depd: 2.0.0
destroy: 1.2.0
http-errors: 2.0.0
iconv-lite: 0.4.24
on-finished: 2.4.1
qs: 6.11.0
raw-body: 2.5.1
type-is: 1.6.18
unpipe: 1.0.0
transitivePeerDependencies:
- supports-color
dev: false
/body-parser/1.20.2:
resolution: {integrity: sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==}
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
dependencies:
bytes: 3.1.2
content-type: 1.0.5
debug: 2.6.9
depd: 2.0.0
destroy: 1.2.0
http-errors: 2.0.0
iconv-lite: 0.4.24
on-finished: 2.4.1
qs: 6.11.0
raw-body: 2.5.2
type-is: 1.6.18
unpipe: 1.0.0
transitivePeerDependencies:
- supports-color
dev: false
/bytes/3.1.2:
resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==}
engines: {node: '>= 0.8'}
dev: false
/call-bind/1.0.2:
resolution: {integrity: sha512-7O+FbCihrB5WGbFYesctwmTKae6rOiIzmz1icreWJ+0aA7LJfuqhEso2T9ncpcFtzMQtzXf2QGGueWJGTYsqrA==}
dependencies:
function-bind: 1.1.1
get-intrinsic: 1.2.0
dev: false
/chownr/2.0.0:
resolution: {integrity: sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==}
engines: {node: '>=10'}
dev: false
/combined-stream/1.0.8:
resolution: {integrity: sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==}
engines: {node: '>= 0.8'}
dependencies:
delayed-stream: 1.0.0
dev: true
/content-disposition/0.5.4:
resolution: {integrity: sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==}
engines: {node: '>= 0.6'}
dependencies:
safe-buffer: 5.2.1
dev: false
/content-type/1.0.5:
resolution: {integrity: sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA==}
engines: {node: '>= 0.6'}
dev: false
/cookie-signature/1.0.6:
resolution: {integrity: sha512-QADzlaHc8icV8I7vbaJXJwod9HWYp8uCqf1xa4OfNu1T7JVxQIrUgOWtHdNDtPiywmFbiS12VjotIXLrKM3orQ==}
dev: false
/cookie/0.5.0:
resolution: {integrity: sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==}
engines: {node: '>= 0.6'}
dev: false
/debug/2.6.9:
resolution: {integrity: sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==}
peerDependencies:
supports-color: '*'
peerDependenciesMeta:
supports-color:
optional: true
dependencies:
ms: 2.0.0
dev: false
/delayed-stream/1.0.0:
resolution: {integrity: sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==}
engines: {node: '>=0.4.0'}
dev: true
/depd/2.0.0:
resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==}
engines: {node: '>= 0.8'}
dev: false
/destroy/1.2.0:
resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==}
engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16}
dev: false
/ee-first/1.1.1:
resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==}
dev: false
/encodeurl/1.0.2:
resolution: {integrity: sha512-TPJXq8JqFaVYm2CWmPvnP2Iyo4ZSM7/QKcSmuMLDObfpH5fi7RUGmd/rTDf+rut/saiDiQEeVTNgAmJEdAOx0w==}
engines: {node: '>= 0.8'}
dev: false
/escape-html/1.0.3:
resolution: {integrity: sha512-NiSupZ4OeuGwr68lGIeym/ksIZMJodUGOSCZ/FSnTxcrekbvqrgdUxlJOMpijaKZVjAJrWrGs/6Jy8OMuyj9ow==}
dev: false
/etag/1.8.1:
resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==}
engines: {node: '>= 0.6'}
dev: false
/express-ws/5.0.2_express@4.18.2:
resolution: {integrity: sha512-0uvmuk61O9HXgLhGl3QhNSEtRsQevtmbL94/eILaliEADZBHZOQUAiHFrGPrgsjikohyrmSG5g+sCfASTt0lkQ==}
engines: {node: '>=4.5.0'}
peerDependencies:
express: ^4.0.0 || ^5.0.0-alpha.1
dependencies:
express: 4.18.2
ws: 7.5.9
transitivePeerDependencies:
- bufferutil
- utf-8-validate
dev: false
/express/4.18.2:
resolution: {integrity: sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==}
engines: {node: '>= 0.10.0'}
dependencies:
accepts: 1.3.8
array-flatten: 1.1.1
body-parser: 1.20.1
content-disposition: 0.5.4
content-type: 1.0.5
cookie: 0.5.0
cookie-signature: 1.0.6
debug: 2.6.9
depd: 2.0.0
encodeurl: 1.0.2
escape-html: 1.0.3
etag: 1.8.1
finalhandler: 1.2.0
fresh: 0.5.2
http-errors: 2.0.0
merge-descriptors: 1.0.1
methods: 1.1.2
on-finished: 2.4.1
parseurl: 1.3.3
path-to-regexp: 0.1.7
proxy-addr: 2.0.7
qs: 6.11.0
range-parser: 1.2.1
safe-buffer: 5.2.1
send: 0.18.0
serve-static: 1.15.0
setprototypeof: 1.2.0
statuses: 2.0.1
type-is: 1.6.18
utils-merge: 1.0.1
vary: 1.1.2
transitivePeerDependencies:
- supports-color
dev: false
/finalhandler/1.2.0:
resolution: {integrity: sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==}
engines: {node: '>= 0.8'}
dependencies:
debug: 2.6.9
encodeurl: 1.0.2
escape-html: 1.0.3
on-finished: 2.4.1
parseurl: 1.3.3
statuses: 2.0.1
unpipe: 1.0.0
transitivePeerDependencies:
- supports-color
dev: false
/form-data/3.0.1:
resolution: {integrity: sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==}
engines: {node: '>= 6'}
dependencies:
asynckit: 0.4.0
combined-stream: 1.0.8
mime-types: 2.1.35
dev: true
/forwarded/0.2.0:
resolution: {integrity: sha512-buRG0fpBtRHSTCOASe6hD258tEubFoRLb4ZNA6NxMVHNw2gOcwHo9wyablzMzOA5z9xA9L1KNjk/Nt6MT9aYow==}
engines: {node: '>= 0.6'}
dev: false
/fresh/0.5.2:
resolution: {integrity: sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==}
engines: {node: '>= 0.6'}
dev: false
/function-bind/1.1.1:
resolution: {integrity: sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==}
dev: false
/get-intrinsic/1.2.0:
resolution: {integrity: sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==}
dependencies:
function-bind: 1.1.1
has: 1.0.3
has-symbols: 1.0.3
dev: false
/has-symbols/1.0.3:
resolution: {integrity: sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==}
engines: {node: '>= 0.4'}
dev: false
/has/1.0.3:
resolution: {integrity: sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==}
engines: {node: '>= 0.4.0'}
dependencies:
function-bind: 1.1.1
dev: false
/http-errors/2.0.0:
resolution: {integrity: sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==}
engines: {node: '>= 0.8'}
dependencies:
depd: 2.0.0
inherits: 2.0.4
setprototypeof: 1.2.0
statuses: 2.0.1
toidentifier: 1.0.1
dev: false
/iconv-lite/0.4.24:
resolution: {integrity: sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==}
engines: {node: '>=0.10.0'}
dependencies:
safer-buffer: 2.1.2
dev: false
/inherits/2.0.4:
resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==}
dev: false
/ipaddr.js/1.9.1:
resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==}
engines: {node: '>= 0.10'}
dev: false
/is-docker/2.2.1:
resolution: {integrity: sha512-F+i2BKsFrH66iaUFc0woD8sLy8getkwTwtOBjvs56Cx4CgJDeKQeqfz8wAYiSb8JOprWhHH5p77PbmYCvvUuXQ==}
engines: {node: '>=8'}
hasBin: true
dev: false
/lru-cache/6.0.0:
resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==}
engines: {node: '>=10'}
dependencies:
yallist: 4.0.0
dev: false
/media-typer/0.3.0:
resolution: {integrity: sha512-dq+qelQ9akHpcOl/gUVRTxVIOkAJ1wR3QAvb4RsVjS8oVoFjDGTc679wJYmUmknUF5HwMLOgb5O+a3KxfWapPQ==}
engines: {node: '>= 0.6'}
dev: false
/merge-descriptors/1.0.1:
resolution: {integrity: sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==}
dev: false
/methods/1.1.2:
resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==}
engines: {node: '>= 0.6'}
dev: false
/mime-db/1.52.0:
resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==}
engines: {node: '>= 0.6'}
/mime-types/2.1.35:
resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==}
engines: {node: '>= 0.6'}
dependencies:
mime-db: 1.52.0
/mime/1.6.0:
resolution: {integrity: sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==}
engines: {node: '>=4'}
hasBin: true
dev: false
/ms/2.0.0:
resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==}
dev: false
/ms/2.1.3:
resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==}
dev: false
/negotiator/0.6.3:
resolution: {integrity: sha512-+EUsqGPLsM+j/zdChZjsnX51g4XrHFOIXwfnCVPGlQk/k5giakcKsuxCObBRu6DSm9opw/O6slWbJdghQM4bBg==}
engines: {node: '>= 0.6'}
dev: false
/node-fetch/2.6.9:
resolution: {integrity: sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==}
engines: {node: 4.x || >=6.0.0}
peerDependencies:
encoding: ^0.1.0
peerDependenciesMeta:
encoding:
optional: true
dependencies:
whatwg-url: 5.0.0
dev: false
/object-inspect/1.12.3:
resolution: {integrity: sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==}
dev: false
/on-finished/2.4.1:
resolution: {integrity: sha512-oVlzkg3ENAhCk2zdv7IJwd/QUD4z2RxRwpkcGY8psCVcCYZNq4wYnVWALHM+brtuJjePWiYF/ClmuDr8Ch5+kg==}
engines: {node: '>= 0.8'}
dependencies:
ee-first: 1.1.1
dev: false
/parseurl/1.3.3:
resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==}
engines: {node: '>= 0.8'}
dev: false
/path-to-regexp/0.1.7:
resolution: {integrity: sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==}
dev: false
/proxy-addr/2.0.7:
resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==}
engines: {node: '>= 0.10'}
dependencies:
forwarded: 0.2.0
ipaddr.js: 1.9.1
dev: false
/qs/6.11.0:
resolution: {integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==}
engines: {node: '>=0.6'}
dependencies:
side-channel: 1.0.4
dev: false
/range-parser/1.2.1:
resolution: {integrity: sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg==}
engines: {node: '>= 0.6'}
dev: false
/raw-body/2.5.1:
resolution: {integrity: sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==}
engines: {node: '>= 0.8'}
dependencies:
bytes: 3.1.2
http-errors: 2.0.0
iconv-lite: 0.4.24
unpipe: 1.0.0
dev: false
/raw-body/2.5.2:
resolution: {integrity: sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==}
engines: {node: '>= 0.8'}
dependencies:
bytes: 3.1.2
http-errors: 2.0.0
iconv-lite: 0.4.24
unpipe: 1.0.0
dev: false
/safe-buffer/5.2.1:
resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==}
dev: false
/safer-buffer/2.1.2:
resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==}
dev: false
/semver/7.3.8:
resolution: {integrity: sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A==}
engines: {node: '>=10'}
hasBin: true
dependencies:
lru-cache: 6.0.0
dev: false
/send/0.18.0:
resolution: {integrity: sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==}
engines: {node: '>= 0.8.0'}
dependencies:
debug: 2.6.9
depd: 2.0.0
destroy: 1.2.0
encodeurl: 1.0.2
escape-html: 1.0.3
etag: 1.8.1
fresh: 0.5.2
http-errors: 2.0.0
mime: 1.6.0
ms: 2.1.3
on-finished: 2.4.1
range-parser: 1.2.1
statuses: 2.0.1
transitivePeerDependencies:
- supports-color
dev: false
/serve-static/1.15.0:
resolution: {integrity: sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==}
engines: {node: '>= 0.8.0'}
dependencies:
encodeurl: 1.0.2
escape-html: 1.0.3
parseurl: 1.3.3
send: 0.18.0
transitivePeerDependencies:
- supports-color
dev: false
/setprototypeof/1.2.0:
resolution: {integrity: sha512-E5LDX7Wrp85Kil5bhZv46j8jOeboKq5JMmYM3gVGdGH8xFpPWXUMsNrlODCrkoxMEeNi/XZIwuRvY4XNwYMJpw==}
dev: false
/side-channel/1.0.4:
resolution: {integrity: sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==}
dependencies:
call-bind: 1.0.2
get-intrinsic: 1.2.0
object-inspect: 1.12.3
dev: false
/statuses/2.0.1:
resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==}
engines: {node: '>= 0.8'}
dev: false
/toidentifier/1.0.1:
resolution: {integrity: sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==}
engines: {node: '>=0.6'}
dev: false
/tr46/0.0.3:
resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==}
dev: false
/type-is/1.6.18:
resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==}
engines: {node: '>= 0.6'}
dependencies:
media-typer: 0.3.0
mime-types: 2.1.35
dev: false
/typescript/4.9.5:
resolution: {integrity: sha512-1FXk9E2Hm+QzZQ7z+McJiHL4NW1F2EzMu9Nq9i3zAaGqibafqYwCVU6WyWAuyQRRzOlxou8xZSyXLEN8oKj24g==}
engines: {node: '>=4.2.0'}
hasBin: true
dev: true
/unpipe/1.0.0:
resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==}
engines: {node: '>= 0.8'}
dev: false
/utils-merge/1.0.1:
resolution: {integrity: sha512-pMZTvIkT1d+TFGvDOqodOclx0QWkkgi6Tdoa8gC8ffGAAqz9pzPTZWAybbsHHoED/ztMtkv/VoYTYyShUn81hA==}
engines: {node: '>= 0.4.0'}
dev: false
/uuid/8.3.2:
resolution: {integrity: sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==}
hasBin: true
dev: false
/vary/1.1.2:
resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==}
engines: {node: '>= 0.8'}
dev: false
/webidl-conversions/3.0.1:
resolution: {integrity: sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==}
dev: false
/whatwg-url/5.0.0:
resolution: {integrity: sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==}
dependencies:
tr46: 0.0.3
webidl-conversions: 3.0.1
dev: false
/ws/7.5.9:
resolution: {integrity: sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q==}
engines: {node: '>=8.3.0'}
peerDependencies:
bufferutil: ^4.0.1
utf-8-validate: ^5.0.2
peerDependenciesMeta:
bufferutil:
optional: true
utf-8-validate:
optional: true
dev: false
/yallist/4.0.0:
resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==}
dev: false
github.com/Endercheif/logplease/808583a0f24b2d6625d0d30da5b4164cc7bbf23a:
resolution: {tarball: https://codeload.github.com/Endercheif/logplease/tar.gz/808583a0f24b2d6625d0d30da5b4164cc7bbf23a}
name: logplease
version: 1.3.5
dev: false
github.com/HexF/node-waitpid/a08d116a5d993a747624fe72ff890167be8c34aa:
resolution: {tarball: https://codeload.github.com/HexF/node-waitpid/tar.gz/a08d116a5d993a747624fe72ff890167be8c34aa}
name: waitpid
version: 0.1.1
requiresBuild: true
dev: false

View File

@ -1,12 +1,18 @@
const express = require('express'); import { Router } from 'express';
const router = express.Router();
const events = require('events'); import { EventEmitter } from 'node:events';
const runtime = require('../runtime'); import {
const { Job } = require('../job'); get_latest_runtime_matching_language_version,
const package = require('../package'); runtimes as _runtimes,
const logger = require('logplease').create('api/v2'); } from '../runtime.js';
import Job from '../job.js';
import package_ from '../package.js';
import { create } from 'logplease';
import { RequestBody } from '../types.js';
const logger = create('api/v2', {});
const router = Router();
const SIGNALS = [ const SIGNALS = [
'SIGABRT', 'SIGABRT',
@ -47,10 +53,10 @@ const SIGNALS = [
'SIGXCPU', 'SIGXCPU',
'SIGXFSZ', 'SIGXFSZ',
'SIGWINCH', 'SIGWINCH',
]; ] as const;
// ref: https://man7.org/linux/man-pages/man7/signal.7.html // ref: https://man7.org/linux/man-pages/man7/signal.7.html
function get_job(body) { function get_job(body: RequestBody): Promise<Job> {
let { let {
language, language,
version, version,
@ -63,7 +69,7 @@ function get_job(body) {
compile_timeout, compile_timeout,
} = body; } = body;
return new Promise((resolve, reject) => { return new Promise<Job>((resolve, reject) => {
if (!language || typeof language !== 'string') { if (!language || typeof language !== 'string') {
return reject({ return reject({
message: 'language is required as a string', message: 'language is required as a string',
@ -87,7 +93,7 @@ function get_job(body) {
} }
} }
const rt = runtime.get_latest_runtime_matching_language_version( const rt = get_latest_runtime_matching_language_version(
language, language,
version version
); );
@ -171,10 +177,10 @@ router.use((req, res, next) => {
next(); next();
}); });
// @ts-ignore
router.ws('/connect', async (ws, req) => { router.ws('/connect', async (ws, req) => {
let job = null; let job = null;
let eventBus = new events.EventEmitter(); let eventBus = new EventEmitter();
eventBus.on('stdout', data => eventBus.on('stdout', data =>
ws.send( ws.send(
@ -203,7 +209,7 @@ router.ws('/connect', async (ws, req) => {
ws.on('message', async data => { ws.on('message', async data => {
try { try {
const msg = JSON.parse(data); const msg = JSON.parse(data.toString());
switch (msg.type) { switch (msg.type) {
case 'init': case 'init':
@ -286,7 +292,7 @@ router.post('/execute', async (req, res) => {
}); });
router.get('/runtimes', (req, res) => { router.get('/runtimes', (req, res) => {
const runtimes = runtime.map(rt => { const runtimes = _runtimes.map(rt => {
return { return {
language: rt.language, language: rt.language,
version: rt.version.raw, version: rt.version.raw,
@ -299,10 +305,12 @@ router.get('/runtimes', (req, res) => {
}); });
router.get('/packages', async (req, res) => { router.get('/packages', async (req, res) => {
console.log({req, res});
logger.debug('Request to list packages'); logger.debug('Request to list packages');
let packages = await package.get_package_list(); let packages = await package_.get_package_list();
packages = packages.map(pkg => { const pkgs = packages.map(pkg => {
return { return {
language: pkg.language, language: pkg.language,
language_version: pkg.version.raw, language_version: pkg.version.raw,
@ -310,7 +318,7 @@ router.get('/packages', async (req, res) => {
}; };
}); });
return res.status(200).send(packages); return res.status(200).send(pkgs);
}); });
router.post('/packages', async (req, res) => { router.post('/packages', async (req, res) => {
@ -318,7 +326,7 @@ router.post('/packages', async (req, res) => {
const { language, version } = req.body; const { language, version } = req.body;
const pkg = await package.get_package(language, version); const pkg = await package_.get_package(language, version);
if (pkg == null) { if (pkg == null) {
return res.status(404).send({ return res.status(404).send({
@ -347,7 +355,7 @@ router.delete('/packages', async (req, res) => {
const { language, version } = req.body; const { language, version } = req.body;
const pkg = await package.get_package(language, version); const pkg = await package_.get_package(language, version);
if (pkg == null) { if (pkg == null) {
return res.status(404).send({ return res.status(404).send({
@ -371,4 +379,4 @@ router.delete('/packages', async (req, res) => {
} }
}); });
module.exports = router; export default router;

View File

@ -1,14 +1,15 @@
const fss = require('fs'); import { existsSync } from 'fs';
const Logger = require('logplease'); import { create, type LogLevel, LogLevels } from 'logplease';
const logger = Logger.create('config'); import { Limit, Limits, ObjectType } from './types.js';
const logger = create('config', {});
const options = { const options = {
log_level: { log_level: {
desc: 'Level of data to log', desc: 'Level of data to log',
default: 'INFO', default: 'INFO' as LogLevel,
validators: [ validators: [
x => x =>
Object.values(Logger.LogLevels).includes(x) || Object.values(LogLevels).includes(x) ||
`Log level ${x} does not exist`, `Log level ${x} does not exist`,
], ],
}, },
@ -20,33 +21,31 @@ const options = {
data_directory: { data_directory: {
desc: 'Absolute path to store all piston related data at', desc: 'Absolute path to store all piston related data at',
default: '/piston', default: '/piston',
validators: [ validators: [x => existsSync(x) || `Directory ${x} does not exist`],
x => fss.exists_sync(x) || `Directory ${x} does not exist`,
],
}, },
runner_uid_min: { runner_uid_min: {
desc: 'Minimum uid to use for runner', desc: 'Minimum uid to use for runner',
default: 1001, default: 1001,
parser: parse_int, parser: parseInt,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
}, },
runner_uid_max: { runner_uid_max: {
desc: 'Maximum uid to use for runner', desc: 'Maximum uid to use for runner',
default: 1500, default: 1500,
parser: parse_int, parser: parseInt,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
}, },
runner_gid_min: { runner_gid_min: {
desc: 'Minimum gid to use for runner', desc: 'Minimum gid to use for runner',
default: 1001, default: 1001,
parser: parse_int, parser: parseInt,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
}, },
runner_gid_max: { runner_gid_max: {
desc: 'Maximum gid to use for runner', desc: 'Maximum gid to use for runner',
default: 1500, default: 1500,
parser: parse_int, parser: parseInt,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
}, },
disable_networking: { disable_networking: {
desc: 'Set to true to disable networking', desc: 'Set to true to disable networking',
@ -57,50 +56,50 @@ const options = {
output_max_size: { output_max_size: {
desc: 'Max size of each stdio buffer', desc: 'Max size of each stdio buffer',
default: 1024, default: 1024,
parser: parse_int, parser: parseInt,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
}, },
max_process_count: { max_process_count: {
desc: 'Max number of processes per job', desc: 'Max number of processes per job',
default: 64, default: 64,
parser: parse_int, parser: parseInt,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
}, },
max_open_files: { max_open_files: {
desc: 'Max number of open files per job', desc: 'Max number of open files per job',
default: 2048, default: 2048,
parser: parse_int, parser: parseInt,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
}, },
max_file_size: { max_file_size: {
desc: 'Max file size in bytes for a file', desc: 'Max file size in bytes for a file',
default: 10000000, //10MB default: 10000000, //10MB
parser: parse_int, parser: parseInt,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
}, },
compile_timeout: { compile_timeout: {
desc: 'Max time allowed for compile stage in milliseconds', desc: 'Max time allowed for compile stage in milliseconds',
default: 10000, // 10 seconds default: 10000, // 10 seconds
parser: parse_int, parser: parseInt,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
}, },
run_timeout: { run_timeout: {
desc: 'Max time allowed for run stage in milliseconds', desc: 'Max time allowed for run stage in milliseconds',
default: 3000, // 3 seconds default: 3000, // 3 seconds
parser: parse_int, parser: parseInt,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
}, },
compile_memory_limit: { compile_memory_limit: {
desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)', desc: 'Max memory usage for compile stage in bytes (set to -1 for no limit)',
default: -1, // no limit default: -1, // no limit
parser: parse_int, parser: parseInt,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
}, },
run_memory_limit: { run_memory_limit: {
desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)', desc: 'Max memory usage for run stage in bytes (set to -1 for no limit)',
default: -1, // no limit default: -1, // no limit
parser: parse_int, parser: parseInt,
validators: [(x, raw) => !is_nan(x) || `${raw} is not a number`], validators: [(x, raw) => !isNaN(x) || `${raw} is not a number`],
}, },
repo_url: { repo_url: {
desc: 'URL of repo index', desc: 'URL of repo index',
@ -111,14 +110,14 @@ const options = {
max_concurrent_jobs: { max_concurrent_jobs: {
desc: 'Maximum number of concurrent jobs to run at one time', desc: 'Maximum number of concurrent jobs to run at one time',
default: 64, default: 64,
parser: parse_int, parser: parseInt,
validators: [x => x > 0 || `${x} cannot be negative`], validators: [x => x > 0 || `${x} cannot be negative`],
}, },
limit_overrides: { limit_overrides: {
desc: 'Per-language exceptions in JSON format for each of:\ desc: 'Per-language exceptions in JSON format for each of:\
max_process_count, max_open_files, max_file_size, compile_memory_limit,\ max_process_count, max_open_files, max_file_size, compile_memory_limit,\
run_memory_limit, compile_timeout, run_timeout, output_max_size', run_memory_limit, compile_timeout, run_timeout, output_max_size',
default: {}, default: {} as Record<string, Limits>,
parser: parse_overrides, parser: parse_overrides,
validators: [ validators: [
x => !!x || `Failed to parse the overrides\n${x}`, x => !!x || `Failed to parse the overrides\n${x}`,
@ -129,7 +128,7 @@ const options = {
Object.freeze(options); Object.freeze(options);
function apply_validators(validators, validator_parameters) { function apply_validators(validators: Array<(...args: unknown[]) => true | string>, validator_parameters: unknown[]) {
for (const validator of validators) { for (const validator of validators) {
const validation_response = validator(...validator_parameters); const validation_response = validator(...validator_parameters);
if (validation_response !== true) { if (validation_response !== true) {
@ -139,8 +138,8 @@ function apply_validators(validators, validator_parameters) {
return true; return true;
} }
function parse_overrides(overrides_string) { function parse_overrides(overrides_string: string): Record<string, Limits> {
function get_parsed_json_or_null(overrides) { function get_parsed_json_or_null(overrides: string): Record<string, Partial<Limits>> | null {
try { try {
return JSON.parse(overrides); return JSON.parse(overrides);
} catch (e) { } catch (e) {
@ -152,7 +151,7 @@ function parse_overrides(overrides_string) {
if (overrides === null) { if (overrides === null) {
return null; return null;
} }
const parsed_overrides = {}; const parsed_overrides: Record<string, Partial<Limits>> = {};
for (const language in overrides) { for (const language in overrides) {
parsed_overrides[language] = {}; parsed_overrides[language] = {};
for (const key in overrides[language]) { for (const key in overrides[language]) {
@ -171,21 +170,22 @@ function parse_overrides(overrides_string) {
return null; return null;
} }
// Find the option for the override // Find the option for the override
const option = options[key]; const option = options[key as Limit];
const parser = option.parser; const parser = option.parser;
const raw_value = overrides[language][key]; const raw_value = overrides[language][key as Limit];
// @ts-ignore: lgtm
const parsed_value = parser(raw_value); const parsed_value = parser(raw_value);
parsed_overrides[language][key] = parsed_value; parsed_overrides[language][key] = parsed_value;
} }
} }
return parsed_overrides; return parsed_overrides as Record<string, Limits>;
} }
function validate_overrides(overrides) { function validate_overrides(overrides: Record<string, Limits>) {
for (const language in overrides) { for (const language in overrides) {
for (const key in overrides[language]) { for (const key in overrides[language]) {
const value = overrides[language][key]; const value = overrides[language][key as Limit];
const option = options[key]; const option = options[key as Limit];
const validators = option.validators; const validators = option.validators;
const validation_response = apply_validators(validators, [ const validation_response = apply_validators(validators, [
value, value,
@ -201,12 +201,12 @@ function validate_overrides(overrides) {
logger.info(`Loading Configuration from environment`); logger.info(`Loading Configuration from environment`);
let config = {}; let config = {} as ObjectType<typeof options, "default">;
for (const option_name in options) { for (const option_name in options) {
const env_key = 'PISTON_' + option_name.to_upper_case(); const env_key = 'PISTON_' + option_name.toUpperCase();
const option = options[option_name]; const option = options[option_name];
const parser = option.parser || (x => x); const parser = option.parser || ((x: unknown) => x);
const env_val = process.env[env_key]; const env_val = process.env[env_key];
const parsed_val = parser(env_val); const parsed_val = parser(env_val);
const value = env_val === undefined ? option.default : parsed_val; const value = env_val === undefined ? option.default : parsed_val;
@ -228,4 +228,4 @@ for (const option_name in options) {
logger.info('Configuration successfully loaded'); logger.info('Configuration successfully loaded');
module.exports = config; export default config;

View File

@ -1,20 +0,0 @@
// Globals are things the user shouldn't change in config, but is good to not use inline constants for
const is_docker = require('is-docker');
const fs = require('fs');
const platform = `${is_docker() ? 'docker' : 'baremetal'}-${fs
.read_file_sync('/etc/os-release')
.toString()
.split('\n')
.find(x => x.startsWith('ID'))
.replace('ID=', '')}`;
module.exports = {
data_directories: {
packages: 'packages',
jobs: 'jobs',
},
version: require('../package.json').version,
platform,
pkg_installed_file: '.ppman-installed', //Used as indication for if a package was installed
clean_directories: ['/dev/shm', '/run/lock', '/tmp', '/var/tmp'],
};

24
api/src/globals.ts Normal file
View File

@ -0,0 +1,24 @@
// Globals are things the user shouldn't change in config, but is good to not use inline constants for
import is_docker from 'is-docker';
import { readFileSync } from 'node:fs';
import { createRequire } from 'node:module';
const require = createRequire(import.meta.url);
export const platform = `${is_docker() ? 'docker' : 'baremetal'}-${readFileSync(
'/etc/os-release'
)
.toString()
.split('\n')
.find(x => x.startsWith('ID'))
.replace('ID=', '')}`;
export const data_directories = {
packages: 'packages',
jobs: 'jobs',
};
// @ts-ignore
export const version: string = require('../package.json').version;
export const pkg_installed_file = '.ppman-installed'; //Used as indication for if a package was installed
export const clean_directories = ['/dev/shm', '/run/lock', '/tmp', '/var/tmp'];

View File

@ -1,98 +0,0 @@
#!/usr/bin/env node
require('nocamel');
const Logger = require('logplease');
const express = require('express');
const expressWs = require('express-ws');
const globals = require('./globals');
const config = require('./config');
const path = require('path');
const fs = require('fs/promises');
const fss = require('fs');
const body_parser = require('body-parser');
const runtime = require('./runtime');
const logger = Logger.create('index');
const app = express();
expressWs(app);
(async () => {
logger.info('Setting loglevel to', config.log_level);
Logger.setLogLevel(config.log_level);
logger.debug('Ensuring data directories exist');
Object.values(globals.data_directories).for_each(dir => {
let data_path = path.join(config.data_directory, dir);
logger.debug(`Ensuring ${data_path} exists`);
if (!fss.exists_sync(data_path)) {
logger.info(`${data_path} does not exist.. Creating..`);
try {
fss.mkdir_sync(data_path);
} catch (e) {
logger.error(`Failed to create ${data_path}: `, e.message);
}
}
});
fss.chmodSync(path.join(config.data_directory, globals.data_directories.jobs), 0o711)
logger.info('Loading packages');
const pkgdir = path.join(
config.data_directory,
globals.data_directories.packages
);
const pkglist = await fs.readdir(pkgdir);
const languages = await Promise.all(
pkglist.map(lang => {
return fs.readdir(path.join(pkgdir, lang)).then(x => {
return x.map(y => path.join(pkgdir, lang, y));
});
})
);
const installed_languages = languages
.flat()
.filter(pkg =>
fss.exists_sync(path.join(pkg, globals.pkg_installed_file))
);
installed_languages.for_each(pkg => runtime.load_package(pkg));
logger.info('Starting API Server');
logger.debug('Constructing Express App');
logger.debug('Registering middleware');
app.use(body_parser.urlencoded({ extended: true }));
app.use(body_parser.json());
app.use((err, req, res, next) => {
return res.status(400).send({
stack: err.stack,
});
});
logger.debug('Registering Routes');
const api_v2 = require('./api/v2');
app.use('/api/v2', api_v2);
const { version } = require('../package.json');
app.get('/', (req, res, next) => {
return res.status(200).send({ message: `Piston v${version}` });
});
app.use((req, res, next) => {
return res.status(404).send({ message: 'Not Found' });
});
logger.debug('Calling app.listen');
const [address, port] = config.bind_address.split(':');
app.listen(port, address, () => {
logger.info('API server started on', config.bind_address);
});
})();

98
api/src/index.ts Normal file
View File

@ -0,0 +1,98 @@
#!/usr/bin/env node
import { create, setLogLevel } from 'logplease';
import express from 'express';
import expressWs from 'express-ws';
import * as globals from './globals.js';
import config from './config.js';
import { join } from 'path';
import { readdir } from 'node:fs/promises';
import { existsSync, mkdirSync, chmodSync } from 'node:fs';
import bodyParser from 'body-parser';
import { load_package } from './runtime.js';
const logger = create('index', {});
const app = express();
expressWs(app);
(async () => {
logger.info('Setting loglevel to');
setLogLevel(config.log_level);
logger.debug('Ensuring data directories exist');
Object.values(globals.data_directories).forEach(dir => {
let data_path = join(config.data_directory, dir);
logger.debug(`Ensuring ${data_path} exists`);
if (!existsSync(data_path)) {
logger.info(`${data_path} does not exist.. Creating..`);
try {
mkdirSync(data_path);
} catch (e) {
logger.error(`Failed to create ${data_path}: `, e.message);
}
}
});
chmodSync(
join(config.data_directory, globals.data_directories.jobs),
0o711
);
logger.info('Loading packages');
const pkgdir = join(
config.data_directory,
globals.data_directories.packages
);
const pkglist = await readdir(pkgdir);
const languages = await Promise.all(
pkglist.map(async lang => {
const x = await readdir(join(pkgdir, lang));
return x.map(y => join(pkgdir, lang, y));
})
);
const installed_languages = languages
.flat()
.filter(pkg => existsSync(join(pkg, globals.pkg_installed_file)));
installed_languages.forEach(pkg => load_package(pkg));
logger.info('Starting API Server');
logger.debug('Constructing Express App');
logger.debug('Registering middleware');
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
app.use(
(
err: Error,
req: express.Request,
res: express.Response,
next: express.NextFunction
) => {
return res.status(400).send({
stack: err.stack,
});
}
);
logger.debug('Registering Routes');
const api_v2 = (await import('./api/v2.js')).default;
app.use('/api/v2', api_v2);
app.use((req, res, next) => {
return res.status(404).send({ message: 'Not Found' });
});
logger.debug('Calling app.listen');
const [address, port] = config.bind_address.split(':');
app.listen(+port, address, () => {
logger.info('API server started on', config.bind_address);
});
})();

View File

@ -1,19 +1,30 @@
const logplease = require('logplease'); import { create, type Logger } from 'logplease';
const logger = logplease.create('job');
const { v4: uuidv4 } = require('uuid'); const logger = create('job');
const cp = require('child_process'); import { v4 as uuidv4 } from 'uuid';
const path = require('path'); import { spawn } from 'child_process';
const config = require('./config'); import { join, relative, dirname } from 'node:path';
const globals = require('./globals'); import config from './config.js';
const fs = require('fs/promises'); import * as globals from './globals.js';
const fss = require('fs'); import {
const wait_pid = require('waitpid'); mkdir,
chown,
writeFile,
readdir,
stat as _stat,
rm,
} from 'node:fs/promises';
import { readdirSync, readFileSync } from 'node:fs';
import wait_pid from 'waitpid';
import EventEmitter from 'events';
import { File, ResponseBody } from './types.js';
const job_states = { const job_states = {
READY: Symbol('Ready to be primed'), READY: Symbol('Ready to be primed'),
PRIMED: Symbol('Primed and ready for execution'), PRIMED: Symbol('Primed and ready for execution'),
EXECUTED: Symbol('Executed and ready for cleanup'), EXECUTED: Symbol('Executed and ready for cleanup'),
}; } as const;
let uid = 0; let uid = 0;
let gid = 0; let gid = 0;
@ -28,14 +39,40 @@ setInterval(() => {
} }
}, 10); }, 10);
class Job { export default class Job {
constructor({ runtime, files, args, stdin, timeouts, memory_limits }) { uuid: string;
logger: Logger;
runtime: any;
files: File[];
args: string[];
stdin: string;
timeouts: { compile: number; run: number };
memory_limits: { compile: number; run: number };
uid: number;
gid: number;
state: symbol;
dir: string;
constructor({
runtime,
files,
args,
stdin,
timeouts,
memory_limits,
}: {
runtime: unknown;
files: File[];
args: string[];
stdin: string;
timeouts: { compile: number; run: number };
memory_limits: { compile: number; run: number };
}) {
this.uuid = uuidv4(); this.uuid = uuidv4();
this.logger = logplease.create(`job/${this.uuid}`); this.logger = create(`job/${this.uuid}`, {});
this.runtime = runtime; this.runtime = runtime;
this.files = files.map((file, i) => ({ this.files = files.map((file: File, i: number) => ({
name: file.name || `file${i}.code`, name: file.name || `file${i}.code`,
content: file.content, content: file.content,
encoding: ['base64', 'hex', 'utf8'].includes(file.encoding) encoding: ['base64', 'hex', 'utf8'].includes(file.encoding)
@ -61,7 +98,7 @@ class Job {
this.logger.debug(`Assigned uid=${this.uid} gid=${this.gid}`); this.logger.debug(`Assigned uid=${this.uid} gid=${this.gid}`);
this.state = job_states.READY; this.state = job_states.READY;
this.dir = path.join( this.dir = join(
config.data_directory, config.data_directory,
globals.data_directories.jobs, globals.data_directories.jobs,
this.uuid this.uuid
@ -82,12 +119,12 @@ class Job {
this.logger.debug(`Transfering ownership`); this.logger.debug(`Transfering ownership`);
await fs.mkdir(this.dir, { mode: 0o700 }); await mkdir(this.dir, { mode: 0o700 });
await fs.chown(this.dir, this.uid, this.gid); await chown(this.dir, this.uid, this.gid);
for (const file of this.files) { for (const file of this.files) {
const file_path = path.join(this.dir, file.name); const file_path = join(this.dir, file.name);
const rel = path.relative(this.dir, file_path); const rel = relative(this.dir, file_path);
const file_content = Buffer.from(file.content, file.encoding); const file_content = Buffer.from(file.content, file.encoding);
if (rel.startsWith('..')) if (rel.startsWith('..'))
@ -95,14 +132,14 @@ class Job {
`File path "${file.name}" tries to escape parent directory: ${rel}` `File path "${file.name}" tries to escape parent directory: ${rel}`
); );
await fs.mkdir(path.dirname(file_path), { await mkdir(dirname(file_path), {
recursive: true, recursive: true,
mode: 0o700, mode: 0o700,
}); });
await fs.chown(path.dirname(file_path), this.uid, this.gid); await chown(dirname(file_path), this.uid, this.gid);
await fs.write_file(file_path, file_content); await writeFile(file_path, file_content);
await fs.chown(file_path, this.uid, this.gid); await chown(file_path, this.uid, this.gid);
} }
this.state = job_states.PRIMED; this.state = job_states.PRIMED;
@ -110,7 +147,13 @@ class Job {
this.logger.debug('Primed job'); this.logger.debug('Primed job');
} }
async safe_call(file, args, timeout, memory_limit, eventBus = null) { async safe_call(
file: string,
args: string[],
timeout: number,
memory_limit: string | number,
eventBus: EventEmitter = null
): Promise<ResponseBody['run'] & { error?: Error }> {
return new Promise((resolve, reject) => { return new Promise((resolve, reject) => {
const nonetwork = config.disable_networking ? ['nosocket'] : []; const nonetwork = config.disable_networking ? ['nosocket'] : [];
@ -120,16 +163,19 @@ class Job {
'--nofile=' + this.runtime.max_open_files, '--nofile=' + this.runtime.max_open_files,
'--fsize=' + this.runtime.max_file_size, '--fsize=' + this.runtime.max_file_size,
]; ];
const timeout_call = [ const timeout_call = [
'timeout', '-s', '9', Math.ceil(timeout / 1000), 'timeout',
'-s',
'9',
Math.ceil(timeout / 1000),
]; ];
if (memory_limit >= 0) { if (memory_limit >= 0) {
prlimit.push('--as=' + memory_limit); prlimit.push('--as=' + memory_limit);
} }
const proc_call = [ const proc_call = [
'nice', 'nice',
...timeout_call, ...timeout_call,
...prlimit, ...prlimit,
@ -137,13 +183,13 @@ class Job {
'bash', 'bash',
file, file,
...args, ...args,
]; ] as Array<string>;
var stdout = ''; var stdout = '';
var stderr = ''; var stderr = '';
var output = ''; var output = '';
const proc = cp.spawn(proc_call[0], proc_call.splice(1), { const proc = spawn(proc_call[0], proc_call.splice(1), {
env: { env: {
...this.runtime.env_vars, ...this.runtime.env_vars,
PISTON_LANGUAGE: this.runtime.language, PISTON_LANGUAGE: this.runtime.language,
@ -160,18 +206,18 @@ class Job {
proc.stdin.end(); proc.stdin.end();
proc.stdin.destroy(); proc.stdin.destroy();
} else { } else {
eventBus.on('stdin', data => { eventBus.on('stdin', (data: any) => {
proc.stdin.write(data); proc.stdin.write(data);
}); });
eventBus.on('kill', signal => { eventBus.on('kill', (signal: NodeJS.Signals | number) => {
proc.kill(signal); proc.kill(signal);
}); });
} }
const kill_timeout = const kill_timeout =
(timeout >= 0 && (timeout >= 0 &&
set_timeout(async _ => { setTimeout(async _ => {
this.logger.info(`Timeout exceeded timeout=${timeout}`); this.logger.info(`Timeout exceeded timeout=${timeout}`);
process.kill(proc.pid, 'SIGKILL'); process.kill(proc.pid, 'SIGKILL');
}, timeout)) || }, timeout)) ||
@ -202,7 +248,7 @@ class Job {
}); });
const exit_cleanup = () => { const exit_cleanup = () => {
clear_timeout(kill_timeout); clearTimeout(kill_timeout);
proc.stderr.destroy(); proc.stderr.destroy();
proc.stdout.destroy(); proc.stdout.destroy();
@ -237,15 +283,15 @@ class Job {
const code_files = const code_files =
(this.runtime.language === 'file' && this.files) || (this.runtime.language === 'file' && this.files) ||
this.files.filter(file => file.encoding == 'utf8'); this.files.filter((file: File) => file.encoding == 'utf8');
this.logger.debug('Compiling'); this.logger.debug('Compiling');
let compile; let compile: unknown;
if (this.runtime.compiled) { if (this.runtime.compiled) {
compile = await this.safe_call( compile = await this.safe_call(
path.join(this.runtime.pkgdir, 'compile'), join(this.runtime.pkgdir, 'compile'),
code_files.map(x => x.name), code_files.map(x => x.name),
this.timeouts.compile, this.timeouts.compile,
this.memory_limits.compile this.memory_limits.compile
@ -255,7 +301,7 @@ class Job {
this.logger.debug('Running'); this.logger.debug('Running');
const run = await this.safe_call( const run = await this.safe_call(
path.join(this.runtime.pkgdir, 'run'), join(this.runtime.pkgdir, 'run'),
[code_files[0].name, ...this.args], [code_files[0].name, ...this.args],
this.timeouts.run, this.timeouts.run,
this.memory_limits.run this.memory_limits.run
@ -271,7 +317,7 @@ class Job {
}; };
} }
async execute_interactive(eventBus) { async execute_interactive(eventBus: EventEmitter) {
if (this.state !== job_states.PRIMED) { if (this.state !== job_states.PRIMED) {
throw new Error( throw new Error(
'Job must be in primed state, current state: ' + 'Job must be in primed state, current state: ' +
@ -290,7 +336,7 @@ class Job {
if (this.runtime.compiled) { if (this.runtime.compiled) {
eventBus.emit('stage', 'compile'); eventBus.emit('stage', 'compile');
const { error, code, signal } = await this.safe_call( const { error, code, signal } = await this.safe_call(
path.join(this.runtime.pkgdir, 'compile'), join(this.runtime.pkgdir, 'compile'),
code_files.map(x => x.name), code_files.map(x => x.name),
this.timeouts.compile, this.timeouts.compile,
this.memory_limits.compile, this.memory_limits.compile,
@ -303,7 +349,7 @@ class Job {
this.logger.debug('Running'); this.logger.debug('Running');
eventBus.emit('stage', 'run'); eventBus.emit('stage', 'run');
const { error, code, signal } = await this.safe_call( const { error, code, signal } = await this.safe_call(
path.join(this.runtime.pkgdir, 'run'), join(this.runtime.pkgdir, 'run'),
[code_files[0].name, ...this.args], [code_files[0].name, ...this.args],
this.timeouts.run, this.timeouts.run,
this.memory_limits.run, this.memory_limits.run,
@ -316,42 +362,43 @@ class Job {
} }
cleanup_processes(dont_wait = []) { cleanup_processes(dont_wait = []) {
let processes = [1]; let processes: number[] = [1];
const to_wait = []; const to_wait = [];
this.logger.debug(`Cleaning up processes`); this.logger.debug(`Cleaning up processes`);
while (processes.length > 0) { while (processes.length > 0) {
processes = []; processes = [];
const proc_ids = fss.readdir_sync('/proc'); const proc_ids = readdirSync('/proc');
processes = proc_ids.map(proc_id => { processes = proc_ids.map(proc_id => {
if (isNaN(proc_id)) return -1; if (isNaN(+proc_id)) return -1;
try { try {
const proc_status = fss.read_file_sync( const proc_status = readFileSync(
path.join('/proc', proc_id, 'status') join('/proc', proc_id, 'status')
); );
const proc_lines = proc_status.to_string().split('\n'); const proc_lines = proc_status.toString().split('\n');
const state_line = proc_lines.find(line => const state_line = proc_lines.find(line =>
line.starts_with('State:') line.startsWith('State:')
); );
const uid_line = proc_lines.find(line => const uid_line = proc_lines.find(line =>
line.starts_with('Uid:') line.startsWith('Uid:')
); );
const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/); const [_, ruid, euid, suid, fuid] = uid_line.split(/\s+/);
const [_1, state, user_friendly] = state_line.split(/\s+/); const [_1, state, user_friendly] = state_line.split(/\s+/);
const proc_id_int = parse_int(proc_id);
// Skip over any processes that aren't ours.
if(ruid != this.uid && euid != this.uid) return -1;
if (state == 'Z'){ const proc_id_int = parseInt(proc_id);
// Skip over any processes that aren't ours.
// @ts-ignore: dont want to risk fixing this
if (ruid != this.uid && euid != this.uid) return -1;
if (state == 'Z') {
// Zombie process, just needs to be waited, regardless of the user id // Zombie process, just needs to be waited, regardless of the user id
if(!to_wait.includes(proc_id_int)) if (!to_wait.includes(proc_id_int))
to_wait.push(proc_id_int); to_wait.push(proc_id_int);
return -1; return -1;
} }
// We should kill in all other state (Sleep, Stopped & Running) // We should kill in all other state (Sleep, Stopped & Running)
@ -386,7 +433,7 @@ class Job {
// Then clear them out of the process tree // Then clear them out of the process tree
try { try {
process.kill(proc, 'SIGKILL'); process.kill(proc, 'SIGKILL');
} catch(e) { } catch (e) {
// Could already be dead and just needs to be waited on // Could already be dead and just needs to be waited on
this.logger.debug( this.logger.debug(
`Got error while SIGKILLing process ${proc}:`, `Got error while SIGKILLing process ${proc}:`,
@ -413,16 +460,16 @@ class Job {
async cleanup_filesystem() { async cleanup_filesystem() {
for (const clean_path of globals.clean_directories) { for (const clean_path of globals.clean_directories) {
const contents = await fs.readdir(clean_path); const contents = await readdir(clean_path);
for (const file of contents) { for (const file of contents) {
const file_path = path.join(clean_path, file); const file_path = join(clean_path, file);
try { try {
const stat = await fs.stat(file_path); const stat = await _stat(file_path);
if (stat.uid === this.uid) { if (stat.uid === this.uid) {
await fs.rm(file_path, { await rm(file_path, {
recursive: true, recursive: true,
force: true, force: true,
}); });
@ -434,7 +481,7 @@ class Job {
} }
} }
await fs.rm(this.dir, { recursive: true, force: true }); await rm(this.dir, { recursive: true, force: true });
} }
async cleanup() { async cleanup() {
@ -446,7 +493,3 @@ class Job {
remaining_job_spaces++; remaining_job_spaces++;
} }
} }
module.exports = {
Job,
};

View File

@ -1,33 +1,46 @@
const logger = require('logplease').create('package'); import { create } from 'logplease';
const semver = require('semver'); import { parse, satisfies, rcompare, type SemVer } from 'semver';
const config = require('./config'); import config from './config.js';
const globals = require('./globals'); import * as globals from './globals.js';
const fetch = require('node-fetch'); import fetch from 'node-fetch';
const path = require('path'); import { join } from 'path';
const fs = require('fs/promises'); import { rm, mkdir, writeFile, rmdir } from 'fs/promises';
const fss = require('fs'); import { existsSync, createWriteStream, createReadStream } from 'fs';
const cp = require('child_process'); import { exec, spawn } from 'child_process';
const crypto = require('crypto'); import { createHash } from 'crypto';
const runtime = require('./runtime'); import { load_package, get_runtime_by_name_and_version } from './runtime.js';
const chownr = require('chownr'); import chownr from 'chownr';
const util = require('util'); import { promisify } from 'util';
const logger = create('package', {});
class Package { class Package {
constructor({ language, version, download, checksum }) { language: string;
version: SemVer;
checksum: string;
download: string;
constructor({
language,
version,
download,
checksum,
}: {
language: string;
version: string;
checksum: string;
download: string;
}) {
this.language = language; this.language = language;
this.version = semver.parse(version); this.version = parse(version);
this.checksum = checksum; this.checksum = checksum;
this.download = download; this.download = download;
} }
get installed() { get installed() {
return fss.exists_sync( return existsSync(join(this.install_path, globals.pkg_installed_file));
path.join(this.install_path, globals.pkg_installed_file)
);
} }
get install_path() { get install_path() {
return path.join( return join(
config.data_directory, config.data_directory,
globals.data_directories.packages, globals.data_directories.packages,
this.language, this.language,
@ -42,23 +55,24 @@ class Package {
logger.info(`Installing ${this.language}-${this.version.raw}`); logger.info(`Installing ${this.language}-${this.version.raw}`);
if (fss.exists_sync(this.install_path)) { if (existsSync(this.install_path)) {
logger.warn( logger.warn(
`${this.language}-${this.version.raw} has residual files. Removing them.` `${this.language}-${this.version.raw} has residual files. Removing them.`
); );
await fs.rm(this.install_path, { recursive: true, force: true }); await rm(this.install_path, { recursive: true, force: true });
} }
logger.debug(`Making directory ${this.install_path}`); logger.debug(`Making directory ${this.install_path}`);
await fs.mkdir(this.install_path, { recursive: true }); await mkdir(this.install_path, { recursive: true });
logger.debug( logger.debug(
`Downloading package from ${this.download} in to ${this.install_path}` `Downloading package from ${this.download} in to ${this.install_path}`
); );
const pkgpath = path.join(this.install_path, 'pkg.tar.gz'); const pkgpath = join(this.install_path, 'pkg.tar.gz');
const download = await fetch(this.download); // @ts-ignore
const download = (await fetch(this.download)) as fetch.Response;
const file_stream = fss.create_write_stream(pkgpath); const file_stream = createWriteStream(pkgpath);
await new Promise((resolve, reject) => { await new Promise((resolve, reject) => {
download.body.pipe(file_stream); download.body.pipe(file_stream);
download.body.on('error', reject); download.body.on('error', reject);
@ -68,10 +82,10 @@ class Package {
logger.debug('Validating checksums'); logger.debug('Validating checksums');
logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`); logger.debug(`Assert sha256(pkg.tar.gz) == ${this.checksum}`);
const hash = crypto.create_hash('sha256'); const hash = createHash('sha256');
const read_stream = fss.create_read_stream(pkgpath); const read_stream = createReadStream(pkgpath);
await new Promise((resolve, reject) => { await new Promise<void>((resolve, reject) => {
read_stream.on('data', chunk => hash.update(chunk)); read_stream.on('data', chunk => hash.update(chunk));
read_stream.on('end', () => resolve()); read_stream.on('end', () => resolve());
read_stream.on('error', error => reject(error)); read_stream.on('error', error => reject(error));
@ -89,8 +103,8 @@ class Package {
`Extracting package files from archive ${pkgpath} in to ${this.install_path}` `Extracting package files from archive ${pkgpath} in to ${this.install_path}`
); );
await new Promise((resolve, reject) => { await new Promise<void>((resolve, reject) => {
const proc = cp.exec( const proc = exec(
`bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'` `bash -c 'cd "${this.install_path}" && tar xzf ${pkgpath}'`
); );
@ -105,15 +119,15 @@ class Package {
}); });
logger.debug('Registering runtime'); logger.debug('Registering runtime');
runtime.load_package(this.install_path); load_package(this.install_path);
logger.debug('Caching environment'); logger.debug('Caching environment');
const get_env_command = `cd ${this.install_path}; source environment; env`; const get_env_command = `cd ${this.install_path}; source environment; env`;
const envout = await new Promise((resolve, reject) => { const envout = await new Promise<string>((resolve, reject) => {
let stdout = ''; let stdout = '';
const proc = cp.spawn( const proc = spawn(
'env', 'env',
['-i', 'bash', '-c', `${get_env_command}`], ['-i', 'bash', '-c', `${get_env_command}`],
{ {
@ -142,14 +156,14 @@ class Package {
) )
.join('\n'); .join('\n');
await fs.write_file(path.join(this.install_path, '.env'), filtered_env); await writeFile(join(this.install_path, '.env'), filtered_env);
logger.debug('Changing Ownership of package directory'); logger.debug('Changing Ownership of package directory');
await util.promisify(chownr)(this.install_path, 0, 0); await promisify(chownr)(this.install_path, 0, 0);
logger.debug('Writing installed state to disk'); logger.debug('Writing installed state to disk');
await fs.write_file( await writeFile(
path.join(this.install_path, globals.pkg_installed_file), join(this.install_path, globals.pkg_installed_file),
Date.now().toString() Date.now().toString()
); );
@ -161,11 +175,11 @@ class Package {
}; };
} }
async uninstall() { async uninstall(): Promise<{ language: string; version: string }> {
logger.info(`Uninstalling ${this.language}-${this.version.raw}`); logger.info(`Uninstalling ${this.language}-${this.version.raw}`);
logger.debug('Finding runtime'); logger.debug('Finding runtime');
const found_runtime = runtime.get_runtime_by_name_and_version( const found_runtime = get_runtime_by_name_and_version(
this.language, this.language,
this.version.raw this.version.raw
); );
@ -183,7 +197,7 @@ class Package {
found_runtime.unregister(); found_runtime.unregister();
logger.debug('Cleaning files from disk'); logger.debug('Cleaning files from disk');
await fs.rmdir(this.install_path, { recursive: true }); await rmdir(this.install_path, { recursive: true });
logger.info(`Uninstalled ${this.language}-${this.version.raw}`); logger.info(`Uninstalled ${this.language}-${this.version.raw}`);
@ -194,7 +208,10 @@ class Package {
} }
static async get_package_list() { static async get_package_list() {
const repo_content = await fetch(config.repo_url).then(x => x.text()); // @ts-ignore
const repo_content: string = await fetch(config.repo_url).then(
(x: fetch.Response) => x.text()
);
const entries = repo_content.split('\n').filter(x => x.length > 0); const entries = repo_content.split('\n').filter(x => x.length > 0);
@ -210,19 +227,17 @@ class Package {
}); });
} }
static async get_package(lang, version) { static async get_package(lang: string, version: string) {
const packages = await Package.get_package_list(); const packages = await Package.get_package_list();
const candidates = packages.filter(pkg => { const candidates = packages.filter(pkg => {
return ( return pkg.language == lang && satisfies(pkg.version, version);
pkg.language == lang && semver.satisfies(pkg.version, version)
);
}); });
candidates.sort((a, b) => semver.rcompare(a.version, b.version)); candidates.sort((a, b) => rcompare(a.version, b.version));
return candidates[0] || null; return candidates[0] || null;
} }
} }
module.exports = Package; export default Package;

View File

@ -1,44 +1,60 @@
const logger = require('logplease').create('runtime'); import { create } from 'logplease';
const semver = require('semver'); import { parse, satisfies, rcompare, type SemVer } from 'semver';
const config = require('./config'); import config from './config.js';
const globals = require('./globals'); import { platform } from './globals.js';
const fss = require('fs'); import { readFileSync, existsSync } from 'fs';
const path = require('path'); import { join } from 'path';
import { Limit, Limits, PackageInfo } from './types.js';
const runtimes = []; const logger = create('runtime', {});
class Runtime { export const runtimes: Runtime[] = [];
constructor({
language, export class Runtime {
version, language: string;
aliases, version: SemVer;
pkgdir, aliases: string[];
runtime, pkgdir: string;
timeouts, runtime?: any;
memory_limits, timeouts: { run: number; compile: number };
max_process_count, memory_limits: { run: number; compile: number };
max_open_files, max_process_count: number;
max_file_size, max_open_files: number;
output_max_size, max_file_size: number;
output_max_size: number;
_compiled?: boolean;
_env_vars?: Record<string, any>;
constructor(o: {
language: string;
version: SemVer;
aliases: string[];
pkgdir: string;
runtime?: any;
timeouts: { run: number; compile: number };
memory_limits: { run: number; compile: number };
max_process_count: number;
max_open_files: number;
max_file_size: number;
output_max_size: number;
}) { }) {
this.language = language; this.language = o.language;
this.version = version; this.version = o.version;
this.aliases = aliases || []; this.aliases = o.aliases || [];
this.pkgdir = pkgdir; this.pkgdir = o.pkgdir;
this.runtime = runtime; this.runtime = o.runtime;
this.timeouts = timeouts; this.timeouts = o.timeouts;
this.memory_limits = memory_limits; this.memory_limits = o.memory_limits;
this.max_process_count = max_process_count; this.max_process_count = o.max_process_count;
this.max_open_files = max_open_files; this.max_open_files = o.max_open_files;
this.max_file_size = max_file_size; this.max_file_size = o.max_file_size;
this.output_max_size = output_max_size; this.output_max_size = o.output_max_size;
} }
static compute_single_limit( static compute_single_limit(
language_name, language_name: string,
limit_name, limit_name: Limit,
language_limit_overrides language_limit_overrides: Limits
) { ): number {
return ( return (
(config.limit_overrides[language_name] && (config.limit_overrides[language_name] &&
config.limit_overrides[language_name][limit_name]) || config.limit_overrides[language_name][limit_name]) ||
@ -48,7 +64,10 @@ class Runtime {
); );
} }
static compute_all_limits(language_name, language_limit_overrides) { static compute_all_limits(
language_name: string,
language_limit_overrides: Limits
) {
return { return {
timeouts: { timeouts: {
compile: this.compute_single_limit( compile: this.compute_single_limit(
@ -97,25 +116,25 @@ class Runtime {
}; };
} }
static load_package(package_dir) { static load_package(package_dir: string) {
let info = JSON.parse( let info = JSON.parse(
fss.read_file_sync(path.join(package_dir, 'pkg-info.json')) readFileSync(join(package_dir, 'pkg-info.json'), 'utf8')
); ) as PackageInfo;
let { let {
language, language,
version, version: _version,
build_platform, build_platform,
aliases, aliases,
provides, provides,
limit_overrides, limit_overrides,
} = info; } = info;
version = semver.parse(version); const version = parse(_version);
if (build_platform !== globals.platform) { if (build_platform !== platform) {
logger.warn( logger.warn(
`Package ${language}-${version} was built for platform ${build_platform}, ` + `Package ${language}-${version} was built for platform ${build_platform}, ` +
`but our platform is ${globals.platform}` `but our platform is ${platform}`
); );
} }
@ -153,7 +172,7 @@ class Runtime {
get compiled() { get compiled() {
if (this._compiled === undefined) { if (this._compiled === undefined) {
this._compiled = fss.exists_sync(path.join(this.pkgdir, 'compile')); this._compiled = existsSync(join(this.pkgdir, 'compile'));
} }
return this._compiled; return this._compiled;
@ -161,8 +180,8 @@ class Runtime {
get env_vars() { get env_vars() {
if (!this._env_vars) { if (!this._env_vars) {
const env_file = path.join(this.pkgdir, '.env'); const env_file = join(this.pkgdir, '.env');
const env_content = fss.read_file_sync(env_file).toString(); const env_content = readFileSync(env_file).toString();
this._env_vars = {}; this._env_vars = {};
@ -188,31 +207,32 @@ class Runtime {
} }
} }
module.exports = runtimes; export function get_runtimes_matching_language_version(
module.exports.Runtime = Runtime; lang: string,
module.exports.get_runtimes_matching_language_version = function (lang, ver) { ver: string | import('semver/classes/range.js')
) {
return runtimes.filter( return runtimes.filter(
rt => rt =>
(rt.language == lang || rt.aliases.includes(lang)) && (rt.language == lang || rt.aliases.includes(lang)) &&
semver.satisfies(rt.version, ver) satisfies(rt.version, ver)
); );
}; }
module.exports.get_latest_runtime_matching_language_version = function ( export function get_latest_runtime_matching_language_version(
lang, lang: string,
ver ver: string
) { ) {
return module.exports return get_runtimes_matching_language_version(lang, ver).sort((a, b) =>
.get_runtimes_matching_language_version(lang, ver) rcompare(a.version, b.version)
.sort((a, b) => semver.rcompare(a.version, b.version))[0]; )[0];
}; }
module.exports.get_runtime_by_name_and_version = function (runtime, ver) { export function get_runtime_by_name_and_version(runtime: string, ver: string) {
return runtimes.find( return runtimes.find(
rt => rt =>
(rt.runtime == runtime || (rt.runtime == runtime ||
(rt.runtime === undefined && rt.language == runtime)) && (rt.runtime === undefined && rt.language == runtime)) &&
semver.satisfies(rt.version, ver) satisfies(rt.version, ver)
); );
}; }
module.exports.load_package = Runtime.load_package; export const load_package = Runtime.load_package;

63
api/src/types.ts Normal file
View File

@ -0,0 +1,63 @@
export interface Metadata {
language: string;
version: string;
aliases?: string[];
dependencies?: Record<string, string>;
provides: {
language: string;
aliases: string[];
limit_overrides: Limits;
}[];
limit_overrides?: Limits;
}
export type Limit =
| 'compile_timeout'
| 'compile_memory_limit'
| 'max_process_count'
| 'max_open_files'
| 'max_file_size'
| 'output_max_size'
| 'run_memory_limit'
| 'run_timeout';
export type Limits = Record<Limit, number>;
export type LanguageMetadata = {
language: string;
version: string;
};
export type PackageInfo = Metadata & { build_platform: string };
export type File = {
content: string;
name?: string;
encoding?: 'base64' | 'hex' | 'utf8';
};
export type RequestBody = {
language: string;
version: string;
files: Array<File>;
stdin?: string;
args?: Array<string>;
} & Partial<Limits>;
export interface ResponseBody {
language: string;
version: string;
run: {
stdout: string;
stderr: string;
output: string;
code: number;
signal?: NodeJS.Signals;
};
}
export type ObjectType<
TObject extends Record<any, Record<Key, any>>,
Key extends string
> = {
[K in keyof TObject]: TObject[K][Key];
};

16
api/tsconfig.json Normal file
View File

@ -0,0 +1,16 @@
{
"compilerOptions": {
"allowJs": true,
"checkJs": true,
"resolveJsonModule": true,
"target": "ES2022",
"module": "ES2022",
"moduleResolution": "nodenext",
"allowSyntheticDefaultImports": true,
"outDir": "./dist",
"rootDir": "src",
// "declaration": true
},
"include": ["src"],
"exclude": ["dist", "node_modules"]
}

View File

@ -27,7 +27,7 @@ const msg_format = {
exports.handler = async ({ axios, packages }) => { exports.handler = async ({ axios, packages }) => {
const requests = packages.map(package => split_package(package)); const requests = packages.map(package => split_package(package));
for (request of requests) { for (const request of requests) {
try { try {
const install = await axios.post(`/api/v2/packages`, request); const install = await axios.post(`/api/v2/packages`, request);