From f2cdf78a26305969181b394f5be4c45bd4101363 Mon Sep 17 00:00:00 2001 From: zwiterrion Date: Thu, 7 Dec 2023 19:40:53 +0100 Subject: [PATCH] fix tests - add postgres support - swap of logger - add pg queue --- .gitignore | 1 + server/configuration.js | 5 + server/datastores/api.js | 26 ++- server/datastores/index.js | 2 +- server/datastores/postgres.js | 125 +++++++++++--- server/datastores/s3.js | 65 +++---- server/index.js | 13 +- server/logger.js | 18 +- server/package.json | 2 +- server/routers/plugins.js | 29 +++- server/routers/public.js | 4 +- server/routers/wasm.js | 3 +- server/security/middlewares.js | 5 +- server/services/compiler/compiler.js | 27 +-- server/services/cron-job.js | 10 +- server/services/file-system.js | 14 +- server/services/queue.js | 22 ++- server/services/websocket.js | 6 +- .../tests/authentication/basic_auth.test.js | 14 +- .../authentication/otoroshi_auth.test.js | 21 ++- server/tests/functional/no_storage.test.js | 17 +- server/tests/functional/s3.test.js | 16 +- server/tests/functional/s3_pg.test.js | 37 ++-- server/yarn.lock | 158 ++++++++++++++++-- ui/src/ReleasesMenu.js | 14 +- ui/src/Terminal.js | 17 +- 26 files changed, 494 insertions(+), 177 deletions(-) diff --git a/.gitignore b/.gitignore index 86a4a74..9988cf3 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,4 @@ cli/go cli/rust cli/ts *.wasmo +*.log \ No newline at end of file diff --git a/server/configuration.js b/server/configuration.js index 81a63df..1b6dd79 100644 --- a/server/configuration.js +++ b/server/configuration.js @@ -54,6 +54,11 @@ module.exports = { POOL_SIZE: process.env.PG_POOL_SIZE || 20, PG_IDLE_TIMEOUT_MILLIS: process.env.PG_IDLE_TIMEOUT_MILLIS || 30000, CONNECTION_TIMEOUT_MILLIS: process.env.PG_CONNECTION_TIMEOUT_MILLIS || 2000, + }, + + LOGGER: { + FILE: process.env.LOGGER_FILE || false, + LEVEL: process.env.LOGGER_LEVEL || 'info' } } } \ No newline at end of file diff --git a/server/datastores/api.js b/server/datastores/api.js index cf94db3..8859ab0 100644 --- a/server/datastores/api.js +++ b/server/datastores/api.js @@ -3,7 +3,7 @@ module.exports = class Datastore { /** * Initialize the datastore */ - async initialize() { console.log('initialize') } + async initialize() { Promise.resolve() } /** * Get current user */ @@ -119,4 +119,28 @@ module.exports = class Datastore { * @param {string} newName */ patchPluginName = (email, pluginId, newName) => Promise.resolve() + + /** + * Check if a job with this id is running + * @param {string} pluginId + */ + isJobRunning = pluginId => Promise.resolve() + + /** + * Clean up all legacy tasks on startup + */ + cleanJobs = () => Promise.resolve() + + /** + * Remove specific job from datastore + * @param {string} pluginId + */ + removeJob = pluginId => Promise.resolve() + + /** + * Get the minimum of time to wait before running the plugin + * @param {string} pluginId + * @returns {Promise} + */ + waitingTimeBeforeNextRun = pluginId => Promise.resolve(-1) }; \ No newline at end of file diff --git a/server/datastores/index.js b/server/datastores/index.js index 2044497..5346048 100644 --- a/server/datastores/index.js +++ b/server/datastores/index.js @@ -9,7 +9,7 @@ let datastore; if ([STORAGE.S3, STORAGE.DOCKER_S3].includes(ENV.STORAGE)) { datastore = new S3Datastore(); -} else if (STORAGE.DOCKER_S3_POSTGRES, STORAGE.S3_POSTGRES) { +} else if ([STORAGE.DOCKER_S3_POSTGRES, STORAGE.S3_POSTGRES].includes(ENV.STORAGE)) { datastore = new PgDatastore(new S3Datastore()); } else { datastore = new Datastore() diff --git a/server/datastores/postgres.js b/server/datastores/postgres.js index e9e4ab6..80fa2ac 100644 --- a/server/datastores/postgres.js +++ b/server/datastores/postgres.js @@ -1,25 +1,14 @@ const crypto = require('crypto') +const cron = require('node-cron'); -const { format } = require('../utils'); const { ENV } = require("../configuration"); const Datastore = require('./api'); const S3Datastore = require('./s3'); const { Pool } = require('pg'); -const manager = require("../logger"); -const log = manager.createLogger('PG'); - -const configuration = { - host: ENV.PG.HOST, - port: ENV.PG.PORT, - database: ENV.PG.DATABASE, - user: ENV.PG.USER, - password: ENV.PG.PASSWORD, - max: ENV.PG.POOL_SIZE, - idleTimeoutMillis: 30000, - connectionTimeoutMillis: 2000, -} +const logger = require("../logger"); +const { isAString } = require('../utils'); /** * Class representing PG. @@ -42,7 +31,7 @@ module.exports = class PgDatastore extends Datastore { initialize = async () => { - log.info("Initialize pg client"); + logger.info("Initialize pg client"); await this.#sourcesDatastore.initialize(); @@ -56,12 +45,15 @@ module.exports = class PgDatastore extends Datastore { idleTimeoutMillis: 30000, connectionTimeoutMillis: 2000, }) - .on('error', err => log.error(err)); + .on('error', err => logger.error(err)); return this.#pool.connect() .then(client => { - client.query("CREATE TABLE IF NOT EXISTS users(id SERIAL, email VARCHAR, content JSONB)",) + Promise.all([ + client.query("CREATE TABLE IF NOT EXISTS users(id SERIAL, email VARCHAR, content JSONB)"), + client.query("CREATE TABLE IF NOT EXISTS jobs(id SERIAL, pluginId VARCHAR UNIQUE, created_at TIMESTAMP default current_timestamp)"), + ]) .then(() => client.release()) }) } @@ -79,7 +71,7 @@ module.exports = class PgDatastore extends Datastore { getUserPlugins = (email) => { return this.getUser(email) .then(user => user ? user.plugins : []) - .catch(err => log.error(err)) + .catch(err => logger.error(err)) } createUserIfNotExists = async (email) => { @@ -127,7 +119,46 @@ module.exports = class PgDatastore extends Datastore { } putWasmInformationsToS3 = (email, pluginId, newHash, generateWasmName) => { - return this.#sourcesDatastore.putWasmInformationsToS3(email, pluginId, newHash, generateWasmName); + return this.getUser(email) + .then(data => { + const newPlugins = data.plugins.map(plugin => { + if (plugin.pluginId !== pluginId) { + return plugin; + } + let versions = plugin.versions || []; + + // convert legacy array + if (versions.length > 0 && isAString(versions[0])) { + versions = versions.map(name => ({ name })) + } + + const index = versions.findIndex(item => item.name === generateWasmName); + if (index === -1) + versions.push({ + name: generateWasmName, + updated_at: Date.now(), + creator: email + }) + else { + versions[index] = { + ...versions[index], + updated_at: Date.now(), + creator: email + } + } + + return { + ...plugin, + last_hash: newHash, + wasm: generateWasmName, + versions + } + }); + return this.updateUser(email, { + ...data, + plugins: newPlugins + }) + }) } getWasm = (wasmId) => { @@ -158,8 +189,8 @@ module.exports = class PgDatastore extends Datastore { }) } - getConfigurations = (email, pluginId) => { - const plugin = this.#getPlugin(email, pluginId); + getConfigurations = async (email, pluginId) => { + const plugin = await this.#getPlugin(email, pluginId); const files = [{ ext: 'json', @@ -288,4 +319,56 @@ module.exports = class PgDatastore extends Datastore { }) })) } + + isJobRunning = pluginId => { + return this.#pool.connect() + .then(client => { + return client.query("INSERT INTO jobs(pluginId) VALUES($1) on conflict (pluginId) do nothing", [pluginId]) + .then(res => { + client.release() + + return res.rowCount === 0; + }) + }) + } + + cleanJobs = () => { + cron.schedule('0 */1 * * * *', this.cleanJobsRunner); + this.cleanJobsRunner() + } + + cleanJobsRunner = () => { + return this.#pool.connect() + .then(client => { + return client.query(`DELETE from jobs WHERE created_at < NOW() - make_interval(mins => 1)`) + .then(() => { + client.release() + }) + }) + } + + removeJob = pluginId => { + return this.#pool.connect() + .then(client => { + return client.query("DELETE FROM jobs WHERE pluginId = $1", [pluginId]) + .then(() => client.release()); + }); + } + + waitingTimeBeforeNextRun = pluginId => { + return this.#pool.connect() + .then(client => { + return client.query("SELECT created_at FROM jobs WHERE pluginId = $1", [pluginId]) + .then(res => { + client.release(); + + if (res.rowCount === 0) + return null + + const interval = 5 * 60 * 1000 - new Date() - new Date(res.rows[0]?.created_at); + + return interval > 0 ? interval : 0; + }); + }); + } }; \ No newline at end of file diff --git a/server/datastores/s3.js b/server/datastores/s3.js index 4f7db09..4a0b1b5 100644 --- a/server/datastores/s3.js +++ b/server/datastores/s3.js @@ -18,12 +18,10 @@ const fs = require('fs-extra'); const { format, isAString } = require('../utils'); const Datastore = require('./api'); const { ENV, STORAGE } = require("../configuration"); -const manager = require("../logger"); +const logger = require("../logger"); const consumers = require('node:stream/consumers'); const AdmZip = require("adm-zip"); -const log = manager.createLogger('S3'); - /** * Class representing S3. * @extends Datastore @@ -39,21 +37,21 @@ module.exports = class S3Datastore extends Datastore { return this.#state.instance.send(new HeadBucketCommand(params)) .then(() => { - log.info("Using existing bucket") + logger.info("Using existing bucket") }) .catch(res => { if (res || res.$metadata.httpStatusCode === 404 || res.$metadata.httpStatusCode === 403 || res.$metadata.httpStatusCode === 400) { - log.info(`Bucket ${this.#state.Bucket} is missing.`) + logger.info(`Bucket ${this.#state.Bucket} is missing.`) return new Promise(resolve => { this.#state.instance.send(new CreateBucketCommand(params), err => { if (err) { - console.log("Failed to create missing bucket") - console.log(err) - // process.exit(1) + logger.error("Failed to create missing bucket") + logger.error(err) + process.exit(1) } else { - log.info(`Bucket ${this.#state.Bucket} created.`) + logger.info(`Bucket ${this.#state.Bucket} created.`) resolve() } }); @@ -66,17 +64,17 @@ module.exports = class S3Datastore extends Datastore { initialize = async () => { if (!ENV.S3_BUCKET) { - console.log("[S3 INITIALIZATION](failed): S3 Bucket is missing"); + logger.error("[S3 INITIALIZATION](failed): S3 Bucket is missing"); process.exit(1); } - log.info("Initialize s3 client"); + logger.info("Initialize s3 client"); if (ENV.STORAGE === STORAGE.DOCKER_S3 || ENV.STORAGE === STORAGE.DOCKER_S3_POSTGRES) { const URL = url.parse(ENV.S3_ENDPOINT); const ip = await new Promise(resolve => dns.lookup(URL.hostname, (_, ip) => resolve(ip))); - log.debug(`${URL.protocol}//${ip}:${URL.port}${URL.pathname}`) + logger.debug(`${URL.protocol}//${ip}:${URL.port}${URL.pathname}`) this.#state = { instance: new S3Client({ region: ENV.AWS_DEFAULT_REGION, @@ -95,7 +93,7 @@ module.exports = class S3Datastore extends Datastore { Bucket: ENV.S3_BUCKET } - log.info("Bucket initialized"); + logger.info("Bucket initialized"); } return this.#createBucketIfMissing(); @@ -188,7 +186,7 @@ module.exports = class S3Datastore extends Datastore { Key: 'users.json' })) .then(data => new fetch.Response(data.Body).json()) - .catch(err => log.error(err)) + .catch(err => logger.error(err)) } updateUser = (email, content) => { @@ -300,25 +298,32 @@ module.exports = class S3Datastore extends Datastore { }) } - getWasm = (wasmId) => { + #getWasm = async name => { const { instance, Bucket } = this.#state; - return new Promise(resolve => { - instance.send(new GetObjectCommand({ + try { + const data = await instance.send(new GetObjectCommand({ Bucket, - wasmId + Key: name })) - .then(data => new fetch.Response(data.Body).buffer()) - .then(data => { - resolve({ content: data }); - }) - .catch(err => { - resolve({ - error: err.Code, - status: err?.$metadata?.httpStatusCode || 404 - }) - }); - }); + const content = await new fetch.Response(data.Body).buffer(); + return { content }; + } catch (err) { + return { + error: err.Code, + status: err?.$metadata?.httpStatusCode || 404 + } + } + } + + getWasm = (wasmId) => { + return this.#getWasm(wasmId) + .then(out => { + if (out.error) { + return this.#getWasm(wasmId.replace('.wasm', '')); + } else + return out; + }) } run = (wasm, { input, functionName, wasi }) => { @@ -452,7 +457,7 @@ module.exports = class S3Datastore extends Datastore { } return instance.send(new DeleteObjectCommand(params)) - .catch(err => { log.error(err) }); + .catch(err => { logger.error(err) }); } deletePlugin = (email, pluginId) => { diff --git a/server/index.js b/server/index.js index 9f277ec..3fc3583 100644 --- a/server/index.js +++ b/server/index.js @@ -20,9 +20,8 @@ const { Security } = require('./security/middlewares'); const Datastore = require('./datastores'); -const manager = require('./logger'); +const logger = require('./logger'); const { Cron } = require('./services/cron-job'); -const log = manager.createLogger(''); if (ENV.AUTH_MODE === AUTHENTICATION.NO_AUTH) { console.log("###############################################################") @@ -56,10 +55,9 @@ function createServer(appVersion) { app.use(bodyParser.urlencoded({ extended: true })); app.use(bodyParser.text()); - // app.use('*', (req, res, next) => { - // console.log(req.headers) - // next() - // }) + app.use('/_/healthcheck', (_, res) => { + return res.status(200).json() + }); app.use('/', Security.extractUserFromQuery); app.use('/', publicRouter); @@ -89,6 +87,7 @@ Promise.all([Datastore.initialize(), getAppVersion()]) } FileSystem.cleanBuildsAndLogsFolders(); + Datastore.cleanJobs(); Cron.initialize(); @@ -96,5 +95,5 @@ Promise.all([Datastore.initialize(), getAppVersion()]) WebSocket.createLogsWebSocket(server); - server.listen(ENV.PORT, () => log.info(`Wasmo ${version}, listening on ${ENV.PORT}`)); + server.listen(ENV.PORT, () => logger.info(`Wasmo ${version}, listening on ${ENV.PORT}`)); }); diff --git a/server/logger.js b/server/logger.js index ea86bde..e3584c6 100644 --- a/server/logger.js +++ b/server/logger.js @@ -1,4 +1,16 @@ -const manager = require('simple-node-logger') - .createLogManager(); +const { createLogger, transports, format } = require("winston"); +const { ENV } = require("./configuration"); -module.exports = manager; \ No newline at end of file +const logger = createLogger({ + level: ENV.LOGGER.LEVEL, + format: format.combine( + format.splat(), + format.simple() + ), + transports: [ + new transports.Console(), + ENV.LOGGER.FILE ? new transports.File({ filename: 'combined.log' }) : undefined + ].filter(f => f) +}); + +module.exports = logger; \ No newline at end of file diff --git a/server/package.json b/server/package.json index 8ff2531..a997e9e 100644 --- a/server/package.json +++ b/server/package.json @@ -23,9 +23,9 @@ "node-fetch": "^2.6.9", "pako": "^2.1.0", "pg": "^8.11.3", - "simple-node-logger": "^21.8.12", "testcontainers": "^10.3.2", "toml": "^3.0.0", + "winston": "^3.11.0", "ws": "^8.14.2" }, "devDependencies": { diff --git a/server/routers/plugins.js b/server/routers/plugins.js index 1145881..6a7b238 100644 --- a/server/routers/plugins.js +++ b/server/routers/plugins.js @@ -62,6 +62,16 @@ router.get('/:id', (req, res) => { }) }) +router.get('/:id/state', (req, res) => { + Datastore.waitingTimeBeforeNextRun(req.params.id) + .then(msTime => { + if (msTime === null) { + res.status(404) + } + res.json(msTime) + }); +}) + router.get('/:id/configurations', (req, res) => { Datastore.getConfigurations(req.user.email, req.params.id) .then(data => res.json(data)) @@ -203,19 +213,23 @@ router.post('/build', async (req, res) => { res, "zipHashToTest", metadata.release, - saveInLocal + saveInLocal, + pluginId ); }); }); }) -function addPluginToBuildQueue(folder, plugin, req, res, zipHash, release, saveInLocal) { +function addPluginToBuildQueue(folder, plugin, req, res, zipHash, release, saveInLocal, pluginId) { FileSystem.checkIfInformationsFileExists(folder, plugin.type) .then(() => InformationsReader.extractInformations(folder, plugin.type)) .then(({ pluginName, pluginVersion, metadata, err }) => { if (err) { WebSocket.emitError(plugin.pluginId, release, err); - FileSystem.removeFolder('build', folder) + Promise.all([ + Datastore.removeJob(pluginId), + FileSystem.removeFolder('build', folder) + ]) .then(() => { res .status(400) @@ -230,7 +244,10 @@ function addPluginToBuildQueue(folder, plugin, req, res, zipHash, release, saveI Datastore.isWasmExists(wasmName, release) .then(exists => { if (exists) { - FileSystem.removeFolder('build', folder) + Promise.all([ + Datastore.removeJob(pluginId), + FileSystem.removeFolder('build', folder) + ]) .then(() => { res .status(400) @@ -293,7 +310,7 @@ router.post('/:id/build', async (req, res) => { const isRustBuild = plugin.type == 'rust'; - Queue.isBuildRunning(pluginId) + Queue.isJobRunning(pluginId) .then(async exists => { if (exists) { res.json({ queue_id: pluginId, alreadyExists: true }); @@ -324,7 +341,7 @@ router.post('/:id/build', async (req, res) => { .digest('hex'); if (release || plugin['last_hash'] !== zipHash) { - addPluginToBuildQueue(folder, plugin, req, res, zipHash, release) + addPluginToBuildQueue(folder, plugin, req, res, zipHash, release, undefined, pluginId) } else { FileSystem.removeFolder('build', folder) .then(() => { diff --git a/server/routers/public.js b/server/routers/public.js index 90cc3a5..d4b5729 100644 --- a/server/routers/public.js +++ b/server/routers/public.js @@ -15,9 +15,7 @@ const auth = (req, res, next) => { } const forbiddenAccess = (req, res, next) => { - console.log(DOMAINS.includes(req.headers.host)) - console.log(req.headers.host) - console.log(DOMAINS) + logger.debug(`Received host: ${req.headers.host} - available domains: ${JSON.stringify(DOMAINS, null, 2)}`) res .status(403) .json({ diff --git a/server/routers/wasm.js b/server/routers/wasm.js index 12d0bae..090f93e 100644 --- a/server/routers/wasm.js +++ b/server/routers/wasm.js @@ -8,9 +8,10 @@ router.get('/runtime', (_, res) => res.json(ENV.EXTISM_RUNTIME_ENVIRONMENT === ' router.get('/:id', (req, res) => { const Key = `${req.params.id}.wasm`; + Datastore.getWasm(Key) .then(({ content, error, status }) => { - if (error) { + if (error || status === 404) { res.status(status).json({ error, status }) } else { res.attachment(Key); diff --git a/server/security/middlewares.js b/server/security/middlewares.js index 0293224..29d5f53 100644 --- a/server/security/middlewares.js +++ b/server/security/middlewares.js @@ -1,5 +1,6 @@ const jwt = require('jsonwebtoken'); const { ENV, AUTHENTICATION } = require('../configuration'); +const logger = require('../logger'); const secret = ENV.OTOROSHI_TOKEN_SECRET || 'veryverysecret'; @@ -20,11 +21,11 @@ const otoroshiAuthentication = (req, res, next) => { } next() } catch (err) { - console.log(err) + logger.debug(err) missingCredentials(res) } } else { - console.log(`Missing jwt user`, jwtUser) + logger.debug('missing or wrong jwt') missingCredentials(res) } } diff --git a/server/services/compiler/compiler.js b/server/services/compiler/compiler.js index db2f657..ebcaf50 100644 --- a/server/services/compiler/compiler.js +++ b/server/services/compiler/compiler.js @@ -2,7 +2,7 @@ const { spawn } = require('child_process'); const path = require('path'); const fs = require('fs-extra'); -const manager = require('../../logger'); +const logger = require('../../logger'); const { WebSocket } = require('../../services/websocket'); const { FileSystem } = require('../file-system'); const { optimizeBinaryFile } = require('../wasmgc'); @@ -55,7 +55,6 @@ class BuildOptions { class Compiler { constructor({ name, commands, options, outputWasmFolder }) { - this.log = manager.createLogger(`[${name} BUILDER]`); this.options = options; this.commands = this.splitCommands(commands); @@ -113,17 +112,18 @@ class Compiler { }); child.on('error', (error) => { - console.log(error) + logger.error(`[${buildOptions.plugin?.id}] ${error}`) this.#websocketEmitMessage(buildOptions, error, true); stderrStream.write(`${error.stack}\n`); }); } - #handleCloseEvent = (buildOptions, closeCode, isLastCommand, { justToNext, onAllSuccess, onChildFailure }) => { + #handleCloseEvent = (buildOptions, closeCode, isLastCommand, { runNextCommand, onAllSuccess, onChildFailure }) => { const childProcessHasFailed = closeCode !== 0; if (childProcessHasFailed) { this.#handleChildFailure([buildOptions.buildFolder, buildOptions.logsFolder], closeCode, onChildFailure); + Datastore.removeJob(buildOptions.plugin.id) } else if (isLastCommand) { this.#websocketEmitMessage(buildOptions, "Build done."); this.#onSuccess(buildOptions, { @@ -131,7 +131,7 @@ class Compiler { handleFailure: onChildFailure }); } else { - justToNext(); + runNextCommand(); } } @@ -156,14 +156,19 @@ class Compiler { .then(() => this.#websocketEmitMessage(buildOptions, "Informations has been updated")) ])) .then(() => { - FileSystem.cleanFolders(buildOptions.buildFolder, buildOptions.logsFolder) - .then(callback) + return Promise.all([ + Datastore.removeJob(buildOptions.plugin.id), + FileSystem.cleanFolders(buildOptions.buildFolder, buildOptions.logsFolder) + .then(callback) + ]) }) }) .catch(err => { - this.log.error(`Build failed: ${err}`) + logger.error(`[${buildOptions.plugin.id}] ${err}`) this.#websocketEmitMessage(buildOptions, err, true); this.#handleChildFailure([buildOptions.buildFolder, buildOptions.logsFolder], -1, handleFailure) + + Datastore.removeJob(buildOptions.plugin.id); }); } @@ -174,7 +179,7 @@ class Compiler { build(rawBuildOptions) { const buildOptions = { ...rawBuildOptions }; - this.log.info(`Starting build ${buildOptions.folderPath}`) + logger.info(`[${rawBuildOptions.plugin.id}] Starting build`) const root = process.cwd(); @@ -189,7 +194,7 @@ class Compiler { this.#websocketEmitMessage(buildOptions, 'Starting build ...'); return this.commands - .reduce((promise, fn, index) => promise.then(() => new Promise(justToNext => { + .reduce((promise, fn, index) => promise.then(() => new Promise(runNextCommand => { const { executable, args } = fn; this.#websocketEmitMessage(buildOptions, `Running command ${executable} ${args.join(' ')} ...`); @@ -199,7 +204,7 @@ class Compiler { buildOptions, code, this.commands.length - 1 === index, - { justToNext, onAllSuccess, onChildFailure }, + { runNextCommand, onAllSuccess, onChildFailure }, )); this.#attachListeners(childProcess, buildOptions); diff --git a/server/services/cron-job.js b/server/services/cron-job.js index 48650ee..564adf2 100644 --- a/server/services/cron-job.js +++ b/server/services/cron-job.js @@ -1,14 +1,12 @@ const cron = require('node-cron'); -const manager = require('../logger'); +const logger = require('../logger'); const fs = require('fs-extra'); const path = require('path'); const { ENV } = require('../configuration'); -const log = manager.createLogger('CRON'); - const cleaningWasm = () => { - log.info("Start cleaning wasm folder"); + logger.debug("[CRON-JOB]: Start cleaning wasm folder"); const root = path.join(process.cwd(), 'wasm'); @@ -20,13 +18,13 @@ const cleaningWasm = () => { return fs.stat(filepath) .then(data => { if (Date.now() - data.birthtimeMs >= ENV.LOCAL_WASM_JOB_CLEANING) { - log.info(`Remove ${filepath}`) + logger.info(`[CRON-JOB]: Remove ${filepath}`) fs.unlink(filepath) } }) }))) .then(() => { - log.info("End cleaning"); + logger.debug("[CRON-JOB]: End cleaning"); }) } diff --git a/server/services/file-system.js b/server/services/file-system.js index 8dea1d5..1528cd2 100644 --- a/server/services/file-system.js +++ b/server/services/file-system.js @@ -2,6 +2,7 @@ const fs = require('fs-extra'); const path = require('path'); const { INFORMATIONS_FILENAME } = require('../utils'); const AdmZip = require('adm-zip'); +const logger = require('../logger'); const createBuildFolder = (type, name) => { if (['rust', 'js', 'ts'].includes(type)) { @@ -11,7 +12,7 @@ const createBuildFolder = (type, name) => { path.join(process.cwd(), 'build', name), err => { if (err) { - console.log('An error occured while copying the folder.') + logger.error('An error occured while copying the folder.') throw err } resolve(name) @@ -48,10 +49,17 @@ const cleanBuildsAndLogsFolders = async () => { path.join(process.cwd(), "build"), path.join(process.cwd(), "logs"), ].map((folder, i) => { - return fs.readdir(folder, (_, files) => { + return fs.readdir(folder, async (_, files) => { const deletedFiles = (files || []).filter(file => !file.startsWith('.')); - return cleanFolders(...deletedFiles.map(file => path.join(process.cwd(), i === 0 ? "build" : "logs", file))); + const out = await Promise.all(deletedFiles.map(async file => { + const filepath = path.join(process.cwd(), i === 0 ? "build" : "logs", file) + if (Date.now() - (await fs.stat(filepath)).birthtimeMs > 120000) + return filepath + return undefined + })).then(data => data.filter(f => f)) + + return cleanFolders(...out); }); })) } diff --git a/server/services/queue.js b/server/services/queue.js index 8b54cc5..effa2c7 100644 --- a/server/services/queue.js +++ b/server/services/queue.js @@ -1,5 +1,5 @@ const { WebSocket } = require('../services/websocket'); -const manager = require('../logger'); +const logger = require('../logger'); const { FileSystem } = require('./file-system'); @@ -9,7 +9,8 @@ const rustCompiler = require('./compiler/rust'); const opaCompiler = require('./compiler/opa'); const { BuildOptions, CompilerOptions } = require('./compiler/compiler'); -const { ENV } = require('../configuration'); +const { ENV, STORAGE } = require('../configuration'); +const Datastore = require('../datastores'); const COMPILERS = { 'js': JsCompiler, @@ -19,15 +20,15 @@ const COMPILERS = { 'opa': opaCompiler }; -const log = manager.createLogger('BUILDER'); - let running = 0; const queue = []; const MAX_JOBS = ENV.MANAGER_MAX_PARALLEL_JOBS || 2; const loop = () => { - log.info(`Running jobs: ${running} - BuildingJob size: ${queue.length}`) + logger.info(`Running jobs: ${running}`); + logger.info(`Queue length: ${queue.length}`); + if (running < MAX_JOBS && queue.length > 0) { running += 1; @@ -59,7 +60,7 @@ const loop = () => { loop() }) .catch(err => { - log.error(err) + logger.error(err) WebSocket.emitError(nextBuild.plugin, "JOB", err) running -= 1; loop() @@ -79,6 +80,13 @@ const addBuildToQueue = props => { module.exports = { Queue: { addBuildToQueue, - isBuildRunning: folder => FileSystem.buildFolderAlreadyExits('build', folder) + isJobRunning: pluginId => { + return FileSystem.buildFolderAlreadyExits('build', pluginId) + .then(() => { + if (ENV.STORAGE.includes("POSTGRES")) { + return Datastore.isJobRunning(pluginId) + } + }) + } } } \ No newline at end of file diff --git a/server/services/websocket.js b/server/services/websocket.js index 9ac733d..f04a622 100644 --- a/server/services/websocket.js +++ b/server/services/websocket.js @@ -13,9 +13,9 @@ const createLogsWebSocket = server => { clients[request.url.slice(1)] = ws; - ws.on('message', function message(data) { - console.log(`Received message ${data} from user ${client}`); - }); + // ws.on('message', function message(data) { + // console.log(`Received message ${data} from user ${client}`); + // }); ws.on('close', () => { delete clients[request.url.slice(1)]; diff --git a/server/tests/authentication/basic_auth.test.js b/server/tests/authentication/basic_auth.test.js index 1fa63da..1734000 100644 --- a/server/tests/authentication/basic_auth.test.js +++ b/server/tests/authentication/basic_auth.test.js @@ -1,4 +1,4 @@ -const { GenericContainer, Network } = require("testcontainers"); +const { GenericContainer, Network, Wait } = require("testcontainers"); let instance; let container; @@ -21,16 +21,18 @@ beforeAll(async () => { WASMO_CLIENT_ID, WASMO_CLIENT_SECRET }) + .withWaitStrategy(Wait.forHttp("/_/healthcheck", 5003) + .forStatusCodeMatching(statusCode => statusCode === 200)) .start() - .catch(err => console.log(err)) instance = `http://localhost:${container.getFirstMappedPort()}`; - - await new Promise(resolve => { - setTimeout(resolve, 10000); - }) }, 60000); + +afterAll(() => { + container?.stop() +}) + test('/health', async () => { return fetch(`${instance}/health`) .then(r => expect(r.status).toBe(401)) diff --git a/server/tests/authentication/otoroshi_auth.test.js b/server/tests/authentication/otoroshi_auth.test.js index f545632..706bc81 100644 --- a/server/tests/authentication/otoroshi_auth.test.js +++ b/server/tests/authentication/otoroshi_auth.test.js @@ -1,4 +1,4 @@ -const { GenericContainer, Network } = require("testcontainers"); +const { GenericContainer, Network, Wait } = require("testcontainers"); const wasmo_route = require('./otoroshi_entities/route-wasm-manager-1701851138888.json'); const authentication_module = require('./otoroshi_entities/authentication-module-new-auth-module-1701851199519.json'); @@ -21,6 +21,8 @@ beforeAll(async () => { OTOROSHI_INITIAL_ADMIN_PASSWORD: 'password', OTOROSHI_INITIAL_ADMIN_LOGIN: 'admin@otoroshi.io' }) + .withWaitStrategy(Wait.forHttp("/live", 8080) + .forStatusCodeMatching(statusCode => statusCode === 404)) .start() container = await new GenericContainer("wasmo") @@ -30,18 +32,25 @@ beforeAll(async () => { MANAGER_PORT: 5003, AUTH_MODE: "OTOROSHI_AUTH", CHECK_DOMAINS: false, - STORAGE: 'test', WASMO_CLIENT_ID, WASMO_CLIENT_SECRET }) + .withWaitStrategy(Wait.forHttp("/_/healthcheck", 5003) + .forStatusCodeMatching(statusCode => statusCode === 200)) .start() instance = `http://localhost:${container.getFirstMappedPort()}`; - await new Promise(resolve => { - setTimeout(resolve, 10000); - }) -}, 60000); + // await new Promise(resolve => { + // setTimeout(resolve, 10000); + // }) +}, 15000); + + +afterAll(() => { + otoroshi?.stop() + container?.stop() +}) test('setup otoroshi', async () => { const _createdRoute = await fetch(`http://otoroshi-api.oto.tools:${otoroshi.getFirstMappedPort()}/api/routes`, { diff --git a/server/tests/functional/no_storage.test.js b/server/tests/functional/no_storage.test.js index d13d3b0..b93366b 100644 --- a/server/tests/functional/no_storage.test.js +++ b/server/tests/functional/no_storage.test.js @@ -1,4 +1,4 @@ -const { GenericContainer, Network } = require("testcontainers"); +const { GenericContainer, Network, Wait } = require("testcontainers"); let instance; let container; @@ -7,9 +7,6 @@ beforeAll(async () => { const network = await new Network().start(); container = await new GenericContainer("wasmo") - // .withLogConsumer(stream => { - // stream.on('data', chunk => console.log(Buffer.from(chunk).toString().replace('\n', ''))) - // }) .withNetwork(network) .withNetworkAliases("foo") .withExposedPorts(5001) @@ -20,14 +17,16 @@ beforeAll(async () => { WASMO_CLIENT_ID: "id", WASMO_CLIENT_SECRET: "secret" }) + .withWaitStrategy(Wait.forHttp("/_/healthcheck", 5001) + .forStatusCodeMatching(statusCode => statusCode === 200)) .start() - .catch(err => console.log(err)) instance = `http://localhost:${container.getFirstMappedPort()}`; - await new Promise(resolve => { - setTimeout(resolve, 10000); - }) -}, 60000); +}); + +afterAll(() => { + container?.stop() +}) test('/health', async () => { return fetch(`${instance}/health`) diff --git a/server/tests/functional/s3.test.js b/server/tests/functional/s3.test.js index 8828ab1..bcd98f1 100644 --- a/server/tests/functional/s3.test.js +++ b/server/tests/functional/s3.test.js @@ -1,4 +1,4 @@ -const { GenericContainer, Network } = require("testcontainers"); +const { GenericContainer, Network, Wait } = require("testcontainers"); let instance; let s3; @@ -9,12 +9,13 @@ beforeAll(async () => { s3 = await new GenericContainer("scality/s3server:latest") .withNetwork(network) - .withName("s3") .withExposedPorts(8000) .withEnvironment({ SCALITY_ACCESS_KEY_ID: 'access_key', SCALITY_SECRET_ACCESS_KEY: 'secret' }) + .withWaitStrategy(Wait.forHttp("/_/healthcheck", 8000) + .forStatusCodeMatching(statusCode => statusCode === 403)) .start(); container = await new GenericContainer("wasmo") @@ -33,15 +34,18 @@ beforeAll(async () => { STORAGE: "DOCKER_S3", S3_BUCKET: "wasmo", }) + .withWaitStrategy(Wait.forHttp("/_/healthcheck", 5002) + .forStatusCodeMatching(statusCode => statusCode === 200)) .start(); instance = `http://localhost:${container.getFirstMappedPort()}`; - - await new Promise(resolve => { - setTimeout(resolve, 10000); - }) }, 60000); +afterAll(() => { + s3?.stop() + container?.stop() +}) + test('/api/plugins', () => { return fetch(`${instance}/api/plugins`) .then(r => { diff --git a/server/tests/functional/s3_pg.test.js b/server/tests/functional/s3_pg.test.js index e669459..800f0e8 100644 --- a/server/tests/functional/s3_pg.test.js +++ b/server/tests/functional/s3_pg.test.js @@ -1,21 +1,25 @@ -const { GenericContainer, Network } = require("testcontainers"); +const { GenericContainer, Network, Wait } = require("testcontainers"); let instance; +let s3; +let pg; +let container; beforeAll(async () => { const network = await new Network().start(); - const s3 = await new GenericContainer("scality/s3server:latest") + s3 = await new GenericContainer("scality/s3server:latest") .withNetwork(network) - .withName("s3") .withExposedPorts(8000) .withEnvironment({ SCALITY_ACCESS_KEY_ID: 'access_key', SCALITY_SECRET_ACCESS_KEY: 'secret' }) + .withWaitStrategy(Wait.forHttp("/_/healthcheck", 8000) + .forStatusCodeMatching(statusCode => statusCode === 403)) .start(); - const pg = await new GenericContainer("postgres:13") + pg = await new GenericContainer("postgres:13") .withNetwork(network) .withExposedPorts(5432) .withEnvironment({ @@ -24,25 +28,25 @@ beforeAll(async () => { }) .start() - const container = await new GenericContainer("wasmo") + container = await new GenericContainer("wasmo") .withNetwork(network) .withExposedPorts(5004) .withEnvironment({ MANAGER_PORT: 5004, AUTH_MODE: "NO_AUTH", STORAGE: "DOCKER_S3_POSTGRES", - + CHECK_DOMAINS: false, - + WASMO_CLIENT_ID: "id", WASMO_CLIENT_SECRET: "secret", - + AWS_ACCESS_KEY_ID: "access_key", AWS_SECRET_ACCESS_KEY: "secret", S3_ENDPOINT: `http://host.docker.internal:${s3.getFirstMappedPort()}`, S3_FORCE_PATH_STYLE: true, S3_BUCKET: "wasmo", - + PG_HOST: "host.docker.internal", PG_PORT: pg.getFirstMappedPort(), PG_DATABASE: "wasmo", @@ -52,15 +56,24 @@ beforeAll(async () => { PG_IDLE_TIMEOUT_MILLIS: 30000, PG_CONNECTION_TIMEOUT_MILLIS: 2000 }) + .withWaitStrategy(Wait.forHttp("/_/healthcheck", 5004) + .forStatusCodeMatching(statusCode => statusCode === 200)) .start(); instance = `http://localhost:${container.getFirstMappedPort()}`; - await new Promise(resolve => { - setTimeout(resolve, 10000); - }) + // await new Promise(resolve => { + // setTimeout(resolve, 10000); + // }) }, 60000); + +afterAll(() => { + s3?.stop() + pg?.stop() + container?.stop() +}) + test('/api/plugins', () => { return fetch(`${instance}/api/plugins`) .then(r => { diff --git a/server/yarn.lock b/server/yarn.lock index fbc461b..61abd43 100644 --- a/server/yarn.lock +++ b/server/yarn.lock @@ -881,6 +881,20 @@ resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== +"@colors/colors@1.6.0", "@colors/colors@^1.6.0": + version "1.6.0" + resolved "https://registry.yarnpkg.com/@colors/colors/-/colors-1.6.0.tgz#ec6cd237440700bc23ca23087f513c75508958b0" + integrity sha512-Ir+AOibqzrIsL6ajt3Rz3LskB7OiMVHqltZmspbW/TJuTVuyOMirVqAkjfY6JISiLHgyNqicAC8AyHHGzNd/dA== + +"@dabh/diagnostics@^2.0.2": + version "2.0.3" + resolved "https://registry.yarnpkg.com/@dabh/diagnostics/-/diagnostics-2.0.3.tgz#7f7e97ee9a725dffc7808d93668cc984e1dc477a" + integrity sha512-hrlQOIi7hAfzsMqlGSFyVucrx38O+j6wiGOf//H2ecvIEqYN4ADBSS2iLMh5UFyDunCNniUIPk/q3riFv45xRA== + dependencies: + colorspace "1.1.x" + enabled "2.0.x" + kuler "^2.0.0" + "@extism/extism@^0.2.0": version "0.2.0" resolved "https://registry.yarnpkg.com/@extism/extism/-/extism-0.2.0.tgz#2a5ffab708b2f9441c67d9ee9ef4aa1c5b887502" @@ -1720,6 +1734,11 @@ resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.3.tgz#6209321eb2c1712a7e7466422b8cb1fc0d9dd5d8" integrity sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw== +"@types/triple-beam@^1.3.2": + version "1.3.5" + resolved "https://registry.yarnpkg.com/@types/triple-beam/-/triple-beam-1.3.5.tgz#74fef9ffbaa198eb8b588be029f38b00299caa2c" + integrity sha512-6WaYesThRMCl19iryMYP7/x2OVgCtbIVflDGFpWnb9irXI3UjYE4AzmYuiUKY1AJstGijoY+MgUszMgRxIYTYw== + "@types/yargs-parser@*": version "21.0.3" resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.3.tgz#815e30b786d2e8f0dcd85fd5bcf5e1a04d008f15" @@ -1866,7 +1885,7 @@ async-lock@^1.4.0: resolved "https://registry.yarnpkg.com/async-lock/-/async-lock-1.4.0.tgz#c8b6630eff68fbbdd8a5b6eb763dac3bfbb8bf02" integrity sha512-coglx5yIWuetakm3/1dsX9hxCNox22h7+V80RQOu2XUUMidtArxKoZoOtHUPuR84SycKTXzgGzAUR5hJxujyJQ== -async@^3.2.4: +async@^3.2.3, async@^3.2.4: version "3.2.5" resolved "https://registry.yarnpkg.com/async/-/async-3.2.5.tgz#ebd52a8fdaf7a2289a24df399f8d8485c8a46b66" integrity sha512-baNZyqaaLhyLVKm/DlvdW051MSgO6b8eVfIezl9E5PqWxFgzLm/wQntEW4zOytVburDEr0JlALEpdOFwvErLsg== @@ -2194,7 +2213,7 @@ collect-v8-coverage@^1.0.0: resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.2.tgz#c0b29bcd33bcd0779a1344c2136051e6afd3d9e9" integrity sha512-lHl4d5/ONEbLlJvaJNtsF/Lz+WvB07u2ycqTYbdrq7UypDXailES4valYb2eWiJFxZlVmpGekfqoxQhzyFdT4Q== -color-convert@^1.9.0: +color-convert@^1.9.0, color-convert@^1.9.3: version "1.9.3" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== @@ -2213,11 +2232,35 @@ color-name@1.1.3: resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" integrity sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw== -color-name@~1.1.4: +color-name@^1.0.0, color-name@~1.1.4: version "1.1.4" resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== +color-string@^1.6.0: + version "1.9.1" + resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.9.1.tgz#4467f9146f036f855b764dfb5bf8582bf342c7a4" + integrity sha512-shrVawQFojnZv6xM40anx4CkoDP+fZsw/ZerEMsW/pyzsRbElpsL/DBVW7q3ExxwusdNXI3lXpuhEZkzs8p5Eg== + dependencies: + color-name "^1.0.0" + simple-swizzle "^0.2.2" + +color@^3.1.3: + version "3.2.1" + resolved "https://registry.yarnpkg.com/color/-/color-3.2.1.tgz#3544dc198caf4490c3ecc9a790b54fe9ff45e164" + integrity sha512-aBl7dZI9ENN6fUGC7mWpMTPNHmWUSNan9tuWN6ahh5ZLNk9baLJOnSMlrQkHcrfFgz2/RigjUVAjdx36VcemKA== + dependencies: + color-convert "^1.9.3" + color-string "^1.6.0" + +colorspace@1.1.x: + version "1.1.4" + resolved "https://registry.yarnpkg.com/colorspace/-/colorspace-1.1.4.tgz#8d442d1186152f60453bf8070cd66eb364e59243" + integrity sha512-BgvKJiuVu1igBUF2kEjRCZXol6wiiGbY5ipL/oVPwm0BL9sIpMIzM8IK7vwuxIIzOXMV3Ey5w+vxhm0rR/TN8w== + dependencies: + color "^3.1.3" + text-hex "1.0.x" + compress-commons@^4.1.2: version "4.1.2" resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-4.1.2.tgz#6542e59cb63e1f46a8b21b0e06f9a32e4c8b06df" @@ -2445,6 +2488,11 @@ emoji-regex@^8.0.0: resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== +enabled@2.0.x: + version "2.0.0" + resolved "https://registry.yarnpkg.com/enabled/-/enabled-2.0.0.tgz#f9dd92ec2d6f4bbc0d5d1e64e21d61cd4665e7c2" + integrity sha512-AKrN98kuwOzMIdAizXGI86UFBoo26CL21UM763y1h/GMSJ4/OHU9k2YlsmBpyScFo/wbLzWQJBMCW4+IO3/+OQ== + encodeurl@~1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" @@ -2619,6 +2667,11 @@ fb-watchman@^2.0.0: dependencies: bser "2.1.1" +fecha@^4.2.0: + version "4.2.3" + resolved "https://registry.yarnpkg.com/fecha/-/fecha-4.2.3.tgz#4d9ccdbc61e8629b259fdca67e65891448d569fd" + integrity sha512-OP2IUU6HeYKJi3i0z4A19kHMQoLVs4Hc+DPqqxI2h/DPZHTm/vjsfC6P0b4jCMy14XizLBqvndQ+UilD7707Jw== + ffi-napi@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/ffi-napi/-/ffi-napi-4.0.3.tgz#27a8d42a8ea938457154895c59761fbf1a10f441" @@ -2659,6 +2712,11 @@ find-up@^4.0.0, find-up@^4.1.0: locate-path "^5.0.0" path-exists "^4.0.0" +fn.name@1.x.x: + version "1.1.0" + resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc" + integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw== + forwarded@0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811" @@ -2880,6 +2938,11 @@ is-arrayish@^0.2.1: resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" integrity sha512-zz06S8t0ozoDXMG+ube26zeCTNXcKIPJZJi8hBrF4idCLms4CG9QtK7qBl1boi5ODzFpjswb5JPmHCbMpjaYzg== +is-arrayish@^0.3.1: + version "0.3.2" + resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03" + integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ== + is-binary-path@~2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" @@ -3422,6 +3485,11 @@ kleur@^3.0.3: resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== +kuler@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/kuler/-/kuler-2.0.0.tgz#e2c570a3800388fb44407e851531c1d670b061b3" + integrity sha512-Xq9nH7KlWZmXAtodXDDRE7vs6DU1gTU8zYDHDiWLSip45Egwq3plLHzPn27NgvzL2r1LMPC1vdqh98sQxtqj4A== + lazystream@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/lazystream/-/lazystream-1.0.1.tgz#494c831062f1f9408251ec44db1cba29242a2638" @@ -3501,10 +3569,17 @@ lodash.union@^4.6.0: resolved "https://registry.yarnpkg.com/lodash.union/-/lodash.union-4.6.0.tgz#48bb5088409f16f1821666641c44dd1aaae3cd88" integrity sha512-c4pB2CdGrGdjMKYLA+XiRDO7Y0PRQbm/Gzg8qMj+QH+pFVAoTp5sBpO0odL3FjoPCGjK96p6qsP+yQoiLoOBcw== -lodash@^4.17.12: - version "4.17.21" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" - integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== +logform@^2.3.2, logform@^2.4.0: + version "2.6.0" + resolved "https://registry.yarnpkg.com/logform/-/logform-2.6.0.tgz#8c82a983f05d6eaeb2d75e3decae7a768b2bf9b5" + integrity sha512-1ulHeNPp6k/LD8H91o7VYFBng5i1BDE7HoKxVbZiGFidS1Rj65qcywLxX+pVfAPoQJEjRdvKcusKwOupHCVOVQ== + dependencies: + "@colors/colors" "1.6.0" + "@types/triple-beam" "^1.3.2" + fecha "^4.2.0" + ms "^2.1.1" + safe-stable-stringify "^2.3.1" + triple-beam "^1.3.0" lru-cache@^5.1.1: version "5.1.1" @@ -3608,11 +3683,6 @@ mkdirp@^1.0.4: resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-1.0.4.tgz#3eb5ed62622756d79a5f0e2a221dfebad75c2f7e" integrity sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw== -moment@^2.20.1: - version "2.29.4" - resolved "https://registry.yarnpkg.com/moment/-/moment-2.29.4.tgz#3dbe052889fe7c1b2ed966fcb3a77328964ef108" - integrity sha512-5LC9SOxjSc2HF6vO2CyuTDNivEdoz2IvyJJGj6X8DJ0eFyfszE0QiEd+iXmBvUP3WHxSjFH/vIsA0EN00cgr8w== - ms@2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" @@ -3741,6 +3811,13 @@ once@^1.3.0, once@^1.3.1, once@^1.4.0: dependencies: wrappy "1" +one-time@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/one-time/-/one-time-1.0.0.tgz#e06bc174aed214ed58edede573b433bbf827cb45" + integrity sha512-5DXOiRKwuSEcQ/l0kGCF6Q3jcADFv5tSmRaJck/OqkVFcOzutB134KRSfF0xDrL39MNnqxbHBbUUcjZIhTgb2g== + dependencies: + fn.name "1.x.x" + onetime@^5.1.2: version "5.1.2" resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" @@ -4153,6 +4230,11 @@ safe-buffer@5.2.1, safe-buffer@^5.0.1, safe-buffer@~5.2.0: resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== +safe-stable-stringify@^2.3.1: + version "2.4.3" + resolved "https://registry.yarnpkg.com/safe-stable-stringify/-/safe-stable-stringify-2.4.3.tgz#138c84b6f6edb3db5f8ef3ef7115b8f55ccbf886" + integrity sha512-e2bDA2WJT0wxseVd4lsDP4+3ONX6HpMXQa1ZhFQ7SU+GjvORCmShbCMltrtIDfkYhVHrOcPtj+KhmDBdPdZD1g== + "safer-buffer@>= 2.1.2 < 3", safer-buffer@~2.1.0: version "2.1.2" resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" @@ -4240,13 +4322,12 @@ signal-exit@^3.0.2, signal-exit@^3.0.3, signal-exit@^3.0.7: resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.7.tgz#a9a1767f8af84155114eaabd73f99273c8f59ad9" integrity sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ== -simple-node-logger@^21.8.12: - version "21.8.12" - resolved "https://registry.yarnpkg.com/simple-node-logger/-/simple-node-logger-21.8.12.tgz#51cea08e17ed0139a78f1f1a7397fa5e756767e8" - integrity sha512-RPImnYDq3jdUjaTvYLghaF1n65Dd0LV8hdZtlT0X1NZBAkw+lx0ZJtFydcUyYKjg0Yxd27AW9IAIc3OLhTjBzA== +simple-swizzle@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a" + integrity sha512-JA//kQgZtbuY83m+xT+tXJkmJncGMTFT+C+g2h2R9uxkYIrE2yy9sgmcLhCnw57/WSD+Eh3J97FPEDFnbXnDUg== dependencies: - lodash "^4.17.12" - moment "^2.20.1" + is-arrayish "^0.3.1" simple-update-notifier@^1.0.7: version "1.1.0" @@ -4312,6 +4393,11 @@ ssh2@^1.11.0, ssh2@^1.4.0: cpu-features "~0.0.8" nan "^2.17.0" +stack-trace@0.0.x: + version "0.0.10" + resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0" + integrity sha512-KGzahc7puUKkzyMt+IqAep+TVNbKP+k2Lmwhub39m1AsTSkaDutx56aDCo+HLDzf/D26BIHTJWNiTG1KAJiQCg== + stack-utils@^2.0.3: version "2.0.6" resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f" @@ -4485,6 +4571,11 @@ testcontainers@^10.3.2: tar-fs "^3.0.4" tmp "^0.2.1" +text-hex@1.0.x: + version "1.0.0" + resolved "https://registry.yarnpkg.com/text-hex/-/text-hex-1.0.0.tgz#69dc9c1b17446ee79a92bf5b884bb4b9127506f5" + integrity sha512-uuVGNWzgJ4yhRaNSiubPY7OjISw4sw4E5Uv0wbjp+OzcbmVU/rsT8ujgcXJhn9ypzsgr5vlzpPqP+MBBKcGvbg== + tmp@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.2.1.tgz#8457fc3037dcf4719c251367a1af6500ee1ccf14" @@ -4531,6 +4622,11 @@ tr46@~0.0.3: resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== +triple-beam@^1.3.0: + version "1.4.1" + resolved "https://registry.yarnpkg.com/triple-beam/-/triple-beam-1.4.1.tgz#6fde70271dc6e5d73ca0c3b24e2d92afb7441984" + integrity sha512-aZbgViZrg1QNcG+LULa7nhZpJTZSLm/mXnHXnbAbjmN5aSa0y7V+wvv6+4WaBtpISJzThKy+PIPxc1Nq1EJ9mg== + tslib@^1.11.1: version "1.14.1" resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" @@ -4663,6 +4759,32 @@ which@^2.0.1: dependencies: isexe "^2.0.0" +winston-transport@^4.5.0: + version "4.6.0" + resolved "https://registry.yarnpkg.com/winston-transport/-/winston-transport-4.6.0.tgz#f1c1a665ad1b366df72199e27892721832a19e1b" + integrity sha512-wbBA9PbPAHxKiygo7ub7BYRiKxms0tpfU2ljtWzb3SjRjv5yl6Ozuy/TkXf00HTAt+Uylo3gSkNwzc4ME0wiIg== + dependencies: + logform "^2.3.2" + readable-stream "^3.6.0" + triple-beam "^1.3.0" + +winston@^3.11.0: + version "3.11.0" + resolved "https://registry.yarnpkg.com/winston/-/winston-3.11.0.tgz#2d50b0a695a2758bb1c95279f0a88e858163ed91" + integrity sha512-L3yR6/MzZAOl0DsysUXHVjOwv8mKZ71TrA/41EIduGpOOV5LQVodqN+QdQ6BS6PJ/RdIshZhq84P/fStEZkk7g== + dependencies: + "@colors/colors" "^1.6.0" + "@dabh/diagnostics" "^2.0.2" + async "^3.2.3" + is-stream "^2.0.0" + logform "^2.4.0" + one-time "^1.0.0" + readable-stream "^3.4.0" + safe-stable-stringify "^2.3.1" + stack-trace "0.0.x" + triple-beam "^1.3.0" + winston-transport "^4.5.0" + wrap-ansi@^7.0.0: version "7.0.0" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-7.0.0.tgz#67e145cff510a6a6984bdf1152911d69d2eb9e43" diff --git a/ui/src/ReleasesMenu.js b/ui/src/ReleasesMenu.js index 0a40e5b..56109a4 100644 --- a/ui/src/ReleasesMenu.js +++ b/ui/src/ReleasesMenu.js @@ -26,11 +26,21 @@ export default function ReleasesMenu({ files }) { style={{ marginLeft: 12, cursor: 'pointer' }} key={key} onClick={() => { + const anchorElement = document.createElement('a'); + document.body.appendChild(anchorElement); + anchorElement.style.display = 'none'; + Service.getWasmRelease(key) .then(res => res.blob()) .then(blob => { - const file = URL.createObjectURL(blob); - window.location.assign(file); + const url = window.URL.createObjectURL(blob); + + anchorElement.href = url; + anchorElement.download = `${key}.wasm`; + anchorElement.click(); + + window.URL.revokeObjectURL(url); + document.body.removeChild(anchorElement); }) }}>
diff --git a/ui/src/Terminal.js b/ui/src/Terminal.js index c77f056..afc18cf 100644 --- a/ui/src/Terminal.js +++ b/ui/src/Terminal.js @@ -17,9 +17,8 @@ function Terminal({ sizeTerminal, toggleResizingTerminal, changeTerminalSize, se } }, [loadConfigurationFile]); - const connect = (isDevelopment, attempts) => { + const connect = (isDevelopment) => { let socket; - let internalAttempts = attempts; if (isDevelopment) { socket = new WebSocket(`ws://${window.location.hostname}:5001/${selectedPlugin.pluginId}`); @@ -28,7 +27,6 @@ function Terminal({ sizeTerminal, toggleResizingTerminal, changeTerminalSize, se } socket.onopen = () => { - internalAttempts = 1; } socket.onmessage = event => { @@ -53,14 +51,9 @@ function Terminal({ sizeTerminal, toggleResizingTerminal, changeTerminalSize, se socket.onclose = function (e) { console.log('Socket is closed. Reconnect will be attempted in 1 second.', e.reason); - if (internalAttempts <= 5) { - console.log(1000 * (internalAttempts)) - setTimeout(function () { - connect(isDevelopment, internalAttempts + 1); - }, 1000 * (internalAttempts)); - } else { - console.log('Reconnect attempts have failed. You need to reload the page'); - } + setTimeout(function () { + connect(isDevelopment); + }, 5000); }; socket.onerror = function (err) { @@ -73,7 +66,7 @@ function Terminal({ sizeTerminal, toggleResizingTerminal, changeTerminalSize, se if (selectedPlugin) { isDevelopmentMode() .then(isDevelopment => { - connect(isDevelopment, 1) + connect(isDevelopment) }) } }, [selectedPlugin?.pluginId]);