diff --git a/hosting/proxy/nginx.prod.conf b/hosting/proxy/nginx.prod.conf index dce1a71918..001a08a9a6 100644 --- a/hosting/proxy/nginx.prod.conf +++ b/hosting/proxy/nginx.prod.conf @@ -55,7 +55,7 @@ http { set $csp_style "style-src 'self' 'unsafe-inline' https://cdn.jsdelivr.net https://fonts.googleapis.com https://rsms.me https://maxcdn.bootstrapcdn.com"; set $csp_object "object-src 'none'"; set $csp_base_uri "base-uri 'self'"; - set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com"; + set $csp_connect "connect-src 'self' https://*.budibase.net https://api-iam.intercom.io https://api-iam.intercom.io https://api-ping.intercom.io https://app.posthog.com wss://nexus-websocket-a.intercom.io wss://nexus-websocket-b.intercom.io https://nexus-websocket-a.intercom.io https://nexus-websocket-b.intercom.io https://uploads.intercomcdn.com https://uploads.intercomusercontent.com https://*.amazonaws.com https://*.s3.amazonaws.com https://*.s3.us-east-2.amazonaws.com https://*.s3.us-east-1.amazonaws.com https://*.s3.us-west-1.amazonaws.com https://*.s3.us-west-2.amazonaws.com https://*.s3.af-south-1.amazonaws.com https://*.s3.ap-east-1.amazonaws.com https://*.s3.ap-southeast-3.amazonaws.com https://*.s3.ap-south-1.amazonaws.com https://*.s3.ap-northeast-3.amazonaws.com https://*.s3.ap-northeast-2.amazonaws.com https://*.s3.ap-southeast-1.amazonaws.com https://*.s3.ap-southeast-2.amazonaws.com https://*.s3.ap-northeast-1.amazonaws.com https://*.s3.ca-central-1.amazonaws.com https://*.s3.cn-north-1.amazonaws.com https://*.s3.cn-northwest-1.amazonaws.com https://*.s3.eu-central-1.amazonaws.com https://*.s3.eu-west-1.amazonaws.com https://*.s3.eu-west-2.amazonaws.com https://*.s3.eu-south-1.amazonaws.com https://*.s3.eu-west-3.amazonaws.com https://*.s3.eu-north-1.amazonaws.com https://*.s3.sa-east-1.amazonaws.com https://*.s3.me-south-1.amazonaws.com https://*.s3.us-gov-east-1.amazonaws.com https://*.s3.us-gov-west-1.amazonaws.com https://api.github.com"; set $csp_font "font-src 'self' data: https://cdn.jsdelivr.net https://fonts.gstatic.com https://rsms.me https://maxcdn.bootstrapcdn.com https://js.intercomcdn.com https://fonts.intercomcdn.com"; set $csp_frame "frame-src 'self' https:"; set $csp_img "img-src http: https: data: blob:"; @@ -82,6 +82,12 @@ http { set $couchdb ${COUCHDB_UPSTREAM_URL}; set $watchtower ${WATCHTOWER_UPSTREAM_URL}; + location /health { + access_log off; + add_header 'Content-Type' 'application/json'; + return 200 '{ "status": "OK" }'; + } + location /app { proxy_pass $apps; } diff --git a/lerna.json b/lerna.json index 76dc01dc26..c6f78ea0a5 100644 --- a/lerna.json +++ b/lerna.json @@ -1,5 +1,5 @@ { - "version": "2.6.8-alpha.14", + "version": "2.6.19-alpha.0", "npmClient": "yarn", "packages": [ "packages/backend-core", diff --git a/packages/backend-core/src/context/mainContext.ts b/packages/backend-core/src/context/mainContext.ts index 861777b679..61d96bb4b0 100644 --- a/packages/backend-core/src/context/mainContext.ts +++ b/packages/backend-core/src/context/mainContext.ts @@ -104,6 +104,22 @@ async function newContext(updates: ContextMap, task: any) { return Context.run(context, task) } +export async function doInAutomationContext(params: { + appId: string + automationId: string + task: any +}): Promise { + const tenantId = getTenantIDFromAppID(params.appId) + return newContext( + { + tenantId, + appId: params.appId, + automationId: params.automationId, + }, + params.task + ) +} + export async function doInContext(appId: string, task: any): Promise { const tenantId = getTenantIDFromAppID(appId) return newContext( @@ -187,6 +203,11 @@ export function getTenantId(): string { return tenantId } +export function getAutomationId(): string | undefined { + const context = Context.get() + return context?.automationId +} + export function getAppId(): string | undefined { const context = Context.get() const foundId = context?.appId diff --git a/packages/backend-core/src/context/types.ts b/packages/backend-core/src/context/types.ts index 727dad80bc..d687a93594 100644 --- a/packages/backend-core/src/context/types.ts +++ b/packages/backend-core/src/context/types.ts @@ -7,4 +7,5 @@ export type ContextMap = { identity?: IdentityContext environmentVariables?: Record isScim?: boolean + automationId?: string } diff --git a/packages/backend-core/src/db/couch/DatabaseImpl.ts b/packages/backend-core/src/db/couch/DatabaseImpl.ts index 94d78e94ff..29ca4123f5 100644 --- a/packages/backend-core/src/db/couch/DatabaseImpl.ts +++ b/packages/backend-core/src/db/couch/DatabaseImpl.ts @@ -12,7 +12,7 @@ import { isDocument, } from "@budibase/types" import { getCouchInfo } from "./connections" -import { directCouchCall } from "./utils" +import { directCouchUrlCall } from "./utils" import { getPouchDB } from "./pouchDB" import { WriteStream, ReadStream } from "fs" import { newid } from "../../docIds/newid" @@ -46,6 +46,8 @@ export class DatabaseImpl implements Database { private readonly instanceNano?: Nano.ServerScope private readonly pouchOpts: DatabaseOpts + private readonly couchInfo = getCouchInfo() + constructor(dbName?: string, opts?: DatabaseOpts, connection?: string) { if (dbName == null) { throw new Error("Database name cannot be undefined.") @@ -53,8 +55,8 @@ export class DatabaseImpl implements Database { this.name = dbName this.pouchOpts = opts || {} if (connection) { - const couchInfo = getCouchInfo(connection) - this.instanceNano = buildNano(couchInfo) + this.couchInfo = getCouchInfo(connection) + this.instanceNano = buildNano(this.couchInfo) } if (!DatabaseImpl.nano) { DatabaseImpl.init() @@ -67,7 +69,11 @@ export class DatabaseImpl implements Database { } async exists() { - let response = await directCouchCall(`/${this.name}`, "HEAD") + const response = await directCouchUrlCall({ + url: `${this.couchInfo.url}/${this.name}`, + method: "HEAD", + cookie: this.couchInfo.cookie, + }) return response.status === 200 } diff --git a/packages/backend-core/src/db/couch/connections.ts b/packages/backend-core/src/db/couch/connections.ts index 06c661f350..4214c7cdc6 100644 --- a/packages/backend-core/src/db/couch/connections.ts +++ b/packages/backend-core/src/db/couch/connections.ts @@ -4,21 +4,21 @@ export const getCouchInfo = (connection?: string) => { const urlInfo = getUrlInfo(connection) let username let password - if (env.COUCH_DB_USERNAME) { - // set from env - username = env.COUCH_DB_USERNAME - } else if (urlInfo.auth.username) { + if (urlInfo.auth?.username) { // set from url username = urlInfo.auth.username + } else if (env.COUCH_DB_USERNAME) { + // set from env + username = env.COUCH_DB_USERNAME } else if (!env.isTest()) { throw new Error("CouchDB username not set") } - if (env.COUCH_DB_PASSWORD) { - // set from env - password = env.COUCH_DB_PASSWORD - } else if (urlInfo.auth.password) { + if (urlInfo.auth?.password) { // set from url password = urlInfo.auth.password + } else if (env.COUCH_DB_PASSWORD) { + // set from env + password = env.COUCH_DB_PASSWORD } else if (!env.isTest()) { throw new Error("CouchDB password not set") } diff --git a/packages/backend-core/src/db/couch/utils.ts b/packages/backend-core/src/db/couch/utils.ts index 426bf92158..51b2a38998 100644 --- a/packages/backend-core/src/db/couch/utils.ts +++ b/packages/backend-core/src/db/couch/utils.ts @@ -9,6 +9,20 @@ export async function directCouchCall( ) { let { url, cookie } = getCouchInfo() const couchUrl = `${url}/${path}` + return await directCouchUrlCall({ url: couchUrl, cookie, method, body }) +} + +export async function directCouchUrlCall({ + url, + cookie, + method, + body, +}: { + url: string + cookie: string + method: string + body?: any +}) { const params: any = { method: method, headers: { @@ -19,7 +33,7 @@ export async function directCouchCall( params.body = JSON.stringify(body) params.headers["Content-Type"] = "application/json" } - return await fetch(checkSlashesInUrl(encodeURI(couchUrl)), params) + return await fetch(checkSlashesInUrl(encodeURI(url)), params) } export async function directCouchQuery( diff --git a/packages/backend-core/src/environment.ts b/packages/backend-core/src/environment.ts index 155f09e6d9..9163dfeba6 100644 --- a/packages/backend-core/src/environment.ts +++ b/packages/backend-core/src/environment.ts @@ -69,10 +69,10 @@ function findVersion() { try { const packageJsonFile = findFileInAncestors("package.json", process.cwd()) const content = readFileSync(packageJsonFile!, "utf-8") - const version = JSON.parse(content).version - return version + return JSON.parse(content).version } catch { - throw new Error("Cannot find a valid version in its package.json") + // throwing an error here is confusing/causes backend-core to be hard to import + return undefined } } @@ -95,7 +95,7 @@ const environment = { GOOGLE_CLIENT_SECRET: process.env.GOOGLE_CLIENT_SECRET, SALT_ROUNDS: process.env.SALT_ROUNDS, REDIS_URL: process.env.REDIS_URL || "localhost:6379", - REDIS_PASSWORD: process.env.REDIS_PASSWORD || "budibase", + REDIS_PASSWORD: process.env.REDIS_PASSWORD, REDIS_CLUSTERED: process.env.REDIS_CLUSTERED, MOCK_REDIS: process.env.MOCK_REDIS, MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY, diff --git a/packages/backend-core/src/logging/pino/logger.ts b/packages/backend-core/src/logging/pino/logger.ts index 276377eb00..cebc78ffc7 100644 --- a/packages/backend-core/src/logging/pino/logger.ts +++ b/packages/backend-core/src/logging/pino/logger.ts @@ -39,6 +39,7 @@ if (!env.DISABLE_PINO_LOGGER) { objects?: any[] tenantId?: string appId?: string + automationId?: string identityId?: string identityType?: IdentityType correlationId?: string @@ -86,18 +87,44 @@ if (!env.DISABLE_PINO_LOGGER) { contextObject = { tenantId: getTenantId(), appId: getAppId(), + automationId: getAutomationId(), identityId: identity?._id, identityType: identity?.type, correlationId: correlation.getId(), } } - const mergingObject = { - objects: objects.length ? objects : undefined, + const mergingObject: any = { err: error, ...contextObject, } + if (objects.length) { + // init generic data object for params supplied that don't have a + // '_logKey' field. This prints an object using argument index as the key + // e.g. { 0: {}, 1: {} } + const data: any = {} + let dataIndex = 0 + + for (let i = 0; i < objects.length; i++) { + const object = objects[i] + // the object has specified a log key + // use this instead of generic key + const logKey = object._logKey + if (logKey) { + delete object._logKey + mergingObject[logKey] = object + } else { + data[dataIndex] = object + dataIndex++ + } + } + + if (Object.keys(data).length) { + mergingObject.data = data + } + } + return [mergingObject, message] } @@ -159,6 +186,16 @@ if (!env.DISABLE_PINO_LOGGER) { return appId } + const getAutomationId = () => { + let appId + try { + appId = context.getAutomationId() + } catch (e) { + // do nothing + } + return appId + } + const getIdentity = () => { let identity try { diff --git a/packages/backend-core/src/queue/inMemoryQueue.ts b/packages/backend-core/src/queue/inMemoryQueue.ts index b80aece418..ec1d9d4a90 100644 --- a/packages/backend-core/src/queue/inMemoryQueue.ts +++ b/packages/backend-core/src/queue/inMemoryQueue.ts @@ -128,6 +128,7 @@ class InMemoryQueue { on() { // do nothing + return this } async waitForCompletion() { diff --git a/packages/backend-core/src/queue/listeners.ts b/packages/backend-core/src/queue/listeners.ts index 331b690fe9..42e3172364 100644 --- a/packages/backend-core/src/queue/listeners.ts +++ b/packages/backend-core/src/queue/listeners.ts @@ -1,5 +1,6 @@ import { Job, JobId, Queue } from "bull" import { JobQueue } from "./constants" +import * as context from "../context" export type StalledFn = (job: Job) => Promise @@ -31,77 +32,164 @@ function handleStalled(queue: Queue, removeStalledCb?: StalledFn) { }) } -function logging(queue: Queue, jobQueue: JobQueue) { - let eventType: string - switch (jobQueue) { - case JobQueue.AUTOMATION: - eventType = "automation-event" - break - case JobQueue.APP_BACKUP: - eventType = "app-backup-event" - break - case JobQueue.AUDIT_LOG: - eventType = "audit-log-event" - break - case JobQueue.SYSTEM_EVENT_QUEUE: - eventType = "system-event" - break +function getLogParams( + eventType: QueueEventType, + event: BullEvent, + opts: { + job?: Job + jobId?: JobId + error?: Error + } = {}, + extra: any = {} +) { + const message = `[BULL] ${eventType}=${event}` + const err = opts.error + + const bullLog = { + _logKey: "bull", + eventType, + event, + job: opts.job, + jobId: opts.jobId || opts.job?.id, + ...extra, } + + let automationLog + if (opts.job?.data?.automation) { + automationLog = { + _logKey: "automation", + trigger: opts.job + ? opts.job.data.automation.definition.trigger.event + : undefined, + } + } + + return [message, err, bullLog, automationLog] +} + +enum BullEvent { + ERROR = "error", + WAITING = "waiting", + ACTIVE = "active", + STALLED = "stalled", + PROGRESS = "progress", + COMPLETED = "completed", + FAILED = "failed", + PAUSED = "paused", + RESUMED = "resumed", + CLEANED = "cleaned", + DRAINED = "drained", + REMOVED = "removed", +} + +enum QueueEventType { + AUTOMATION_EVENT = "automation-event", + APP_BACKUP_EVENT = "app-backup-event", + AUDIT_LOG_EVENT = "audit-log-event", + SYSTEM_EVENT = "system-event", +} + +const EventTypeMap: { [key in JobQueue]: QueueEventType } = { + [JobQueue.AUTOMATION]: QueueEventType.AUTOMATION_EVENT, + [JobQueue.APP_BACKUP]: QueueEventType.APP_BACKUP_EVENT, + [JobQueue.AUDIT_LOG]: QueueEventType.AUDIT_LOG_EVENT, + [JobQueue.SYSTEM_EVENT_QUEUE]: QueueEventType.SYSTEM_EVENT, +} + +function logging(queue: Queue, jobQueue: JobQueue) { + const eventType = EventTypeMap[jobQueue] + + function doInJobContext(job: Job, task: any) { + // if this is an automation job try to get the app id + const appId = job.data.event?.appId + if (appId) { + return context.doInContext(appId, task) + } else { + task() + } + } + + queue + .on(BullEvent.STALLED, async (job: Job) => { + // A job has been marked as stalled. This is useful for debugging job + // workers that crash or pause the event loop. + await doInJobContext(job, () => { + console.error(...getLogParams(eventType, BullEvent.STALLED, { job })) + }) + }) + .on(BullEvent.ERROR, (error: any) => { + // An error occurred. + console.error(...getLogParams(eventType, BullEvent.ERROR, { error })) + }) + if (process.env.NODE_DEBUG?.includes("bull")) { queue - .on("error", (error: any) => { - // An error occurred. - console.error(`${eventType}=error error=${JSON.stringify(error)}`) - }) - .on("waiting", (jobId: JobId) => { + .on(BullEvent.WAITING, (jobId: JobId) => { // A Job is waiting to be processed as soon as a worker is idling. - console.log(`${eventType}=waiting jobId=${jobId}`) + console.info(...getLogParams(eventType, BullEvent.WAITING, { jobId })) }) - .on("active", (job: Job, jobPromise: any) => { + .on(BullEvent.ACTIVE, async (job: Job, jobPromise: any) => { // A job has started. You can use `jobPromise.cancel()`` to abort it. - console.log(`${eventType}=active jobId=${job.id}`) + await doInJobContext(job, () => { + console.info(...getLogParams(eventType, BullEvent.ACTIVE, { job })) + }) }) - .on("stalled", (job: Job) => { - // A job has been marked as stalled. This is useful for debugging job - // workers that crash or pause the event loop. - console.error( - `${eventType}=stalled jobId=${job.id} job=${JSON.stringify(job)}` - ) + .on(BullEvent.PROGRESS, async (job: Job, progress: any) => { + // A job's progress was updated + await doInJobContext(job, () => { + console.info( + ...getLogParams( + eventType, + BullEvent.PROGRESS, + { job }, + { progress } + ) + ) + }) }) - .on("progress", (job: Job, progress: any) => { - // A job's progress was updated! - console.log( - `${eventType}=progress jobId=${job.id} progress=${progress}` - ) - }) - .on("completed", (job: Job, result) => { + .on(BullEvent.COMPLETED, async (job: Job, result) => { // A job successfully completed with a `result`. - console.log(`${eventType}=completed jobId=${job.id} result=${result}`) + await doInJobContext(job, () => { + console.info( + ...getLogParams(eventType, BullEvent.COMPLETED, { job }, { result }) + ) + }) }) - .on("failed", (job, err: any) => { + .on(BullEvent.FAILED, async (job: Job, error: any) => { // A job failed with reason `err`! - console.log(`${eventType}=failed jobId=${job.id} error=${err}`) + await doInJobContext(job, () => { + console.error( + ...getLogParams(eventType, BullEvent.FAILED, { job, error }) + ) + }) }) - .on("paused", () => { + .on(BullEvent.PAUSED, () => { // The queue has been paused. - console.log(`${eventType}=paused`) + console.info(...getLogParams(eventType, BullEvent.PAUSED)) }) - .on("resumed", (job: Job) => { + .on(BullEvent.RESUMED, () => { // The queue has been resumed. - console.log(`${eventType}=paused jobId=${job.id}`) + console.info(...getLogParams(eventType, BullEvent.RESUMED)) }) - .on("cleaned", (jobs: Job[], type: string) => { + .on(BullEvent.CLEANED, (jobs: Job[], type: string) => { // Old jobs have been cleaned from the queue. `jobs` is an array of cleaned // jobs, and `type` is the type of jobs cleaned. - console.log(`${eventType}=cleaned length=${jobs.length} type=${type}`) + console.info( + ...getLogParams( + eventType, + BullEvent.CLEANED, + {}, + { length: jobs.length, type } + ) + ) }) - .on("drained", () => { + .on(BullEvent.DRAINED, () => { // Emitted every time the queue has processed all the waiting jobs (even if there can be some delayed jobs not yet processed) - console.log(`${eventType}=drained`) + console.info(...getLogParams(eventType, BullEvent.DRAINED)) }) - .on("removed", (job: Job) => { + .on(BullEvent.REMOVED, (job: Job) => { // A job successfully removed. - console.log(`${eventType}=removed jobId=${job.id}`) + console.info(...getLogParams(eventType, BullEvent.REMOVED, { job })) }) } } diff --git a/packages/bbui/package.json b/packages/bbui/package.json index f313dd04c0..de1fc0db5e 100644 --- a/packages/bbui/package.json +++ b/packages/bbui/package.json @@ -84,7 +84,7 @@ "@spectrum-css/vars": "3.0.1", "dayjs": "^1.10.4", "easymde": "^2.16.1", - "svelte-flatpickr": "^3.3.2", + "svelte-flatpickr": "3.2.3", "svelte-portal": "^1.0.0" }, "resolutions": { diff --git a/packages/bbui/src/Button/Button.svelte b/packages/bbui/src/Button/Button.svelte index f8a6004f8f..efd5f33bd2 100644 --- a/packages/bbui/src/Button/Button.svelte +++ b/packages/bbui/src/Button/Button.svelte @@ -2,6 +2,7 @@ import "@spectrum-css/button/dist/index-vars.css" import Tooltip from "../Tooltip/Tooltip.svelte" + export let type export let disabled = false export let size = "M" export let cta = false @@ -21,6 +22,7 @@ - + + diff --git a/packages/builder/src/pages/builder/apps/index.svelte b/packages/builder/src/pages/builder/apps/index.svelte index 23f4df5bb5..4b77671345 100644 --- a/packages/builder/src/pages/builder/apps/index.svelte +++ b/packages/builder/src/pages/builder/apps/index.svelte @@ -133,7 +133,7 @@ - {#if $licensing.usageMetrics?.dayPasses >= 100} + {#if $licensing.usageMetrics?.dayPasses >= 100 || $licensing.errUserLimit}
spaceman diff --git a/packages/builder/src/pages/builder/portal/apps/index.svelte b/packages/builder/src/pages/builder/portal/apps/index.svelte index 783cac49d7..ce1c249087 100644 --- a/packages/builder/src/pages/builder/portal/apps/index.svelte +++ b/packages/builder/src/pages/builder/portal/apps/index.svelte @@ -14,6 +14,7 @@ import Spinner from "components/common/Spinner.svelte" import CreateAppModal from "components/start/CreateAppModal.svelte" import AppLimitModal from "components/portal/licensing/AppLimitModal.svelte" + import AccountLockedModal from "components/portal/licensing/AccountLockedModal.svelte" import { store, automationStore } from "builderStore" import { API } from "api" @@ -28,6 +29,7 @@ let template let creationModal let appLimitModal + let accountLockedModal let creatingApp = false let searchTerm = "" let creatingFromTemplate = false @@ -48,6 +50,11 @@ : true) ) $: automationErrors = getAutomationErrors(enrichedApps) + $: isOwner = $auth.accountPortalAccess && $admin.cloud + + const usersLimitLockAction = $licensing?.errUserLimit + ? () => accountLockedModal.show() + : null const enrichApps = (apps, user, sortBy) => { const enrichedApps = apps.map(app => ({ @@ -189,6 +196,9 @@ creatingFromTemplate = true createAppFromTemplateUrl(initInfo.init_template) } + if (usersLimitLockAction) { + usersLimitLockAction() + } } catch (error) { notifications.error("Error getting init info") } @@ -230,20 +240,30 @@
- {#if $apps?.length > 0} {/if} {#if !$apps?.length} - {/if} @@ -267,7 +287,7 @@
{#each filteredApps as app (app.appId)} - + {/each}
@@ -294,6 +314,11 @@ + + isOwner ? $licensing.goToUpgradePage() : $licensing.goToPricingPage()} +/> diff --git a/packages/frontend-core/src/components/TestimonialPage.svelte b/packages/frontend-core/src/components/TestimonialPage.svelte index bbb61d2276..94983384a8 100644 --- a/packages/frontend-core/src/components/TestimonialPage.svelte +++ b/packages/frontend-core/src/components/TestimonialPage.svelte @@ -1,58 +1,15 @@
{#if enabled} -
- - a-happy-budibase-user -
- "{testimonial.text}" -
-
-
{testimonial.name}
-
{testimonial.role}
-
-
-
+ {/if}
@@ -64,20 +21,4 @@ display: grid; place-items: center; } - .testimonial { - width: 380px; - padding: 40px; - } - .text { - font-size: var(--font-size-l); - font-style: italic; - } - .name { - font-weight: bold; - color: var(--spectrum-global-color-gray-900); - font-size: var(--font-size-l); - } - .company { - color: var(--spectrum-global-color-gray-700); - } diff --git a/packages/frontend-core/src/components/grid/cells/DateCell.svelte b/packages/frontend-core/src/components/grid/cells/DateCell.svelte index f5b1acb1c8..53b159ee30 100644 --- a/packages/frontend-core/src/components/grid/cells/DateCell.svelte +++ b/packages/frontend-core/src/components/grid/cells/DateCell.svelte @@ -13,10 +13,10 @@ let flatpickr let isOpen - // adding the 0- will turn a string like 00:00:00 into a valid ISO + // Adding the 0- will turn a string like 00:00:00 into a valid ISO // date, but will make actual ISO dates invalid - $: time = new Date(`0-${value}`) - $: timeOnly = !isNaN(time) || schema?.timeOnly + $: isTimeValue = !isNaN(new Date(`0-${value}`)) + $: timeOnly = isTimeValue || schema?.timeOnly $: dateOnly = schema?.dateOnly $: format = timeOnly ? "HH:mm:ss" @@ -24,6 +24,19 @@ ? "MMM D YYYY" : "MMM D YYYY, HH:mm" $: editable = focused && !readonly + $: displayValue = getDisplayValue(value, format, timeOnly, isTimeValue) + + const getDisplayValue = (value, format, timeOnly, isTimeValue) => { + if (!value) { + return "" + } + // Parse full date strings + if (!timeOnly || !isTimeValue) { + return dayjs(value).format(format) + } + // Otherwise must be a time string + return dayjs(`0-${value}`).format(format) + } // Ensure we close flatpickr when unselected $: { @@ -49,7 +62,7 @@
{#if value} - {dayjs(timeOnly ? time : value).format(format)} + {displayValue} {/if}
{#if editable} diff --git a/packages/frontend-core/src/components/index.js b/packages/frontend-core/src/components/index.js index 2b7033ad8f..88107da535 100644 --- a/packages/frontend-core/src/components/index.js +++ b/packages/frontend-core/src/components/index.js @@ -1,3 +1,4 @@ export { default as SplitPage } from "./SplitPage.svelte" export { default as TestimonialPage } from "./TestimonialPage.svelte" +export { default as Testimonial } from "./Testimonial.svelte" export { Grid } from "./grid" diff --git a/packages/server/package.json b/packages/server/package.json index f0ece87bed..c65469f5cb 100644 --- a/packages/server/package.json +++ b/packages/server/package.json @@ -99,7 +99,7 @@ "mysql2": "2.3.3", "node-fetch": "2.6.7", "open": "8.4.0", - "pg": "8.5.1", + "pg": "8.10.0", "posthog-node": "1.3.0", "pouchdb": "7.3.0", "pouchdb-adapter-memory": "7.2.2", @@ -141,6 +141,7 @@ "@types/node": "14.18.20", "@types/node-fetch": "2.6.1", "@types/oracledb": "5.2.2", + "@types/pg": "8.6.6", "@types/pouchdb": "6.4.0", "@types/redis": "4.0.11", "@types/server-destroy": "1.0.1", diff --git a/packages/server/src/api/controllers/datasource.ts b/packages/server/src/api/controllers/datasource.ts index b61b168980..8f13e0e618 100644 --- a/packages/server/src/api/controllers/datasource.ts +++ b/packages/server/src/api/controllers/datasource.ts @@ -18,11 +18,71 @@ import { Row, CreateDatasourceResponse, UpdateDatasourceResponse, - UpdateDatasourceRequest, CreateDatasourceRequest, + VerifyDatasourceRequest, + VerifyDatasourceResponse, + IntegrationBase, + DatasourcePlus, } from "@budibase/types" import sdk from "../../sdk" +function getErrorTables(errors: any, errorType: string) { + return Object.entries(errors) + .filter(entry => entry[1] === errorType) + .map(([name]) => name) +} + +function updateError(error: any, newError: any, tables: string[]) { + if (!error) { + error = "" + } + if (error.length > 0) { + error += "\n" + } + error += `${newError} ${tables.join(", ")}` + return error +} + +async function getConnector( + datasource: Datasource +): Promise { + const Connector = await getIntegration(datasource.source) + // can't enrich if it doesn't have an ID yet + if (datasource._id) { + datasource = await sdk.datasources.enrich(datasource) + } + // Connect to the DB and build the schema + return new Connector(datasource.config) +} + +async function buildSchemaHelper(datasource: Datasource) { + const connector = (await getConnector(datasource)) as DatasourcePlus + await connector.buildSchema(datasource._id!, datasource.entities!) + + const errors = connector.schemaErrors + let error = null + if (errors && Object.keys(errors).length > 0) { + const noKey = getErrorTables(errors, BuildSchemaErrors.NO_KEY) + const invalidCol = getErrorTables(errors, BuildSchemaErrors.INVALID_COLUMN) + if (noKey.length) { + error = updateError( + error, + "No primary key constraint found for the following:", + noKey + ) + } + if (invalidCol.length) { + const invalidCols = Object.values(InvalidColumns).join(", ") + error = updateError( + error, + `Cannot use columns ${invalidCols} found in following:`, + invalidCol + ) + } + } + return { tables: connector.tables, error } +} + export async function fetch(ctx: UserCtx) { // Get internal tables const db = context.getAppDB() @@ -66,6 +126,33 @@ export async function fetch(ctx: UserCtx) { ctx.body = [bbInternalDb, ...datasources] } +export async function verify( + ctx: UserCtx +) { + const { datasource } = ctx.request.body + let existingDatasource: undefined | Datasource + if (datasource._id) { + existingDatasource = await sdk.datasources.get(datasource._id) + } + let enrichedDatasource = datasource + if (existingDatasource) { + enrichedDatasource = sdk.datasources.mergeConfigs( + datasource, + existingDatasource + ) + } + const connector = await getConnector(enrichedDatasource) + if (!connector.testConnection) { + ctx.throw(400, "Connection information verification not supported") + } + const response = await connector.testConnection() + + ctx.body = { + connected: response.connected, + error: response.error, + } +} + export async function buildSchemaFromDb(ctx: UserCtx) { const db = context.getAppDB() const datasource = await sdk.datasources.get(ctx.params.datasourceId) @@ -311,51 +398,3 @@ export async function query(ctx: UserCtx) { ctx.throw(400, err) } } - -function getErrorTables(errors: any, errorType: string) { - return Object.entries(errors) - .filter(entry => entry[1] === errorType) - .map(([name]) => name) -} - -function updateError(error: any, newError: any, tables: string[]) { - if (!error) { - error = "" - } - if (error.length > 0) { - error += "\n" - } - error += `${newError} ${tables.join(", ")}` - return error -} - -async function buildSchemaHelper(datasource: Datasource) { - const Connector = await getIntegration(datasource.source) - datasource = await sdk.datasources.enrich(datasource) - // Connect to the DB and build the schema - const connector = new Connector(datasource.config) - await connector.buildSchema(datasource._id, datasource.entities) - - const errors = connector.schemaErrors - let error = null - if (errors && Object.keys(errors).length > 0) { - const noKey = getErrorTables(errors, BuildSchemaErrors.NO_KEY) - const invalidCol = getErrorTables(errors, BuildSchemaErrors.INVALID_COLUMN) - if (noKey.length) { - error = updateError( - error, - "No primary key constraint found for the following:", - noKey - ) - } - if (invalidCol.length) { - const invalidCols = Object.values(InvalidColumns).join(", ") - error = updateError( - error, - `Cannot use columns ${invalidCols} found in following:`, - invalidCol - ) - } - } - return { tables: connector.tables, error } -} diff --git a/packages/server/src/api/controllers/integration.ts b/packages/server/src/api/controllers/integration.ts index 743d216da7..23defac831 100644 --- a/packages/server/src/api/controllers/integration.ts +++ b/packages/server/src/api/controllers/integration.ts @@ -1,4 +1,4 @@ -import { getDefinitions } from "../../integrations" +import { getDefinition, getDefinitions } from "../../integrations" import { BBContext } from "@budibase/types" export async function fetch(ctx: BBContext) { @@ -7,7 +7,7 @@ export async function fetch(ctx: BBContext) { } export async function find(ctx: BBContext) { - const defs = await getDefinitions() + const def = await getDefinition(ctx.params.type) + ctx.body = def ctx.status = 200 - ctx.body = defs[ctx.params.type] } diff --git a/packages/server/src/api/controllers/row/internal.ts b/packages/server/src/api/controllers/row/internal.ts index cc1450060c..e3cb419236 100644 --- a/packages/server/src/api/controllers/row/internal.ts +++ b/packages/server/src/api/controllers/row/internal.ts @@ -118,8 +118,11 @@ export async function patch(ctx: UserCtx) { combinedRow[key] = inputs[key] } + // need to copy the table so it can be differenced on way out + const tableClone = cloneDeep(dbTable) + // this returns the table and row incase they have been updated - let { table, row } = inputProcessing(ctx.user, dbTable, combinedRow) + let { table, row } = inputProcessing(ctx.user, tableClone, combinedRow) const validateResult = await utils.validate({ row, table, @@ -163,7 +166,12 @@ export async function save(ctx: UserCtx) { // this returns the table and row incase they have been updated const dbTable = await db.get(inputs.tableId) - let { table, row } = inputProcessing(ctx.user, dbTable, inputs) + + // need to copy the table so it can be differenced on way out + const tableClone = cloneDeep(dbTable) + + let { table, row } = inputProcessing(ctx.user, tableClone, inputs) + const validateResult = await utils.validate({ row, table, diff --git a/packages/server/src/api/controllers/table/index.ts b/packages/server/src/api/controllers/table/index.ts index bc967a90f4..cbbda7b930 100644 --- a/packages/server/src/api/controllers/table/index.ts +++ b/packages/server/src/api/controllers/table/index.ts @@ -97,6 +97,7 @@ export async function bulkImport(ctx: UserCtx) { // right now we don't trigger anything for bulk import because it // can only be done in the builder, but in the future we may need to // think about events for bulk items + ctx.status = 200 ctx.body = { message: `Bulk rows created.` } } diff --git a/packages/server/src/api/controllers/table/internal.ts b/packages/server/src/api/controllers/table/internal.ts index 628932bba1..d2a4de575e 100644 --- a/packages/server/src/api/controllers/table/internal.ts +++ b/packages/server/src/api/controllers/table/internal.ts @@ -184,8 +184,13 @@ export async function destroy(ctx: any) { } export async function bulkImport(ctx: any) { + const db = context.getAppDB() const table = await sdk.tables.getTable(ctx.params.tableId) const { rows } = ctx.request.body await handleDataImport(ctx.user, table, rows) + + // Ensure auto id and other table updates are persisted + await db.put(table) + return table } diff --git a/packages/server/src/api/controllers/table/utils.ts b/packages/server/src/api/controllers/table/utils.ts index 1a3eda683b..f088dbaa8e 100644 --- a/packages/server/src/api/controllers/table/utils.ts +++ b/packages/server/src/api/controllers/table/utils.ts @@ -129,17 +129,17 @@ export function importToRows( // the real schema of the table passed in, not the clone used for // incrementing auto IDs for (const [fieldName, schema] of Object.entries(originalTable.schema)) { + const rowVal = Array.isArray(row[fieldName]) + ? row[fieldName] + : [row[fieldName]] if ( (schema.type === FieldTypes.OPTIONS || schema.type === FieldTypes.ARRAY) && - row[fieldName] && - (!schema.constraints!.inclusion || - schema.constraints!.inclusion.indexOf(row[fieldName]) === -1) + row[fieldName] ) { - schema.constraints!.inclusion = [ - ...schema.constraints!.inclusion!, - row[fieldName], - ] + let merged = [...schema.constraints!.inclusion!, ...rowVal] + let superSet = new Set(merged) + schema.constraints!.inclusion = Array.from(superSet) schema.constraints!.inclusion.sort() } } diff --git a/packages/server/src/api/routes/datasource.ts b/packages/server/src/api/routes/datasource.ts index 85929d2180..654fb794e3 100644 --- a/packages/server/src/api/routes/datasource.ts +++ b/packages/server/src/api/routes/datasource.ts @@ -15,6 +15,11 @@ router authorized(permissions.BUILDER), datasourceController.fetch ) + .post( + "/api/datasources/verify", + authorized(permissions.BUILDER), + datasourceController.verify + ) .get( "/api/datasources/:datasourceId", authorized( diff --git a/packages/server/src/api/routes/public/index.ts b/packages/server/src/api/routes/public/index.ts index bed798f75b..6852778e68 100644 --- a/packages/server/src/api/routes/public/index.ts +++ b/packages/server/src/api/routes/public/index.ts @@ -42,13 +42,17 @@ if (!env.isTest()) { host: REDIS_OPTS.host, port: REDIS_OPTS.port, }, - password: - REDIS_OPTS.opts.password || REDIS_OPTS.opts.redisOptions.password, + } + + if (REDIS_OPTS.opts?.password || REDIS_OPTS.opts.redisOptions?.password) { + // @ts-ignore + options.password = + REDIS_OPTS.opts.password || REDIS_OPTS.opts.redisOptions.password } if (!env.REDIS_CLUSTERED) { - // Can't set direct redis db in clustered env // @ts-ignore + // Can't set direct redis db in clustered env options.database = 1 } } diff --git a/packages/server/src/api/routes/tests/misc.spec.js b/packages/server/src/api/routes/tests/misc.spec.js index 6dd82df496..21ebea637f 100644 --- a/packages/server/src/api/routes/tests/misc.spec.js +++ b/packages/server/src/api/routes/tests/misc.spec.js @@ -73,18 +73,97 @@ describe("run misc tests", () => { type: "string", }, }, + e: { + name: "Auto ID", + type: "number", + subtype: "autoID", + icon: "ri-magic-line", + autocolumn: true, + constraints: { + type: "number", + presence: false, + numericality: { + greaterThanOrEqualTo: "", + lessThanOrEqualTo: "", + }, + }, + }, + f: { + type: "array", + constraints: { + type: "array", + presence: { + "allowEmpty": true + }, + inclusion: [ + "One", + "Two", + "Three", + ] + }, + name: "Sample Tags", + sortable: false + }, + g: { + type: "options", + constraints: { + type: "string", + presence: false, + inclusion: [ + "Alpha", + "Beta", + "Gamma" + ] + }, + name: "Sample Opts" + } }, }) - + + // Shift specific row tests to the row spec await tableUtils.handleDataImport( { userId: "test" }, table, - [{ a: '1', b: '2', c: '3', d: '4'}] + [ + { a: '1', b: '2', c: '3', d: '4', f: "['One']", g: "Alpha" }, + { a: '5', b: '6', c: '7', d: '8', f: "[]", g: undefined}, + { a: '9', b: '10', c: '11', d: '12', f: "['Two','Four']", g: ""}, + { a: '13', b: '14', c: '15', d: '16', g: "Omega"} + ] ) + + // 4 rows imported, the auto ID starts at 1 + // We expect the handleDataImport function to update the lastID + expect(table.schema.e.lastID).toEqual(4); + + // Array/Multi - should have added a new value to the inclusion. + expect(table.schema.f.constraints.inclusion).toEqual(['Four','One','Three','Two']); + + // Options - should have a new value in the inclusion + expect(table.schema.g.constraints.inclusion).toEqual(['Alpha','Beta','Gamma','Omega']); + const rows = await config.getRows() - expect(rows[0].a).toEqual("1") - expect(rows[0].b).toEqual("2") - expect(rows[0].c).toEqual("3") + expect(rows.length).toEqual(4); + + const rowOne = rows.find(row => row.e === 1) + expect(rowOne.a).toEqual("1") + expect(rowOne.f).toEqual(['One']) + expect(rowOne.g).toEqual('Alpha') + + const rowTwo = rows.find(row => row.e === 2) + expect(rowTwo.a).toEqual("5") + expect(rowTwo.f).toEqual([]) + expect(rowTwo.g).toEqual(undefined) + + const rowThree = rows.find(row => row.e === 3) + expect(rowThree.a).toEqual("9") + expect(rowThree.f).toEqual(['Two','Four']) + expect(rowThree.g).toEqual(null) + + const rowFour = rows.find(row => row.e === 4) + expect(rowFour.a).toEqual("13") + expect(rowFour.f).toEqual(undefined) + expect(rowFour.g).toEqual('Omega') }) }) }) diff --git a/packages/server/src/api/routes/tests/row.spec.js b/packages/server/src/api/routes/tests/row.spec.js index 4b835a1fb5..a8041dac3b 100644 --- a/packages/server/src/api/routes/tests/row.spec.js +++ b/packages/server/src/api/routes/tests/row.spec.js @@ -34,9 +34,9 @@ describe("/rows", () => { row = basicRow(table._id) }) - const loadRow = async (id, status = 200) => + const loadRow = async (id, tbl_Id, status = 200) => await request - .get(`/api/${table._id}/rows/${id}`) + .get(`/api/${tbl_Id}/rows/${id}`) .set(config.defaultHeaders()) .expect("Content-Type", /json/) .expect(status) @@ -79,6 +79,60 @@ describe("/rows", () => { await assertQueryUsage(queryUsage + 1) }) + it("Increment row autoId per create row request", async () => { + const rowUsage = await getRowUsage() + const queryUsage = await getQueryUsage() + + const newTable = await config.createTable({ + name: "TestTableAuto", + type: "table", + key: "name", + schema: { + ...table.schema, + "Row ID": { + name: "Row ID", + type: "number", + subtype: "autoID", + icon: "ri-magic-line", + autocolumn: true, + constraints: { + type: "number", + presence: false, + numericality: { + greaterThanOrEqualTo: "", + lessThanOrEqualTo: "", + }, + }, + }, + } + }) + + const ids = [1,2,3] + + // Performing several create row requests should increment the autoID fields accordingly + const createRow = async (id) => { + const res = await request + .post(`/api/${newTable._id}/rows`) + .send({ + name: "row_" + id + }) + .set(config.defaultHeaders()) + .expect('Content-Type', /json/) + .expect(200) + expect(res.res.statusMessage).toEqual(`${newTable.name} saved successfully`) + expect(res.body.name).toEqual("row_" + id) + expect(res.body._rev).toBeDefined() + expect(res.body["Row ID"]).toEqual(id) + } + + for (let i=0; i { const existing = await config.createRow() const rowUsage = await getRowUsage() @@ -182,8 +236,32 @@ describe("/rows", () => { type: "string", presence: false, datetime: { earliest: "", latest: "" }, - }, + } } + const arrayField = { + type: "array", + constraints: { + type: "array", + presence: false, + inclusion: [ + "One", + "Two", + "Three", + ] + }, + name: "Sample Tags", + sortable: false + } + const optsField = { + fieldName: "Sample Opts", + name: "Sample Opts", + type: "options", + constraints: { + type: "string", + presence: false, + inclusion: [ "Alpha", "Beta", "Gamma" ] + }, + }, table = await config.createTable({ name: "TestTable2", @@ -212,7 +290,15 @@ describe("/rows", () => { attachmentNull: attachment, attachmentUndefined: attachment, attachmentEmpty: attachment, - attachmentEmptyArrayStr: attachment + attachmentEmptyArrayStr: attachment, + arrayFieldEmptyArrayStr: arrayField, + arrayFieldArrayStrKnown: arrayField, + arrayFieldNull: arrayField, + arrayFieldUndefined: arrayField, + optsFieldEmptyStr: optsField, + optsFieldUndefined: optsField, + optsFieldNull: optsField, + optsFieldStrKnown: optsField }, }) @@ -241,11 +327,20 @@ describe("/rows", () => { attachmentUndefined: undefined, attachmentEmpty: "", attachmentEmptyArrayStr: "[]", + arrayFieldEmptyArrayStr: "[]", + arrayFieldUndefined: undefined, + arrayFieldNull: null, + arrayFieldArrayStrKnown: "['One']", + optsFieldEmptyStr: "", + optsFieldUndefined: undefined, + optsFieldNull: null, + optsFieldStrKnown: 'Alpha' } - const id = (await config.createRow(row))._id + const createdRow = await config.createRow(row); + const id = createdRow._id - const saved = (await loadRow(id)).body + const saved = (await loadRow(id, table._id)).body expect(saved.stringUndefined).toBe(undefined) expect(saved.stringNull).toBe("") @@ -270,7 +365,15 @@ describe("/rows", () => { expect(saved.attachmentNull).toEqual([]) expect(saved.attachmentUndefined).toBe(undefined) expect(saved.attachmentEmpty).toEqual([]) - expect(saved.attachmentEmptyArrayStr).toEqual([]) + expect(saved.attachmentEmptyArrayStr).toEqual([]) + expect(saved.arrayFieldEmptyArrayStr).toEqual([]) + expect(saved.arrayFieldNull).toEqual([]) + expect(saved.arrayFieldUndefined).toEqual(undefined) + expect(saved.optsFieldEmptyStr).toEqual(null) + expect(saved.optsFieldUndefined).toEqual(undefined) + expect(saved.optsFieldNull).toEqual(null) + expect(saved.arrayFieldArrayStrKnown).toEqual(['One']) + expect(saved.optsFieldStrKnown).toEqual('Alpha') }) }) @@ -299,7 +402,7 @@ describe("/rows", () => { expect(res.body.name).toEqual("Updated Name") expect(res.body.description).toEqual(existing.description) - const savedRow = await loadRow(res.body._id) + const savedRow = await loadRow(res.body._id, table._id) expect(savedRow.body.description).toEqual(existing.description) expect(savedRow.body.name).toEqual("Updated Name") @@ -401,7 +504,7 @@ describe("/rows", () => { .expect(200) expect(res.body.length).toEqual(2) - await loadRow(row1._id, 404) + await loadRow(row1._id, table._id, 404) await assertRowUsage(rowUsage - 2) await assertQueryUsage(queryUsage + 1) }) diff --git a/packages/server/src/api/routes/tests/table.spec.js b/packages/server/src/api/routes/tests/table.spec.js index d28f2232ee..9c6980c1d7 100644 --- a/packages/server/src/api/routes/tests/table.spec.js +++ b/packages/server/src/api/routes/tests/table.spec.js @@ -167,7 +167,10 @@ describe("/tables", () => { expect(events.table.created).not.toHaveBeenCalled() expect(events.rows.imported).toBeCalledTimes(1) - expect(events.rows.imported).toBeCalledWith(table, 1) + expect(events.rows.imported).toBeCalledWith(expect.objectContaining({ + name: "TestTable", + _id: table._id + }), 1) }) }) diff --git a/packages/server/src/automations/triggers.ts b/packages/server/src/automations/triggers.ts index 772ffa4fd2..fe98f20d72 100644 --- a/packages/server/src/automations/triggers.ts +++ b/packages/server/src/automations/triggers.ts @@ -9,7 +9,7 @@ import { checkTestFlag } from "../utilities/redis" import * as utils from "./utils" import env from "../environment" import { context, db as dbCore } from "@budibase/backend-core" -import { Automation, Row } from "@budibase/types" +import { Automation, Row, AutomationData, AutomationJob } from "@budibase/types" export const TRIGGER_DEFINITIONS = definitions const JOB_OPTS = { @@ -109,14 +109,16 @@ export async function externalTrigger( } params.fields = coercedFields } - const data: Record = { automation, event: params } + + const data: AutomationData = { automation, event: params as any } if (getResponses) { data.event = { ...data.event, appId: context.getAppId(), automation, } - return utils.processEvent({ data }, { timeout: params.timeout }) + const job = { data } as AutomationJob + return utils.processEvent(job) } else { return automationQueue.add(data, JOB_OPTS) } diff --git a/packages/server/src/automations/utils.ts b/packages/server/src/automations/utils.ts index 82e3c941e8..dc2f02c821 100644 --- a/packages/server/src/automations/utils.ts +++ b/packages/server/src/automations/utils.ts @@ -8,38 +8,48 @@ import { db as dbCore, context } from "@budibase/backend-core" import { getAutomationMetadataParams } from "../db/utils" import { cloneDeep } from "lodash/fp" import { quotas } from "@budibase/pro" -import { Automation, WebhookActionType } from "@budibase/types" +import { Automation, AutomationJob, WebhookActionType } from "@budibase/types" import sdk from "../sdk" const REBOOT_CRON = "@reboot" const WH_STEP_ID = definitions.WEBHOOK.stepId const CRON_STEP_ID = definitions.CRON.stepId +const Runner = new Thread(ThreadType.AUTOMATION) -const jobMessage = (job: any, message: string) => { - return `app=${job.data.event.appId} automation=${job.data.automation._id} jobId=${job.id} trigger=${job.data.automation.definition.trigger.event} : ${message}` +function loggingArgs(job: AutomationJob) { + return [ + { + _logKey: "automation", + trigger: job.data.automation.definition.trigger.event, + }, + { + _logKey: "bull", + jobId: job.id, + }, + ] } -export async function processEvent(job: any, timeout?: { timeout?: number }) { - const Runner = new Thread(ThreadType.AUTOMATION, { - timeout: timeout || null, - }) +export async function processEvent(job: AutomationJob) { + const appId = job.data.event.appId! + const automationId = job.data.automation._id! + const task = async () => { + try { + // need to actually await these so that an error can be captured properly + console.log("automation running", ...loggingArgs(job)) - try { - const automationId = job.data.automation._id - console.log(jobMessage(job, "running")) - // need to actually await these so that an error can be captured properly - return await context.doInContext(job.data.event.appId, async () => { const runFn = () => Runner.run(job) - return quotas.addAutomation(runFn, { + const result = await quotas.addAutomation(runFn, { automationId, }) - }) - } catch (err) { - const errJson = JSON.stringify(err) - console.error(jobMessage(job, `was unable to run - ${errJson}`)) - console.trace(err) - return { err } + console.log("automation completed", ...loggingArgs(job)) + return result + } catch (err) { + console.error(`automation was unable to run`, err, ...loggingArgs(job)) + return { err } + } } + + return await context.doInAutomationContext({ appId, automationId, task }) } export async function updateTestHistory( diff --git a/packages/server/src/db/dynamoClient.ts b/packages/server/src/db/dynamoClient.ts index cb045b7d6f..597be2b21e 100644 --- a/packages/server/src/db/dynamoClient.ts +++ b/packages/server/src/db/dynamoClient.ts @@ -140,7 +140,7 @@ export function init(endpoint: string) { docClient = new AWS.DynamoDB.DocumentClient(docClientParams) } -if (!env.isProd()) { +if (!env.isProd() && !env.isJest()) { env._set("AWS_ACCESS_KEY_ID", "KEY_ID") env._set("AWS_SECRET_ACCESS_KEY", "SECRET_KEY") init("http://localhost:8333") diff --git a/packages/server/src/definitions/automations.ts b/packages/server/src/definitions/automations.ts index 79cbe6e4ac..7e86608bf3 100644 --- a/packages/server/src/definitions/automations.ts +++ b/packages/server/src/definitions/automations.ts @@ -1,4 +1,4 @@ -import { AutomationResults, AutomationStep, Document } from "@budibase/types" +import { AutomationResults, AutomationStep } from "@budibase/types" export enum LoopStepType { ARRAY = "Array", @@ -27,7 +27,3 @@ export interface AutomationContext extends AutomationResults { env?: Record trigger: any } - -export interface AutomationMetadata extends Document { - errorCount?: number -} diff --git a/packages/server/src/integration-test/postgres.spec.ts b/packages/server/src/integration-test/postgres.spec.ts index 78075b4e54..79f6db5cd1 100644 --- a/packages/server/src/integration-test/postgres.spec.ts +++ b/packages/server/src/integration-test/postgres.spec.ts @@ -19,7 +19,6 @@ import _ from "lodash" import { generator } from "@budibase/backend-core/tests" import { utils } from "@budibase/backend-core" import { GenericContainer } from "testcontainers" -import { generateRowIdField } from "../integrations/utils" const config = setup.getConfig()! diff --git a/packages/server/src/integrations/airtable.ts b/packages/server/src/integrations/airtable.ts index 1f56f0619b..a102caab76 100644 --- a/packages/server/src/integrations/airtable.ts +++ b/packages/server/src/integrations/airtable.ts @@ -1,11 +1,13 @@ import { - Integration, + ConnectionInfo, + DatasourceFeature, DatasourceFieldType, - QueryType, + Integration, IntegrationBase, + QueryType, } from "@budibase/types" -const Airtable = require("airtable") +import Airtable from "airtable" interface AirtableConfig { apiKey: string @@ -18,6 +20,7 @@ const SCHEMA: Integration = { "Airtable is a spreadsheet-database hybrid, with the features of a database but applied to a spreadsheet.", friendlyName: "Airtable", type: "Spreadsheet", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { apiKey: { type: DatasourceFieldType.PASSWORD, @@ -81,13 +84,37 @@ const SCHEMA: Integration = { class AirtableIntegration implements IntegrationBase { private config: AirtableConfig - private client: any + private client constructor(config: AirtableConfig) { this.config = config this.client = new Airtable(config).base(config.base) } + async testConnection(): Promise { + const mockTable = Date.now().toString() + try { + await this.client.makeRequest({ + path: `/${mockTable}`, + }) + + return { connected: true } + } catch (e: any) { + if ( + e.message === + `Could not find table ${mockTable} in application ${this.config.base}` + ) { + // The request managed to check the application, so the credentials are valid + return { connected: true } + } + + return { + connected: false, + error: e.message as string, + } + } + } + async create(query: { table: any; json: any }) { const { table, json } = query diff --git a/packages/server/src/integrations/arangodb.ts b/packages/server/src/integrations/arangodb.ts index e28940f36e..b486748a68 100644 --- a/packages/server/src/integrations/arangodb.ts +++ b/packages/server/src/integrations/arangodb.ts @@ -3,9 +3,11 @@ import { DatasourceFieldType, QueryType, IntegrationBase, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" -const { Database, aql } = require("arangojs") +import { Database, aql } from "arangojs" interface ArangodbConfig { url: string @@ -21,6 +23,7 @@ const SCHEMA: Integration = { type: "Non-relational", description: "ArangoDB is a scalable open-source multi-model database natively supporting graph, document and search. All supported data models & access patterns can be combined in queries allowing for maximal flexibility. ", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { url: { type: DatasourceFieldType.STRING, @@ -58,7 +61,7 @@ const SCHEMA: Integration = { class ArangoDBIntegration implements IntegrationBase { private config: ArangodbConfig - private client: any + private client constructor(config: ArangodbConfig) { const newConfig = { @@ -74,6 +77,19 @@ class ArangoDBIntegration implements IntegrationBase { this.client = new Database(newConfig) } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + await this.client.get() + response.connected = true + } catch (e: any) { + response.error = e.message as string + } + return response + } + async read(query: { sql: any }) { try { const result = await this.client.query(query.sql) diff --git a/packages/server/src/integrations/couchdb.ts b/packages/server/src/integrations/couchdb.ts index 257b84ca13..4ccbd5456d 100644 --- a/packages/server/src/integrations/couchdb.ts +++ b/packages/server/src/integrations/couchdb.ts @@ -1,4 +1,6 @@ import { + ConnectionInfo, + DatasourceFeature, DatasourceFieldType, Document, Integration, @@ -18,6 +20,7 @@ const SCHEMA: Integration = { type: "Non-relational", description: "Apache CouchDB is an open-source document-oriented NoSQL database, implemented in Erlang.", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { url: { type: DatasourceFieldType.STRING, @@ -61,21 +64,32 @@ const SCHEMA: Integration = { } class CouchDBIntegration implements IntegrationBase { - private config: CouchDBConfig - private readonly client: any + private readonly client: dbCore.DatabaseImpl constructor(config: CouchDBConfig) { - this.config = config this.client = dbCore.DatabaseWithConnection(config.database, config.url) } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + const result = await this.query("exists", "validation error", {}) + response.connected = result === true + } catch (e: any) { + response.error = e.message as string + } + return response + } + async query( command: string, errorMsg: string, query: { json?: object; id?: string } ) { try { - return await this.client[command](query.id || query.json) + return await (this.client as any)[command](query.id || query.json) } catch (err) { console.error(errorMsg, err) throw err diff --git a/packages/server/src/integrations/dynamodb.ts b/packages/server/src/integrations/dynamodb.ts index 28c1c7b52b..28b42c7a54 100644 --- a/packages/server/src/integrations/dynamodb.ts +++ b/packages/server/src/integrations/dynamodb.ts @@ -3,10 +3,13 @@ import { DatasourceFieldType, QueryType, IntegrationBase, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import AWS from "aws-sdk" import { AWS_REGION } from "../db/dynamoClient" +import { DocumentClient } from "aws-sdk/clients/dynamodb" interface DynamoDBConfig { region: string @@ -22,6 +25,7 @@ const SCHEMA: Integration = { "Amazon DynamoDB is a key-value and document database that delivers single-digit millisecond performance at any scale.", friendlyName: "DynamoDB", type: "Non-relational", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { region: { type: DatasourceFieldType.STRING, @@ -128,7 +132,7 @@ const SCHEMA: Integration = { class DynamoDBIntegration implements IntegrationBase { private config: DynamoDBConfig - private client: any + private client constructor(config: DynamoDBConfig) { this.config = config @@ -148,7 +152,23 @@ class DynamoDBIntegration implements IntegrationBase { this.client = new AWS.DynamoDB.DocumentClient(this.config) } - async create(query: { table: string; json: object }) { + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + const scanRes = await new AWS.DynamoDB(this.config).listTables().promise() + response.connected = !!scanRes.$response + } catch (e: any) { + response.error = e.message as string + } + return response + } + + async create(query: { + table: string + json: Omit + }) { const params = { TableName: query.table, ...query.json, @@ -189,7 +209,10 @@ class DynamoDBIntegration implements IntegrationBase { return new AWS.DynamoDB(this.config).describeTable(params).promise() } - async get(query: { table: string; json: object }) { + async get(query: { + table: string + json: Omit + }) { const params = { TableName: query.table, ...query.json, @@ -197,7 +220,10 @@ class DynamoDBIntegration implements IntegrationBase { return this.client.get(params).promise() } - async update(query: { table: string; json: object }) { + async update(query: { + table: string + json: Omit + }) { const params = { TableName: query.table, ...query.json, @@ -205,7 +231,10 @@ class DynamoDBIntegration implements IntegrationBase { return this.client.update(params).promise() } - async delete(query: { table: string; json: object }) { + async delete(query: { + table: string + json: Omit + }) { const params = { TableName: query.table, ...query.json, diff --git a/packages/server/src/integrations/elasticsearch.ts b/packages/server/src/integrations/elasticsearch.ts index aeba628d30..af52799c51 100644 --- a/packages/server/src/integrations/elasticsearch.ts +++ b/packages/server/src/integrations/elasticsearch.ts @@ -3,6 +3,8 @@ import { DatasourceFieldType, QueryType, IntegrationBase, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { Client, ClientOptions } from "@elastic/elasticsearch" @@ -20,6 +22,7 @@ const SCHEMA: Integration = { "Elasticsearch is a search engine based on the Lucene library. It provides a distributed, multitenant-capable full-text search engine with an HTTP web interface and schema-free JSON documents.", friendlyName: "ElasticSearch", type: "Non-relational", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { url: { type: DatasourceFieldType.STRING, @@ -95,7 +98,7 @@ const SCHEMA: Integration = { class ElasticSearchIntegration implements IntegrationBase { private config: ElasticsearchConfig - private client: any + private client constructor(config: ElasticsearchConfig) { this.config = config @@ -114,6 +117,18 @@ class ElasticSearchIntegration implements IntegrationBase { this.client = new Client(clientConfig) } + async testConnection(): Promise { + try { + await this.client.info() + return { connected: true } + } catch (e: any) { + return { + connected: false, + error: e.message as string, + } + } + } + async create(query: { index: string; json: object }) { const { index, json } = query diff --git a/packages/server/src/integrations/firebase.ts b/packages/server/src/integrations/firebase.ts index a82b3be782..3907275f41 100644 --- a/packages/server/src/integrations/firebase.ts +++ b/packages/server/src/integrations/firebase.ts @@ -3,6 +3,8 @@ import { Integration, QueryType, IntegrationBase, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { Firestore, WhereFilterOp } from "@google-cloud/firestore" @@ -18,6 +20,7 @@ const SCHEMA: Integration = { type: "Non-relational", description: "Cloud Firestore is a flexible, scalable database for mobile, web, and server development from Firebase and Google Cloud.", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { email: { type: DatasourceFieldType.STRING, @@ -99,6 +102,18 @@ class FirebaseIntegration implements IntegrationBase { }) } + async testConnection(): Promise { + try { + await this.client.listCollections() + return { connected: true } + } catch (e: any) { + return { + connected: false, + error: e.message as string, + } + } + } + async create(query: { json: object; extra: { [key: string]: string } }) { try { const documentReference = this.client diff --git a/packages/server/src/integrations/googlesheets.ts b/packages/server/src/integrations/googlesheets.ts index 644f650ea3..eea9cc4176 100644 --- a/packages/server/src/integrations/googlesheets.ts +++ b/packages/server/src/integrations/googlesheets.ts @@ -1,4 +1,6 @@ import { + ConnectionInfo, + DatasourceFeature, DatasourceFieldType, DatasourcePlus, FieldType, @@ -64,6 +66,7 @@ const SCHEMA: Integration = { "Create and collaborate on online spreadsheets in real-time and from any device. ", friendlyName: "Google Sheets", type: "Spreadsheet", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { spreadsheetId: { display: "Google Sheet URL", @@ -139,6 +142,19 @@ class GoogleSheetsIntegration implements DatasourcePlus { this.client = new GoogleSpreadsheet(spreadsheetId) } + async testConnection(): Promise { + try { + await this.connect() + await this.client.loadInfo() + return { connected: true } + } catch (e: any) { + return { + connected: false, + error: e.message as string, + } + } + } + getBindingIdentifier() { return "" } diff --git a/packages/server/src/integrations/index.ts b/packages/server/src/integrations/index.ts index f3285e441f..90dd7cfcd6 100644 --- a/packages/server/src/integrations/index.ts +++ b/packages/server/src/integrations/index.ts @@ -20,7 +20,7 @@ import env from "../environment" import { cloneDeep } from "lodash" import sdk from "../sdk" -const DEFINITIONS: { [key: string]: Integration } = { +const DEFINITIONS: Record = { [SourceName.POSTGRES]: postgres.schema, [SourceName.DYNAMODB]: dynamodb.schema, [SourceName.MONGODB]: mongodb.schema, @@ -36,9 +36,10 @@ const DEFINITIONS: { [key: string]: Integration } = { [SourceName.GOOGLE_SHEETS]: googlesheets.schema, [SourceName.REDIS]: redis.schema, [SourceName.SNOWFLAKE]: snowflake.schema, + [SourceName.ORACLE]: undefined, } -const INTEGRATIONS: { [key: string]: any } = { +const INTEGRATIONS: Record = { [SourceName.POSTGRES]: postgres.integration, [SourceName.DYNAMODB]: dynamodb.integration, [SourceName.MONGODB]: mongodb.integration, @@ -55,6 +56,7 @@ const INTEGRATIONS: { [key: string]: any } = { [SourceName.REDIS]: redis.integration, [SourceName.FIRESTORE]: firebase.integration, [SourceName.SNOWFLAKE]: snowflake.integration, + [SourceName.ORACLE]: undefined, } // optionally add oracle integration if the oracle binary can be installed @@ -67,10 +69,13 @@ if ( INTEGRATIONS[SourceName.ORACLE] = oracle.integration } -export async function getDefinition(source: SourceName): Promise { +export async function getDefinition( + source: SourceName +): Promise { // check if its integrated, faster - if (DEFINITIONS[source]) { - return DEFINITIONS[source] + const definition = DEFINITIONS[source] + if (definition) { + return definition } const allDefinitions = await getDefinitions() return allDefinitions[source] @@ -98,7 +103,7 @@ export async function getDefinitions() { } } -export async function getIntegration(integration: string) { +export async function getIntegration(integration: SourceName) { if (INTEGRATIONS[integration]) { return INTEGRATIONS[integration] } @@ -107,7 +112,7 @@ export async function getIntegration(integration: string) { for (let plugin of plugins) { if (plugin.name === integration) { // need to use commonJS require due to its dynamic runtime nature - const retrieved: any = await getDatasourcePlugin(plugin) + const retrieved = await getDatasourcePlugin(plugin) if (retrieved.integration) { return retrieved.integration } else { diff --git a/packages/server/src/integrations/microsoftSqlServer.ts b/packages/server/src/integrations/microsoftSqlServer.ts index eb87c1ccf1..47f36f60e9 100644 --- a/packages/server/src/integrations/microsoftSqlServer.ts +++ b/packages/server/src/integrations/microsoftSqlServer.ts @@ -8,6 +8,8 @@ import { QueryType, SqlQuery, DatasourcePlus, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { getSqlQuery, @@ -39,6 +41,7 @@ const SCHEMA: Integration = { "Microsoft SQL Server is a relational database management system developed by Microsoft. ", friendlyName: "MS SQL Server", type: "Relational", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { user: { type: DatasourceFieldType.STRING, @@ -121,6 +124,19 @@ class SqlServerIntegration extends Sql implements DatasourcePlus { } } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + await this.connect() + response.connected = true + } catch (e: any) { + response.error = e.message as string + } + return response + } + getBindingIdentifier(): string { return `@p${this.index++}` } diff --git a/packages/server/src/integrations/mongodb.ts b/packages/server/src/integrations/mongodb.ts index 38b3891fe4..ee7302c501 100644 --- a/packages/server/src/integrations/mongodb.ts +++ b/packages/server/src/integrations/mongodb.ts @@ -3,6 +3,8 @@ import { DatasourceFieldType, QueryType, IntegrationBase, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { MongoClient, @@ -38,6 +40,7 @@ const getSchema = () => { type: "Non-relational", description: "MongoDB is a general purpose, document-based, distributed database built for modern application developers and for the cloud era.", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { connectionString: { type: DatasourceFieldType.STRING, @@ -358,6 +361,19 @@ class MongoIntegration implements IntegrationBase { this.client = new MongoClient(config.connectionString, options) } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + await this.connect() + response.connected = true + } catch (e: any) { + response.error = e.message as string + } + return response + } + async connect() { return this.client.connect() } diff --git a/packages/server/src/integrations/mysql.ts b/packages/server/src/integrations/mysql.ts index 8d984ed402..eb721a6e0f 100644 --- a/packages/server/src/integrations/mysql.ts +++ b/packages/server/src/integrations/mysql.ts @@ -7,6 +7,8 @@ import { Table, TableSchema, DatasourcePlus, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { getSqlQuery, @@ -20,18 +22,11 @@ import { NUMBER_REGEX } from "../utilities" import Sql from "./base/sql" import { MySQLColumn } from "./base/types" -const mysql = require("mysql2/promise") +import mysql from "mysql2/promise" -interface MySQLConfig { - host: string - port: number - user: string - password: string +interface MySQLConfig extends mysql.ConnectionOptions { database: string - ssl?: { [key: string]: any } rejectUnauthorized: boolean - typeCast: Function - multipleStatements: boolean } const SCHEMA: Integration = { @@ -41,6 +36,7 @@ const SCHEMA: Integration = { type: "Relational", description: "MySQL Database Service is a fully managed database service to deploy cloud-native applications. ", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { host: { type: DatasourceFieldType.STRING, @@ -92,8 +88,6 @@ const SCHEMA: Integration = { }, } -const TimezoneAwareDateTypes = ["timestamp"] - function bindingTypeCoerce(bindings: any[]) { for (let i = 0; i < bindings.length; i++) { const binding = bindings[i] @@ -120,7 +114,7 @@ function bindingTypeCoerce(bindings: any[]) { class MySQLIntegration extends Sql implements DatasourcePlus { private config: MySQLConfig - private client: any + private client?: mysql.Connection public tables: Record = {} public schemaErrors: Record = {} @@ -134,7 +128,8 @@ class MySQLIntegration extends Sql implements DatasourcePlus { if ( config.rejectUnauthorized != null && !config.rejectUnauthorized && - config.ssl + config.ssl && + typeof config.ssl !== "string" ) { config.ssl.rejectUnauthorized = config.rejectUnauthorized } @@ -160,6 +155,22 @@ class MySQLIntegration extends Sql implements DatasourcePlus { } } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + const [result] = await this.internalQuery( + { sql: "SELECT 1+1 AS checkRes" }, + { connect: true } + ) + response.connected = result?.checkRes == 2 + } catch (e: any) { + response.error = e.message as string + } + return response + } + getBindingIdentifier(): string { return "?" } @@ -173,7 +184,7 @@ class MySQLIntegration extends Sql implements DatasourcePlus { } async disconnect() { - await this.client.end() + await this.client!.end() } async internalQuery( @@ -192,10 +203,10 @@ class MySQLIntegration extends Sql implements DatasourcePlus { ? baseBindings : bindingTypeCoerce(baseBindings) // Node MySQL is callback based, so we must wrap our call in a promise - const response = await this.client.query(query.sql, bindings) + const response = await this.client!.query(query.sql, bindings) return response[0] } finally { - if (opts?.connect) { + if (opts?.connect && this.client) { await this.disconnect() } } diff --git a/packages/server/src/integrations/oracle.ts b/packages/server/src/integrations/oracle.ts index 65e0829905..f8ec6e8bae 100644 --- a/packages/server/src/integrations/oracle.ts +++ b/packages/server/src/integrations/oracle.ts @@ -7,6 +7,8 @@ import { SqlQuery, Table, DatasourcePlus, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { buildExternalTableId, @@ -24,12 +26,7 @@ import { ExecuteOptions, Result, } from "oracledb" -import { - OracleTable, - OracleColumn, - OracleColumnsResponse, - OracleConstraint, -} from "./base/types" +import { OracleTable, OracleColumn, OracleColumnsResponse } from "./base/types" let oracledb: any try { oracledb = require("oracledb") @@ -53,6 +50,7 @@ const SCHEMA: Integration = { type: "Relational", description: "Oracle Database is an object-relational database management system developed by Oracle Corporation", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { host: { type: DatasourceFieldType.STRING, @@ -325,6 +323,30 @@ class OracleIntegration extends Sql implements DatasourcePlus { this.schemaErrors = final.errors } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + let connection + try { + connection = await this.getConnection() + response.connected = true + } catch (err: any) { + response.connected = false + response.error = err.message + } finally { + if (connection) { + try { + await connection.close() + } catch (err: any) { + response.connected = false + response.error = err.message + } + } + } + return response + } + private async internalQuery(query: SqlQuery): Promise> { let connection try { diff --git a/packages/server/src/integrations/postgres.ts b/packages/server/src/integrations/postgres.ts index c981c3acc5..bf77ec08c6 100644 --- a/packages/server/src/integrations/postgres.ts +++ b/packages/server/src/integrations/postgres.ts @@ -6,6 +6,8 @@ import { SqlQuery, Table, DatasourcePlus, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" import { getSqlQuery, @@ -18,7 +20,7 @@ import Sql from "./base/sql" import { PostgresColumn } from "./base/types" import { escapeDangerousCharacters } from "../utilities" -const { Client, types } = require("pg") +import { Client, types } from "pg" // Return "date" and "timestamp" types as plain strings. // This lets us reference the original stored timezone. @@ -50,6 +52,7 @@ const SCHEMA: Integration = { type: "Relational", description: "PostgreSQL, also known as Postgres, is a free and open-source relational database management system emphasizing extensibility and SQL compliance.", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { host: { type: DatasourceFieldType.STRING, @@ -114,7 +117,7 @@ const SCHEMA: Integration = { } class PostgresIntegration extends Sql implements DatasourcePlus { - private readonly client: any + private readonly client: Client private readonly config: PostgresConfig private index: number = 1 private open: boolean @@ -150,6 +153,21 @@ class PostgresIntegration extends Sql implements DatasourcePlus { this.open = false } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + await this.openConnection() + response.connected = true + } catch (e: any) { + response.error = e.message as string + } finally { + await this.closeConnection() + } + return response + } + getBindingIdentifier(): string { return `$${this.index++}` } @@ -163,7 +181,7 @@ class PostgresIntegration extends Sql implements DatasourcePlus { if (!this.config.schema) { this.config.schema = "public" } - this.client.query(`SET search_path TO ${this.config.schema}`) + await this.client.query(`SET search_path TO ${this.config.schema}`) this.COLUMNS_SQL = `select * from information_schema.columns where table_schema = '${this.config.schema}'` this.open = true } diff --git a/packages/server/src/integrations/redis.ts b/packages/server/src/integrations/redis.ts index 73ef2bb55c..d71f66edc1 100644 --- a/packages/server/src/integrations/redis.ts +++ b/packages/server/src/integrations/redis.ts @@ -1,4 +1,10 @@ -import { DatasourceFieldType, Integration, QueryType } from "@budibase/types" +import { + ConnectionInfo, + DatasourceFeature, + DatasourceFieldType, + Integration, + QueryType, +} from "@budibase/types" import Redis from "ioredis" interface RedisConfig { @@ -11,9 +17,11 @@ interface RedisConfig { const SCHEMA: Integration = { docs: "https://redis.io/docs/", - description: "", + description: + "Redis is a caching tool, providing powerful key-value store capabilities.", friendlyName: "Redis", type: "Non-relational", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { host: { type: "string", @@ -86,7 +94,7 @@ const SCHEMA: Integration = { class RedisIntegration { private readonly config: RedisConfig - private client: any + private client constructor(config: RedisConfig) { this.config = config @@ -99,6 +107,21 @@ class RedisIntegration { }) } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + await this.client.ping() + response.connected = true + } catch (e: any) { + response.error = e.message as string + } finally { + await this.disconnect() + } + return response + } + async disconnect() { return this.client.quit() } diff --git a/packages/server/src/integrations/s3.ts b/packages/server/src/integrations/s3.ts index ad3bb09109..0f9848ed59 100644 --- a/packages/server/src/integrations/s3.ts +++ b/packages/server/src/integrations/s3.ts @@ -3,10 +3,12 @@ import { QueryType, IntegrationBase, DatasourceFieldType, + DatasourceFeature, + ConnectionInfo, } from "@budibase/types" -const AWS = require("aws-sdk") -const csv = require("csvtojson") +import AWS from "aws-sdk" +import csv from "csvtojson" interface S3Config { region: string @@ -22,6 +24,7 @@ const SCHEMA: Integration = { "Amazon Simple Storage Service (Amazon S3) is an object storage service that offers industry-leading scalability, data availability, security, and performance.", friendlyName: "Amazon S3", type: "Object store", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { region: { type: "string", @@ -152,7 +155,7 @@ const SCHEMA: Integration = { class S3Integration implements IntegrationBase { private readonly config: S3Config - private client: any + private client constructor(config: S3Config) { this.config = config @@ -165,6 +168,19 @@ class S3Integration implements IntegrationBase { this.client = new AWS.S3(this.config) } + async testConnection() { + const response: ConnectionInfo = { + connected: false, + } + try { + await this.client.listBuckets().promise() + response.connected = true + } catch (e: any) { + response.error = e.message as string + } + return response + } + async create(query: { bucket: string location: string diff --git a/packages/server/src/integrations/snowflake.ts b/packages/server/src/integrations/snowflake.ts index db702520f9..9b743131ae 100644 --- a/packages/server/src/integrations/snowflake.ts +++ b/packages/server/src/integrations/snowflake.ts @@ -1,4 +1,10 @@ -import { Integration, QueryType, SqlQuery } from "@budibase/types" +import { + ConnectionInfo, + DatasourceFeature, + Integration, + QueryType, + SqlQuery, +} from "@budibase/types" import { Snowflake } from "snowflake-promise" interface SnowflakeConfig { @@ -16,6 +22,7 @@ const SCHEMA: Integration = { "Snowflake is a solution for data warehousing, data lakes, data engineering, data science, data application development, and securely sharing and consuming shared data.", friendlyName: "Snowflake", type: "Relational", + features: [DatasourceFeature.CONNECTION_CHECKING], datasource: { account: { type: "string", @@ -65,6 +72,18 @@ class SnowflakeIntegration { this.client = new Snowflake(config) } + async testConnection(): Promise { + try { + await this.client.connect() + return { connected: true } + } catch (e: any) { + return { + connected: false, + error: e.message as string, + } + } + } + async internalQuery(query: SqlQuery) { await this.client.connect() try { diff --git a/packages/server/src/migrations/functions/backfill/app/queries.ts b/packages/server/src/migrations/functions/backfill/app/queries.ts index e66c7af841..e028721bce 100644 --- a/packages/server/src/migrations/functions/backfill/app/queries.ts +++ b/packages/server/src/migrations/functions/backfill/app/queries.ts @@ -33,7 +33,7 @@ export const backfill = async (appDb: any, timestamp: string | number) => { datasource = { type: "unknown", _id: query.datasourceId, - source: SourceName.UNKNOWN, + source: "unknown" as SourceName, } } else { throw e diff --git a/packages/server/src/sdk/app/datasources/datasources.ts b/packages/server/src/sdk/app/datasources/datasources.ts index b3fe5bcdf1..c886e6a15f 100644 --- a/packages/server/src/sdk/app/datasources/datasources.ts +++ b/packages/server/src/sdk/app/datasources/datasources.ts @@ -13,6 +13,7 @@ import { import { cloneDeep } from "lodash/fp" import { getEnvironmentVariables } from "../../utils" import { getDefinitions, getDefinition } from "../../../integrations" +import _ from "lodash" const ENV_VAR_PREFIX = "env." @@ -41,7 +42,7 @@ async function enrichDatasourceWithValues(datasource: Datasource) { { onlyFound: true } ) as Datasource const definition = await getDefinition(processed.source) - processed.config = checkDatasourceTypes(definition, processed.config) + processed.config = checkDatasourceTypes(definition!, processed.config) return { datasource: processed, envVars: env as Record, @@ -147,6 +148,11 @@ export function mergeConfigs(update: Datasource, old: Datasource) { } } } + + if (old.config?.auth) { + update.config = _.merge(old.config, update.config) + } + // update back to actual passwords for everything else for (let [key, value] of Object.entries(update.config)) { if (value !== PASSWORD_REPLACEMENT) { diff --git a/packages/server/src/threads/automation.ts b/packages/server/src/threads/automation.ts index adc1a83af0..e271ebba0b 100644 --- a/packages/server/src/threads/automation.ts +++ b/packages/server/src/threads/automation.ts @@ -13,13 +13,18 @@ import { generateAutomationMetadataID, isProdAppID } from "../db/utils" import { definitions as triggerDefs } from "../automations/triggerInfo" import { AutomationErrors, MAX_AUTOMATION_RECURRING_ERRORS } from "../constants" import { storeLog } from "../automations/logging" -import { Automation, AutomationStep, AutomationStatus } from "@budibase/types" +import { + Automation, + AutomationStep, + AutomationStatus, + AutomationMetadata, + AutomationJob, +} from "@budibase/types" import { LoopStep, LoopInput, TriggerOutput, AutomationContext, - AutomationMetadata, } from "../definitions/automations" import { WorkerCallback } from "./definitions" import { context, logging } from "@budibase/backend-core" @@ -60,11 +65,11 @@ class Orchestrator { _job: Job executionOutput: AutomationContext - constructor(job: Job) { - let automation = job.data.automation, - triggerOutput = job.data.event + constructor(job: AutomationJob) { + let automation = job.data.automation + let triggerOutput = job.data.event const metadata = triggerOutput.metadata - this._chainCount = metadata ? metadata.automationChainCount : 0 + this._chainCount = metadata ? metadata.automationChainCount! : 0 this._appId = triggerOutput.appId as string this._job = job const triggerStepId = automation.definition.trigger.stepId diff --git a/packages/server/src/threads/definitions.ts b/packages/server/src/threads/definitions.ts index 2cf5d8066c..21e7ce0b69 100644 --- a/packages/server/src/threads/definitions.ts +++ b/packages/server/src/threads/definitions.ts @@ -1,5 +1,3 @@ -import { EnvironmentVariablesDecrypted } from "@budibase/types" - export type WorkerCallback = (error: any, response?: any) => void export interface QueryEvent { diff --git a/packages/server/src/threads/index.ts b/packages/server/src/threads/index.ts index 6876f1a07c..9b6bffa867 100644 --- a/packages/server/src/threads/index.ts +++ b/packages/server/src/threads/index.ts @@ -1,5 +1,7 @@ import workerFarm from "worker-farm" import env from "../environment" +import { AutomationJob } from "@budibase/types" +import { QueryEvent } from "./definitions" export const ThreadType = { QUERY: "query", @@ -64,11 +66,11 @@ export class Thread { ) } - run(data: any) { + run(job: AutomationJob | QueryEvent) { const timeout = this.timeoutMs return new Promise((resolve, reject) => { function fire(worker: any) { - worker.execute(data, (err: any, response: any) => { + worker.execute(job, (err: any, response: any) => { if (err && err.type === "TimeoutError") { reject( new Error(`Query response time exceeded ${timeout}ms timeout.`) diff --git a/packages/server/src/utilities/rowProcessor/index.ts b/packages/server/src/utilities/rowProcessor/index.ts index a5bb352eeb..44cab4d18b 100644 --- a/packages/server/src/utilities/rowProcessor/index.ts +++ b/packages/server/src/utilities/rowProcessor/index.ts @@ -137,8 +137,7 @@ export function inputProcessing( opts?: AutoColumnProcessingOpts ) { let clonedRow = cloneDeep(row) - // need to copy the table so it can be differenced on way out - const copiedTable = cloneDeep(table) + const dontCleanseKeys = ["type", "_id", "_rev", "tableId"] for (let [key, value] of Object.entries(clonedRow)) { const field = table.schema[key] @@ -175,7 +174,7 @@ export function inputProcessing( } // handle auto columns - this returns an object like {table, row} - return processAutoColumn(user, copiedTable, clonedRow, opts) + return processAutoColumn(user, table, clonedRow, opts) } /** diff --git a/packages/server/src/utilities/rowProcessor/map.ts b/packages/server/src/utilities/rowProcessor/map.ts index 808b16178d..cf6823856c 100644 --- a/packages/server/src/utilities/rowProcessor/map.ts +++ b/packages/server/src/utilities/rowProcessor/map.ts @@ -2,6 +2,22 @@ import { FieldTypes } from "../../constants" import { logging } from "@budibase/backend-core" +const parseArrayString = value => { + if (typeof value === "string") { + if (value === "") { + return [] + } + let result + try { + result = JSON.parse(value.replace(/'/g, '"')) + return result + } catch (e) { + logging.logAlert("Could not parse row value", e) + } + } + return value +} + /** * A map of how we convert various properties in rows to each other based on the row type. */ @@ -26,9 +42,9 @@ export const TYPE_TRANSFORM_MAP: any = { [undefined]: undefined, }, [FieldTypes.ARRAY]: { - "": [], [null]: [], [undefined]: undefined, + parse: parseArrayString, }, [FieldTypes.STRING]: { "": "", @@ -70,21 +86,7 @@ export const TYPE_TRANSFORM_MAP: any = { [FieldTypes.ATTACHMENT]: { [null]: [], [undefined]: undefined, - parse: attachments => { - if (typeof attachments === "string") { - if (attachments === "") { - return [] - } - let result - try { - result = JSON.parse(attachments) - } catch (e) { - logging.logAlert("Could not parse attachments", e) - } - return result - } - return attachments - }, + parse: parseArrayString, }, [FieldTypes.BOOLEAN]: { "": null, diff --git a/packages/types/src/api/web/app/datasource.ts b/packages/types/src/api/web/app/datasource.ts index d54259eab5..983fd45b92 100644 --- a/packages/types/src/api/web/app/datasource.ts +++ b/packages/types/src/api/web/app/datasource.ts @@ -14,6 +14,15 @@ export interface CreateDatasourceRequest { fetchSchema?: boolean } +export interface VerifyDatasourceRequest { + datasource: Datasource +} + +export interface VerifyDatasourceResponse { + connected: boolean + error?: string +} + export interface UpdateDatasourceRequest extends Datasource { datasource: Datasource } diff --git a/packages/types/src/documents/app/automation.ts b/packages/types/src/documents/app/automation.ts index 946e852a7b..70cefbb0d9 100644 --- a/packages/types/src/documents/app/automation.ts +++ b/packages/types/src/documents/app/automation.ts @@ -180,3 +180,8 @@ export type AutomationStepInput = { appId: string apiKey?: string } + +export interface AutomationMetadata extends Document { + errorCount?: number + automationChainCount?: number +} diff --git a/packages/types/src/sdk/automations/index.ts b/packages/types/src/sdk/automations/index.ts new file mode 100644 index 0000000000..f1e3d38101 --- /dev/null +++ b/packages/types/src/sdk/automations/index.ts @@ -0,0 +1,15 @@ +import { Automation, AutomationMetadata } from "../../documents" +import { Job } from "bull" + +export interface AutomationDataEvent { + appId?: string + metadata?: AutomationMetadata + automation?: Automation +} + +export interface AutomationData { + event: AutomationDataEvent + automation: Automation +} + +export type AutomationJob = Job diff --git a/packages/types/src/sdk/datasources.ts b/packages/types/src/sdk/datasources.ts index 605b431d9e..9df9670877 100644 --- a/packages/types/src/sdk/datasources.ts +++ b/packages/types/src/sdk/datasources.ts @@ -55,7 +55,6 @@ export enum SourceName { FIRESTORE = "FIRESTORE", REDIS = "REDIS", SNOWFLAKE = "SNOWFLAKE", - UNKNOWN = "unknown", } export enum IncludeRelationship { @@ -74,6 +73,10 @@ export enum FilterType { ONE_OF = "oneOf", } +export enum DatasourceFeature { + CONNECTION_CHECKING = "connection", +} + export interface StepDefinition { key: string template: string @@ -112,6 +115,7 @@ export interface Integration { docs: string plus?: boolean auth?: { type: string } + features?: DatasourceFeature[] relationships?: boolean description: string friendlyName: string @@ -124,11 +128,17 @@ export interface Integration { extra?: ExtraQueryConfig } +export type ConnectionInfo = { + connected: boolean + error?: string +} + export interface IntegrationBase { create?(query: any): Promise read?(query: any): Promise update?(query: any): Promise delete?(query: any): Promise + testConnection?(): Promise } export interface DatasourcePlus extends IntegrationBase { diff --git a/packages/types/src/sdk/index.ts b/packages/types/src/sdk/index.ts index 8fc5fb287e..ed44c13667 100644 --- a/packages/types/src/sdk/index.ts +++ b/packages/types/src/sdk/index.ts @@ -1,3 +1,4 @@ +export * from "./automations" export * from "./hosting" export * from "./context" export * from "./events" diff --git a/packages/worker/src/utilities/redis.ts b/packages/worker/src/utilities/redis.ts index d77e44cd9f..6b82df3b57 100644 --- a/packages/worker/src/utilities/redis.ts +++ b/packages/worker/src/utilities/redis.ts @@ -7,7 +7,7 @@ function getExpirySecondsForDB(db: string) { // a hour return 3600 case redis.utils.Databases.INVITATIONS: - // a day + // a week return 604800 } } diff --git a/qa-core/package.json b/qa-core/package.json index 73fd59cab2..987fe36d7c 100644 --- a/qa-core/package.json +++ b/qa-core/package.json @@ -14,12 +14,14 @@ "test:watch": "yarn run test --watch", "test:debug": "DEBUG=1 yarn run test", "test:notify": "node scripts/testResultsWebhook", - "test:smoke": "yarn run test --testPathIgnorePatterns=\\\"\\/dataSources\\/\\\"", + "test:smoke": "yarn run test --testPathIgnorePatterns=/.+\\.integration\\.spec\\.ts", "test:ci": "start-server-and-test dev:built http://localhost:4001/health test:smoke", + "serve": "start-server-and-test dev:built http://localhost:4001/health", "dev:built": "cd ../ && yarn dev:built" }, "devDependencies": { "@budibase/types": "^2.3.17", + "@trendyol/jest-testcontainers": "^2.1.1", "@types/jest": "29.0.0", "@types/node-fetch": "2.6.2", "chance": "1.1.8", diff --git a/qa-core/src/integrations/validators/arango.integration.spec.ts b/qa-core/src/integrations/validators/arango.integration.spec.ts new file mode 100644 index 0000000000..7c0faafd61 --- /dev/null +++ b/qa-core/src/integrations/validators/arango.integration.spec.ts @@ -0,0 +1,77 @@ +import { GenericContainer, Wait } from "testcontainers" +import arangodb from "../../../../packages/server/src/integrations/arangodb" +import { generator } from "../../shared" + +jest.unmock("arangojs") + +describe("datasource validators", () => { + describe("arangodb", () => { + let connectionSettings: { + user: string + password: string + url: string + } + + beforeAll(async () => { + const user = "root" + const password = generator.hash() + const container = await new GenericContainer("arangodb") + .withExposedPorts(8529) + .withEnv("ARANGO_ROOT_PASSWORD", password) + .withWaitStrategy( + Wait.forLogMessage("is ready for business. Have fun!") + ) + .start() + + connectionSettings = { + user, + password, + url: `http://${container.getContainerIpAddress()}:${container.getMappedPort( + 8529 + )}`, + } + }) + + it("test valid connection string", async () => { + const integration = new arangodb.integration({ + url: connectionSettings.url, + username: connectionSettings.user, + password: connectionSettings.password, + databaseName: "", + collection: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test wrong password", async () => { + const integration = new arangodb.integration({ + url: connectionSettings.url, + username: connectionSettings.user, + password: "wrong", + databaseName: "", + collection: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "not authorized to execute this request", + }) + }) + + it("test wrong url", async () => { + const integration = new arangodb.integration({ + url: "http://not.here", + username: connectionSettings.user, + password: connectionSettings.password, + databaseName: "", + collection: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "getaddrinfo ENOTFOUND not.here", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/couch.integration.spec.ts b/qa-core/src/integrations/validators/couch.integration.spec.ts new file mode 100644 index 0000000000..b0f4254610 --- /dev/null +++ b/qa-core/src/integrations/validators/couch.integration.spec.ts @@ -0,0 +1,67 @@ +import { GenericContainer } from "testcontainers" + +import couchdb from "../../../../packages/server/src/integrations/couchdb" +import { generator } from "../../shared" + +describe("datasource validators", () => { + describe("couchdb", () => { + let url: string + + beforeAll(async () => { + const user = generator.first() + const password = generator.hash() + + const container = await new GenericContainer("budibase/couchdb") + .withExposedPorts(5984) + .withEnv("COUCHDB_USER", user) + .withEnv("COUCHDB_PASSWORD", password) + .start() + + const host = container.getContainerIpAddress() + const port = container.getMappedPort(5984) + + await container.exec([ + `curl`, + `-u`, + `${user}:${password}`, + `-X`, + `PUT`, + `localhost:5984/db`, + ]) + url = `http://${user}:${password}@${host}:${port}` + }) + + it("test valid connection string", async () => { + const integration = new couchdb.integration({ + url, + database: "db", + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test invalid database", async () => { + const integration = new couchdb.integration({ + url, + database: "random_db", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + }) + }) + + it("test invalid url", async () => { + const integration = new couchdb.integration({ + url: "http://invalid:123", + database: "any", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: + "request to http://invalid:123/any failed, reason: getaddrinfo ENOTFOUND invalid", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/dynamodb.integration.spec.ts b/qa-core/src/integrations/validators/dynamodb.integration.spec.ts new file mode 100644 index 0000000000..c885f64213 --- /dev/null +++ b/qa-core/src/integrations/validators/dynamodb.integration.spec.ts @@ -0,0 +1,63 @@ +import { GenericContainer } from "testcontainers" +import { env } from "@budibase/backend-core" + +import dynamodb from "../../../../packages/server/src/integrations/dynamodb" +import { generator } from "../../shared" + +jest.unmock("aws-sdk") + +describe("datasource validators", () => { + describe("dynamodb", () => { + let connectionSettings: { + user: string + password: string + url: string + } + + beforeAll(async () => { + const user = "root" + const password = generator.hash() + const container = await new GenericContainer("amazon/dynamodb-local") + .withExposedPorts(8000) + .start() + + connectionSettings = { + user, + password, + url: `http://${container.getContainerIpAddress()}:${container.getMappedPort( + 8000 + )}`, + } + env._set("AWS_ACCESS_KEY_ID", "mocked_key") + env._set("AWS_SECRET_ACCESS_KEY", "mocked_secret") + }) + + it("test valid connection string", async () => { + const integration = new dynamodb.integration({ + endpoint: connectionSettings.url, + region: "", + accessKeyId: "", + secretAccessKey: "", + }) + + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test wrong endpoint", async () => { + const integration = new dynamodb.integration({ + endpoint: "http://wrong.url:2880", + region: "", + accessKeyId: "", + secretAccessKey: "", + }) + + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: + "Inaccessible host: `wrong.url' at port `undefined'. This service may not be available in the `eu-west-1' region.", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/elastic.integration.spec.ts b/qa-core/src/integrations/validators/elastic.integration.spec.ts new file mode 100644 index 0000000000..39fd732744 --- /dev/null +++ b/qa-core/src/integrations/validators/elastic.integration.spec.ts @@ -0,0 +1,34 @@ +import { ElasticsearchContainer } from "testcontainers" +import elastic from "../../../../packages/server/src/integrations/elasticsearch" + +jest.unmock("@elastic/elasticsearch") + +describe("datasource validators", () => { + describe("elastic search", () => { + let url: string + + beforeAll(async () => { + const container = await new ElasticsearchContainer().start() + url = container.getHttpUrl() + }) + + it("test valid connection string", async () => { + const integration = new elastic.integration({ + url, + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test wrong connection string", async () => { + const integration = new elastic.integration({ + url: `http://localhost:5656`, + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "connect ECONNREFUSED 127.0.0.1:5656", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/mongo.integration.spec.ts b/qa-core/src/integrations/validators/mongo.integration.spec.ts new file mode 100644 index 0000000000..a20b7cd7fa --- /dev/null +++ b/qa-core/src/integrations/validators/mongo.integration.spec.ts @@ -0,0 +1,100 @@ +import { GenericContainer } from "testcontainers" +import mongo from "../../../../packages/server/src/integrations/mongodb" +import { generator } from "../../shared" + +jest.unmock("mongodb") + +describe("datasource validators", () => { + describe("mongo", () => { + let connectionSettings: { + user: string + password: string + host: string + port: number + } + + function getConnectionString( + settings: Partial = {} + ) { + const { user, password, host, port } = { + ...connectionSettings, + ...settings, + } + return `mongodb://${user}:${password}@${host}:${port}` + } + + beforeAll(async () => { + const user = generator.name() + const password = generator.hash() + const container = await new GenericContainer("mongo") + .withExposedPorts(27017) + .withEnv("MONGO_INITDB_ROOT_USERNAME", user) + .withEnv("MONGO_INITDB_ROOT_PASSWORD", password) + .start() + + connectionSettings = { + user, + password, + host: container.getContainerIpAddress(), + port: container.getMappedPort(27017), + } + }) + + it("test valid connection string", async () => { + const integration = new mongo.integration({ + connectionString: getConnectionString(), + db: "", + tlsCertificateFile: "", + tlsCertificateKeyFile: "", + tlsCAFile: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test invalid password", async () => { + const integration = new mongo.integration({ + connectionString: getConnectionString({ password: "wrong" }), + db: "", + tlsCertificateFile: "", + tlsCertificateKeyFile: "", + tlsCAFile: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "Authentication failed.", + }) + }) + + it("test invalid username", async () => { + const integration = new mongo.integration({ + connectionString: getConnectionString({ user: "wrong" }), + db: "", + tlsCertificateFile: "", + tlsCertificateKeyFile: "", + tlsCAFile: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "Authentication failed.", + }) + }) + + it("test invalid connection", async () => { + const integration = new mongo.integration({ + connectionString: getConnectionString({ host: "http://nothinghere" }), + db: "", + tlsCertificateFile: "", + tlsCertificateKeyFile: "", + tlsCAFile: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "Error: getaddrinfo ENOTFOUND http", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/mssql.integration.spec.ts b/qa-core/src/integrations/validators/mssql.integration.spec.ts new file mode 100644 index 0000000000..17f79d86ec --- /dev/null +++ b/qa-core/src/integrations/validators/mssql.integration.spec.ts @@ -0,0 +1,65 @@ +import { GenericContainer, Wait } from "testcontainers" +import { Duration, TemporalUnit } from "node-duration" + +import mssql from "../../../../packages/server/src/integrations/microsoftSqlServer" + +jest.unmock("mssql") + +describe("datasource validators", () => { + describe("mssql", () => { + let host: string, port: number + + const password = "Str0Ng_p@ssW0rd!" + + beforeAll(async () => { + const container = await new GenericContainer( + "mcr.microsoft.com/mssql/server" + ) + .withExposedPorts(1433) + .withEnv("ACCEPT_EULA", "Y") + .withEnv("MSSQL_SA_PASSWORD", password) + .withEnv("MSSQL_PID", "Developer") + .withWaitStrategy(Wait.forHealthCheck()) + .withHealthCheck({ + test: `/opt/mssql-tools/bin/sqlcmd -S localhost -U sa -P "${password}" -Q "SELECT 1" -b -o /dev/null`, + interval: new Duration(1000, TemporalUnit.MILLISECONDS), + timeout: new Duration(3, TemporalUnit.SECONDS), + retries: 20, + startPeriod: new Duration(100, TemporalUnit.MILLISECONDS), + }) + .start() + + host = container.getContainerIpAddress() + port = container.getMappedPort(1433) + }) + + it("test valid connection string", async () => { + const integration = new mssql.integration({ + user: "sa", + password, + server: host, + port: port, + database: "master", + schema: "dbo", + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test invalid password", async () => { + const integration = new mssql.integration({ + user: "sa", + password: "wrong_pwd", + server: host, + port: port, + database: "master", + schema: "dbo", + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "ConnectionError: Login failed for user 'sa'.", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/mysql.integration.spec.ts b/qa-core/src/integrations/validators/mysql.integration.spec.ts new file mode 100644 index 0000000000..6ee39731fa --- /dev/null +++ b/qa-core/src/integrations/validators/mysql.integration.spec.ts @@ -0,0 +1,70 @@ +import { GenericContainer } from "testcontainers" +import mysql from "../../../../packages/server/src/integrations/mysql" + +jest.unmock("mysql2/promise") + +describe("datasource validators", () => { + describe("mysql", () => { + let host: string + let port: number + + beforeAll(async () => { + const container = await new GenericContainer("mysql") + .withExposedPorts(3306) + .withEnv("MYSQL_ROOT_PASSWORD", "admin") + .withEnv("MYSQL_DATABASE", "db") + .withEnv("MYSQL_USER", "user") + .withEnv("MYSQL_PASSWORD", "password") + .start() + + host = container.getContainerIpAddress() + port = container.getMappedPort(3306) + }) + + it("test valid connection string", async () => { + const integration = new mysql.integration({ + host, + port, + user: "user", + database: "db", + password: "password", + rejectUnauthorized: true, + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test invalid database", async () => { + const integration = new mysql.integration({ + host, + port, + user: "user", + database: "test", + password: "password", + rejectUnauthorized: true, + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: "Access denied for user 'user'@'%' to database 'test'", + }) + }) + + it("test invalid password", async () => { + const integration = new mysql.integration({ + host, + port, + user: "root", + database: "test", + password: "wrong", + rejectUnauthorized: true, + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: + "Access denied for user 'root'@'172.17.0.1' (using password: YES)", + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/postgres.integration.spec.ts b/qa-core/src/integrations/validators/postgres.integration.spec.ts new file mode 100644 index 0000000000..029d929df0 --- /dev/null +++ b/qa-core/src/integrations/validators/postgres.integration.spec.ts @@ -0,0 +1,53 @@ +import { GenericContainer } from "testcontainers" + +jest.unmock("pg") + +describe("datasource validators", () => { + describe("postgres", () => { + let host: string + let port: number + + beforeAll(async () => { + const container = await new GenericContainer("postgres") + .withExposedPorts(5432) + .withEnv("POSTGRES_PASSWORD", "password") + .start() + + host = container.getContainerIpAddress() + port = container.getMappedPort(5432) + }) + + it("test valid connection string", async () => { + const integration = new postgres.integration({ + host, + port, + database: "postgres", + user: "postgres", + password: "password", + schema: "public", + ssl: false, + rejectUnauthorized: false, + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test invalid connection string", async () => { + const integration = new postgres.integration({ + host, + port, + database: "postgres", + user: "wrong", + password: "password", + schema: "public", + ssl: false, + rejectUnauthorized: false, + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: 'password authentication failed for user "wrong"', + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/redis.integration.spec.ts b/qa-core/src/integrations/validators/redis.integration.spec.ts new file mode 100644 index 0000000000..89ada2fe2d --- /dev/null +++ b/qa-core/src/integrations/validators/redis.integration.spec.ts @@ -0,0 +1,72 @@ +import redis from "../../../../packages/server/src/integrations/redis" +import { GenericContainer } from "testcontainers" +import { generator } from "../../shared" + +describe("datasource validators", () => { + describe("redis", () => { + describe("unsecured", () => { + let host: string + let port: number + + beforeAll(async () => { + const container = await new GenericContainer("redis") + .withExposedPorts(6379) + .start() + + host = container.getContainerIpAddress() + port = container.getMappedPort(6379) + }) + + it("test valid connection", async () => { + const integration = new redis.integration({ + host, + port, + username: "", + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test invalid connection even with wrong user/password", async () => { + const integration = new redis.integration({ + host, + port, + username: generator.name(), + password: generator.hash(), + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: + "WRONGPASS invalid username-password pair or user is disabled.", + }) + }) + }) + + describe("secured", () => { + let host: string + let port: number + + beforeAll(async () => { + const container = await new GenericContainer("redis") + .withExposedPorts(6379) + .withCmd(["redis-server", "--requirepass", "P@ssW0rd!"]) + .start() + + host = container.getContainerIpAddress() + port = container.getMappedPort(6379) + }) + + it("test valid connection", async () => { + const integration = new redis.integration({ + host, + port, + username: "", + password: "P@ssW0rd!", + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + }) + }) +}) diff --git a/qa-core/src/integrations/validators/s3.integration.spec.ts b/qa-core/src/integrations/validators/s3.integration.spec.ts new file mode 100644 index 0000000000..7bb415ee3d --- /dev/null +++ b/qa-core/src/integrations/validators/s3.integration.spec.ts @@ -0,0 +1,52 @@ +import s3 from "../../../../packages/server/src/integrations/s3" +import { GenericContainer } from "testcontainers" + +jest.unmock("aws-sdk") + +describe("datasource validators", () => { + describe("s3", () => { + let host: string + let port: number + + beforeAll(async () => { + const container = await new GenericContainer("localstack/localstack") + .withExposedPorts(4566) + .withEnv("SERVICES", "s3") + .withEnv("DEFAULT_REGION", "eu-west-1") + .withEnv("AWS_ACCESS_KEY_ID", "testkey") + .withEnv("AWS_SECRET_ACCESS_KEY", "testsecret") + .start() + + host = container.getContainerIpAddress() + port = container.getMappedPort(4566) + }) + + it("test valid connection", async () => { + const integration = new s3.integration({ + region: "eu-west-1", + accessKeyId: "testkey", + secretAccessKey: "testsecret", + s3ForcePathStyle: false, + endpoint: `http://${host}:${port}`, + }) + const result = await integration.testConnection() + expect(result).toEqual({ connected: true }) + }) + + it("test wrong endpoint", async () => { + const integration = new s3.integration({ + region: "eu-west-2", + accessKeyId: "testkey", + secretAccessKey: "testsecret", + s3ForcePathStyle: false, + endpoint: `http://wrong:123`, + }) + const result = await integration.testConnection() + expect(result).toEqual({ + connected: false, + error: + "Inaccessible host: `wrong' at port `undefined'. This service may not be available in the `eu-west-2' region.", + }) + }) + }) +}) diff --git a/qa-core/src/internal-api/tests/dataSources/mariaDB.spec.ts b/qa-core/src/internal-api/tests/dataSources/mariaDB.integration.spec.ts similarity index 100% rename from qa-core/src/internal-api/tests/dataSources/mariaDB.spec.ts rename to qa-core/src/internal-api/tests/dataSources/mariaDB.integration.spec.ts diff --git a/qa-core/src/internal-api/tests/dataSources/mongoDB.spec.ts b/qa-core/src/internal-api/tests/dataSources/mongoDB.integration.spec.ts similarity index 100% rename from qa-core/src/internal-api/tests/dataSources/mongoDB.spec.ts rename to qa-core/src/internal-api/tests/dataSources/mongoDB.integration.spec.ts diff --git a/qa-core/src/internal-api/tests/dataSources/postgresSQL.spec.ts b/qa-core/src/internal-api/tests/dataSources/postgresSQL.integration.spec.ts similarity index 100% rename from qa-core/src/internal-api/tests/dataSources/postgresSQL.spec.ts rename to qa-core/src/internal-api/tests/dataSources/postgresSQL.integration.spec.ts diff --git a/qa-core/src/internal-api/tests/dataSources/restAPI.spec.ts b/qa-core/src/internal-api/tests/dataSources/restAPI.integration.spec.ts similarity index 100% rename from qa-core/src/internal-api/tests/dataSources/restAPI.spec.ts rename to qa-core/src/internal-api/tests/dataSources/restAPI.integration.spec.ts diff --git a/qa-core/src/shared/generator.ts b/qa-core/src/shared/generator.ts index c9395f7e47..1789fc0f75 100644 --- a/qa-core/src/shared/generator.ts +++ b/qa-core/src/shared/generator.ts @@ -1,3 +1,3 @@ -const Chance = require("chance") +import Chance from "chance" export default new Chance() diff --git a/qa-core/yarn.lock b/qa-core/yarn.lock index 42beb07108..272b4cc03a 100644 --- a/qa-core/yarn.lock +++ b/qa-core/yarn.lock @@ -304,6 +304,11 @@ "@babel/helper-validator-identifier" "^7.18.6" to-fast-properties "^2.0.0" +"@balena/dockerignore@^1.0.2": + version "1.0.2" + resolved "https://registry.yarnpkg.com/@balena/dockerignore/-/dockerignore-1.0.2.tgz#9ffe4726915251e8eb69f44ef3547e0da2c03e0d" + integrity sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q== + "@bcoe/v8-coverage@^0.2.3": version "0.2.3" resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" @@ -779,6 +784,15 @@ request "^2.88.0" webfinger "^0.4.2" +"@trendyol/jest-testcontainers@^2.1.1": + version "2.1.1" + resolved "https://registry.yarnpkg.com/@trendyol/jest-testcontainers/-/jest-testcontainers-2.1.1.tgz#dced95cf9c37b75efe0a65db9b75ae8912f2f14a" + integrity sha512-4iAc2pMsev4BTUzoA7jO1VvbTOU2N3juQUYa8TwiSPXPuQtxKwV9WB9ZEP+JQ+Pj15YqfGOXp5H0WNMPtapjiA== + dependencies: + cwd "^0.10.0" + node-duration "^1.0.4" + testcontainers "4.7.0" + "@tsconfig/node10@^1.0.7": version "1.0.9" resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2" @@ -832,6 +846,13 @@ dependencies: "@babel/types" "^7.3.0" +"@types/dockerode@^2.5.34": + version "2.5.34" + resolved "https://registry.yarnpkg.com/@types/dockerode/-/dockerode-2.5.34.tgz#9adb884f7cc6c012a6eb4b2ad794cc5d01439959" + integrity sha512-LcbLGcvcBwBAvjH9UrUI+4qotY+A5WCer5r43DR5XHv2ZIEByNXFdPLo1XxR+v/BjkGjlggW8qUiXuVEhqfkpA== + dependencies: + "@types/node" "*" + "@types/graceful-fs@^4.1.3": version "4.1.5" resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.5.tgz#21ffba0d98da4350db64891f92a9e5db3cdb4e15" @@ -1006,6 +1027,11 @@ ansi-styles@^5.0.0: resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b" integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA== +any-promise@^1.1.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/any-promise/-/any-promise-1.3.0.tgz#abc6afeedcea52e809cdc0376aed3ce39635d17f" + integrity sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A== + anymatch@^3.0.3: version "3.1.2" resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" @@ -1044,7 +1070,7 @@ argsarray@0.0.1: resolved "https://registry.yarnpkg.com/argsarray/-/argsarray-0.0.1.tgz#6e7207b4ecdb39b0af88303fa5ae22bda8df61cb" integrity sha512-u96dg2GcAKtpTrBdDoFIM7PjcBA+6rSP0OR94MOReNRyUECL6MtQt5XXmRr4qrftYaef9+l5hcpO5te7sML1Cg== -asn1@~0.2.3: +asn1@^0.2.6, asn1@~0.2.3: version "0.2.6" resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.6.tgz#0d3a7bb6e64e02a90c0303b31f292868ea09a08d" integrity sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ== @@ -1199,7 +1225,7 @@ base64url@3.x.x, base64url@^3.0.1: resolved "https://registry.yarnpkg.com/base64url/-/base64url-3.0.1.tgz#6399d572e2bc3f90a9a8b22d5dbb0a32d33f788d" integrity sha512-ir1UPr3dkwexU7FdV8qBBbNDRUhMmIekYMFZfi+C/sLNnRESKPl23nB9b2pltqfOQNnGzsDdId90AEtG5tCx4A== -bcrypt-pbkdf@^1.0.0: +bcrypt-pbkdf@^1.0.0, bcrypt-pbkdf@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" integrity sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w== @@ -1304,6 +1330,11 @@ buffer@^5.5.0, buffer@^5.6.0: base64-js "^1.3.1" ieee754 "^1.1.13" +buildcheck@~0.0.6: + version "0.0.6" + resolved "https://registry.yarnpkg.com/buildcheck/-/buildcheck-0.0.6.tgz#89aa6e417cfd1e2196e3f8fe915eb709d2fe4238" + integrity sha512-8f9ZJCUXyT1M35Jx7MkBgmBMo3oHTTBIPLiY9xyL0pl3T5RwcPEY8cUHr5LBNfu/fk6c2T4DJZuVM/8ZZT2D2A== + bull@4.10.1: version "4.10.1" resolved "https://registry.yarnpkg.com/bull/-/bull-4.10.1.tgz#f14974b6089358b62b495a2cbf838aadc098e43f" @@ -1319,6 +1350,11 @@ bull@4.10.1: semver "^7.3.2" uuid "^8.3.0" +byline@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/byline/-/byline-5.0.0.tgz#741c5216468eadc457b03410118ad77de8c1ddb1" + integrity sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q== + cache-content-type@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/cache-content-type/-/cache-content-type-1.0.1.tgz#035cde2b08ee2129f4a8315ea8f00a00dba1453c" @@ -1546,6 +1582,14 @@ correlation-id@4.0.0: dependencies: uuid "^8.3.1" +cpu-features@~0.0.7: + version "0.0.7" + resolved "https://registry.yarnpkg.com/cpu-features/-/cpu-features-0.0.7.tgz#81ba93e1d0a729fd25132a54c3ff689c37b542f7" + integrity sha512-fjzFmsUKKCrC9GrM1eQTvQx18e+kjXFzjRLvJPNEDjk31+bJ6ZiV6uchv/hzbzXVIgbWdrEyyX1IFKwse65+8w== + dependencies: + buildcheck "~0.0.6" + nan "^2.17.0" + create-require@^1.1.0: version "1.1.1" resolved "https://registry.yarnpkg.com/create-require/-/create-require-1.1.1.tgz#c1d7e8f1e5f6cfc9ff65f9cd352d37348756c333" @@ -1572,6 +1616,14 @@ crypt@0.0.2: resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b" integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow== +cwd@^0.10.0: + version "0.10.0" + resolved "https://registry.yarnpkg.com/cwd/-/cwd-0.10.0.tgz#172400694057c22a13b0cf16162c7e4b7a7fe567" + integrity sha512-YGZxdTTL9lmLkCUTpg4j0zQ7IhRB5ZmqNBbGCl3Tg6MP/d5/6sY7L5mmTjzbc6JKgVZYiqTQTNhPFsbXNGlRaA== + dependencies: + find-pkg "^0.1.2" + fs-exists-sync "^0.1.0" + dashdash@^1.12.0: version "1.14.1" resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" @@ -1676,6 +1728,32 @@ diff@^4.0.1: resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d" integrity sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A== +docker-compose@^0.23.5: + version "0.23.19" + resolved "https://registry.yarnpkg.com/docker-compose/-/docker-compose-0.23.19.tgz#9947726e2fe67bdfa9e8efe1ff15aa0de2e10eb8" + integrity sha512-v5vNLIdUqwj4my80wxFDkNH+4S85zsRuH29SO7dCWVWPCMt/ohZBsGN6g6KXWifT0pzQ7uOxqEKCYCDPJ8Vz4g== + dependencies: + yaml "^1.10.2" + +docker-modem@^3.0.0: + version "3.0.8" + resolved "https://registry.yarnpkg.com/docker-modem/-/docker-modem-3.0.8.tgz#ef62c8bdff6e8a7d12f0160988c295ea8705e77a" + integrity sha512-f0ReSURdM3pcKPNS30mxOHSbaFLcknGmQjwSfmbcdOw1XWKXVhukM3NJHhr7NpY9BIyyWQb0EBo3KQvvuU5egQ== + dependencies: + debug "^4.1.1" + readable-stream "^3.5.0" + split-ca "^1.0.1" + ssh2 "^1.11.0" + +dockerode@^3.2.1: + version "3.3.5" + resolved "https://registry.yarnpkg.com/dockerode/-/dockerode-3.3.5.tgz#7ae3f40f2bec53ae5e9a741ce655fff459745629" + integrity sha512-/0YNa3ZDNeLr/tSckmD69+Gq+qVNhvKfAHNeZJBnp7EOP6RGKV8ORrJHkUn20So5wU+xxT7+1n5u8PjHbfjbSA== + dependencies: + "@balena/dockerignore" "^1.0.2" + docker-modem "^3.0.0" + tar-fs "~2.0.1" + dotenv@16.0.1: version "16.0.1" resolved "https://registry.yarnpkg.com/dotenv/-/dotenv-16.0.1.tgz#8f8f9d94876c35dac989876a5d3a82a267fdce1d" @@ -1844,6 +1922,13 @@ exit@^0.1.2: resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" integrity sha512-Zk/eNKV2zbjpKzrsQ+n1G6poVbErQxJ0LBOJXaKZ1EViLzH+hrLu9cdXI4zw9dBQJslwBEpbQ2P1oS7nDxs6jQ== +expand-tilde@^1.2.2: + version "1.2.2" + resolved "https://registry.yarnpkg.com/expand-tilde/-/expand-tilde-1.2.2.tgz#0b81eba897e5a3d31d1c3d102f8f01441e559449" + integrity sha512-rtmc+cjLZqnu9dSYosX9EWmSJhTwpACgJQTfj4hgg2JjOD/6SIQalZrt4a3aQeh++oNxkazcaxrhPUj6+g5G/Q== + dependencies: + os-homedir "^1.0.1" + expect@^29.0.0: version "29.0.2" resolved "https://registry.yarnpkg.com/expect/-/expect-29.0.2.tgz#22c7132400f60444b427211f1d6bb604a9ab2420" @@ -1919,6 +2004,21 @@ fill-range@^7.0.1: dependencies: to-regex-range "^5.0.1" +find-file-up@^0.1.2: + version "0.1.3" + resolved "https://registry.yarnpkg.com/find-file-up/-/find-file-up-0.1.3.tgz#cf68091bcf9f300a40da411b37da5cce5a2fbea0" + integrity sha512-mBxmNbVyjg1LQIIpgO8hN+ybWBgDQK8qjht+EbrTCGmmPV/sc7RF1i9stPTD6bpvXZywBdrwRYxhSdJv867L6A== + dependencies: + fs-exists-sync "^0.1.0" + resolve-dir "^0.1.0" + +find-pkg@^0.1.2: + version "0.1.2" + resolved "https://registry.yarnpkg.com/find-pkg/-/find-pkg-0.1.2.tgz#1bdc22c06e36365532e2a248046854b9788da557" + integrity sha512-0rnQWcFwZr7eO0513HahrWafsc3CTFioEB7DRiEYCUM/70QXSY8f3mCST17HXLcPvEhzH/Ty/Bxd72ZZsr/yvw== + dependencies: + find-file-up "^0.1.2" + find-up@^4.0.0, find-up@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" @@ -1984,6 +2084,11 @@ fs-constants@^1.0.0: resolved "https://registry.yarnpkg.com/fs-constants/-/fs-constants-1.0.0.tgz#6be0de9be998ce16af8afc24497b9ee9b7ccd9ad" integrity sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow== +fs-exists-sync@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/fs-exists-sync/-/fs-exists-sync-0.1.0.tgz#982d6893af918e72d08dec9e8673ff2b5a8d6add" + integrity sha512-cR/vflFyPZtrN6b38ZyWxpWdhlXrzZEBawlpBQMq7033xVY7/kg0GDMBK5jg8lDYQckdJ5x/YC88lM3C7VMsLg== + fs-minipass@^2.0.0: version "2.1.0" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" @@ -2062,7 +2167,7 @@ getpass@^0.1.1: dependencies: assert-plus "^1.0.0" -glob@^7.1.3, glob@^7.1.4: +glob@^7.1.3, glob@^7.1.4, glob@^7.1.6: version "7.2.3" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== @@ -2074,6 +2179,24 @@ glob@^7.1.3, glob@^7.1.4: once "^1.3.0" path-is-absolute "^1.0.0" +global-modules@^0.2.3: + version "0.2.3" + resolved "https://registry.yarnpkg.com/global-modules/-/global-modules-0.2.3.tgz#ea5a3bed42c6d6ce995a4f8a1269b5dae223828d" + integrity sha512-JeXuCbvYzYXcwE6acL9V2bAOeSIGl4dD+iwLY9iUx2VBJJ80R18HCn+JCwHM9Oegdfya3lEkGCdaRkSyc10hDA== + dependencies: + global-prefix "^0.1.4" + is-windows "^0.2.0" + +global-prefix@^0.1.4: + version "0.1.5" + resolved "https://registry.yarnpkg.com/global-prefix/-/global-prefix-0.1.5.tgz#8d3bc6b8da3ca8112a160d8d496ff0462bfef78f" + integrity sha512-gOPiyxcD9dJGCEArAhF4Hd0BAqvAe/JzERP7tYumE4yIkmIedPUVXcJFWbV3/p/ovIIvKjkrTk+f1UVkq7vvbw== + dependencies: + homedir-polyfill "^1.0.0" + ini "^1.3.4" + is-windows "^0.2.0" + which "^1.2.12" + globals@^11.1.0: version "11.12.0" resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" @@ -2131,6 +2254,13 @@ has@^1.0.3: dependencies: function-bind "^1.1.1" +homedir-polyfill@^1.0.0: + version "1.0.3" + resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" + integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== + dependencies: + parse-passwd "^1.0.0" + html-escaper@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" @@ -2230,6 +2360,11 @@ inherits@2, inherits@2.0.4, inherits@^2.0.1, inherits@^2.0.3, inherits@^2.0.4, i resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== +ini@^1.3.4: + version "1.3.8" + resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.8.tgz#a29da425b48806f34767a4efce397269af28432c" + integrity sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew== + ioredis@4.28.0: version "4.28.0" resolved "https://registry.yarnpkg.com/ioredis/-/ioredis-4.28.0.tgz#5a2be3f37ff2075e2332f280eaeb02ab4d9ff0d3" @@ -2318,6 +2453,11 @@ is-typedarray@~1.0.0: resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" integrity sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA== +is-windows@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-0.2.0.tgz#de1aa6d63ea29dd248737b69f1ff8b8002d2108c" + integrity sha512-n67eJYmXbniZB7RF4I/FTjK1s6RPOCTxhYrVYLRaCt3lF0mpWZPKr3T2LSZAqyjQsxR2qMmGYXXzK0YWwcPM1Q== + isarray@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" @@ -3332,6 +3472,11 @@ msgpackr@^1.5.2: optionalDependencies: msgpackr-extract "^3.0.0" +nan@^2.17.0: + version "2.17.0" + resolved "https://registry.yarnpkg.com/nan/-/nan-2.17.0.tgz#c0150a2368a182f033e9aa5195ec76ea41a199cb" + integrity sha512-2ZTgtl0nJsO0KQCjEpxcIr5D+Yv90plTitZt9JBfQvVJDS5seMl3FOvsh3+9CoYWXf/1l5OaZzzF6nDm4cagaQ== + napi-macros@~2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/napi-macros/-/napi-macros-2.0.0.tgz#2b6bae421e7b96eb687aa6c77a7858640670001b" @@ -3367,6 +3512,11 @@ node-addon-api@^3.1.0: resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-3.2.1.tgz#81325e0a2117789c0128dab65e7e38f07ceba161" integrity sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A== +node-duration@^1.0.4: + version "1.0.4" + resolved "https://registry.yarnpkg.com/node-duration/-/node-duration-1.0.4.tgz#3e94ecc0e473691c89c4560074503362071cecac" + integrity sha512-eUXYNSY7DL53vqfTosggWkvyIW3bhAcqBDIlolgNYlZhianXTrCL50rlUJWD1eRqkIxMppXTfiFbp+9SjpPrgA== + node-fetch@2, node-fetch@2.6.7, node-fetch@^2.6.7: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" @@ -3490,6 +3640,11 @@ only@~0.0.2: resolved "https://registry.yarnpkg.com/only/-/only-0.0.2.tgz#2afde84d03e50b9a8edc444e30610a70295edfb4" integrity sha512-Fvw+Jemq5fjjyWz6CpKx6w9s7xxqo3+JCyM0WXWeCSOboZ8ABkyvP8ID4CZuChA/wxSx+XSJmdOm8rGVyJ1hdQ== +os-homedir@^1.0.1: + version "1.0.2" + resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3" + integrity sha512-B5JU3cabzk8c67mRRd3ECmROafjYMXbuzlwtqdM8IbS8ktlTix8aFGb2bAGKrSRIlnfKwovGUUr72JUPyOb6kQ== + p-finally@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" @@ -3543,6 +3698,11 @@ parse-json@^5.2.0: json-parse-even-better-errors "^2.3.0" lines-and-columns "^1.1.6" +parse-passwd@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" + integrity sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q== + parseurl@^1.3.2, parseurl@^1.3.3: version "1.3.3" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" @@ -3982,6 +4142,15 @@ readable-stream@1.1.14, readable-stream@^1.0.27-1: string_decoder "^1.1.1" util-deprecate "^1.0.1" +readable-stream@^3.5.0: + version "3.6.2" + resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-3.6.2.tgz#56a9b36ea965c00c5a93ef31eb111a0f11056967" + integrity sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA== + dependencies: + inherits "^2.0.3" + string_decoder "^1.1.1" + util-deprecate "^1.0.1" + readable-stream@~0.0.2: version "0.0.4" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-0.0.4.tgz#f32d76e3fb863344a548d79923007173665b3b8d" @@ -4077,6 +4246,14 @@ resolve-cwd@^3.0.0: dependencies: resolve-from "^5.0.0" +resolve-dir@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/resolve-dir/-/resolve-dir-0.1.1.tgz#b219259a5602fac5c5c496ad894a6e8cc430261e" + integrity sha512-QxMPqI6le2u0dCLyiGzgy92kjkkL6zO0XyvHzjdTNH3zM6e5Hz3BwG6+aEyNgiQ5Xz6PwTwgQEj3U50dByPKIA== + dependencies: + expand-tilde "^1.2.2" + global-modules "^0.2.3" + resolve-from@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" @@ -4238,6 +4415,11 @@ spark-md5@3.0.2: resolved "https://registry.yarnpkg.com/spark-md5/-/spark-md5-3.0.2.tgz#7952c4a30784347abcee73268e473b9c0167e3fc" integrity sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw== +split-ca@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/split-ca/-/split-ca-1.0.1.tgz#6c83aff3692fa61256e0cd197e05e9de157691a6" + integrity sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ== + split2@^2.1.0: version "2.2.0" resolved "https://registry.yarnpkg.com/split2/-/split2-2.2.0.tgz#186b2575bcf83e85b7d18465756238ee4ee42493" @@ -4257,6 +4439,17 @@ sprintf-js@~1.0.2: resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" integrity sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g== +ssh2@^1.11.0: + version "1.13.0" + resolved "https://registry.yarnpkg.com/ssh2/-/ssh2-1.13.0.tgz#9b53a07534fa72283ada471b82395a3b3c875934" + integrity sha512-CIZBFRRY1y9mAZSqBGFE4EB4dNJad2ysT2PqO8OpkiI3UTB/gUZwE5EaN16qVyQ6s/M7EgC/iaV/MnjdlvnuzA== + dependencies: + asn1 "^0.2.6" + bcrypt-pbkdf "^1.0.2" + optionalDependencies: + cpu-features "~0.0.7" + nan "^2.17.0" + sshpk@^1.7.0: version "1.17.0" resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.17.0.tgz#578082d92d4fe612b13007496e543fa0fbcbe4c5" @@ -4314,6 +4507,13 @@ stream-combiner@~0.0.4: dependencies: duplexer "~0.1.1" +stream-to-array@^2.3.0: + version "2.3.0" + resolved "https://registry.yarnpkg.com/stream-to-array/-/stream-to-array-2.3.0.tgz#bbf6b39f5f43ec30bc71babcb37557acecf34353" + integrity sha512-UsZtOYEn4tWU2RGLOXr/o/xjRBftZRlG3dEWoaHr8j4GuypJ3isitGbVyjQKAuMu+xbiop8q224TjiZWc4XTZA== + dependencies: + any-promise "^1.1.0" + string-length@^4.0.1: version "4.0.2" resolved "https://registry.yarnpkg.com/string-length/-/string-length-4.0.2.tgz#a8a8dc7bd5c1a82b9b3c8b87e125f66871b6e57a" @@ -4403,7 +4603,7 @@ supports-preserve-symlinks-flag@^1.0.0: resolved "https://registry.yarnpkg.com/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz#6eda4bd344a3c94aea376d4cc31bc77311039e09" integrity sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w== -tar-fs@2.1.1: +tar-fs@2.1.1, tar-fs@^2.1.0: version "2.1.1" resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.1.1.tgz#489a15ab85f1f0befabb370b7de4f9eb5cbe8784" integrity sha512-V0r2Y9scmbDRLCNex/+hYzvp/zyYjvFbHPNgVTKfQvVrb6guiE/fxP+XblDNR011utopbkex2nM4dHNV6GDsng== @@ -4413,7 +4613,17 @@ tar-fs@2.1.1: pump "^3.0.0" tar-stream "^2.1.4" -tar-stream@^2.1.4: +tar-fs@~2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/tar-fs/-/tar-fs-2.0.1.tgz#e44086c1c60d31a4f0cf893b1c4e155dabfae9e2" + integrity sha512-6tzWDMeroL87uF/+lin46k+Q+46rAJ0SyPGz7OW7wTgblI273hsBqk2C1j0/xNadNLKDTUL9BukSjB7cwgmlPA== + dependencies: + chownr "^1.1.1" + mkdirp-classic "^0.5.2" + pump "^3.0.0" + tar-stream "^2.0.0" + +tar-stream@^2.0.0, tar-stream@^2.1.4: version "2.2.0" resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-2.2.0.tgz#acad84c284136b060dc3faa64474aa9aebd77287" integrity sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ== @@ -4445,6 +4655,23 @@ test-exclude@^6.0.0: glob "^7.1.4" minimatch "^3.0.4" +testcontainers@4.7.0: + version "4.7.0" + resolved "https://registry.yarnpkg.com/testcontainers/-/testcontainers-4.7.0.tgz#5a9a864b1b0cc86984086dcc737c2f5e73490cf3" + integrity sha512-5SrG9RMfDRRZig34fDZeMcGD5i3lHCOJzn0kjouyK4TiEWjZB3h7kCk8524lwNRHROFE1j6DGjceonv/5hl5ag== + dependencies: + "@types/dockerode" "^2.5.34" + byline "^5.0.0" + debug "^4.1.1" + docker-compose "^0.23.5" + dockerode "^3.2.1" + get-port "^5.1.1" + glob "^7.1.6" + node-duration "^1.0.4" + slash "^3.0.0" + stream-to-array "^2.3.0" + tar-fs "^2.1.0" + through2@3.0.2: version "3.0.2" resolved "https://registry.yarnpkg.com/through2/-/through2-3.0.2.tgz#99f88931cfc761ec7678b41d5d7336b5b6a07bf4" @@ -4746,6 +4973,13 @@ whatwg-url@^5.0.0: tr46 "~0.0.3" webidl-conversions "^3.0.0" +which@^1.2.12: + version "1.3.1" + resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" + integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== + dependencies: + isexe "^2.0.0" + which@^2.0.1: version "2.0.2" resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" @@ -4824,6 +5058,11 @@ yallist@^4.0.0: resolved "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A== +yaml@^1.10.2: + version "1.10.2" + resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.10.2.tgz#2301c5ffbf12b467de8da2333a459e29e7920e4b" + integrity sha512-r3vXyErRCYJ7wg28yvBY5VSoAF8ZvlcW9/BwUzEtUsjvX/DKs24dIkuwjtuprwJJHsbyUbLApepYTR1BN4uHrg== + yargs-parser@^21.0.0, yargs-parser@^21.0.1: version "21.1.1" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" diff --git a/yarn.lock b/yarn.lock index 04d9a7d140..f8357eeeda 100644 --- a/yarn.lock +++ b/yarn.lock @@ -5211,6 +5211,15 @@ resolved "https://registry.yarnpkg.com/@types/parse-json/-/parse-json-4.0.0.tgz#2f8bb441434d163b35fb8ffdccd7138927ffb8c0" integrity sha512-//oorEZjL6sbPcKUaCdIGlIUeH26mgzimjBB77G6XRgnDl/L5wOnpyBGRe/Mmf5CVW3PwEBE1NjiMZ/ssFh4wA== +"@types/pg@8.6.6": + version "8.6.6" + resolved "https://registry.yarnpkg.com/@types/pg/-/pg-8.6.6.tgz#21cdf873a3e345a6e78f394677e3b3b1b543cb80" + integrity sha512-O2xNmXebtwVekJDD+02udOncjVcMZQuTEQEMpKJ0ZRf5E7/9JJX3izhKUcUifBkyKpljyUM6BTgy2trmviKlpw== + dependencies: + "@types/node" "*" + pg-protocol "*" + pg-types "^2.2.0" + "@types/pouchdb-adapter-cordova-sqlite@*": version "1.0.1" resolved "https://registry.yarnpkg.com/@types/pouchdb-adapter-cordova-sqlite/-/pouchdb-adapter-cordova-sqlite-1.0.1.tgz#49e5ee6df7cc0c23196fcb340f43a560e74eb1d6" @@ -19085,7 +19094,7 @@ performance-now@^2.1.0: resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" integrity sha512-7EAHlyLHI56VEIdK57uwHdHKIaAGbnXPiw0yWbarQZOKaKpvUIgW0jWRVLiatnM+XXlSwsanIBH/hzGMJulMow== -pg-connection-string@2.5.0, pg-connection-string@^2.4.0: +pg-connection-string@2.5.0, pg-connection-string@^2.5.0: version "2.5.0" resolved "https://registry.yarnpkg.com/pg-connection-string/-/pg-connection-string-2.5.0.tgz#538cadd0f7e603fc09a12590f3b8a452c2c0cf34" integrity sha512-r5o/V/ORTA6TmUnyWZR9nCj1klXCO2CEKNRlVuJptZe85QuhFayC7WeMic7ndayT5IRIR0S0xFxFi2ousartlQ== @@ -19095,17 +19104,17 @@ pg-int8@1.0.1: resolved "https://registry.yarnpkg.com/pg-int8/-/pg-int8-1.0.1.tgz#943bd463bf5b71b4170115f80f8efc9a0c0eb78c" integrity sha512-WCtabS6t3c8SkpDBUlb1kjOs7l66xsGdKpIPZsg4wR+B3+u9UAum2odSsF9tnvxg80h4ZxLWMy4pRjOsFIqQpw== -pg-pool@^3.2.2: +pg-pool@^3.6.0: version "3.6.0" resolved "https://registry.yarnpkg.com/pg-pool/-/pg-pool-3.6.0.tgz#3190df3e4747a0d23e5e9e8045bcd99bda0a712e" integrity sha512-clFRf2ksqd+F497kWFyM21tMjeikn60oGDmqMT8UBrynEwVEX/5R5xd2sdvdo1cZCFlguORNpVuqxIj+aK4cfQ== -pg-protocol@^1.4.0: +pg-protocol@*, pg-protocol@^1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/pg-protocol/-/pg-protocol-1.6.0.tgz#4c91613c0315349363af2084608db843502f8833" integrity sha512-M+PDm637OY5WM307051+bsDia5Xej6d9IR4GwJse1qA1DIhiKlksvrneZOYQq42OM+spubpcNYEo2FcKQrDk+Q== -pg-types@^2.1.0: +pg-types@^2.1.0, pg-types@^2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/pg-types/-/pg-types-2.2.0.tgz#2d0250d636454f7cfa3b6ae0382fdfa8063254a3" integrity sha512-qTAAlrEsl8s4OiEQY69wDvcMIdQN6wdz5ojQiOy6YRMuynxenON0O5oCpJI6lshc6scgAY8qvJ2On/p+CXY0GA== @@ -19116,16 +19125,16 @@ pg-types@^2.1.0: postgres-date "~1.0.4" postgres-interval "^1.1.0" -pg@8.5.1: - version "8.5.1" - resolved "https://registry.yarnpkg.com/pg/-/pg-8.5.1.tgz#34dcb15f6db4a29c702bf5031ef2e1e25a06a120" - integrity sha512-9wm3yX9lCfjvA98ybCyw2pADUivyNWT/yIP4ZcDVpMN0og70BUWYEGXPCTAQdGTAqnytfRADb7NERrY1qxhIqw== +pg@8.10.0: + version "8.10.0" + resolved "https://registry.yarnpkg.com/pg/-/pg-8.10.0.tgz#5b8379c9b4a36451d110fc8cd98fc325fe62ad24" + integrity sha512-ke7o7qSTMb47iwzOSaZMfeR7xToFdkE71ifIipOAAaLIM0DYzfOAXlgFFmYUIE2BcJtvnVlGCID84ZzCegE8CQ== dependencies: buffer-writer "2.0.0" packet-reader "1.0.0" - pg-connection-string "^2.4.0" - pg-pool "^3.2.2" - pg-protocol "^1.4.0" + pg-connection-string "^2.5.0" + pg-pool "^3.6.0" + pg-protocol "^1.6.0" pg-types "^2.1.0" pgpass "1.x" @@ -22951,7 +22960,14 @@ svelte-dnd-action@^0.9.8: resolved "https://registry.yarnpkg.com/svelte-dnd-action/-/svelte-dnd-action-0.9.22.tgz#003eee9dddb31d8c782f6832aec8b1507fff194d" integrity sha512-lOQJsNLM1QWv5mdxIkCVtk6k4lHCtLgfE59y8rs7iOM6erchbLC9hMEFYSveZz7biJV0mpg7yDSs4bj/RT/YkA== -svelte-flatpickr@^3.1.0, svelte-flatpickr@^3.2.3, svelte-flatpickr@^3.3.2: +svelte-flatpickr@3.2.3: + version "3.2.3" + resolved "https://registry.yarnpkg.com/svelte-flatpickr/-/svelte-flatpickr-3.2.3.tgz#db5dd7ad832ef83262b45e09737955ad3d591fc8" + integrity sha512-PNkqK4Napx8nTvCwkaUXdnKo8dISThaxEOK+szTUXcY6H0dQM0TSyuoMaVWY2yX7pM+PN5cpCQCcVe8YvTRFSw== + dependencies: + flatpickr "^4.5.2" + +svelte-flatpickr@^3.1.0, svelte-flatpickr@^3.2.3: version "3.3.2" resolved "https://registry.yarnpkg.com/svelte-flatpickr/-/svelte-flatpickr-3.3.2.tgz#f08bcde83d439cb30df6fd07b974d87371f130c1" integrity sha512-VNJLYyLRDplI63oWX5hJylzAJc2VhTh3z9SNecfjtuPZmP6FZPpg9Fw7rXpkEV2DPovIWj2PtaVxB6Kp9r423w==