diff --git a/lerna.json b/lerna.json
index 7186c0ca17..74b10c07ea 100644
--- a/lerna.json
+++ b/lerna.json
@@ -1,5 +1,5 @@
{
- "version": "2.22.16",
+ "version": "2.22.18",
"npmClient": "yarn",
"packages": [
"packages/*",
diff --git a/packages/account-portal b/packages/account-portal
index 532c4db35c..a0ee9cad8c 160000
--- a/packages/account-portal
+++ b/packages/account-portal
@@ -1 +1 @@
-Subproject commit 532c4db35cecd346b5c24f0b89ab7b397a122a36
+Subproject commit a0ee9cad8cefb8f9f40228705711be174f018fa9
diff --git a/packages/backend-core/src/users/db.ts b/packages/backend-core/src/users/db.ts
index 04d3264e6f..6165a68e57 100644
--- a/packages/backend-core/src/users/db.ts
+++ b/packages/backend-core/src/users/db.ts
@@ -45,6 +45,7 @@ type GroupFns = {
getGroupBuilderAppIds: GroupBuildersFn
}
type CreateAdminUserOpts = {
+ password?: string
ssoId?: string
hashPassword?: boolean
requirePassword?: boolean
@@ -501,9 +502,9 @@ export class UserDB {
static async createAdminUser(
email: string,
tenantId: string,
- password?: string,
opts?: CreateAdminUserOpts
) {
+ const password = opts?.password
const user: User = {
email: email,
password,
diff --git a/packages/builder/package.json b/packages/builder/package.json
index 253f5a0c14..f29ae3f7f2 100644
--- a/packages/builder/package.json
+++ b/packages/builder/package.json
@@ -72,7 +72,7 @@
"fast-json-patch": "^3.1.1",
"json-format-highlight": "^1.0.4",
"lodash": "4.17.21",
- "posthog-js": "^1.116.6",
+ "posthog-js": "^1.118.0",
"remixicon": "2.5.0",
"sanitize-html": "^2.7.0",
"shortid": "2.2.15",
diff --git a/packages/builder/src/analytics/index.js b/packages/builder/src/analytics/index.js
index 3a80a05d7f..a0ddfe1d42 100644
--- a/packages/builder/src/analytics/index.js
+++ b/packages/builder/src/analytics/index.js
@@ -38,6 +38,10 @@ class AnalyticsHub {
intercom.show(user)
}
+ initPosthog() {
+ posthog.init()
+ }
+
async logout() {
posthog.logout()
intercom.logout()
diff --git a/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte b/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte
index eb1e7bc7ff..efbfd26565 100644
--- a/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte
+++ b/packages/builder/src/components/backend/TableNavigator/ExistingTableDataImport.svelte
@@ -49,7 +49,10 @@
label: "Long Form Text",
value: FIELDS.LONGFORM.type,
},
-
+ {
+ label: "Attachment",
+ value: FIELDS.ATTACHMENT.type,
+ },
{
label: "User",
value: `${FIELDS.USER.type}${FIELDS.USER.subtype}`,
diff --git a/packages/builder/src/components/deploy/AppActions.svelte b/packages/builder/src/components/deploy/AppActions.svelte
index 1d7bb4f65e..30b95d639e 100644
--- a/packages/builder/src/components/deploy/AppActions.svelte
+++ b/packages/builder/src/components/deploy/AppActions.svelte
@@ -33,13 +33,10 @@
import { TOUR_STEP_KEYS } from "components/portal/onboarding/tours.js"
import { goto } from "@roxi/routify"
import { onMount } from "svelte"
- import PosthogClient from "../../analytics/PosthogClient"
export let application
export let loaded
- const posthog = new PosthogClient(process.env.POSTHOG_TOKEN)
-
let unpublishModal
let updateAppModal
let revertModal
@@ -156,7 +153,7 @@
}
onMount(() => {
- posthog.init()
+ analytics.initPosthog()
})
diff --git a/packages/builder/src/components/design/settings/componentSettings.js b/packages/builder/src/components/design/settings/componentSettings.js
index f2d1520878..3b26031ca1 100644
--- a/packages/builder/src/components/design/settings/componentSettings.js
+++ b/packages/builder/src/components/design/settings/componentSettings.js
@@ -1,4 +1,6 @@
import { Checkbox, Select, RadioGroup, Stepper, Input } from "@budibase/bbui"
+import { licensing } from "stores/portal"
+import { get } from "svelte/store"
import DataSourceSelect from "./controls/DataSourceSelect/DataSourceSelect.svelte"
import S3DataSourceSelect from "./controls/S3DataSourceSelect.svelte"
import DataProviderSelect from "./controls/DataProviderSelect.svelte"
@@ -26,7 +28,8 @@ import FieldConfiguration from "./controls/FieldConfiguration/FieldConfiguration
import ButtonConfiguration from "./controls/ButtonConfiguration/ButtonConfiguration.svelte"
import RelationshipFilterEditor from "./controls/RelationshipFilterEditor.svelte"
import FormStepConfiguration from "./controls/FormStepConfiguration.svelte"
-import FormStepControls from "components/design/settings/controls/FormStepControls.svelte"
+import FormStepControls from "./controls/FormStepControls.svelte"
+import PaywalledSetting from "./controls/PaywalledSetting.svelte"
const componentMap = {
text: DrawerBindableInput,
@@ -86,11 +89,16 @@ const componentMap = {
}
export const getComponentForSetting = setting => {
- const { type, showInBar, barStyle } = setting || {}
+ const { type, showInBar, barStyle, license } = setting || {}
if (!type) {
return null
}
+ // Check for paywalled settings
+ if (license && get(licensing).isFreePlan) {
+ return PaywalledSetting
+ }
+
// We can show a clone of the bar settings for certain select settings
if (showInBar && type === "select" && barStyle === "buttons") {
return BarButtonList
diff --git a/packages/builder/src/components/design/settings/controls/PaywalledSetting.svelte b/packages/builder/src/components/design/settings/controls/PaywalledSetting.svelte
new file mode 100644
index 0000000000..c28bc49e3d
--- /dev/null
+++ b/packages/builder/src/components/design/settings/controls/PaywalledSetting.svelte
@@ -0,0 +1,23 @@
+
+
+
+
+ {title}
+
+
+
+
diff --git a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte
index c7f8094084..9ff2a764b7 100644
--- a/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte
+++ b/packages/builder/src/pages/builder/app/[application]/design/[screenId]/[componentId]/_components/Component/ComponentSettingsSection.svelte
@@ -183,6 +183,7 @@
props={{
// Generic settings
placeholder: setting.placeholder || null,
+ license: setting.license,
// Select settings
options: setting.options || [],
diff --git a/packages/client/manifest.json b/packages/client/manifest.json
index 08d614391b..a056f59cc7 100644
--- a/packages/client/manifest.json
+++ b/packages/client/manifest.json
@@ -4610,6 +4610,35 @@
"key": "dataSource",
"required": true
},
+ {
+ "type": "select",
+ "label": "Auto-refresh",
+ "key": "autoRefresh",
+ "license": "premium",
+ "placeholder": "Never",
+ "options": [
+ {
+ "label": "10 seconds",
+ "value": 10
+ },
+ {
+ "label": "30 seconds",
+ "value": 30
+ },
+ {
+ "label": "1 minute",
+ "value": 60
+ },
+ {
+ "label": "5 minutes",
+ "value": 300
+ },
+ {
+ "label": "10 minutes",
+ "value": 600
+ }
+ ]
+ },
{
"type": "filter",
"label": "Filtering",
@@ -4977,6 +5006,35 @@
"key": "dataSource",
"required": true
},
+ {
+ "type": "select",
+ "label": "Auto-refresh",
+ "key": "autoRefresh",
+ "license": "premium",
+ "placeholder": "Never",
+ "options": [
+ {
+ "label": "10 seconds",
+ "value": 10
+ },
+ {
+ "label": "30 seconds",
+ "value": 30
+ },
+ {
+ "label": "1 minute",
+ "value": 60
+ },
+ {
+ "label": "5 minutes",
+ "value": 300
+ },
+ {
+ "label": "10 minutes",
+ "value": 600
+ }
+ ]
+ },
{
"type": "text",
"label": "Title",
@@ -5445,6 +5503,35 @@
"key": "dataSource",
"required": true
},
+ {
+ "type": "select",
+ "label": "Auto-refresh",
+ "key": "autoRefresh",
+ "license": "premium",
+ "placeholder": "Never",
+ "options": [
+ {
+ "label": "10 seconds",
+ "value": 10
+ },
+ {
+ "label": "30 seconds",
+ "value": 30
+ },
+ {
+ "label": "1 minute",
+ "value": 60
+ },
+ {
+ "label": "5 minutes",
+ "value": 300
+ },
+ {
+ "label": "10 minutes",
+ "value": 600
+ }
+ ]
+ },
{
"type": "columns",
"label": "Columns",
@@ -5731,6 +5818,35 @@
"key": "dataSource",
"required": true
},
+ {
+ "type": "select",
+ "label": "Auto-refresh",
+ "key": "autoRefresh",
+ "license": "premium",
+ "placeholder": "Never",
+ "options": [
+ {
+ "label": "10 seconds",
+ "value": 10
+ },
+ {
+ "label": "30 seconds",
+ "value": 30
+ },
+ {
+ "label": "1 minute",
+ "value": 60
+ },
+ {
+ "label": "5 minutes",
+ "value": 300
+ },
+ {
+ "label": "10 minutes",
+ "value": 600
+ }
+ ]
+ },
{
"type": "searchfield",
"label": "Search columns",
@@ -5908,6 +6024,35 @@
"key": "dataSource",
"required": true
},
+ {
+ "type": "select",
+ "label": "Auto-refresh",
+ "key": "autoRefresh",
+ "license": "premium",
+ "placeholder": "Never",
+ "options": [
+ {
+ "label": "10 seconds",
+ "value": 10
+ },
+ {
+ "label": "30 seconds",
+ "value": 30
+ },
+ {
+ "label": "1 minute",
+ "value": 60
+ },
+ {
+ "label": "5 minutes",
+ "value": 300
+ },
+ {
+ "label": "10 minutes",
+ "value": 600
+ }
+ ]
+ },
{
"type": "filter",
"label": "Filtering",
@@ -6504,6 +6649,35 @@
"key": "dataSource",
"required": true
},
+ {
+ "type": "select",
+ "label": "Auto-refresh",
+ "key": "autoRefresh",
+ "license": "premium",
+ "placeholder": "Never",
+ "options": [
+ {
+ "label": "10 seconds",
+ "value": 10
+ },
+ {
+ "label": "30 seconds",
+ "value": 30
+ },
+ {
+ "label": "1 minute",
+ "value": 60
+ },
+ {
+ "label": "5 minutes",
+ "value": 300
+ },
+ {
+ "label": "10 minutes",
+ "value": 600
+ }
+ ]
+ },
{
"type": "text",
"label": "Height",
diff --git a/packages/client/src/components/Screen.svelte b/packages/client/src/components/Screen.svelte
index 4b3acb2019..ac0af9f3b2 100644
--- a/packages/client/src/components/Screen.svelte
+++ b/packages/client/src/components/Screen.svelte
@@ -5,29 +5,29 @@
import Provider from "./context/Provider.svelte"
import { onMount, getContext } from "svelte"
import { enrichButtonActions } from "../utils/buttonActions.js"
+ import { memo } from "@budibase/frontend-core"
export let params = {}
const context = getContext("context")
+ const onLoadActions = memo()
// Get the screen definition for the current route
$: screenDefinition = $screenStore.activeScreen?.props
-
- $: runOnLoadActions(params)
+ $: onLoadActions.set($screenStore.activeScreen?.onLoad)
+ $: runOnLoadActions($onLoadActions, params)
// Enrich and execute any on load actions.
// We manually construct the full context here as this component is the
// one that provides the url context, so it is not available in $context yet
- const runOnLoadActions = params => {
- const screenState = get(screenStore)
-
- if (screenState.activeScreen?.onLoad && !get(builderStore).inBuilder) {
- const actions = enrichButtonActions(screenState.activeScreen.onLoad, {
+ const runOnLoadActions = (actions, params) => {
+ if (actions?.length && !get(builderStore).inBuilder) {
+ const enrichedActions = enrichButtonActions(actions, {
...get(context),
url: params,
})
- if (actions != null) {
- actions()
+ if (enrichedActions != null) {
+ enrichedActions()
}
}
}
diff --git a/packages/client/src/components/app/DataProvider.svelte b/packages/client/src/components/app/DataProvider.svelte
index 1c6dfb32b7..dc0c40f55c 100644
--- a/packages/client/src/components/app/DataProvider.svelte
+++ b/packages/client/src/components/app/DataProvider.svelte
@@ -9,17 +9,18 @@
export let sortOrder
export let limit
export let paginate
+ export let autoRefresh
const { styleable, Provider, ActionTypes, API } = getContext("sdk")
const component = getContext("component")
+ let interval
+ let queryExtensions = {}
+
// We need to manage our lucene query manually as we want to allow components
// to extend it
- let queryExtensions = {}
$: defaultQuery = LuceneUtils.buildLuceneQuery(filter)
$: query = extendQuery(defaultQuery, queryExtensions)
-
- // Fetch data and refresh when needed
$: fetch = createFetch(dataSource)
$: fetch.update({
query,
@@ -28,11 +29,8 @@
limit,
paginate,
})
-
- // Sanitize schema to remove hidden fields
$: schema = sanitizeSchema($fetch.schema)
-
- // Build our action context
+ $: setUpAutoRefresh(autoRefresh)
$: actions = [
{
type: ActionTypes.RefreshDatasource,
@@ -63,8 +61,6 @@
},
},
]
-
- // Build our data context
$: dataContext = {
rows: $fetch.rows,
info: $fetch.info,
@@ -140,6 +136,13 @@
})
return extendedQuery
}
+
+ const setUpAutoRefresh = autoRefresh => {
+ clearInterval(interval)
+ if (autoRefresh) {
+ interval = setInterval(fetch.refresh, Math.max(10000, autoRefresh * 1000))
+ }
+ }
diff --git a/packages/client/src/components/app/GridBlock.svelte b/packages/client/src/components/app/GridBlock.svelte
index 46a507387d..085449a5b0 100644
--- a/packages/client/src/components/app/GridBlock.svelte
+++ b/packages/client/src/components/app/GridBlock.svelte
@@ -18,6 +18,7 @@
export let columns = null
export let onRowClick = null
export let buttons = null
+ export let repeat = null
const context = getContext("context")
const component = getContext("component")
@@ -122,6 +123,7 @@
{fixedRowHeight}
{columnWhitelist}
{schemaOverrides}
+ {repeat}
canAddRows={allowAddRows}
canEditRows={allowEditRows}
canDeleteRows={allowDeleteRows}
diff --git a/packages/client/src/components/app/blocks/CardsBlock.svelte b/packages/client/src/components/app/blocks/CardsBlock.svelte
index bd2b69d352..0368a27e93 100644
--- a/packages/client/src/components/app/blocks/CardsBlock.svelte
+++ b/packages/client/src/components/app/blocks/CardsBlock.svelte
@@ -31,6 +31,7 @@
export let cardButtonOnClick
export let linkColumn
export let noRowsMessage
+ export let autoRefresh
const context = getContext("context")
const { fetchDatasourceSchema, generateGoldenSample } = getContext("sdk")
@@ -184,6 +185,7 @@
sortOrder,
paginate,
limit,
+ autoRefresh,
}}
order={1}
>
diff --git a/packages/client/src/components/app/blocks/ChartBlock.svelte b/packages/client/src/components/app/blocks/ChartBlock.svelte
index 1bbc69ce63..2767c44b8e 100644
--- a/packages/client/src/components/app/blocks/ChartBlock.svelte
+++ b/packages/client/src/components/app/blocks/ChartBlock.svelte
@@ -8,6 +8,7 @@
export let sortColumn
export let sortOrder
export let limit
+ export let autoRefresh
// Block
export let chartTitle
@@ -65,6 +66,7 @@
sortColumn,
sortOrder,
limit,
+ autoRefresh,
}}
>
{#if dataProviderId && chartType}
diff --git a/packages/client/src/components/app/blocks/RepeaterBlock.svelte b/packages/client/src/components/app/blocks/RepeaterBlock.svelte
index 878b827c78..d8816b6c24 100644
--- a/packages/client/src/components/app/blocks/RepeaterBlock.svelte
+++ b/packages/client/src/components/app/blocks/RepeaterBlock.svelte
@@ -17,6 +17,7 @@
export let hAlign
export let vAlign
export let gap
+ export let autoRefresh
const component = getContext("component")
const context = getContext("context")
@@ -47,6 +48,7 @@
sortOrder,
limit,
paginate,
+ autoRefresh,
}}
>
{#if $component.empty}
diff --git a/packages/client/src/components/app/blocks/RowExplorer.svelte b/packages/client/src/components/app/blocks/RowExplorer.svelte
index 1e2357713a..ebddaa83a7 100644
--- a/packages/client/src/components/app/blocks/RowExplorer.svelte
+++ b/packages/client/src/components/app/blocks/RowExplorer.svelte
@@ -16,6 +16,7 @@
export let detailFields
export let detailTitle
export let noRowsMessage
+ export let autoRefresh
const stateKey = generate()
const context = getContext("context")
@@ -66,6 +67,7 @@
noValue: false,
},
],
+ autoRefresh,
}}
styles={{
custom: `
diff --git a/packages/client/src/components/app/deprecated/TableBlock.svelte b/packages/client/src/components/app/deprecated/TableBlock.svelte
index 5fd197bc02..e67dc5b061 100644
--- a/packages/client/src/components/app/deprecated/TableBlock.svelte
+++ b/packages/client/src/components/app/deprecated/TableBlock.svelte
@@ -33,6 +33,7 @@
export let sidePanelSaveLabel
export let sidePanelDeleteLabel
export let notificationOverride
+ export let autoRefresh
const { fetchDatasourceSchema, API, generateGoldenSample } = getContext("sdk")
const component = getContext("component")
@@ -243,6 +244,7 @@
sortOrder,
paginate,
limit: rowCount,
+ autoRefresh,
}}
context="provider"
order={1}
diff --git a/packages/pro b/packages/pro
index 6b62505be0..f8e8f87bd5 160000
--- a/packages/pro
+++ b/packages/pro
@@ -1 +1 @@
-Subproject commit 6b62505be0c0b50a57b4f4980d86541ebdc86428
+Subproject commit f8e8f87bd52081e1303a5ae92c432ea5b38f3bb4
diff --git a/packages/server/__mocks__/pg.ts b/packages/server/__mocks__/pg.ts
deleted file mode 100644
index 50a7c7349e..0000000000
--- a/packages/server/__mocks__/pg.ts
+++ /dev/null
@@ -1,25 +0,0 @@
-const query = jest.fn(() => ({
- rows: [
- {
- a: "string",
- b: 1,
- },
- ],
-}))
-
-class Client {
- query = query
- end = jest.fn(cb => {
- if (cb) cb()
- })
- connect = jest.fn()
- release = jest.fn()
-}
-
-const on = jest.fn()
-
-module.exports = {
- Client,
- queryMock: query,
- on,
-}
diff --git a/packages/server/jest.config.ts b/packages/server/jest.config.ts
index 85c75f9039..6341c8e5bd 100644
--- a/packages/server/jest.config.ts
+++ b/packages/server/jest.config.ts
@@ -42,12 +42,6 @@ if (fs.existsSync("../pro/src")) {
const config: Config.InitialOptions = {
projects: [
- {
- ...baseConfig,
- displayName: "sequential test",
- testMatch: ["
/**/*.seq.spec.[jt]s"],
- runner: "jest-serial-runner",
- },
{
...baseConfig,
testMatch: ["/**/!(*.seq).spec.[jt]s"],
@@ -60,6 +54,9 @@ const config: Config.InitialOptions = {
"!src/db/views/staticViews.*",
"!src/**/*.spec.{js,ts}",
"!src/tests/**/*.{js,ts}",
+ // The use of coverage in the JS runner breaks tests by inserting
+ // coverage functions into code that will run inside of the isolate.
+ "!src/jsRunner/**/*.{js,ts}",
],
coverageReporters: ["lcov", "json", "clover"],
}
diff --git a/packages/server/package.json b/packages/server/package.json
index da99ff6dea..4d1df4d734 100644
--- a/packages/server/package.json
+++ b/packages/server/package.json
@@ -143,7 +143,7 @@
"jest": "29.7.0",
"jest-openapi": "0.14.2",
"jest-runner": "29.7.0",
- "jest-serial-runner": "1.2.1",
+ "nock": "13.5.4",
"nodemon": "2.0.15",
"openapi-typescript": "5.2.0",
"path-to-regexp": "6.2.0",
diff --git a/packages/server/scripts/test.sh b/packages/server/scripts/test.sh
index 48766026aa..4b456e4731 100644
--- a/packages/server/scripts/test.sh
+++ b/packages/server/scripts/test.sh
@@ -4,11 +4,9 @@ set -e
if [[ -n $CI ]]
then
export NODE_OPTIONS="--max-old-space-size=4096 --no-node-snapshot $NODE_OPTIONS"
- echo "jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@"
jest --coverage --maxWorkers=4 --forceExit --workerIdleMemoryLimit=2000MB --bail $@
else
# --maxWorkers performs better in development
export NODE_OPTIONS="--no-node-snapshot $NODE_OPTIONS"
- echo "jest --coverage --maxWorkers=2 --forceExit $@"
jest --coverage --maxWorkers=2 --forceExit $@
fi
\ No newline at end of file
diff --git a/packages/server/src/api/controllers/datasource.ts b/packages/server/src/api/controllers/datasource.ts
index 0f17c5a2f5..243d0a17a0 100644
--- a/packages/server/src/api/controllers/datasource.ts
+++ b/packages/server/src/api/controllers/datasource.ts
@@ -1,6 +1,6 @@
import { getQueryParams, getTableParams } from "../../db/utils"
import { getIntegration } from "../../integrations"
-import { invalidateDynamicVariables } from "../../threads/utils"
+import { invalidateCachedVariable } from "../../threads/utils"
import { context, db as dbCore, events } from "@budibase/backend-core"
import {
BuildSchemaFromSourceRequest,
@@ -121,7 +121,7 @@ async function invalidateVariables(
}
})
}
- await invalidateDynamicVariables(toInvalidate)
+ await invalidateCachedVariable(toInvalidate)
}
export async function update(
diff --git a/packages/server/src/api/controllers/query/index.ts b/packages/server/src/api/controllers/query/index.ts
index 055f3bd888..b52cea553f 100644
--- a/packages/server/src/api/controllers/query/index.ts
+++ b/packages/server/src/api/controllers/query/index.ts
@@ -2,7 +2,7 @@ import { generateQueryID } from "../../../db/utils"
import { Thread, ThreadType } from "../../../threads"
import { save as saveDatasource } from "../datasource"
import { RestImporter } from "./import"
-import { invalidateDynamicVariables } from "../../../threads/utils"
+import { invalidateCachedVariable } from "../../../threads/utils"
import env from "../../../environment"
import { events, context, utils, constants } from "@budibase/backend-core"
import sdk from "../../../sdk"
@@ -281,49 +281,52 @@ export async function preview(
return { previewSchema, nestedSchemaFields }
}
+ const inputs: QueryEvent = {
+ appId: ctx.appId,
+ queryVerb: query.queryVerb,
+ fields: query.fields,
+ parameters: enrichParameters(query),
+ transformer: query.transformer,
+ schema: query.schema,
+ nullDefaultSupport: query.nullDefaultSupport,
+ queryId,
+ datasource,
+ // have to pass down to the thread runner - can't put into context now
+ environmentVariables: envVars,
+ ctx: {
+ user: ctx.user,
+ auth: { ...authConfigCtx },
+ },
+ }
+
+ let queryResponse: QueryResponse
try {
- const inputs: QueryEvent = {
- appId: ctx.appId,
- queryVerb: query.queryVerb,
- fields: query.fields,
- parameters: enrichParameters(query),
- transformer: query.transformer,
- schema: query.schema,
- nullDefaultSupport: query.nullDefaultSupport,
- queryId,
- datasource,
- // have to pass down to the thread runner - can't put into context now
- environmentVariables: envVars,
- ctx: {
- user: ctx.user,
- auth: { ...authConfigCtx },
- },
- }
-
- const { rows, keys, info, extra } = await Runner.run(inputs)
- const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
-
- // if existing schema, update to include any previous schema keys
- if (existingSchema) {
- for (let key of Object.keys(previewSchema)) {
- if (existingSchema[key]) {
- previewSchema[key] = existingSchema[key]
- }
- }
- }
- // remove configuration before sending event
- delete datasource.config
- await events.query.previewed(datasource, ctx.request.body)
- ctx.body = {
- rows,
- nestedSchemaFields,
- schema: previewSchema,
- info,
- extra,
- }
+ queryResponse = await Runner.run(inputs)
} catch (err: any) {
ctx.throw(400, err)
}
+
+ const { rows, keys, info, extra } = queryResponse
+ const { previewSchema, nestedSchemaFields } = getSchemaFields(rows, keys)
+
+ // if existing schema, update to include any previous schema keys
+ if (existingSchema) {
+ for (let key of Object.keys(previewSchema)) {
+ if (existingSchema[key]) {
+ previewSchema[key] = existingSchema[key]
+ }
+ }
+ }
+ // remove configuration before sending event
+ delete datasource.config
+ await events.query.previewed(datasource, ctx.request.body)
+ ctx.body = {
+ rows,
+ nestedSchemaFields,
+ schema: previewSchema,
+ info,
+ extra,
+ }
}
async function execute(
@@ -416,7 +419,7 @@ const removeDynamicVariables = async (queryId: string) => {
const variablesToDelete = dynamicVariables!.filter(
(dv: any) => dv.queryId === queryId
)
- await invalidateDynamicVariables(variablesToDelete)
+ await invalidateCachedVariable(variablesToDelete)
}
}
diff --git a/packages/server/src/api/controllers/table/index.ts b/packages/server/src/api/controllers/table/index.ts
index 69305c461e..f799113333 100644
--- a/packages/server/src/api/controllers/table/index.ts
+++ b/packages/server/src/api/controllers/table/index.ts
@@ -84,8 +84,8 @@ export async function save(ctx: UserCtx) {
}
let savedTable = await api.save(ctx, renaming)
if (!table._id) {
- await events.table.created(savedTable)
savedTable = sdk.tables.enrichViewSchemas(savedTable)
+ await events.table.created(savedTable)
} else {
await events.table.updated(savedTable)
}
diff --git a/packages/server/src/api/controllers/view/viewsV2.ts b/packages/server/src/api/controllers/view/viewsV2.ts
index a386ac303f..eb28883e15 100644
--- a/packages/server/src/api/controllers/view/viewsV2.ts
+++ b/packages/server/src/api/controllers/view/viewsV2.ts
@@ -6,6 +6,7 @@ import {
UIFieldMetadata,
UpdateViewRequest,
ViewResponse,
+ ViewResponseEnriched,
ViewV2,
} from "@budibase/types"
import { builderSocket, gridSocket } from "../../../websockets"
@@ -39,9 +40,9 @@ async function parseSchema(view: CreateViewRequest) {
return finalViewSchema
}
-export async function get(ctx: Ctx) {
+export async function get(ctx: Ctx) {
ctx.body = {
- data: await sdk.views.get(ctx.params.viewId, { enriched: true }),
+ data: await sdk.views.getEnriched(ctx.params.viewId),
}
}
diff --git a/packages/server/src/api/routes/tests/datasource.spec.ts b/packages/server/src/api/routes/tests/datasource.spec.ts
index cbd830aee5..0066be2a64 100644
--- a/packages/server/src/api/routes/tests/datasource.spec.ts
+++ b/packages/server/src/api/routes/tests/datasource.spec.ts
@@ -1,18 +1,16 @@
-jest.mock("pg")
import * as setup from "./utilities"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
-import { checkCacheForDynamicVariable } from "../../../threads/utils"
+import { getCachedVariable } from "../../../threads/utils"
import { context, events } from "@budibase/backend-core"
import sdk from "../../../sdk"
import tk from "timekeeper"
import { mocks } from "@budibase/backend-core/tests"
-import { QueryPreview } from "@budibase/types"
+import { QueryPreview, SourceName } from "@budibase/types"
tk.freeze(mocks.date.MOCK_DATE)
let { basicDatasource } = setup.structures
-const pg = require("pg")
describe("/datasources", () => {
let request = setup.getRequest()
@@ -42,6 +40,23 @@ describe("/datasources", () => {
expect(res.body.errors).toEqual({})
expect(events.datasource.created).toHaveBeenCalledTimes(1)
})
+
+ it("should fail if the datasource is invalid", async () => {
+ await config.api.datasource.create(
+ {
+ name: "Test",
+ type: "test",
+ source: "invalid" as SourceName,
+ config: {},
+ },
+ {
+ status: 500,
+ body: {
+ message: "No datasource implementation found.",
+ },
+ }
+ )
+ })
})
describe("update", () => {
@@ -74,7 +89,7 @@ describe("/datasources", () => {
schema: {},
readable: true,
}
- return config.api.query.previewQuery(queryPreview)
+ return config.api.query.preview(queryPreview)
}
it("should invalidate changed or removed variables", async () => {
@@ -85,10 +100,7 @@ describe("/datasources", () => {
queryString: "test={{ variable3 }}",
})
// check variables in cache
- let contents = await checkCacheForDynamicVariable(
- query._id!,
- "variable3"
- )
+ let contents = await getCachedVariable(query._id!, "variable3")
expect(contents.rows.length).toEqual(1)
// update the datasource to remove the variables
@@ -102,7 +114,7 @@ describe("/datasources", () => {
expect(res.body.errors).toBeUndefined()
// check variables no longer in cache
- contents = await checkCacheForDynamicVariable(query._id!, "variable3")
+ contents = await getCachedVariable(query._id!, "variable3")
expect(contents).toBe(null)
})
})
@@ -149,35 +161,6 @@ describe("/datasources", () => {
})
})
- describe("query", () => {
- it("should be able to query a pg datasource", async () => {
- const res = await request
- .post(`/api/datasources/query`)
- .send({
- endpoint: {
- datasourceId: datasource._id,
- operation: "READ",
- // table name below
- entityId: "users",
- },
- resource: {
- fields: ["users.name", "users.age"],
- },
- filters: {
- string: {
- name: "John",
- },
- },
- })
- .set(config.defaultHeaders())
- .expect(200)
- // this is mock data, can't test it
- expect(res.body).toBeDefined()
- const expSql = `select "users"."name" as "users.name", "users"."age" as "users.age" from (select * from "users" where "users"."name" ilike $1 limit $2) as "users"`
- expect(pg.queryMock).toHaveBeenCalledWith(expSql, ["John%", 5000])
- })
- })
-
describe("destroy", () => {
beforeAll(setupTest)
diff --git a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts
index 585288bc43..7790f909e7 100644
--- a/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts
+++ b/packages/server/src/api/routes/tests/queries/generic-sql.spec.ts
@@ -1,12 +1,18 @@
-import { Datasource, Query, SourceName } from "@budibase/types"
+import {
+ Datasource,
+ Operation,
+ Query,
+ QueryPreview,
+ SourceName,
+} from "@budibase/types"
import * as setup from "../utilities"
import {
DatabaseName,
getDatasource,
rawQuery,
} from "../../../../integrations/tests/utils"
-
-jest.unmock("pg")
+import { Expectations } from "src/tests/utilities/api/base"
+import { events } from "@budibase/backend-core"
const createTableSQL: Record = {
[SourceName.POSTGRES]: `
@@ -47,7 +53,10 @@ describe.each(
let rawDatasource: Datasource
let datasource: Datasource
- async function createQuery(query: Partial): Promise {
+ async function createQuery(
+ query: Partial,
+ expectations?: Expectations
+ ): Promise {
const defaultQuery: Query = {
datasourceId: datasource._id!,
name: "New Query",
@@ -58,104 +67,350 @@ describe.each(
transformer: "return data",
readable: true,
}
- return await config.api.query.save({ ...defaultQuery, ...query })
+ return await config.api.query.save(
+ { ...defaultQuery, ...query },
+ expectations
+ )
}
beforeAll(async () => {
await config.init()
- rawDatasource = await dsProvider
- datasource = await config.api.datasource.create(rawDatasource)
})
beforeEach(async () => {
- await rawQuery(rawDatasource, createTableSQL[datasource.source])
- await rawQuery(rawDatasource, insertSQL)
+ rawDatasource = await dsProvider
+ datasource = await config.api.datasource.create(rawDatasource)
+
+ // The Datasource API does not return the password, but we need
+ // it later to connect to the underlying database, so we fill it
+ // back in here.
+ datasource.config!.password = rawDatasource.config!.password
+
+ await rawQuery(datasource, createTableSQL[datasource.source])
+ await rawQuery(datasource, insertSQL)
+
+ jest.clearAllMocks()
})
afterEach(async () => {
- await rawQuery(rawDatasource, dropTableSQL)
+ const ds = await config.api.datasource.get(datasource._id!)
+ config.api.datasource.delete(ds)
+ await rawQuery(datasource, dropTableSQL)
})
afterAll(async () => {
setup.afterAll()
})
- describe("create", () => {
- it("should be able to insert with bindings", async () => {
- const query = await createQuery({
- fields: {
- sql: "INSERT INTO test_table (name) VALUES ({{ foo }})",
- },
- parameters: [
- {
- name: "foo",
- default: "bar",
+ describe("query admin", () => {
+ describe("create", () => {
+ it("should be able to create a query", async () => {
+ const query = await createQuery({
+ name: "New Query",
+ fields: {
+ sql: "SELECT * FROM test_table",
},
- ],
- queryVerb: "create",
+ })
+
+ expect(query).toMatchObject({
+ datasourceId: datasource._id!,
+ name: "New Query",
+ parameters: [],
+ fields: {
+ sql: "SELECT * FROM test_table",
+ },
+ schema: {},
+ queryVerb: "read",
+ transformer: "return data",
+ readable: true,
+ createdAt: expect.any(String),
+ updatedAt: expect.any(String),
+ })
+
+ expect(events.query.created).toHaveBeenCalledTimes(1)
+ expect(events.query.updated).not.toHaveBeenCalled()
})
-
- const result = await config.api.query.execute(query._id!, {
- parameters: {
- foo: "baz",
- },
- })
-
- expect(result.data).toEqual([
- {
- created: true,
- },
- ])
-
- const rows = await rawQuery(
- rawDatasource,
- "SELECT * FROM test_table WHERE name = 'baz'"
- )
- expect(rows).toHaveLength(1)
})
- it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])(
- "should coerce %s into a date",
- async datetimeStr => {
- const date = new Date(datetimeStr)
+ describe("update", () => {
+ it("should be able to update a query", async () => {
const query = await createQuery({
fields: {
- sql: `INSERT INTO test_table (name, birthday) VALUES ('foo', {{ birthday }})`,
+ sql: "SELECT * FROM test_table",
},
- parameters: [
+ })
+
+ jest.clearAllMocks()
+
+ const updatedQuery = await config.api.query.save({
+ ...query,
+ name: "Updated Query",
+ fields: {
+ sql: "SELECT * FROM test_table WHERE id = 1",
+ },
+ })
+
+ expect(updatedQuery).toMatchObject({
+ datasourceId: datasource._id!,
+ name: "Updated Query",
+ parameters: [],
+ fields: {
+ sql: "SELECT * FROM test_table WHERE id = 1",
+ },
+ schema: {},
+ queryVerb: "read",
+ transformer: "return data",
+ readable: true,
+ })
+
+ expect(events.query.created).not.toHaveBeenCalled()
+ expect(events.query.updated).toHaveBeenCalledTimes(1)
+ })
+ })
+
+ describe("delete", () => {
+ it("should be able to delete a query", async () => {
+ const query = await createQuery({
+ fields: {
+ sql: "SELECT * FROM test_table",
+ },
+ })
+
+ await config.api.query.delete(query)
+ await config.api.query.get(query._id!, { status: 404 })
+
+ const queries = await config.api.query.fetch()
+ expect(queries).not.toContainEqual(query)
+
+ expect(events.query.deleted).toHaveBeenCalledTimes(1)
+ expect(events.query.deleted).toHaveBeenCalledWith(datasource, query)
+ })
+ })
+
+ describe("read", () => {
+ it("should be able to list queries", async () => {
+ const query = await createQuery({
+ fields: {
+ sql: "SELECT * FROM test_table",
+ },
+ })
+
+ const queries = await config.api.query.fetch()
+ expect(queries).toContainEqual(query)
+ })
+
+ it("should strip sensitive fields for prod apps", async () => {
+ const query = await createQuery({
+ fields: {
+ sql: "SELECT * FROM test_table",
+ },
+ })
+
+ await config.publish()
+ const prodQuery = await config.api.query.getProd(query._id!)
+
+ expect(prodQuery._id).toEqual(query._id)
+ expect(prodQuery.fields).toBeUndefined()
+ expect(prodQuery.parameters).toBeUndefined()
+ expect(prodQuery.schema).toBeDefined()
+ })
+ })
+ })
+
+ describe("preview", () => {
+ it("should be able to preview a query", async () => {
+ const request: QueryPreview = {
+ datasourceId: datasource._id!,
+ queryVerb: "read",
+ fields: {
+ sql: `SELECT * FROM test_table WHERE id = 1`,
+ },
+ parameters: [],
+ transformer: "return data",
+ name: datasource.name!,
+ schema: {},
+ readable: true,
+ }
+ const response = await config.api.query.preview(request)
+ expect(response.schema).toEqual({
+ birthday: {
+ name: "birthday",
+ type: "string",
+ },
+ id: {
+ name: "id",
+ type: "number",
+ },
+ name: {
+ name: "name",
+ type: "string",
+ },
+ number: {
+ name: "number",
+ type: "string",
+ },
+ })
+ expect(response.rows).toEqual([
+ {
+ birthday: null,
+ id: 1,
+ name: "one",
+ number: null,
+ },
+ ])
+ expect(events.query.previewed).toHaveBeenCalledTimes(1)
+ })
+
+ it("should work with static variables", async () => {
+ await config.api.datasource.update({
+ ...datasource,
+ config: {
+ ...datasource.config,
+ staticVariables: {
+ foo: "bar",
+ },
+ },
+ })
+
+ const request: QueryPreview = {
+ datasourceId: datasource._id!,
+ queryVerb: "read",
+ fields: {
+ sql: `SELECT '{{ foo }}' as foo`,
+ },
+ parameters: [],
+ transformer: "return data",
+ name: datasource.name!,
+ schema: {},
+ readable: true,
+ }
+
+ const response = await config.api.query.preview(request)
+
+ expect(response.schema).toEqual({
+ foo: {
+ name: "foo",
+ type: "string",
+ },
+ })
+
+ expect(response.rows).toEqual([
+ {
+ foo: "bar",
+ },
+ ])
+ })
+
+ it("should work with dynamic variables", async () => {
+ const basedOnQuery = await createQuery({
+ fields: {
+ sql: "SELECT name FROM test_table WHERE id = 1",
+ },
+ })
+
+ await config.api.datasource.update({
+ ...datasource,
+ config: {
+ ...datasource.config,
+ dynamicVariables: [
{
- name: "birthday",
- default: "",
+ queryId: basedOnQuery._id!,
+ name: "foo",
+ value: "{{ data[0].name }}",
},
],
- queryVerb: "create",
- })
+ },
+ })
- const result = await config.api.query.execute(query._id!, {
- parameters: { birthday: datetimeStr },
- })
+ const preview = await config.api.query.preview({
+ datasourceId: datasource._id!,
+ queryVerb: "read",
+ fields: {
+ sql: `SELECT '{{ foo }}' as foo`,
+ },
+ parameters: [],
+ transformer: "return data",
+ name: datasource.name!,
+ schema: {},
+ readable: true,
+ })
- expect(result.data).toEqual([{ created: true }])
+ expect(preview.schema).toEqual({
+ foo: {
+ name: "foo",
+ type: "string",
+ },
+ })
- const rows = await rawQuery(
- rawDatasource,
- `SELECT * FROM test_table WHERE birthday = '${date.toISOString()}'`
- )
- expect(rows).toHaveLength(1)
- }
- )
+ expect(preview.rows).toEqual([
+ {
+ foo: "one",
+ },
+ ])
+ })
- it.each(["2021,02,05", "202205-1500"])(
- "should not coerce %s as a date",
- async notDateStr => {
+ it("should handle the dynamic base query being deleted", async () => {
+ const basedOnQuery = await createQuery({
+ fields: {
+ sql: "SELECT name FROM test_table WHERE id = 1",
+ },
+ })
+
+ await config.api.datasource.update({
+ ...datasource,
+ config: {
+ ...datasource.config,
+ dynamicVariables: [
+ {
+ queryId: basedOnQuery._id!,
+ name: "foo",
+ value: "{{ data[0].name }}",
+ },
+ ],
+ },
+ })
+
+ await config.api.query.delete(basedOnQuery)
+
+ const preview = await config.api.query.preview({
+ datasourceId: datasource._id!,
+ queryVerb: "read",
+ fields: {
+ sql: `SELECT '{{ foo }}' as foo`,
+ },
+ parameters: [],
+ transformer: "return data",
+ name: datasource.name!,
+ schema: {},
+ readable: true,
+ })
+
+ expect(preview.schema).toEqual({
+ foo: {
+ name: "foo",
+ type: "string",
+ },
+ })
+
+ expect(preview.rows).toEqual([
+ {
+ foo: datasource.source === SourceName.SQL_SERVER ? "" : null,
+ },
+ ])
+ })
+ })
+
+ describe("query verbs", () => {
+ describe("create", () => {
+ it("should be able to insert with bindings", async () => {
const query = await createQuery({
fields: {
- sql: "INSERT INTO test_table (name) VALUES ({{ name }})",
+ sql: "INSERT INTO test_table (name) VALUES ({{ foo }})",
},
parameters: [
{
- name: "name",
- default: "",
+ name: "foo",
+ default: "bar",
},
],
queryVerb: "create",
@@ -163,157 +418,349 @@ describe.each(
const result = await config.api.query.execute(query._id!, {
parameters: {
- name: notDateStr,
+ foo: "baz",
},
})
- expect(result.data).toEqual([{ created: true }])
+ expect(result.data).toEqual([
+ {
+ created: true,
+ },
+ ])
const rows = await rawQuery(
- rawDatasource,
- `SELECT * FROM test_table WHERE name = '${notDateStr}'`
+ datasource,
+ "SELECT * FROM test_table WHERE name = 'baz'"
)
expect(rows).toHaveLength(1)
- }
- )
- })
-
- describe("read", () => {
- it("should execute a query", async () => {
- const query = await createQuery({
- fields: {
- sql: "SELECT * FROM test_table ORDER BY id",
- },
})
- const result = await config.api.query.execute(query._id!)
-
- expect(result.data).toEqual([
- {
- id: 1,
- name: "one",
- birthday: null,
- number: null,
- },
- {
- id: 2,
- name: "two",
- birthday: null,
- number: null,
- },
- {
- id: 3,
- name: "three",
- birthday: null,
- number: null,
- },
- {
- id: 4,
- name: "four",
- birthday: null,
- number: null,
- },
- {
- id: 5,
- name: "five",
- birthday: null,
- number: null,
- },
- ])
- })
-
- it("should be able to transform a query", async () => {
- const query = await createQuery({
- fields: {
- sql: "SELECT * FROM test_table WHERE id = 1",
- },
- transformer: `
- data[0].id = data[0].id + 1;
- return data;
- `,
- })
-
- const result = await config.api.query.execute(query._id!)
-
- expect(result.data).toEqual([
- {
- id: 2,
- name: "one",
- birthday: null,
- number: null,
- },
- ])
- })
-
- it("should coerce numeric bindings", async () => {
- const query = await createQuery({
- fields: {
- sql: "SELECT * FROM test_table WHERE id = {{ id }}",
- },
- parameters: [
- {
- name: "id",
- default: "",
+ it("should not allow handlebars as parameters", async () => {
+ const query = await createQuery({
+ fields: {
+ sql: "INSERT INTO test_table (name) VALUES ({{ foo }})",
},
- ],
- })
+ parameters: [
+ {
+ name: "foo",
+ default: "bar",
+ },
+ ],
+ queryVerb: "create",
+ })
- const result = await config.api.query.execute(query._id!, {
- parameters: {
- id: "1",
- },
- })
-
- expect(result.data).toEqual([
- {
- id: 1,
- name: "one",
- birthday: null,
- number: null,
- },
- ])
- })
- })
-
- describe("update", () => {
- it("should be able to update rows", async () => {
- const query = await createQuery({
- fields: {
- sql: "UPDATE test_table SET name = {{ name }} WHERE id = {{ id }}",
- },
- parameters: [
+ await config.api.query.execute(
+ query._id!,
{
- name: "id",
- default: "",
+ parameters: {
+ foo: "{{ 'test' }}",
+ },
},
{
- name: "name",
- default: "updated",
- },
- ],
- queryVerb: "update",
+ status: 400,
+ body: {
+ message:
+ "Parameter 'foo' input contains a handlebars binding - this is not allowed.",
+ },
+ }
+ )
})
- const result = await config.api.query.execute(query._id!, {
- parameters: {
- id: "1",
- name: "foo",
- },
- })
+ it.each(["2021-02-05T12:01:00.000Z", "2021-02-05"])(
+ "should coerce %s into a date",
+ async datetimeStr => {
+ const date = new Date(datetimeStr)
+ const query = await createQuery({
+ fields: {
+ sql: `INSERT INTO test_table (name, birthday) VALUES ('foo', {{ birthday }})`,
+ },
+ parameters: [
+ {
+ name: "birthday",
+ default: "",
+ },
+ ],
+ queryVerb: "create",
+ })
- expect(result.data).toEqual([
- {
- updated: true,
- },
- ])
+ const result = await config.api.query.execute(query._id!, {
+ parameters: { birthday: datetimeStr },
+ })
- const rows = await rawQuery(
- rawDatasource,
- "SELECT * FROM test_table WHERE id = 1"
+ expect(result.data).toEqual([{ created: true }])
+
+ const rows = await rawQuery(
+ datasource,
+ `SELECT * FROM test_table WHERE birthday = '${date.toISOString()}'`
+ )
+ expect(rows).toHaveLength(1)
+ }
)
- expect(rows).toEqual([
- { id: 1, name: "foo", birthday: null, number: null },
- ])
+
+ it.each(["2021,02,05", "202205-1500"])(
+ "should not coerce %s as a date",
+ async notDateStr => {
+ const query = await createQuery({
+ fields: {
+ sql: "INSERT INTO test_table (name) VALUES ({{ name }})",
+ },
+ parameters: [
+ {
+ name: "name",
+ default: "",
+ },
+ ],
+ queryVerb: "create",
+ })
+
+ const result = await config.api.query.execute(query._id!, {
+ parameters: {
+ name: notDateStr,
+ },
+ })
+
+ expect(result.data).toEqual([{ created: true }])
+
+ const rows = await rawQuery(
+ datasource,
+ `SELECT * FROM test_table WHERE name = '${notDateStr}'`
+ )
+ expect(rows).toHaveLength(1)
+ }
+ )
+ })
+
+ describe("read", () => {
+ it("should execute a query", async () => {
+ const query = await createQuery({
+ fields: {
+ sql: "SELECT * FROM test_table ORDER BY id",
+ },
+ })
+
+ const result = await config.api.query.execute(query._id!)
+
+ expect(result.data).toEqual([
+ {
+ id: 1,
+ name: "one",
+ birthday: null,
+ number: null,
+ },
+ {
+ id: 2,
+ name: "two",
+ birthday: null,
+ number: null,
+ },
+ {
+ id: 3,
+ name: "three",
+ birthday: null,
+ number: null,
+ },
+ {
+ id: 4,
+ name: "four",
+ birthday: null,
+ number: null,
+ },
+ {
+ id: 5,
+ name: "five",
+ birthday: null,
+ number: null,
+ },
+ ])
+ })
+
+ it("should be able to transform a query", async () => {
+ const query = await createQuery({
+ fields: {
+ sql: "SELECT * FROM test_table WHERE id = 1",
+ },
+ transformer: `
+ data[0].id = data[0].id + 1;
+ return data;
+ `,
+ })
+
+ const result = await config.api.query.execute(query._id!)
+
+ expect(result.data).toEqual([
+ {
+ id: 2,
+ name: "one",
+ birthday: null,
+ number: null,
+ },
+ ])
+ })
+
+ it("should coerce numeric bindings", async () => {
+ const query = await createQuery({
+ fields: {
+ sql: "SELECT * FROM test_table WHERE id = {{ id }}",
+ },
+ parameters: [
+ {
+ name: "id",
+ default: "",
+ },
+ ],
+ })
+
+ const result = await config.api.query.execute(query._id!, {
+ parameters: {
+ id: "1",
+ },
+ })
+
+ expect(result.data).toEqual([
+ {
+ id: 1,
+ name: "one",
+ birthday: null,
+ number: null,
+ },
+ ])
+ })
+ })
+
+ describe("update", () => {
+ it("should be able to update rows", async () => {
+ const query = await createQuery({
+ fields: {
+ sql: "UPDATE test_table SET name = {{ name }} WHERE id = {{ id }}",
+ },
+ parameters: [
+ {
+ name: "id",
+ default: "",
+ },
+ {
+ name: "name",
+ default: "updated",
+ },
+ ],
+ queryVerb: "update",
+ })
+
+ const result = await config.api.query.execute(query._id!, {
+ parameters: {
+ id: "1",
+ name: "foo",
+ },
+ })
+
+ expect(result.data).toEqual([
+ {
+ updated: true,
+ },
+ ])
+
+ const rows = await rawQuery(
+ datasource,
+ "SELECT * FROM test_table WHERE id = 1"
+ )
+ expect(rows).toEqual([
+ { id: 1, name: "foo", birthday: null, number: null },
+ ])
+ })
+
+ it("should be able to execute an update that updates no rows", async () => {
+ const query = await createQuery({
+ fields: {
+ sql: "UPDATE test_table SET name = 'updated' WHERE id = 100",
+ },
+ queryVerb: "update",
+ })
+
+ const result = await config.api.query.execute(query._id!)
+
+ expect(result.data).toEqual([
+ {
+ updated: true,
+ },
+ ])
+ })
+
+ it("should be able to execute a delete that deletes no rows", async () => {
+ const query = await createQuery({
+ fields: {
+ sql: "DELETE FROM test_table WHERE id = 100",
+ },
+ queryVerb: "delete",
+ })
+
+ const result = await config.api.query.execute(query._id!)
+
+ expect(result.data).toEqual([
+ {
+ deleted: true,
+ },
+ ])
+ })
+ })
+
+ describe("delete", () => {
+ it("should be able to delete rows", async () => {
+ const query = await createQuery({
+ fields: {
+ sql: "DELETE FROM test_table WHERE id = {{ id }}",
+ },
+ parameters: [
+ {
+ name: "id",
+ default: "",
+ },
+ ],
+ queryVerb: "delete",
+ })
+
+ const result = await config.api.query.execute(query._id!, {
+ parameters: {
+ id: "1",
+ },
+ })
+
+ expect(result.data).toEqual([
+ {
+ deleted: true,
+ },
+ ])
+
+ const rows = await rawQuery(
+ datasource,
+ "SELECT * FROM test_table WHERE id = 1"
+ )
+ expect(rows).toHaveLength(0)
+ })
+ })
+ })
+
+ describe("query through datasource", () => {
+ it("should be able to query a pg datasource", async () => {
+ const res = await config.api.datasource.query({
+ endpoint: {
+ datasourceId: datasource._id!,
+ operation: Operation.READ,
+ entityId: "test_table",
+ },
+ resource: {
+ fields: ["id", "name"],
+ },
+ filters: {
+ string: {
+ name: "two",
+ },
+ },
+ })
+ expect(res).toHaveLength(1)
+ expect(res[0]).toEqual({
+ id: 2,
+ name: "two",
+ })
})
it("should be able to execute an update that updates no rows", async () => {
@@ -324,7 +771,7 @@ describe.each(
queryVerb: "update",
})
- const result = await config.api.query.execute(query._id!)
+ const result = await config.api.query.execute(query._id!, {})
expect(result.data).toEqual([
{
@@ -332,58 +779,6 @@ describe.each(
},
])
})
-
- it("should be able to execute a delete that deletes no rows", async () => {
- const query = await createQuery({
- fields: {
- sql: "DELETE FROM test_table WHERE id = 100",
- },
- queryVerb: "delete",
- })
-
- const result = await config.api.query.execute(query._id!)
-
- expect(result.data).toEqual([
- {
- deleted: true,
- },
- ])
- })
- })
-
- describe("delete", () => {
- it("should be able to delete rows", async () => {
- const query = await createQuery({
- fields: {
- sql: "DELETE FROM test_table WHERE id = {{ id }}",
- },
- parameters: [
- {
- name: "id",
- default: "",
- },
- ],
- queryVerb: "delete",
- })
-
- const result = await config.api.query.execute(query._id!, {
- parameters: {
- id: "1",
- },
- })
-
- expect(result.data).toEqual([
- {
- deleted: true,
- },
- ])
-
- const rows = await rawQuery(
- rawDatasource,
- "SELECT * FROM test_table WHERE id = 1"
- )
- expect(rows).toHaveLength(0)
- })
})
// this parameter really only impacts SQL queries
@@ -418,7 +813,7 @@ describe.each(
} catch (err: any) {
error = err.message
}
- if (dbName === DatabaseName.SQL_SERVER) {
+ if (dbName === "mssql") {
expect(error).toBeUndefined()
} else {
expect(error).toBeDefined()
diff --git a/packages/server/src/api/routes/tests/queries/mongodb.spec.ts b/packages/server/src/api/routes/tests/queries/mongodb.spec.ts
index bdcfd85437..c79ae68a36 100644
--- a/packages/server/src/api/routes/tests/queries/mongodb.spec.ts
+++ b/packages/server/src/api/routes/tests/queries/mongodb.spec.ts
@@ -88,345 +88,491 @@ describe("/queries", () => {
})
afterEach(async () => {
- await withCollection(async collection => {
- await collection.drop()
- })
+ await withCollection(collection => collection.drop())
})
- it("should execute a count query", async () => {
- const query = await createQuery({
- fields: {
- json: {},
- extra: {
- actionType: "count",
+ describe("preview", () => {
+ it("should generate a nested schema with an empty array", async () => {
+ const name = generator.guid()
+ await withCollection(
+ async collection => await collection.insertOne({ name, nested: [] })
+ )
+
+ const preview = await config.api.query.preview({
+ name: "New Query",
+ datasourceId: datasource._id!,
+ fields: {
+ json: {
+ name: { $eq: name },
+ },
+ extra: {
+ collection,
+ actionType: "findOne",
+ },
},
- },
+ schema: {},
+ queryVerb: "read",
+ parameters: [],
+ transformer: "return data",
+ readable: true,
+ })
+
+ expect(preview).toEqual({
+ nestedSchemaFields: {},
+ rows: [{ _id: expect.any(String), name, nested: [] }],
+ schema: {
+ _id: {
+ type: "string",
+ name: "_id",
+ },
+ name: {
+ type: "string",
+ name: "name",
+ },
+ nested: {
+ type: "array",
+ name: "nested",
+ },
+ },
+ })
})
- const result = await config.api.query.execute(query._id!)
+ it("should generate a nested schema based on all of the nested items", async () => {
+ const name = generator.guid()
+ const item = {
+ name,
+ contacts: [
+ {
+ address: "123 Lane",
+ },
+ {
+ address: "456 Drive",
+ },
+ {
+ postcode: "BT1 12N",
+ lat: 54.59,
+ long: -5.92,
+ },
+ {
+ city: "Belfast",
+ },
+ {
+ address: "789 Avenue",
+ phoneNumber: "0800-999-5555",
+ },
+ {
+ name: "Name",
+ isActive: false,
+ },
+ ],
+ }
- expect(result.data).toEqual([{ value: 5 }])
- })
+ await withCollection(collection => collection.insertOne(item))
- it("should execute a count query with a transformer", async () => {
- const query = await createQuery({
- fields: {
- json: {},
- extra: {
- actionType: "count",
+ const preview = await config.api.query.preview({
+ name: "New Query",
+ datasourceId: datasource._id!,
+ fields: {
+ json: {
+ name: { $eq: name },
+ },
+ extra: {
+ collection,
+ actionType: "findOne",
+ },
},
- },
- transformer: "return data + 1",
- })
+ schema: {},
+ queryVerb: "read",
+ parameters: [],
+ transformer: "return data",
+ readable: true,
+ })
- const result = await config.api.query.execute(query._id!)
-
- expect(result.data).toEqual([{ value: 6 }])
- })
-
- it("should execute a find query", async () => {
- const query = await createQuery({
- fields: {
- json: {},
- extra: {
- actionType: "find",
+ expect(preview).toEqual({
+ nestedSchemaFields: {
+ contacts: {
+ address: {
+ type: "string",
+ name: "address",
+ },
+ postcode: {
+ type: "string",
+ name: "postcode",
+ },
+ lat: {
+ type: "number",
+ name: "lat",
+ },
+ long: {
+ type: "number",
+ name: "long",
+ },
+ city: {
+ type: "string",
+ name: "city",
+ },
+ phoneNumber: {
+ type: "string",
+ name: "phoneNumber",
+ },
+ name: {
+ type: "string",
+ name: "name",
+ },
+ isActive: {
+ type: "boolean",
+ name: "isActive",
+ },
+ },
},
- },
- })
-
- const result = await config.api.query.execute(query._id!)
-
- expect(result.data).toEqual([
- { _id: expectValidId, name: "one" },
- { _id: expectValidId, name: "two" },
- { _id: expectValidId, name: "three" },
- { _id: expectValidId, name: "four" },
- { _id: expectValidId, name: "five" },
- ])
- })
-
- it("should execute a findOne query", async () => {
- const query = await createQuery({
- fields: {
- json: {},
- extra: {
- actionType: "findOne",
+ rows: [{ ...item, _id: expect.any(String) }],
+ schema: {
+ _id: { type: "string", name: "_id" },
+ name: { type: "string", name: "name" },
+ contacts: { type: "json", name: "contacts", subtype: "array" },
},
- },
- })
-
- const result = await config.api.query.execute(query._id!)
-
- expect(result.data).toEqual([{ _id: expectValidId, name: "one" }])
- })
-
- it("should execute a findOneAndUpdate query", async () => {
- const query = await createQuery({
- fields: {
- json: {
- filter: { name: { $eq: "one" } },
- update: { $set: { name: "newName" } },
- },
- extra: {
- actionType: "findOneAndUpdate",
- },
- },
- })
-
- const result = await config.api.query.execute(query._id!)
-
- expect(result.data).toEqual([
- {
- lastErrorObject: { n: 1, updatedExisting: true },
- ok: 1,
- value: { _id: expectValidId, name: "one" },
- },
- ])
-
- await withCollection(async collection => {
- expect(await collection.countDocuments()).toBe(5)
-
- const doc = await collection.findOne({ name: { $eq: "newName" } })
- expect(doc).toEqual({
- _id: expectValidBsonObjectId,
- name: "newName",
})
})
})
- it("should execute a distinct query", async () => {
- const query = await createQuery({
- fields: {
- json: "name",
- extra: {
- actionType: "distinct",
+ describe("execute", () => {
+ it("a count query", async () => {
+ const query = await createQuery({
+ fields: {
+ json: {},
+ extra: {
+ actionType: "count",
+ },
},
- },
- })
-
- const result = await config.api.query.execute(query._id!)
- const values = result.data.map(o => o.value).sort()
- expect(values).toEqual(["five", "four", "one", "three", "two"])
- })
-
- it("should execute a create query with parameters", async () => {
- const query = await createQuery({
- fields: {
- json: { foo: "{{ foo }}" },
- extra: {
- actionType: "insertOne",
- },
- },
- queryVerb: "create",
- parameters: [
- {
- name: "foo",
- default: "default",
- },
- ],
- })
-
- const result = await config.api.query.execute(query._id!, {
- parameters: { foo: "bar" },
- })
-
- expect(result.data).toEqual([
- {
- acknowledged: true,
- insertedId: expectValidId,
- },
- ])
-
- await withCollection(async collection => {
- const doc = await collection.findOne({ foo: { $eq: "bar" } })
- expect(doc).toEqual({
- _id: expectValidBsonObjectId,
- foo: "bar",
})
- })
- })
- it("should execute a delete query with parameters", async () => {
- const query = await createQuery({
- fields: {
- json: { name: { $eq: "{{ name }}" } },
- extra: {
- actionType: "deleteOne",
+ const result = await config.api.query.execute(query._id!)
+
+ expect(result.data).toEqual([{ value: 5 }])
+ })
+
+ it("should be able to updateOne by ObjectId", async () => {
+ const insertResult = await withCollection(c =>
+ c.insertOne({ name: "one" })
+ )
+ const query = await createQuery({
+ fields: {
+ json: {
+ filter: { _id: { $eq: `ObjectId("${insertResult.insertedId}")` } },
+ update: { $set: { name: "newName" } },
+ },
+ extra: {
+ actionType: "updateOne",
+ },
},
- },
- queryVerb: "delete",
- parameters: [
+ queryVerb: "update",
+ })
+
+ const result = await config.api.query.execute(query._id!)
+
+ expect(result.data).toEqual([
{
- name: "name",
- default: "",
+ acknowledged: true,
+ matchedCount: 1,
+ modifiedCount: 1,
+ upsertedCount: 0,
+ upsertedId: null,
},
- ],
- })
+ ])
- const result = await config.api.query.execute(query._id!, {
- parameters: { name: "one" },
- })
-
- expect(result.data).toEqual([
- {
- acknowledged: true,
- deletedCount: 1,
- },
- ])
-
- await withCollection(async collection => {
- const doc = await collection.findOne({ name: { $eq: "one" } })
- expect(doc).toBeNull()
- })
- })
-
- it("should execute an update query with parameters", async () => {
- const query = await createQuery({
- fields: {
- json: {
- filter: { name: { $eq: "{{ name }}" } },
- update: { $set: { name: "{{ newName }}" } },
- },
- extra: {
- actionType: "updateOne",
- },
- },
- queryVerb: "update",
- parameters: [
- {
- name: "name",
- default: "",
- },
- {
+ await withCollection(async collection => {
+ const doc = await collection.findOne({ name: { $eq: "newName" } })
+ expect(doc).toEqual({
+ _id: insertResult.insertedId,
name: "newName",
- default: "",
- },
- ],
- })
-
- const result = await config.api.query.execute(query._id!, {
- parameters: { name: "one", newName: "newOne" },
- })
-
- expect(result.data).toEqual([
- {
- acknowledged: true,
- matchedCount: 1,
- modifiedCount: 1,
- upsertedCount: 0,
- upsertedId: null,
- },
- ])
-
- await withCollection(async collection => {
- const doc = await collection.findOne({ name: { $eq: "newOne" } })
- expect(doc).toEqual({
- _id: expectValidBsonObjectId,
- name: "newOne",
- })
-
- const oldDoc = await collection.findOne({ name: { $eq: "one" } })
- expect(oldDoc).toBeNull()
- })
- })
-
- it("should be able to updateOne by ObjectId", async () => {
- const insertResult = await withCollection(c => c.insertOne({ name: "one" }))
- const query = await createQuery({
- fields: {
- json: {
- filter: { _id: { $eq: `ObjectId("${insertResult.insertedId}")` } },
- update: { $set: { name: "newName" } },
- },
- extra: {
- actionType: "updateOne",
- },
- },
- queryVerb: "update",
- })
-
- const result = await config.api.query.execute(query._id!)
-
- expect(result.data).toEqual([
- {
- acknowledged: true,
- matchedCount: 1,
- modifiedCount: 1,
- upsertedCount: 0,
- upsertedId: null,
- },
- ])
-
- await withCollection(async collection => {
- const doc = await collection.findOne({ name: { $eq: "newName" } })
- expect(doc).toEqual({
- _id: insertResult.insertedId,
- name: "newName",
+ })
})
})
- })
- it("should be able to delete all records", async () => {
- const query = await createQuery({
- fields: {
- json: {},
- extra: {
- actionType: "deleteMany",
+ it("a count query with a transformer", async () => {
+ const query = await createQuery({
+ fields: {
+ json: {},
+ extra: {
+ actionType: "count",
+ },
},
- },
- queryVerb: "delete",
+ transformer: "return data + 1",
+ })
+
+ const result = await config.api.query.execute(query._id!)
+
+ expect(result.data).toEqual([{ value: 6 }])
})
- const result = await config.api.query.execute(query._id!)
-
- expect(result.data).toEqual([
- {
- acknowledged: true,
- deletedCount: 5,
- },
- ])
-
- await withCollection(async collection => {
- const docs = await collection.find().toArray()
- expect(docs).toHaveLength(0)
- })
- })
-
- it("should be able to update all documents", async () => {
- const query = await createQuery({
- fields: {
- json: {
- filter: {},
- update: { $set: { name: "newName" } },
+ it("a find query", async () => {
+ const query = await createQuery({
+ fields: {
+ json: {},
+ extra: {
+ actionType: "find",
+ },
},
- extra: {
- actionType: "updateMany",
- },
- },
- queryVerb: "update",
+ })
+
+ const result = await config.api.query.execute(query._id!)
+
+ expect(result.data).toEqual([
+ { _id: expectValidId, name: "one" },
+ { _id: expectValidId, name: "two" },
+ { _id: expectValidId, name: "three" },
+ { _id: expectValidId, name: "four" },
+ { _id: expectValidId, name: "five" },
+ ])
})
- const result = await config.api.query.execute(query._id!)
+ it("a findOne query", async () => {
+ const query = await createQuery({
+ fields: {
+ json: {},
+ extra: {
+ actionType: "findOne",
+ },
+ },
+ })
- expect(result.data).toEqual([
- {
- acknowledged: true,
- matchedCount: 5,
- modifiedCount: 5,
- upsertedCount: 0,
- upsertedId: null,
- },
- ])
+ const result = await config.api.query.execute(query._id!)
- await withCollection(async collection => {
- const docs = await collection.find().toArray()
- expect(docs).toHaveLength(5)
- for (const doc of docs) {
+ expect(result.data).toEqual([{ _id: expectValidId, name: "one" }])
+ })
+
+ it("a findOneAndUpdate query", async () => {
+ const query = await createQuery({
+ fields: {
+ json: {
+ filter: { name: { $eq: "one" } },
+ update: { $set: { name: "newName" } },
+ },
+ extra: {
+ actionType: "findOneAndUpdate",
+ },
+ },
+ })
+
+ const result = await config.api.query.execute(query._id!)
+
+ expect(result.data).toEqual([
+ {
+ lastErrorObject: { n: 1, updatedExisting: true },
+ ok: 1,
+ value: { _id: expectValidId, name: "one" },
+ },
+ ])
+
+ await withCollection(async collection => {
+ expect(await collection.countDocuments()).toBe(5)
+
+ const doc = await collection.findOne({ name: { $eq: "newName" } })
expect(doc).toEqual({
_id: expectValidBsonObjectId,
name: "newName",
})
- }
+ })
+ })
+
+ it("a distinct query", async () => {
+ const query = await createQuery({
+ fields: {
+ json: "name",
+ extra: {
+ actionType: "distinct",
+ },
+ },
+ })
+
+ const result = await config.api.query.execute(query._id!)
+ const values = result.data.map(o => o.value).sort()
+ expect(values).toEqual(["five", "four", "one", "three", "two"])
+ })
+
+ it("a create query with parameters", async () => {
+ const query = await createQuery({
+ fields: {
+ json: { foo: "{{ foo }}" },
+ extra: {
+ actionType: "insertOne",
+ },
+ },
+ queryVerb: "create",
+ parameters: [
+ {
+ name: "foo",
+ default: "default",
+ },
+ ],
+ })
+
+ const result = await config.api.query.execute(query._id!, {
+ parameters: { foo: "bar" },
+ })
+
+ expect(result.data).toEqual([
+ {
+ acknowledged: true,
+ insertedId: expectValidId,
+ },
+ ])
+
+ await withCollection(async collection => {
+ const doc = await collection.findOne({ foo: { $eq: "bar" } })
+ expect(doc).toEqual({
+ _id: expectValidBsonObjectId,
+ foo: "bar",
+ })
+ })
+ })
+
+ it("a delete query with parameters", async () => {
+ const query = await createQuery({
+ fields: {
+ json: { name: { $eq: "{{ name }}" } },
+ extra: {
+ actionType: "deleteOne",
+ },
+ },
+ queryVerb: "delete",
+ parameters: [
+ {
+ name: "name",
+ default: "",
+ },
+ ],
+ })
+
+ const result = await config.api.query.execute(query._id!, {
+ parameters: { name: "one" },
+ })
+
+ expect(result.data).toEqual([
+ {
+ acknowledged: true,
+ deletedCount: 1,
+ },
+ ])
+
+ await withCollection(async collection => {
+ const doc = await collection.findOne({ name: { $eq: "one" } })
+ expect(doc).toBeNull()
+ })
+ })
+
+ it("an update query with parameters", async () => {
+ const query = await createQuery({
+ fields: {
+ json: {
+ filter: { name: { $eq: "{{ name }}" } },
+ update: { $set: { name: "{{ newName }}" } },
+ },
+ extra: {
+ actionType: "updateOne",
+ },
+ },
+ queryVerb: "update",
+ parameters: [
+ {
+ name: "name",
+ default: "",
+ },
+ {
+ name: "newName",
+ default: "",
+ },
+ ],
+ })
+
+ const result = await config.api.query.execute(query._id!, {
+ parameters: { name: "one", newName: "newOne" },
+ })
+
+ expect(result.data).toEqual([
+ {
+ acknowledged: true,
+ matchedCount: 1,
+ modifiedCount: 1,
+ upsertedCount: 0,
+ upsertedId: null,
+ },
+ ])
+
+ await withCollection(async collection => {
+ const doc = await collection.findOne({ name: { $eq: "newOne" } })
+ expect(doc).toEqual({
+ _id: expectValidBsonObjectId,
+ name: "newOne",
+ })
+
+ const oldDoc = await collection.findOne({ name: { $eq: "one" } })
+ expect(oldDoc).toBeNull()
+ })
+ })
+
+ it("should be able to delete all records", async () => {
+ const query = await createQuery({
+ fields: {
+ json: {},
+ extra: {
+ actionType: "deleteMany",
+ },
+ },
+ queryVerb: "delete",
+ })
+
+ const result = await config.api.query.execute(query._id!)
+
+ expect(result.data).toEqual([
+ {
+ acknowledged: true,
+ deletedCount: 5,
+ },
+ ])
+
+ await withCollection(async collection => {
+ const docs = await collection.find().toArray()
+ expect(docs).toHaveLength(0)
+ })
+ })
+
+ it("should be able to update all documents", async () => {
+ const query = await createQuery({
+ fields: {
+ json: {
+ filter: {},
+ update: { $set: { name: "newName" } },
+ },
+ extra: {
+ actionType: "updateMany",
+ },
+ },
+ queryVerb: "update",
+ })
+
+ const result = await config.api.query.execute(query._id!)
+
+ expect(result.data).toEqual([
+ {
+ acknowledged: true,
+ matchedCount: 5,
+ modifiedCount: 5,
+ upsertedCount: 0,
+ upsertedId: null,
+ },
+ ])
+
+ await withCollection(async collection => {
+ const docs = await collection.find().toArray()
+ expect(docs).toHaveLength(5)
+ for (const doc of docs) {
+ expect(doc).toEqual({
+ _id: expectValidBsonObjectId,
+ name: "newName",
+ })
+ }
+ })
})
})
diff --git a/packages/server/src/api/routes/tests/queries/permissions.spec.ts b/packages/server/src/api/routes/tests/queries/permissions.spec.ts
new file mode 100644
index 0000000000..a0b342e64d
--- /dev/null
+++ b/packages/server/src/api/routes/tests/queries/permissions.spec.ts
@@ -0,0 +1,47 @@
+import * as setup from "../utilities"
+import { checkBuilderEndpoint } from "../utilities/TestFunctions"
+import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
+import { Datasource, Query, SourceName } from "@budibase/types"
+
+describe("query permissions", () => {
+ let config: TestConfiguration
+ let datasource: Datasource
+ let query: Query
+
+ beforeAll(async () => {
+ config = setup.getConfig()
+ await config.init()
+ datasource = await config.api.datasource.create({
+ name: "test datasource",
+ type: "test",
+ source: SourceName.REST,
+ config: {},
+ })
+ query = await config.api.query.save({
+ name: "test query",
+ datasourceId: datasource._id!,
+ parameters: [],
+ fields: {},
+ transformer: "",
+ schema: {},
+ readable: true,
+ queryVerb: "read",
+ })
+ })
+
+ it("delete should require builder", async () => {
+ await checkBuilderEndpoint({
+ config,
+ method: "DELETE",
+ url: `/api/queries/${query._id}/${query._rev}`,
+ })
+ })
+
+ it("preview should require builder", async () => {
+ await checkBuilderEndpoint({
+ config,
+ method: "POST",
+ url: `/api/queries/preview`,
+ })
+ })
+})
diff --git a/packages/server/src/api/routes/tests/queries/query.seq.spec.ts b/packages/server/src/api/routes/tests/queries/query.seq.spec.ts
deleted file mode 100644
index 4c25a762b8..0000000000
--- a/packages/server/src/api/routes/tests/queries/query.seq.spec.ts
+++ /dev/null
@@ -1,774 +0,0 @@
-import tk from "timekeeper"
-
-const pg = require("pg")
-
-// Mock out postgres for this
-jest.mock("pg")
-jest.mock("node-fetch")
-
-// Mock isProdAppID to we can later mock the implementation and pretend we are
-// using prod app IDs
-jest.mock("@budibase/backend-core", () => {
- const core = jest.requireActual("@budibase/backend-core")
- return {
- ...core,
- db: {
- ...core.db,
- isProdAppID: jest.fn(),
- },
- }
-})
-import * as setup from "../utilities"
-import { checkBuilderEndpoint } from "../utilities/TestFunctions"
-import { checkCacheForDynamicVariable } from "../../../../threads/utils"
-
-const { basicQuery, basicDatasource } = setup.structures
-import { events, db as dbCore } from "@budibase/backend-core"
-import {
- Datasource,
- Query,
- SourceName,
- QueryPreview,
- QueryParameter,
-} from "@budibase/types"
-
-tk.freeze(Date.now())
-
-const mockIsProdAppID = dbCore.isProdAppID as jest.MockedFunction<
- typeof dbCore.isProdAppID
->
-
-describe("/queries", () => {
- let request = setup.getRequest()
- let config = setup.getConfig()
- let datasource: Datasource & Required>, query: Query
-
- afterAll(setup.afterAll)
-
- const setupTest = async () => {
- await config.init()
- datasource = await config.createDatasource()
- query = await config.createQuery()
- }
-
- beforeAll(async () => {
- await setupTest()
- })
-
- const createQuery = async (query: Query) => {
- return request
- .post(`/api/queries`)
- .send(query)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- }
-
- describe("create", () => {
- it("should create a new query", async () => {
- const { _id } = await config.createDatasource()
- const query = basicQuery(_id)
- jest.clearAllMocks()
- const res = await createQuery(query)
-
- expect((res as any).res.statusMessage).toEqual(
- `Query ${query.name} saved successfully.`
- )
- expect(res.body).toEqual({
- _rev: res.body._rev,
- _id: res.body._id,
- ...query,
- nullDefaultSupport: true,
- createdAt: new Date().toISOString(),
- updatedAt: new Date().toISOString(),
- })
- expect(events.query.created).toHaveBeenCalledTimes(1)
- expect(events.query.updated).not.toHaveBeenCalled()
- })
- })
-
- describe("update", () => {
- it("should update query", async () => {
- const { _id } = await config.createDatasource()
- const query = basicQuery(_id)
- const res = await createQuery(query)
- jest.clearAllMocks()
- query._id = res.body._id
- query._rev = res.body._rev
- await createQuery(query)
-
- expect((res as any).res.statusMessage).toEqual(
- `Query ${query.name} saved successfully.`
- )
- expect(res.body).toEqual({
- _rev: res.body._rev,
- _id: res.body._id,
- ...query,
- nullDefaultSupport: true,
- createdAt: new Date().toISOString(),
- updatedAt: new Date().toISOString(),
- })
- expect(events.query.created).not.toHaveBeenCalled()
- expect(events.query.updated).toHaveBeenCalledTimes(1)
- })
- })
-
- describe("fetch", () => {
- beforeEach(async () => {
- await setupTest()
- })
-
- it("returns all the queries from the server", async () => {
- const res = await request
- .get(`/api/queries`)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
-
- const queries = res.body
- expect(queries).toEqual([
- {
- _rev: query._rev,
- _id: query._id,
- createdAt: new Date().toISOString(),
- ...basicQuery(datasource._id),
- nullDefaultSupport: true,
- updatedAt: new Date().toISOString(),
- readable: true,
- },
- ])
- })
-
- it("should apply authorization to endpoint", async () => {
- await checkBuilderEndpoint({
- config,
- method: "GET",
- url: `/api/datasources`,
- })
- })
- })
-
- describe("find", () => {
- it("should find a query in builder", async () => {
- const query = await config.createQuery()
- const res = await request
- .get(`/api/queries/${query._id}`)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect(res.body._id).toEqual(query._id)
- })
-
- it("should find a query in cloud", async () => {
- await config.withEnv({ SELF_HOSTED: "true" }, async () => {
- const query = await config.createQuery()
- const res = await request
- .get(`/api/queries/${query._id}`)
- .set(await config.defaultHeaders())
- .expect(200)
- .expect("Content-Type", /json/)
- expect(res.body.fields).toBeDefined()
- expect(res.body.parameters).toBeDefined()
- expect(res.body.schema).toBeDefined()
- })
- })
-
- it("should remove sensitive info for prod apps", async () => {
- // Mock isProdAppID to pretend we are using a prod app
- mockIsProdAppID.mockClear()
- mockIsProdAppID.mockImplementation(() => true)
-
- const query = await config.createQuery()
- const res = await request
- .get(`/api/queries/${query._id}`)
- .set(await config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect(res.body._id).toEqual(query._id)
- expect(res.body.fields).toBeUndefined()
- expect(res.body.parameters).toBeUndefined()
- expect(res.body.schema).toBeDefined()
-
- // Reset isProdAppID mock
- expect(dbCore.isProdAppID).toHaveBeenCalledTimes(1)
- mockIsProdAppID.mockImplementation(() => false)
- })
- })
-
- describe("destroy", () => {
- beforeEach(async () => {
- await setupTest()
- })
-
- it("deletes a query and returns a success message", async () => {
- await request
- .delete(`/api/queries/${query._id}/${query._rev}`)
- .set(config.defaultHeaders())
- .expect(200)
-
- const res = await request
- .get(`/api/queries`)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
-
- expect(res.body).toEqual([])
- expect(events.query.deleted).toHaveBeenCalledTimes(1)
- expect(events.query.deleted).toHaveBeenCalledWith(datasource, query)
- })
-
- it("should apply authorization to endpoint", async () => {
- const query = await config.createQuery()
- await checkBuilderEndpoint({
- config,
- method: "DELETE",
- url: `/api/queries/${query._id}/${query._rev}`,
- })
- })
- })
-
- describe("preview", () => {
- it("should be able to preview the query", async () => {
- const queryPreview: QueryPreview = {
- datasourceId: datasource._id,
- queryVerb: "read",
- fields: {},
- parameters: [],
- transformer: "return data",
- name: datasource.name!,
- schema: {},
- readable: true,
- }
- const responseBody = await config.api.query.previewQuery(queryPreview)
- // these responses come from the mock
- expect(responseBody.schema).toEqual({
- a: { type: "string", name: "a" },
- b: { type: "number", name: "b" },
- })
- expect(responseBody.rows.length).toEqual(1)
- expect(events.query.previewed).toHaveBeenCalledTimes(1)
- delete datasource.config
- expect(events.query.previewed).toHaveBeenCalledWith(datasource, {
- ...queryPreview,
- nullDefaultSupport: true,
- })
- })
-
- it("should apply authorization to endpoint", async () => {
- await checkBuilderEndpoint({
- config,
- method: "POST",
- url: `/api/queries/preview`,
- })
- })
-
- it("should not error when trying to generate a nested schema for an empty array", async () => {
- const queryPreview: QueryPreview = {
- datasourceId: datasource._id,
- parameters: [],
- fields: {},
- queryVerb: "read",
- name: datasource.name!,
- transformer: "return data",
- schema: {},
- readable: true,
- }
- const rows = [
- {
- contacts: [],
- },
- ]
- pg.queryMock.mockImplementation(() => ({
- rows,
- }))
-
- const responseBody = await config.api.query.previewQuery(queryPreview)
- expect(responseBody).toEqual({
- nestedSchemaFields: {},
- rows,
- schema: {
- contacts: { type: "array", name: "contacts" },
- },
- })
- expect(responseBody.rows.length).toEqual(1)
- delete datasource.config
- })
-
- it("should generate a nested schema based on all the nested items", async () => {
- const queryPreview: QueryPreview = {
- datasourceId: datasource._id,
- parameters: [],
- fields: {},
- queryVerb: "read",
- name: datasource.name!,
- transformer: "return data",
- schema: {},
- readable: true,
- }
- const rows = [
- {
- contacts: [
- {
- address: "123 Lane",
- },
- {
- address: "456 Drive",
- },
- {
- postcode: "BT1 12N",
- lat: 54.59,
- long: -5.92,
- },
- {
- city: "Belfast",
- },
- {
- address: "789 Avenue",
- phoneNumber: "0800-999-5555",
- },
- {
- name: "Name",
- isActive: false,
- },
- ],
- },
- ]
-
- pg.queryMock.mockImplementation(() => ({
- rows,
- }))
-
- const responseBody = await config.api.query.previewQuery(queryPreview)
- expect(responseBody).toEqual({
- nestedSchemaFields: {
- contacts: {
- address: {
- type: "string",
- name: "address",
- },
- postcode: {
- type: "string",
- name: "postcode",
- },
- lat: {
- type: "number",
- name: "lat",
- },
- long: {
- type: "number",
- name: "long",
- },
- city: {
- type: "string",
- name: "city",
- },
- phoneNumber: {
- type: "string",
- name: "phoneNumber",
- },
- name: {
- type: "string",
- name: "name",
- },
- isActive: {
- type: "boolean",
- name: "isActive",
- },
- },
- },
- rows,
- schema: {
- contacts: { type: "json", name: "contacts", subtype: "array" },
- },
- })
- expect(responseBody.rows.length).toEqual(1)
- delete datasource.config
- })
- })
-
- describe("execute", () => {
- beforeEach(async () => {
- await setupTest()
- })
-
- it("should be able to execute the query", async () => {
- const res = await request
- .post(`/api/queries/${query._id}`)
- .send({
- parameters: {},
- })
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect(res.body.length).toEqual(1)
- })
-
- it("should fail with invalid integration type", async () => {
- const datasource: Datasource = {
- ...basicDatasource().datasource,
- source: "INVALID_INTEGRATION" as SourceName,
- }
- await config.api.datasource.create(datasource, {
- status: 500,
- body: {
- message: "No datasource implementation found.",
- },
- })
- })
-
- it("shouldn't allow handlebars to be passed as parameters", async () => {
- const res = await request
- .post(`/api/queries/${query._id}`)
- .send({
- parameters: {
- a: "{{ 'test' }}",
- },
- })
- .set(config.defaultHeaders())
- .expect(400)
- expect(res.body.message).toEqual(
- "Parameter 'a' input contains a handlebars binding - this is not allowed."
- )
- })
- })
-
- describe("variables", () => {
- async function preview(datasource: Datasource, fields: any) {
- const queryPreview: QueryPreview = {
- datasourceId: datasource._id!,
- parameters: [],
- fields,
- queryVerb: "read",
- name: datasource.name!,
- transformer: "return data",
- schema: {},
- readable: true,
- }
- return await config.api.query.previewQuery(queryPreview)
- }
-
- it("should work with static variables", async () => {
- const datasource = await config.restDatasource({
- staticVariables: {
- variable: "google",
- variable2: "1",
- },
- })
- const responseBody = await preview(datasource, {
- path: "www.{{ variable }}.com",
- queryString: "test={{ variable2 }}",
- })
- // these responses come from the mock
- expect(responseBody.schema).toEqual({
- opts: { type: "json", name: "opts" },
- url: { type: "string", name: "url" },
- value: { type: "string", name: "value" },
- })
- expect(responseBody.rows[0].url).toEqual("http://www.google.com?test=1")
- })
-
- it("should work with dynamic variables", async () => {
- const { datasource } = await config.dynamicVariableDatasource()
- const responseBody = await preview(datasource, {
- path: "www.google.com",
- queryString: "test={{ variable3 }}",
- })
- expect(responseBody.schema).toEqual({
- opts: { type: "json", name: "opts" },
- url: { type: "string", name: "url" },
- value: { type: "string", name: "value" },
- })
- expect(responseBody.rows[0].url).toContain("doctype%20html")
- })
-
- it("check that it automatically retries on fail with cached dynamics", async () => {
- const { datasource, query: base } =
- await config.dynamicVariableDatasource()
- // preview once to cache
- await preview(datasource, {
- path: "www.google.com",
- queryString: "test={{ variable3 }}",
- })
- // check its in cache
- const contents = await checkCacheForDynamicVariable(
- base._id!,
- "variable3"
- )
- expect(contents.rows.length).toEqual(1)
- const responseBody = await preview(datasource, {
- path: "www.failonce.com",
- queryString: "test={{ variable3 }}",
- })
- expect(responseBody.schema).toEqual({
- fails: { type: "number", name: "fails" },
- opts: { type: "json", name: "opts" },
- url: { type: "string", name: "url" },
- })
- expect(responseBody.rows[0].fails).toEqual(1)
- })
-
- it("deletes variables when linked query is deleted", async () => {
- const { datasource, query: base } =
- await config.dynamicVariableDatasource()
- // preview once to cache
- await preview(datasource, {
- path: "www.google.com",
- queryString: "test={{ variable3 }}",
- })
- // check its in cache
- let contents = await checkCacheForDynamicVariable(base._id!, "variable3")
- expect(contents.rows.length).toEqual(1)
-
- // delete the query
- await request
- .delete(`/api/queries/${base._id}/${base._rev}`)
- .set(config.defaultHeaders())
- .expect(200)
-
- // check variables no longer in cache
- contents = await checkCacheForDynamicVariable(base._id!, "variable3")
- expect(contents).toBe(null)
- })
- })
-
- describe("Current User Request Mapping", () => {
- async function previewGet(
- datasource: Datasource,
- fields: any,
- params: QueryParameter[]
- ) {
- const queryPreview: QueryPreview = {
- datasourceId: datasource._id!,
- parameters: params,
- fields,
- queryVerb: "read",
- name: datasource.name!,
- transformer: "return data",
- schema: {},
- readable: true,
- }
- return await config.api.query.previewQuery(queryPreview)
- }
-
- async function previewPost(
- datasource: Datasource,
- fields: any,
- params: QueryParameter[]
- ) {
- const queryPreview: QueryPreview = {
- datasourceId: datasource._id!,
- parameters: params,
- fields,
- queryVerb: "create",
- name: datasource.name!,
- transformer: null,
- schema: {},
- readable: false,
- }
- return await config.api.query.previewQuery(queryPreview)
- }
-
- it("should parse global and query level header mappings", async () => {
- const userDetails = config.getUserDetails()
-
- const datasource = await config.restDatasource({
- defaultHeaders: {
- test: "headerVal",
- emailHdr: "{{[user].[email]}}",
- },
- })
- const responseBody = await previewGet(
- datasource,
- {
- path: "www.google.com",
- queryString: "email={{[user].[email]}}",
- headers: {
- queryHdr: "{{[user].[firstName]}}",
- secondHdr: "1234",
- },
- },
- []
- )
-
- const parsedRequest = JSON.parse(responseBody.extra.raw)
- expect(parsedRequest.opts.headers).toEqual({
- test: "headerVal",
- emailHdr: userDetails.email,
- queryHdr: userDetails.firstName,
- secondHdr: "1234",
- })
- expect(responseBody.rows[0].url).toEqual(
- "http://www.google.com?email=" + userDetails.email.replace("@", "%40")
- )
- })
-
- it("should bind the current user to query parameters", async () => {
- const userDetails = config.getUserDetails()
-
- const datasource = await config.restDatasource()
-
- const responseBody = await previewGet(
- datasource,
- {
- path: "www.google.com",
- queryString:
- "test={{myEmail}}&testName={{myName}}&testParam={{testParam}}",
- },
- [
- { name: "myEmail", default: "{{[user].[email]}}" },
- { name: "myName", default: "{{[user].[firstName]}}" },
- { name: "testParam", default: "1234" },
- ]
- )
-
- expect(responseBody.rows[0].url).toEqual(
- "http://www.google.com?test=" +
- userDetails.email.replace("@", "%40") +
- "&testName=" +
- userDetails.firstName +
- "&testParam=1234"
- )
- })
-
- it("should bind the current user the request body - plain text", async () => {
- const userDetails = config.getUserDetails()
- const datasource = await config.restDatasource()
-
- const responseBody = await previewPost(
- datasource,
- {
- path: "www.google.com",
- queryString: "testParam={{testParam}}",
- requestBody:
- "This is plain text and this is my email: {{[user].[email]}}. This is a test param: {{testParam}}",
- bodyType: "text",
- },
- [{ name: "testParam", default: "1234" }]
- )
-
- const parsedRequest = JSON.parse(responseBody.extra.raw)
- expect(parsedRequest.opts.body).toEqual(
- `This is plain text and this is my email: ${userDetails.email}. This is a test param: 1234`
- )
- expect(responseBody.rows[0].url).toEqual(
- "http://www.google.com?testParam=1234"
- )
- })
-
- it("should bind the current user the request body - json", async () => {
- const userDetails = config.getUserDetails()
- const datasource = await config.restDatasource()
-
- const responseBody = await previewPost(
- datasource,
- {
- path: "www.google.com",
- queryString: "testParam={{testParam}}",
- requestBody:
- '{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
- bodyType: "json",
- },
- [
- { name: "testParam", default: "1234" },
- { name: "userRef", default: "{{[user].[firstName]}}" },
- ]
- )
-
- const parsedRequest = JSON.parse(responseBody.extra.raw)
- const test = `{"email":"${userDetails.email}","queryCode":1234,"userRef":"${userDetails.firstName}"}`
- expect(parsedRequest.opts.body).toEqual(test)
- expect(responseBody.rows[0].url).toEqual(
- "http://www.google.com?testParam=1234"
- )
- })
-
- it("should bind the current user the request body - xml", async () => {
- const userDetails = config.getUserDetails()
- const datasource = await config.restDatasource()
-
- const responseBody = await previewPost(
- datasource,
- {
- path: "www.google.com",
- queryString: "testParam={{testParam}}",
- requestBody:
- " {{[user].[email]}} {{testParam}}
" +
- "[{{userId}}] testing ",
- bodyType: "xml",
- },
- [
- { name: "testParam", default: "1234" },
- { name: "userId", default: "{{[user].[firstName]}}" },
- ]
- )
-
- const parsedRequest = JSON.parse(responseBody.extra.raw)
- const test = ` ${userDetails.email} 1234
[${userDetails.firstName}] testing `
-
- expect(parsedRequest.opts.body).toEqual(test)
- expect(responseBody.rows[0].url).toEqual(
- "http://www.google.com?testParam=1234"
- )
- })
-
- it("should bind the current user the request body - form-data", async () => {
- const userDetails = config.getUserDetails()
- const datasource = await config.restDatasource()
-
- const responseBody = await previewPost(
- datasource,
- {
- path: "www.google.com",
- queryString: "testParam={{testParam}}",
- requestBody:
- '{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
- bodyType: "form",
- },
- [
- { name: "testParam", default: "1234" },
- { name: "userRef", default: "{{[user].[firstName]}}" },
- ]
- )
-
- const parsedRequest = JSON.parse(responseBody.extra.raw)
-
- const emailData = parsedRequest.opts.body._streams[1]
- expect(emailData).toEqual(userDetails.email)
-
- const queryCodeData = parsedRequest.opts.body._streams[4]
- expect(queryCodeData).toEqual("1234")
-
- const userRef = parsedRequest.opts.body._streams[7]
- expect(userRef).toEqual(userDetails.firstName)
-
- expect(responseBody.rows[0].url).toEqual(
- "http://www.google.com?testParam=1234"
- )
- })
-
- it("should bind the current user the request body - encoded", async () => {
- const userDetails = config.getUserDetails()
- const datasource = await config.restDatasource()
-
- const responseBody = await previewPost(
- datasource,
- {
- path: "www.google.com",
- queryString: "testParam={{testParam}}",
- requestBody:
- '{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
- bodyType: "encoded",
- },
- [
- { name: "testParam", default: "1234" },
- { name: "userRef", default: "{{[user].[firstName]}}" },
- ]
- )
- const parsedRequest = JSON.parse(responseBody.extra.raw)
-
- expect(parsedRequest.opts.body.email).toEqual(userDetails.email)
- expect(parsedRequest.opts.body.queryCode).toEqual("1234")
- expect(parsedRequest.opts.body.userRef).toEqual(userDetails.firstName)
- })
- })
-})
diff --git a/packages/server/src/api/routes/tests/queries/rest.spec.ts b/packages/server/src/api/routes/tests/queries/rest.spec.ts
new file mode 100644
index 0000000000..5c41583244
--- /dev/null
+++ b/packages/server/src/api/routes/tests/queries/rest.spec.ts
@@ -0,0 +1,406 @@
+import * as setup from "../utilities"
+import TestConfiguration from "../../../../tests/utilities/TestConfiguration"
+import { Datasource, SourceName } from "@budibase/types"
+import { getCachedVariable } from "../../../../threads/utils"
+import nock from "nock"
+import { generator } from "@budibase/backend-core/tests"
+
+jest.unmock("node-fetch")
+
+describe("rest", () => {
+ let config: TestConfiguration
+ let datasource: Datasource
+
+ async function createQuery(fields: any) {
+ return await config.api.query.save({
+ name: "test query",
+ datasourceId: datasource._id!,
+ parameters: [],
+ fields,
+ transformer: "",
+ schema: {},
+ readable: true,
+ queryVerb: "read",
+ })
+ }
+
+ beforeAll(async () => {
+ config = setup.getConfig()
+ await config.init()
+ datasource = await config.api.datasource.create({
+ name: generator.guid(),
+ type: "test",
+ source: SourceName.REST,
+ config: {},
+ })
+ })
+
+ afterEach(() => {
+ nock.cleanAll()
+ })
+
+ it("should automatically retry on fail with cached dynamics", async () => {
+ const basedOnQuery = await createQuery({
+ path: "one.example.com",
+ })
+
+ let cached = await getCachedVariable(basedOnQuery._id!, "foo")
+ expect(cached).toBeNull()
+
+ await config.api.datasource.update({
+ ...datasource,
+ config: {
+ ...datasource.config,
+ dynamicVariables: [
+ {
+ queryId: basedOnQuery._id!,
+ name: "foo",
+ value: "{{ data[0].name }}",
+ },
+ ],
+ },
+ })
+
+ cached = await getCachedVariable(basedOnQuery._id!, "foo")
+ expect(cached).toBeNull()
+
+ nock("http://one.example.com")
+ .get("/")
+ .reply(200, [{ name: "one" }])
+ nock("http://two.example.com").get("/?test=one").reply(500)
+ nock("http://two.example.com")
+ .get("/?test=one")
+ .reply(200, [{ name: "two" }])
+
+ const res = await config.api.query.preview({
+ datasourceId: datasource._id!,
+ name: "test query",
+ parameters: [],
+ queryVerb: "read",
+ transformer: "",
+ schema: {},
+ readable: true,
+ fields: {
+ path: "two.example.com",
+ queryString: "test={{ foo }}",
+ },
+ })
+ expect(res.schema).toEqual({
+ name: { type: "string", name: "name" },
+ })
+
+ cached = await getCachedVariable(basedOnQuery._id!, "foo")
+ expect(cached.rows.length).toEqual(1)
+ expect(cached.rows[0].name).toEqual("one")
+ })
+
+ it("should parse global and query level header mappings", async () => {
+ const datasource = await config.api.datasource.create({
+ name: generator.guid(),
+ type: "test",
+ source: SourceName.REST,
+ config: {
+ defaultHeaders: {
+ test: "headerVal",
+ emailHdr: "{{[user].[email]}}",
+ },
+ },
+ })
+
+ const user = config.getUserDetails()
+ const mock = nock("http://www.example.com", {
+ reqheaders: {
+ test: "headerVal",
+ emailhdr: user.email,
+ queryhdr: user.firstName!,
+ secondhdr: "1234",
+ },
+ })
+ .get("/?email=" + user.email.replace("@", "%40"))
+ .reply(200, {})
+
+ await config.api.query.preview({
+ datasourceId: datasource._id!,
+ name: generator.guid(),
+ parameters: [],
+ queryVerb: "read",
+ transformer: "",
+ schema: {},
+ readable: true,
+ fields: {
+ path: "www.example.com",
+ queryString: "email={{[user].[email]}}",
+ headers: {
+ queryHdr: "{{[user].[firstName]}}",
+ secondHdr: "1234",
+ },
+ },
+ })
+
+ expect(mock.isDone()).toEqual(true)
+ })
+
+ it("should bind the current user to query params", async () => {
+ const user = config.getUserDetails()
+ const mock = nock("http://www.example.com")
+ .get(
+ "/?test=" +
+ user.email.replace("@", "%40") +
+ "&testName=" +
+ user.firstName +
+ "&testParam=1234"
+ )
+ .reply(200, {})
+
+ await config.api.query.preview({
+ datasourceId: datasource._id!,
+ name: generator.guid(),
+ parameters: [
+ { name: "myEmail", default: "{{[user].[email]}}" },
+ { name: "myName", default: "{{[user].[firstName]}}" },
+ { name: "testParam", default: "1234" },
+ ],
+ queryVerb: "read",
+ transformer: "",
+ schema: {},
+ readable: true,
+ fields: {
+ path: "www.example.com",
+ queryString:
+ "test={{myEmail}}&testName={{myName}}&testParam={{testParam}}",
+ },
+ })
+
+ expect(mock.isDone()).toEqual(true)
+ })
+
+ it("should bind the current user to the request body - plain text", async () => {
+ const datasource = await config.api.datasource.create({
+ name: generator.guid(),
+ type: "test",
+ source: SourceName.REST,
+ config: {
+ method: "POST",
+ defaultHeaders: {
+ test: "headerVal",
+ emailHdr: "{{[user].[email]}}",
+ },
+ },
+ })
+
+ const user = config.getUserDetails()
+ const mock = nock("http://www.example.com")
+ .post(
+ "/?testParam=1234",
+ "This is plain text and this is my email: " +
+ user.email +
+ ". This is a test param: 1234"
+ )
+ .reply(200, {})
+
+ await config.api.query.preview({
+ datasourceId: datasource._id!,
+ name: generator.guid(),
+ parameters: [{ name: "testParam", default: "1234" }],
+ queryVerb: "create",
+ transformer: "",
+ schema: {},
+ readable: true,
+ fields: {
+ path: "www.example.com",
+ bodyType: "text",
+ queryString: "&testParam={{testParam}}",
+ requestBody:
+ "This is plain text and this is my email: {{[user].[email]}}. This is a test param: {{testParam}}",
+ },
+ })
+
+ expect(mock.isDone()).toEqual(true)
+ })
+
+ it("should bind the current user to the request body - json", async () => {
+ const datasource = await config.api.datasource.create({
+ name: generator.guid(),
+ type: "test",
+ source: SourceName.REST,
+ config: {
+ method: "POST",
+ defaultHeaders: {
+ test: "headerVal",
+ emailHdr: "{{[user].[email]}}",
+ },
+ },
+ })
+
+ const user = config.getUserDetails()
+ const mock = nock("http://www.example.com")
+ .post("/?testParam=1234", {
+ email: user.email,
+ queryCode: 1234,
+ userRef: user.firstName,
+ })
+ .reply(200, {})
+
+ await config.api.query.preview({
+ datasourceId: datasource._id!,
+ name: generator.guid(),
+ parameters: [
+ { name: "testParam", default: "1234" },
+ { name: "userRef", default: "{{[user].[firstName]}}" },
+ ],
+ queryVerb: "create",
+ transformer: "",
+ schema: {},
+ readable: true,
+ fields: {
+ path: "www.example.com",
+ bodyType: "json",
+ queryString: "&testParam={{testParam}}",
+ requestBody:
+ '{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
+ },
+ })
+
+ expect(mock.isDone()).toEqual(true)
+ })
+
+ it("should bind the current user to the request body - xml", async () => {
+ const datasource = await config.api.datasource.create({
+ name: generator.guid(),
+ type: "test",
+ source: SourceName.REST,
+ config: {
+ method: "POST",
+ defaultHeaders: {
+ test: "headerVal",
+ emailHdr: "{{[user].[email]}}",
+ },
+ },
+ })
+
+ const user = config.getUserDetails()
+ const mock = nock("http://www.example.com")
+ .post(
+ "/?testParam=1234",
+ ` ${user.email} 1234
[${user.firstName}] testing `
+ )
+ .reply(200, {})
+
+ await config.api.query.preview({
+ datasourceId: datasource._id!,
+ name: generator.guid(),
+ parameters: [
+ { name: "testParam", default: "1234" },
+ { name: "userId", default: "{{[user].[firstName]}}" },
+ ],
+ queryVerb: "create",
+ transformer: "",
+ schema: {},
+ readable: true,
+ fields: {
+ path: "www.example.com",
+ bodyType: "xml",
+ queryString: "&testParam={{testParam}}",
+ requestBody:
+ " {{[user].[email]}} {{testParam}}
" +
+ "[{{userId}}] testing ",
+ },
+ })
+
+ expect(mock.isDone()).toEqual(true)
+ })
+
+ it("should bind the current user to the request body - form-data", async () => {
+ const datasource = await config.api.datasource.create({
+ name: generator.guid(),
+ type: "test",
+ source: SourceName.REST,
+ config: {
+ method: "POST",
+ defaultHeaders: {
+ test: "headerVal",
+ emailHdr: "{{[user].[email]}}",
+ },
+ },
+ })
+
+ const user = config.getUserDetails()
+ const mock = nock("http://www.example.com")
+ .post("/?testParam=1234", body => {
+ return (
+ body.includes('name="email"\r\n\r\n' + user.email + "\r\n") &&
+ body.includes('name="queryCode"\r\n\r\n1234\r\n') &&
+ body.includes('name="userRef"\r\n\r\n' + user.firstName + "\r\n")
+ )
+ })
+ .reply(200, {})
+
+ await config.api.query.preview({
+ datasourceId: datasource._id!,
+ name: generator.guid(),
+ parameters: [
+ { name: "testParam", default: "1234" },
+ { name: "userRef", default: "{{[user].[firstName]}}" },
+ ],
+ queryVerb: "create",
+ transformer: "",
+ schema: {},
+ readable: true,
+ fields: {
+ path: "www.example.com",
+ bodyType: "form",
+ queryString: "&testParam={{testParam}}",
+ requestBody:
+ '{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
+ },
+ })
+
+ expect(mock.isDone()).toEqual(true)
+ })
+
+ it("should bind the current user to the request body - encoded", async () => {
+ const datasource = await config.api.datasource.create({
+ name: generator.guid(),
+ type: "test",
+ source: SourceName.REST,
+ config: {
+ method: "POST",
+ defaultHeaders: {
+ test: "headerVal",
+ emailHdr: "{{[user].[email]}}",
+ },
+ },
+ })
+
+ const user = config.getUserDetails()
+ const mock = nock("http://www.example.com")
+ .post("/?testParam=1234", {
+ email: user.email,
+ queryCode: 1234,
+ userRef: user.firstName,
+ })
+ .reply(200, {})
+
+ await config.api.query.preview({
+ datasourceId: datasource._id!,
+ name: generator.guid(),
+ parameters: [
+ { name: "testParam", default: "1234" },
+ { name: "userRef", default: "{{[user].[firstName]}}" },
+ ],
+ queryVerb: "create",
+ transformer: "",
+ schema: {},
+ readable: true,
+ fields: {
+ path: "www.example.com",
+ bodyType: "encoded",
+ queryString: "&testParam={{testParam}}",
+ requestBody:
+ '{"email":"{{[user].[email]}}","queryCode":{{testParam}},"userRef":"{{userRef}}"}',
+ },
+ })
+
+ expect(mock.isDone()).toEqual(true)
+ })
+})
diff --git a/packages/server/src/api/routes/tests/row.spec.ts b/packages/server/src/api/routes/tests/row.spec.ts
index 8910522565..d9895466a5 100644
--- a/packages/server/src/api/routes/tests/row.spec.ts
+++ b/packages/server/src/api/routes/tests/row.spec.ts
@@ -30,7 +30,6 @@ const timestamp = new Date("2023-01-26T11:48:57.597Z").toISOString()
tk.freeze(timestamp)
jest.unmock("mssql")
-jest.unmock("pg")
describe.each([
["internal", undefined],
@@ -723,6 +722,39 @@ describe.each([
})
})
+ describe("bulkImport", () => {
+ isInternal &&
+ it("should update Auto ID field after bulk import", async () => {
+ const table = await config.api.table.save(
+ saveTableRequest({
+ primary: ["autoId"],
+ schema: {
+ autoId: {
+ name: "autoId",
+ type: FieldType.NUMBER,
+ subtype: AutoFieldSubType.AUTO_ID,
+ autocolumn: true,
+ constraints: {
+ type: "number",
+ presence: false,
+ },
+ },
+ },
+ })
+ )
+
+ let row = await config.api.row.save(table._id!, {})
+ expect(row.autoId).toEqual(1)
+
+ await config.api.row.bulkImport(table._id!, {
+ rows: [{ autoId: 2 }],
+ })
+
+ row = await config.api.row.save(table._id!, {})
+ expect(row.autoId).toEqual(3)
+ })
+ })
+
describe("enrich", () => {
beforeAll(async () => {
table = await config.api.table.save(defaultTable())
@@ -1296,7 +1328,7 @@ describe.each([
describe("Formula JS protection", () => {
it("should time out JS execution if a single cell takes too long", async () => {
- await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 20 }, async () => {
+ await config.withEnv({ JS_PER_INVOCATION_TIMEOUT_MS: 40 }, async () => {
const js = Buffer.from(
`
let i = 0;
@@ -1336,8 +1368,8 @@ describe.each([
it("should time out JS execution if a multiple cells take too long", async () => {
await config.withEnv(
{
- JS_PER_INVOCATION_TIMEOUT_MS: 20,
- JS_PER_REQUEST_TIMEOUT_MS: 40,
+ JS_PER_INVOCATION_TIMEOUT_MS: 40,
+ JS_PER_REQUEST_TIMEOUT_MS: 80,
},
async () => {
const js = Buffer.from(
diff --git a/packages/server/src/api/routes/tests/table.spec.ts b/packages/server/src/api/routes/tests/table.spec.ts
index 1038808fe1..7639b840dc 100644
--- a/packages/server/src/api/routes/tests/table.spec.ts
+++ b/packages/server/src/api/routes/tests/table.spec.ts
@@ -1,11 +1,11 @@
import { context, events } from "@budibase/backend-core"
import {
AutoFieldSubType,
+ Datasource,
FieldSubtype,
FieldType,
INTERNAL_TABLE_SOURCE_ID,
InternalTable,
- NumberFieldMetadata,
RelationshipType,
Row,
SaveTableRequest,
@@ -13,31 +13,41 @@ import {
TableSourceType,
User,
ViewCalculation,
+ ViewV2Enriched,
} from "@budibase/types"
import { checkBuilderEndpoint } from "./utilities/TestFunctions"
import * as setup from "./utilities"
-import sdk from "../../../sdk"
import * as uuid from "uuid"
-import tk from "timekeeper"
-import { generator, mocks } from "@budibase/backend-core/tests"
-import { TableToBuild } from "../../../tests/utilities/TestConfiguration"
-
-tk.freeze(mocks.date.MOCK_DATE)
+import { generator } from "@budibase/backend-core/tests"
+import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
+import { tableForDatasource } from "../../../tests/utilities/structures"
+import timekeeper from "timekeeper"
const { basicTable } = setup.structures
const ISO_REGEX_PATTERN = /^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}\.\d{3}Z$/
-describe("/tables", () => {
- let request = setup.getRequest()
+describe.each([
+ ["internal", undefined],
+ [DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
+ [DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
+ [DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
+ [DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
+])("/tables (%s)", (_, dsProvider) => {
+ let isInternal: boolean
+ let datasource: Datasource | undefined
let config = setup.getConfig()
- let appId: string
afterAll(setup.afterAll)
beforeAll(async () => {
- const app = await config.init()
- appId = app.appId
+ await config.init()
+ if (dsProvider) {
+ datasource = await config.api.datasource.create(await dsProvider)
+ isInternal = false
+ } else {
+ isInternal = true
+ }
})
describe("create", () => {
@@ -45,102 +55,28 @@ describe("/tables", () => {
jest.clearAllMocks()
})
- const createTable = (table?: Table) => {
- if (!table) {
- table = basicTable()
- }
- return request
- .post(`/api/tables`)
- .send(table)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- }
-
- it("returns a success message when the table is successfully created", async () => {
- const res = await createTable()
-
- expect((res as any).res.statusMessage).toEqual(
- "Table TestTable saved successfully."
+ it("creates a table successfully", async () => {
+ const name = generator.guid()
+ const table = await config.api.table.save(
+ tableForDatasource(datasource, { name })
)
- expect(res.body.name).toEqual("TestTable")
+ expect(table.name).toEqual(name)
expect(events.table.created).toHaveBeenCalledTimes(1)
- expect(events.table.created).toHaveBeenCalledWith(res.body)
- })
-
- it("creates all the passed fields", async () => {
- const tableData: TableToBuild = {
- name: "TestTable",
- type: "table",
- schema: {
- autoId: {
- name: "id",
- type: FieldType.NUMBER,
- subtype: AutoFieldSubType.AUTO_ID,
- autocolumn: true,
- constraints: {
- type: "number",
- presence: false,
- },
- },
- },
- views: {
- "table view": {
- id: "viewId",
- version: 2,
- name: "table view",
- tableId: "tableId",
- },
- },
- }
- const testTable = await config.createTable(tableData)
-
- const expected: Table = {
- ...tableData,
- type: "table",
- views: {
- "table view": {
- ...tableData.views!["table view"],
- schema: {
- autoId: {
- autocolumn: true,
- constraints: {
- presence: false,
- type: "number",
- },
- name: "id",
- type: FieldType.NUMBER,
- subtype: AutoFieldSubType.AUTO_ID,
- visible: false,
- } as NumberFieldMetadata,
- },
- },
- },
- sourceType: TableSourceType.INTERNAL,
- sourceId: expect.any(String),
- _rev: expect.stringMatching(/^1-.+/),
- _id: expect.any(String),
- createdAt: mocks.date.MOCK_DATE.toISOString(),
- updatedAt: mocks.date.MOCK_DATE.toISOString(),
- }
- expect(testTable).toEqual(expected)
-
- const persistedTable = await config.api.table.get(testTable._id!)
- expect(persistedTable).toEqual(expected)
+ expect(events.table.created).toHaveBeenCalledWith(table)
})
it("creates a table via data import", async () => {
const table: SaveTableRequest = basicTable()
table.rows = [{ name: "test-name", description: "test-desc" }]
- const res = await createTable(table)
+ const res = await config.api.table.save(table)
expect(events.table.created).toHaveBeenCalledTimes(1)
- expect(events.table.created).toHaveBeenCalledWith(res.body)
+ expect(events.table.created).toHaveBeenCalledWith(res)
expect(events.table.imported).toHaveBeenCalledTimes(1)
- expect(events.table.imported).toHaveBeenCalledWith(res.body)
+ expect(events.table.imported).toHaveBeenCalledWith(res)
expect(events.rows.imported).toHaveBeenCalledTimes(1)
- expect(events.rows.imported).toHaveBeenCalledWith(res.body, 1)
+ expect(events.rows.imported).toHaveBeenCalledWith(res, 1)
})
it("should apply authorization to endpoint", async () => {
@@ -155,21 +91,31 @@ describe("/tables", () => {
describe("update", () => {
it("updates a table", async () => {
- const testTable = await config.createTable()
+ const table = await config.api.table.save(
+ tableForDatasource(datasource, {
+ schema: {
+ name: {
+ type: FieldType.STRING,
+ name: "name",
+ constraints: {
+ type: "string",
+ },
+ },
+ },
+ })
+ )
- const res = await request
- .post(`/api/tables`)
- .send(testTable)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
+ const updatedTable = await config.api.table.save({
+ ...table,
+ name: generator.guid(),
+ })
expect(events.table.updated).toHaveBeenCalledTimes(1)
- expect(events.table.updated).toHaveBeenCalledWith(res.body)
+ expect(events.table.updated).toHaveBeenCalledWith(updatedTable)
})
it("updates all the row fields for a table when a schema key is renamed", async () => {
- const testTable = await config.createTable()
+ const testTable = await config.api.table.save(basicTable(datasource))
await config.createLegacyView({
name: "TestView",
field: "Price",
@@ -179,112 +125,96 @@ describe("/tables", () => {
filters: [],
})
- const testRow = await request
- .post(`/api/${testTable._id}/rows`)
- .send({
- name: "test",
- })
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
+ const testRow = await config.api.row.save(testTable._id!, {
+ name: "test",
+ })
- const updatedTable = await request
- .post(`/api/tables`)
- .send({
- _id: testTable._id,
- _rev: testTable._rev,
- name: "TestTable",
- key: "name",
- _rename: {
- old: "name",
- updated: "updatedName",
+ const { name, ...otherColumns } = testTable.schema
+ const updatedTable = await config.api.table.save({
+ ...testTable,
+ _rename: {
+ old: "name",
+ updated: "updatedName",
+ },
+ schema: {
+ ...otherColumns,
+ updatedName: {
+ ...name,
+ name: "updatedName",
},
- schema: {
- updatedName: { type: "string" },
- },
- })
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect((updatedTable as any).res.statusMessage).toEqual(
- "Table TestTable saved successfully."
- )
- expect(updatedTable.body.name).toEqual("TestTable")
+ },
+ })
- const res = await request
- .get(`/api/${testTable._id}/rows/${testRow.body._id}`)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
+ expect(updatedTable.name).toEqual(testTable.name)
- expect(res.body.updatedName).toEqual("test")
- expect(res.body.name).toBeUndefined()
+ const res = await config.api.row.get(testTable._id!, testRow._id!)
+ expect(res.updatedName).toEqual("test")
+ expect(res.name).toBeUndefined()
})
it("updates only the passed fields", async () => {
- const testTable = await config.createTable({
- name: "TestTable",
- type: "table",
- schema: {
- autoId: {
- name: "id",
- type: FieldType.NUMBER,
- subtype: AutoFieldSubType.AUTO_ID,
- autocolumn: true,
- constraints: {
- type: "number",
- presence: false,
+ await timekeeper.withFreeze(new Date(2021, 1, 1), async () => {
+ const table = await config.api.table.save(
+ tableForDatasource(datasource, {
+ schema: {
+ autoId: {
+ name: "id",
+ type: FieldType.NUMBER,
+ subtype: AutoFieldSubType.AUTO_ID,
+ autocolumn: true,
+ constraints: {
+ type: "number",
+ presence: false,
+ },
+ },
},
- },
- },
- views: {
- view1: {
- id: "viewId",
- version: 2,
- name: "table view",
- tableId: "tableId",
- },
- },
- })
+ })
+ )
- const response = await request
- .post(`/api/tables`)
- .send({
- ...testTable,
- name: "UpdatedName",
+ const newName = generator.guid()
+
+ const updatedTable = await config.api.table.save({
+ ...table,
+ name: newName,
})
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect(response.body).toEqual({
- ...testTable,
- name: "UpdatedName",
- _rev: expect.stringMatching(/^2-.+/),
- })
+ let expected: Table = {
+ ...table,
+ name: newName,
+ _id: expect.any(String),
+ }
+ if (isInternal) {
+ expected._rev = expect.stringMatching(/^2-.+/)
+ }
- const persistedTable = await config.api.table.get(testTable._id!)
- expect(persistedTable).toEqual({
- ...testTable,
- name: "UpdatedName",
- _rev: expect.stringMatching(/^2-.+/),
+ expect(updatedTable).toEqual(expected)
+
+ const persistedTable = await config.api.table.get(updatedTable._id!)
+ expected = {
+ ...table,
+ name: newName,
+ _id: updatedTable._id,
+ }
+ if (datasource?.isSQL) {
+ expected.sql = true
+ }
+ if (isInternal) {
+ expected._rev = expect.stringMatching(/^2-.+/)
+ }
+ expect(persistedTable).toEqual(expected)
})
})
describe("user table", () => {
- it("should add roleId and email field when adjusting user table schema", async () => {
- const res = await request
- .post(`/api/tables`)
- .send({
- ...basicTable(),
+ isInternal &&
+ it("should add roleId and email field when adjusting user table schema", async () => {
+ const table = await config.api.table.save({
+ ...basicTable(datasource),
_id: "ta_users",
})
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect(res.body.schema.email).toBeDefined()
- expect(res.body.schema.roleId).toBeDefined()
- })
+ expect(table.schema.email).toBeDefined()
+ expect(table.schema.roleId).toBeDefined()
+ })
})
it("should add a new column for an internal DB table", async () => {
@@ -295,12 +225,7 @@ describe("/tables", () => {
...basicTable(),
}
- const response = await request
- .post(`/api/tables`)
- .send(saveTableRequest)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
+ const response = await config.api.table.save(saveTableRequest)
const expectedResponse = {
...saveTableRequest,
@@ -311,15 +236,16 @@ describe("/tables", () => {
views: {},
}
delete expectedResponse._add
-
- expect(response.status).toBe(200)
- expect(response.body).toEqual(expectedResponse)
+ expect(response).toEqual(expectedResponse)
})
})
describe("import", () => {
it("imports rows successfully", async () => {
- const table = await config.createTable()
+ const name = generator.guid()
+ const table = await config.api.table.save(
+ basicTable(datasource, { name })
+ )
const importRequest = {
schema: table.schema,
rows: [{ name: "test-name", description: "test-desc" }],
@@ -327,83 +253,36 @@ describe("/tables", () => {
jest.clearAllMocks()
- await request
- .post(`/api/tables/${table._id}/import`)
- .send(importRequest)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
+ await config.api.table.import(table._id!, importRequest)
expect(events.table.created).not.toHaveBeenCalled()
expect(events.rows.imported).toHaveBeenCalledTimes(1)
expect(events.rows.imported).toHaveBeenCalledWith(
expect.objectContaining({
- name: "TestTable",
+ name,
_id: table._id,
}),
1
)
})
-
- it("should update Auto ID field after bulk import", async () => {
- const table = await config.createTable({
- name: "TestTable",
- type: "table",
- schema: {
- autoId: {
- name: "id",
- type: FieldType.NUMBER,
- subtype: AutoFieldSubType.AUTO_ID,
- autocolumn: true,
- constraints: {
- type: "number",
- presence: false,
- },
- },
- },
- })
-
- let row = await config.api.row.save(table._id!, {})
- expect(row.autoId).toEqual(1)
-
- await config.api.row.bulkImport(table._id!, {
- rows: [{ autoId: 2 }],
- identifierFields: [],
- })
-
- row = await config.api.row.save(table._id!, {})
- expect(row.autoId).toEqual(3)
- })
})
describe("fetch", () => {
let testTable: Table
- const enrichViewSchemasMock = jest.spyOn(sdk.tables, "enrichViewSchemas")
beforeEach(async () => {
- testTable = await config.createTable(testTable)
+ testTable = await config.api.table.save(
+ basicTable(datasource, { name: generator.guid() })
+ )
})
- afterEach(() => {
- delete testTable._rev
- })
-
- afterAll(() => {
- enrichViewSchemasMock.mockRestore()
- })
-
- it("returns all the tables for that instance in the response body", async () => {
- const res = await request
- .get(`/api/tables`)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
-
- const table = res.body.find((t: Table) => t._id === testTable._id)
+ it("returns all tables", async () => {
+ const res = await config.api.table.fetch()
+ const table = res.find(t => t._id === testTable._id)
expect(table).toBeDefined()
- expect(table.name).toEqual(testTable.name)
- expect(table.type).toEqual("table")
- expect(table.sourceType).toEqual("internal")
+ expect(table!.name).toEqual(testTable.name)
+ expect(table!.type).toEqual("table")
+ expect(table!.sourceType).toEqual(testTable.sourceType)
})
it("should apply authorization to endpoint", async () => {
@@ -414,99 +293,110 @@ describe("/tables", () => {
})
})
- it("should fetch views", async () => {
- const tableId = config.table!._id!
- const views = [
- await config.api.viewV2.create({ tableId, name: generator.guid() }),
- await config.api.viewV2.create({ tableId, name: generator.guid() }),
- ]
-
- const res = await request
- .get(`/api/tables`)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
-
- expect(res.body).toEqual(
- expect.arrayContaining([
- expect.objectContaining({
- _id: tableId,
- views: views.reduce((p, c) => {
- p[c.name] = { ...c, schema: expect.anything() }
- return p
- }, {} as any),
- }),
- ])
- )
- })
-
- it("should enrich the view schemas for viewsV2", async () => {
- const tableId = config.table!._id!
- enrichViewSchemasMock.mockImplementation(t => ({
- ...t,
- views: {
- view1: {
- version: 2,
- name: "view1",
- schema: {},
- id: "new_view_id",
- tableId: t._id!,
- },
- },
- }))
-
- await config.api.viewV2.create({ tableId, name: generator.guid() })
- await config.createLegacyView()
+ it("should enrich the view schemas", async () => {
+ const viewV2 = await config.api.viewV2.create({
+ tableId: testTable._id!,
+ name: generator.guid(),
+ })
+ const legacyView = await config.api.legacyView.save({
+ tableId: testTable._id!,
+ name: generator.guid(),
+ filters: [],
+ schema: {},
+ })
const res = await config.api.table.fetch()
- expect(res).toEqual(
- expect.arrayContaining([
- expect.objectContaining({
- _id: tableId,
- views: {
- view1: {
- version: 2,
- name: "view1",
- schema: {},
- id: "new_view_id",
- tableId,
- },
+ const table = res.find(t => t._id === testTable._id)
+ expect(table).toBeDefined()
+ expect(table!.views![viewV2.name]).toBeDefined()
+
+ const expectedViewV2: ViewV2Enriched = {
+ ...viewV2,
+ schema: {
+ description: {
+ constraints: {
+ type: "string",
},
- }),
- ])
+ name: "description",
+ type: FieldType.STRING,
+ visible: false,
+ },
+ name: {
+ constraints: {
+ type: "string",
+ },
+ name: "name",
+ type: FieldType.STRING,
+ visible: false,
+ },
+ },
+ }
+
+ if (!isInternal) {
+ expectedViewV2.schema!.id = {
+ name: "id",
+ type: FieldType.NUMBER,
+ visible: false,
+ autocolumn: true,
+ }
+ }
+
+ expect(table!.views![viewV2.name!]).toEqual(expectedViewV2)
+
+ if (isInternal) {
+ expect(table!.views![legacyView.name!]).toBeDefined()
+ expect(table!.views![legacyView.name!]).toEqual({
+ ...legacyView,
+ schema: {
+ description: {
+ constraints: {
+ type: "string",
+ },
+ name: "description",
+ type: "string",
+ },
+ name: {
+ constraints: {
+ type: "string",
+ },
+ name: "name",
+ type: "string",
+ },
+ },
+ })
+ }
+ })
+ })
+
+ describe("get", () => {
+ it("returns a table", async () => {
+ const table = await config.api.table.save(
+ basicTable(datasource, { name: generator.guid() })
)
+ const res = await config.api.table.get(table._id!)
+ expect(res).toEqual(table)
})
})
describe("indexing", () => {
it("should be able to create a table with indexes", async () => {
- await context.doInAppContext(appId, async () => {
+ await context.doInAppContext(config.getAppId(), async () => {
const db = context.getAppDB()
const indexCount = (await db.getIndexes()).total_rows
const table = basicTable()
table.indexes = ["name"]
- const res = await request
- .post(`/api/tables`)
- .send(table)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect(res.body._id).toBeDefined()
- expect(res.body._rev).toBeDefined()
+ const res = await config.api.table.save(table)
+ expect(res._id).toBeDefined()
+ expect(res._rev).toBeDefined()
expect((await db.getIndexes()).total_rows).toEqual(indexCount + 1)
// update index to see what happens
table.indexes = ["name", "description"]
- await request
- .post(`/api/tables`)
- .send({
- ...table,
- _id: res.body._id,
- _rev: res.body._rev,
- })
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
+ await config.api.table.save({
+ ...table,
+ _id: res._id,
+ _rev: res._rev,
+ })
// shouldn't have created a new index
expect((await db.getIndexes()).total_rows).toEqual(indexCount + 1)
})
@@ -521,12 +411,9 @@ describe("/tables", () => {
})
it("returns a success response when a table is deleted.", async () => {
- const res = await request
- .delete(`/api/tables/${testTable._id}/${testTable._rev}`)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect(res.body.message).toEqual(`Table ${testTable._id} deleted.`)
+ await config.api.table.destroy(testTable._id!, testTable._rev!, {
+ body: { message: `Table ${testTable._id} deleted.` },
+ })
expect(events.table.deleted).toHaveBeenCalledTimes(1)
expect(events.table.deleted).toHaveBeenCalledWith({
...testTable,
@@ -559,12 +446,9 @@ describe("/tables", () => {
},
})
- const res = await request
- .delete(`/api/tables/${testTable._id}/${testTable._rev}`)
- .set(config.defaultHeaders())
- .expect("Content-Type", /json/)
- .expect(200)
- expect(res.body.message).toEqual(`Table ${testTable._id} deleted.`)
+ await config.api.table.destroy(testTable._id!, testTable._rev!, {
+ body: { message: `Table ${testTable._id} deleted.` },
+ })
const dependentTable = await config.api.table.get(linkedTable._id!)
expect(dependentTable.schema.TestTable).not.toBeDefined()
})
@@ -816,33 +700,31 @@ describe("/tables", () => {
describe("unhappy paths", () => {
let table: Table
beforeAll(async () => {
- table = await config.api.table.save({
- name: "table",
- type: "table",
- sourceId: INTERNAL_TABLE_SOURCE_ID,
- sourceType: TableSourceType.INTERNAL,
- schema: {
- "user relationship": {
- type: FieldType.LINK,
- fieldName: "test",
- name: "user relationship",
- constraints: {
- type: "array",
- presence: false,
+ table = await config.api.table.save(
+ tableForDatasource(datasource, {
+ schema: {
+ "user relationship": {
+ type: FieldType.LINK,
+ fieldName: "test",
+ name: "user relationship",
+ constraints: {
+ type: "array",
+ presence: false,
+ },
+ relationshipType: RelationshipType.MANY_TO_ONE,
+ tableId: InternalTable.USER_METADATA,
},
- relationshipType: RelationshipType.MANY_TO_ONE,
- tableId: InternalTable.USER_METADATA,
- },
- num: {
- type: FieldType.NUMBER,
- name: "num",
- constraints: {
- type: "number",
- presence: false,
+ num: {
+ type: FieldType.NUMBER,
+ name: "num",
+ constraints: {
+ type: "number",
+ presence: false,
+ },
},
},
- },
- })
+ })
+ )
})
it("should fail if the new column name is blank", async () => {
diff --git a/packages/server/src/api/routes/tests/viewV2.spec.ts b/packages/server/src/api/routes/tests/viewV2.spec.ts
index d3e38b0f23..a4ecd7c818 100644
--- a/packages/server/src/api/routes/tests/viewV2.spec.ts
+++ b/packages/server/src/api/routes/tests/viewV2.spec.ts
@@ -25,7 +25,6 @@ import { quotas } from "@budibase/pro"
import { roles } from "@budibase/backend-core"
jest.unmock("mssql")
-jest.unmock("pg")
describe.each([
["internal", undefined],
@@ -182,7 +181,7 @@ describe.each([
const createdView = await config.api.viewV2.create(newView)
- expect(await config.api.viewV2.get(createdView.id)).toEqual({
+ expect(createdView).toEqual({
...newView,
schema: {
Price: {
@@ -399,7 +398,7 @@ describe.each([
})
it("updates only UI schema overrides", async () => {
- await config.api.viewV2.update({
+ const updatedView = await config.api.viewV2.update({
...view,
schema: {
Price: {
@@ -418,7 +417,7 @@ describe.each([
} as Record,
})
- expect(await config.api.viewV2.get(view.id)).toEqual({
+ expect(updatedView).toEqual({
...view,
schema: {
Price: {
@@ -480,17 +479,17 @@ describe.each([
describe("fetch view (through table)", () => {
it("should be able to fetch a view V2", async () => {
- const newView: CreateViewRequest = {
+ const res = await config.api.viewV2.create({
name: generator.name(),
tableId: table._id!,
schema: {
Price: { visible: false },
Category: { visible: true },
},
- }
- const res = await config.api.viewV2.create(newView)
+ })
+ expect(res.schema?.Price).toBeUndefined()
+
const view = await config.api.viewV2.get(res.id)
- expect(view!.schema?.Price).toBeUndefined()
const updatedTable = await config.api.table.get(table._id!)
const viewSchema = updatedTable.views![view!.name!].schema as Record<
string,
diff --git a/packages/server/src/app.ts b/packages/server/src/app.ts
index aa96a30b00..75da54cbaf 100644
--- a/packages/server/src/app.ts
+++ b/packages/server/src/app.ts
@@ -21,7 +21,7 @@ async function start() {
app = koa.app
server = koa.server
// startup includes automation runner - if enabled
- await startup(app, server)
+ await startup({ app, server })
}
start().catch(err => {
diff --git a/packages/server/src/automations/tests/executeQuery.spec.js b/packages/server/src/automations/tests/executeQuery.spec.js
deleted file mode 100644
index 3b691f48ea..0000000000
--- a/packages/server/src/automations/tests/executeQuery.spec.js
+++ /dev/null
@@ -1,39 +0,0 @@
-const setup = require("./utilities")
-
-describe("test the execute query action", () => {
- let query
- let config = setup.getConfig()
-
- beforeAll(async () => {
- await config.init()
-
- await config.createDatasource()
- query = await config.createQuery()
- })
-
- afterAll(setup.afterAll)
-
- it("should be able to execute a query", async () => {
- let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
- query: { queryId: query._id },
- })
- expect(res.response).toEqual([{ a: "string", b: 1 }])
- expect(res.success).toEqual(true)
- })
-
- it("should handle a null query value", async () => {
- let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
- query: null,
- })
- expect(res.response.message).toEqual("Invalid inputs")
- expect(res.success).toEqual(false)
- })
-
- it("should handle an error executing a query", async () => {
- let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
- query: { queryId: "wrong_id" },
- })
- expect(res.response).toEqual("Error: missing")
- expect(res.success).toEqual(false)
- })
-})
diff --git a/packages/server/src/automations/tests/executeQuery.spec.ts b/packages/server/src/automations/tests/executeQuery.spec.ts
new file mode 100644
index 0000000000..996e44af79
--- /dev/null
+++ b/packages/server/src/automations/tests/executeQuery.spec.ts
@@ -0,0 +1,94 @@
+import { Datasource, Query, SourceName } from "@budibase/types"
+import * as setup from "./utilities"
+import { DatabaseName, getDatasource } from "../../integrations/tests/utils"
+import knex, { Knex } from "knex"
+import { generator } from "@budibase/backend-core/tests"
+
+function getKnexClientName(source: SourceName) {
+ switch (source) {
+ case SourceName.MYSQL:
+ return "mysql2"
+ case SourceName.SQL_SERVER:
+ return "mssql"
+ case SourceName.POSTGRES:
+ return "pg"
+ }
+ throw new Error(`Unsupported source: ${source}`)
+}
+
+describe.each(
+ [
+ DatabaseName.POSTGRES,
+ DatabaseName.MYSQL,
+ DatabaseName.SQL_SERVER,
+ DatabaseName.MARIADB,
+ ].map(name => [name, getDatasource(name)])
+)("execute query action (%s)", (_, dsProvider) => {
+ let tableName: string
+ let client: Knex
+ let datasource: Datasource
+ let query: Query
+ let config = setup.getConfig()
+
+ beforeAll(async () => {
+ await config.init()
+
+ const ds = await dsProvider
+ datasource = await config.api.datasource.create(ds)
+ client = knex({
+ client: getKnexClientName(ds.source),
+ connection: ds.config,
+ })
+ })
+
+ beforeEach(async () => {
+ tableName = generator.guid()
+ await client.schema.createTable(tableName, table => {
+ table.string("a")
+ table.integer("b")
+ })
+ await client(tableName).insert({ a: "string", b: 1 })
+ query = await config.api.query.save({
+ name: "test query",
+ datasourceId: datasource._id!,
+ parameters: [],
+ fields: {
+ sql: client(tableName).select("*").toSQL().toNative().sql,
+ },
+ transformer: "",
+ schema: {},
+ readable: true,
+ queryVerb: "read",
+ })
+ })
+
+ afterEach(async () => {
+ await client.schema.dropTable(tableName)
+ })
+
+ afterAll(setup.afterAll)
+
+ it("should be able to execute a query", async () => {
+ let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
+ query: { queryId: query._id },
+ })
+ expect(res.response).toEqual([{ a: "string", b: 1 }])
+ expect(res.success).toEqual(true)
+ })
+
+ it("should handle a null query value", async () => {
+ let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
+ query: null,
+ })
+ expect(res.response.message).toEqual("Invalid inputs")
+ expect(res.success).toEqual(false)
+ })
+
+ it("should handle an error executing a query", async () => {
+ let res = await setup.runStep(setup.actions.EXECUTE_QUERY.stepId, {
+ query: { queryId: "wrong_id" },
+ })
+ expect(res.response).toEqual("Error: missing")
+ expect(res.success).toEqual(false)
+ })
+})
diff --git a/packages/server/src/integration-test/postgres.spec.ts b/packages/server/src/integration-test/postgres.spec.ts
index 5ecc3ca3ef..288489471b 100644
--- a/packages/server/src/integration-test/postgres.spec.ts
+++ b/packages/server/src/integration-test/postgres.spec.ts
@@ -27,7 +27,6 @@ fetch.mockSearch()
const config = setup.getConfig()!
-jest.unmock("pg")
jest.mock("../websockets")
describe("postgres integrations", () => {
diff --git a/packages/server/src/integrations/base/sqlTable.ts b/packages/server/src/integrations/base/sqlTable.ts
index 0feecefb89..80f3864438 100644
--- a/packages/server/src/integrations/base/sqlTable.ts
+++ b/packages/server/src/integrations/base/sqlTable.ts
@@ -224,12 +224,12 @@ class SqlTableQueryBuilder {
const tableName = schemaName
? `\`${schemaName}\`.\`${json.table.name}\``
: `\`${json.table.name}\``
- const externalType = json.table.schema[updatedColumn].externalType!
return {
- sql: `alter table ${tableName} change column \`${json.meta.renamed.old}\` \`${updatedColumn}\` ${externalType};`,
+ sql: `alter table ${tableName} rename column \`${json.meta.renamed.old}\` to \`${updatedColumn}\`;`,
bindings: [],
}
}
+
query = buildUpdateTable(
client,
json.table,
@@ -237,6 +237,27 @@ class SqlTableQueryBuilder {
json.meta.table,
json.meta.renamed!
)
+
+ // renameColumn for SQL Server returns a parameterised `sp_rename` query,
+ // which is not supported by SQL Server and gives a syntax error.
+ if (this.sqlClient === SqlClient.MS_SQL && json.meta.renamed) {
+ const oldColumn = json.meta.renamed.old
+ const updatedColumn = json.meta.renamed.updated
+ const tableName = schemaName
+ ? `${schemaName}.${json.table.name}`
+ : `${json.table.name}`
+ const sql = query.toSQL()
+ if (Array.isArray(sql)) {
+ for (const query of sql) {
+ if (query.sql.startsWith("exec sp_rename")) {
+ query.sql = `exec sp_rename '${tableName}.${oldColumn}', '${updatedColumn}', 'COLUMN'`
+ query.bindings = []
+ }
+ }
+ }
+
+ return sql
+ }
break
case Operation.DELETE_TABLE:
query = buildDeleteTable(client, json.table)
diff --git a/packages/server/src/integrations/tests/sql.spec.ts b/packages/server/src/integrations/tests/sql.spec.ts
index c0b92b3849..dc2a06446b 100644
--- a/packages/server/src/integrations/tests/sql.spec.ts
+++ b/packages/server/src/integrations/tests/sql.spec.ts
@@ -722,7 +722,7 @@ describe("SQL query builder", () => {
})
expect(query).toEqual({
bindings: [],
- sql: `alter table \`${TABLE_NAME}\` change column \`name\` \`first_name\` varchar(45);`,
+ sql: `alter table \`${TABLE_NAME}\` rename column \`name\` to \`first_name\`;`,
})
})
diff --git a/packages/server/src/integrations/tests/utils/index.ts b/packages/server/src/integrations/tests/utils/index.ts
index bbdb41b38a..5034b5a8db 100644
--- a/packages/server/src/integrations/tests/utils/index.ts
+++ b/packages/server/src/integrations/tests/utils/index.ts
@@ -1,5 +1,3 @@
-jest.unmock("pg")
-
import { Datasource, SourceName } from "@budibase/types"
import * as postgres from "./postgres"
import * as mongodb from "./mongodb"
diff --git a/packages/server/src/sdk/app/tables/external/index.ts b/packages/server/src/sdk/app/tables/external/index.ts
index 0ace19d00e..65cd4a07c1 100644
--- a/packages/server/src/sdk/app/tables/external/index.ts
+++ b/packages/server/src/sdk/app/tables/external/index.ts
@@ -48,6 +48,18 @@ export async function save(
oldTable = await getTable(tableId)
}
+ if (
+ !oldTable &&
+ (tableToSave.primary == null || tableToSave.primary.length === 0)
+ ) {
+ tableToSave.primary = ["id"]
+ tableToSave.schema.id = {
+ type: FieldType.NUMBER,
+ autocolumn: true,
+ name: "id",
+ }
+ }
+
if (hasTypeChanged(tableToSave, oldTable)) {
throw new Error("A column type has changed.")
}
@@ -183,6 +195,10 @@ export async function save(
// that the datasource definition changed
const updatedDatasource = await datasourceSdk.get(datasource._id!)
+ if (updatedDatasource.isSQL) {
+ tableToSave.sql = true
+ }
+
return { datasource: updatedDatasource, table: tableToSave }
}
diff --git a/packages/server/src/sdk/app/tables/getters.ts b/packages/server/src/sdk/app/tables/getters.ts
index 72a6ab61f1..414af2c837 100644
--- a/packages/server/src/sdk/app/tables/getters.ts
+++ b/packages/server/src/sdk/app/tables/getters.ts
@@ -142,7 +142,9 @@ export function enrichViewSchemas(table: Table): TableResponse {
return {
...table,
views: Object.values(table.views ?? [])
- .map(v => sdk.views.enrichSchema(v, table.schema))
+ .map(v =>
+ sdk.views.isV2(v) ? sdk.views.enrichSchema(v, table.schema) : v
+ )
.reduce((p, v) => {
p[v.name!] = v
return p
diff --git a/packages/server/src/sdk/app/views/external.ts b/packages/server/src/sdk/app/views/external.ts
index 47301873f5..0f96bcc061 100644
--- a/packages/server/src/sdk/app/views/external.ts
+++ b/packages/server/src/sdk/app/views/external.ts
@@ -1,4 +1,4 @@
-import { ViewV2 } from "@budibase/types"
+import { ViewV2, ViewV2Enriched } from "@budibase/types"
import { context, HTTPError } from "@budibase/backend-core"
import sdk from "../../../sdk"
@@ -6,26 +6,34 @@ import * as utils from "../../../db/utils"
import { enrichSchema, isV2 } from "."
import { breakExternalTableId } from "../../../integrations/utils"
-export async function get(
- viewId: string,
- opts?: { enriched: boolean }
-): Promise {
+export async function get(viewId: string): Promise {
const { tableId } = utils.extractViewInfoFromID(viewId)
const { datasourceId, tableName } = breakExternalTableId(tableId)
const ds = await sdk.datasources.get(datasourceId!)
const table = ds.entities![tableName!]
- const views = Object.values(table.views!)
- const found = views.find(v => isV2(v) && v.id === viewId)
+ const views = Object.values(table.views!).filter(isV2)
+ const found = views.find(v => v.id === viewId)
if (!found) {
throw new Error("No view found")
}
- if (opts?.enriched) {
- return enrichSchema(found, table.schema) as ViewV2
- } else {
- return found as ViewV2
+ return found
+}
+
+export async function getEnriched(viewId: string): Promise {
+ const { tableId } = utils.extractViewInfoFromID(viewId)
+
+ const { datasourceId, tableName } = breakExternalTableId(tableId)
+ const ds = await sdk.datasources.get(datasourceId!)
+
+ const table = ds.entities![tableName!]
+ const views = Object.values(table.views!).filter(isV2)
+ const found = views.find(v => v.id === viewId)
+ if (!found) {
+ throw new Error("No view found")
}
+ return enrichSchema(found, table.schema)
}
export async function create(
diff --git a/packages/server/src/sdk/app/views/index.ts b/packages/server/src/sdk/app/views/index.ts
index 67e7158f21..2edfd900c4 100644
--- a/packages/server/src/sdk/app/views/index.ts
+++ b/packages/server/src/sdk/app/views/index.ts
@@ -1,8 +1,13 @@
-import { RenameColumn, TableSchema, View, ViewV2 } from "@budibase/types"
+import {
+ RenameColumn,
+ TableSchema,
+ View,
+ ViewV2,
+ ViewV2Enriched,
+} from "@budibase/types"
import { db as dbCore } from "@budibase/backend-core"
import { cloneDeep } from "lodash"
-import sdk from "../../../sdk"
import * as utils from "../../../db/utils"
import { isExternalTableID } from "../../../integrations/utils"
@@ -16,12 +21,14 @@ function pickApi(tableId: any) {
return internal
}
-export async function get(
- viewId: string,
- opts?: { enriched: boolean }
-): Promise {
+export async function get(viewId: string): Promise {
const { tableId } = utils.extractViewInfoFromID(viewId)
- return pickApi(tableId).get(viewId, opts)
+ return pickApi(tableId).get(viewId)
+}
+
+export async function getEnriched(viewId: string): Promise {
+ const { tableId } = utils.extractViewInfoFromID(viewId)
+ return pickApi(tableId).getEnriched(viewId)
}
export async function create(
@@ -52,11 +59,10 @@ export function allowedFields(view: View | ViewV2) {
]
}
-export function enrichSchema(view: View | ViewV2, tableSchema: TableSchema) {
- if (!sdk.views.isV2(view)) {
- return view
- }
-
+export function enrichSchema(
+ view: ViewV2,
+ tableSchema: TableSchema
+): ViewV2Enriched {
let schema = cloneDeep(tableSchema)
const anyViewOrder = Object.values(view.schema || {}).some(
ui => ui.order != null
diff --git a/packages/server/src/sdk/app/views/internal.ts b/packages/server/src/sdk/app/views/internal.ts
index d1dedd8566..7b2f9f6c80 100644
--- a/packages/server/src/sdk/app/views/internal.ts
+++ b/packages/server/src/sdk/app/views/internal.ts
@@ -1,26 +1,30 @@
-import { ViewV2 } from "@budibase/types"
+import { ViewV2, ViewV2Enriched } from "@budibase/types"
import { context, HTTPError } from "@budibase/backend-core"
import sdk from "../../../sdk"
import * as utils from "../../../db/utils"
import { enrichSchema, isV2 } from "."
-export async function get(
- viewId: string,
- opts?: { enriched: boolean }
-): Promise {
+export async function get(viewId: string): Promise {
const { tableId } = utils.extractViewInfoFromID(viewId)
const table = await sdk.tables.getTable(tableId)
- const views = Object.values(table.views!)
- const found = views.find(v => isV2(v) && v.id === viewId)
+ const views = Object.values(table.views!).filter(isV2)
+ const found = views.find(v => v.id === viewId)
if (!found) {
throw new Error("No view found")
}
- if (opts?.enriched) {
- return enrichSchema(found, table.schema) as ViewV2
- } else {
- return found as ViewV2
+ return found
+}
+
+export async function getEnriched(viewId: string): Promise {
+ const { tableId } = utils.extractViewInfoFromID(viewId)
+ const table = await sdk.tables.getTable(tableId)
+ const views = Object.values(table.views!).filter(isV2)
+ const found = views.find(v => v.id === viewId)
+ if (!found) {
+ throw new Error("No view found")
}
+ return enrichSchema(found, table.schema)
}
export async function create(
diff --git a/packages/server/src/sdk/tests/attachments.spec.ts b/packages/server/src/sdk/tests/attachments.spec.ts
index 0fd43ac5a8..c1736e6f8e 100644
--- a/packages/server/src/sdk/tests/attachments.spec.ts
+++ b/packages/server/src/sdk/tests/attachments.spec.ts
@@ -1,4 +1,12 @@
-import newid from "../../db/newid"
+import TestConfig from "../../tests/utilities/TestConfiguration"
+import { db as dbCore } from "@budibase/backend-core"
+import sdk from "../index"
+import {
+ FieldType,
+ INTERNAL_TABLE_SOURCE_ID,
+ TableSourceType,
+} from "@budibase/types"
+import { FIND_LIMIT } from "../app/rows/attachments"
const attachment = {
size: 73479,
@@ -8,69 +16,48 @@ const attachment = {
key: "app_bbb/attachments/a.png",
}
-const row = {
- _id: "ro_ta_aaa",
- photo: [attachment],
- otherCol: "string",
-}
-
-const table = {
- _id: "ta_aaa",
- name: "photos",
- schema: {
- photo: {
- type: "attachment",
- name: "photo",
- },
- otherCol: {
- type: "string",
- name: "otherCol",
- },
- },
-}
-
-jest.mock("@budibase/backend-core", () => {
- const core = jest.requireActual("@budibase/backend-core")
- return {
- ...core,
- db: {
- ...core.db,
- directCouchFind: jest.fn(),
- },
- }
-})
-
-import { db as dbCore } from "@budibase/backend-core"
-import sdk from "../index"
-
describe("should be able to re-write attachment URLs", () => {
+ const config = new TestConfig()
+
+ beforeAll(async () => {
+ await config.init()
+ })
+
it("should update URLs on a number of rows over the limit", async () => {
- const db = dbCore.getDB("app_aaa")
- await db.put(table)
- const limit = 30
- let rows = []
- for (let i = 0; i < limit; i++) {
- const rowToWrite = {
- ...row,
- _id: `${row._id}_${newid()}`,
- }
- const { rev } = await db.put(rowToWrite)
- rows.push({
- ...rowToWrite,
- _rev: rev,
+ const table = await config.api.table.save({
+ name: "photos",
+ type: "table",
+ sourceId: INTERNAL_TABLE_SOURCE_ID,
+ sourceType: TableSourceType.INTERNAL,
+ schema: {
+ photo: {
+ type: FieldType.ATTACHMENT,
+ name: "photo",
+ },
+ otherCol: {
+ type: FieldType.STRING,
+ name: "otherCol",
+ },
+ },
+ })
+
+ for (let i = 0; i < FIND_LIMIT * 4; i++) {
+ await config.api.row.save(table._id!, {
+ photo: [attachment],
+ otherCol: "string",
})
}
- dbCore.directCouchFind
- // @ts-ignore
- .mockReturnValueOnce({ rows: rows.slice(0, 25), bookmark: "aaa" })
- .mockReturnValueOnce({ rows: rows.slice(25, limit), bookmark: "bbb" })
+ const db = dbCore.getDB(config.getAppId())
await sdk.backups.updateAttachmentColumns(db.name, db)
- const finalRows = await sdk.rows.getAllInternalRows(db.name)
- for (let rowToCheck of finalRows) {
- expect(rowToCheck.otherCol).toBe(row.otherCol)
- expect(rowToCheck.photo[0].url).toBe("")
- expect(rowToCheck.photo[0].key).toBe(`${db.name}/attachments/a.png`)
+
+ const rows = (await sdk.rows.getAllInternalRows(db.name)).filter(
+ row => row.tableId === table._id
+ )
+ for (const row of rows) {
+ expect(row.otherCol).toBe("string")
+ expect(row.photo[0].url).toBe("")
+ expect(row.photo[0].key).toBe(`${db.name}/attachments/a.png`)
}
})
})
diff --git a/packages/server/src/sdk/users/tests/utils.spec.ts b/packages/server/src/sdk/users/tests/utils.spec.ts
index 6f1c5afd3d..0aa112094d 100644
--- a/packages/server/src/sdk/users/tests/utils.spec.ts
+++ b/packages/server/src/sdk/users/tests/utils.spec.ts
@@ -35,11 +35,20 @@ describe("syncGlobalUsers", () => {
builder: { global: true },
})
await config.doInContext(config.appId, async () => {
- expect(await rawUserMetadata()).toHaveLength(1)
+ let metadata = await rawUserMetadata()
+ expect(metadata).not.toContainEqual(
+ expect.objectContaining({
+ _id: db.generateUserMetadataID(user1._id!),
+ })
+ )
+ expect(metadata).not.toContainEqual(
+ expect.objectContaining({
+ _id: db.generateUserMetadataID(user2._id!),
+ })
+ )
await syncGlobalUsers()
- const metadata = await rawUserMetadata()
- expect(metadata).toHaveLength(3)
+ metadata = await rawUserMetadata()
expect(metadata).toContainEqual(
expect.objectContaining({
_id: db.generateUserMetadataID(user1._id!),
@@ -62,7 +71,6 @@ describe("syncGlobalUsers", () => {
await syncGlobalUsers()
const metadata = await rawUserMetadata()
- expect(metadata).toHaveLength(1)
expect(metadata).not.toContainEqual(
expect.objectContaining({
_id: db.generateUserMetadataID(user._id!),
diff --git a/packages/server/src/startup.ts b/packages/server/src/startup/index.ts
similarity index 80%
rename from packages/server/src/startup.ts
rename to packages/server/src/startup/index.ts
index 2cedda1099..48d500a0cf 100644
--- a/packages/server/src/startup.ts
+++ b/packages/server/src/startup/index.ts
@@ -1,6 +1,6 @@
-import env from "./environment"
-import * as redis from "./utilities/redis"
-import { generateApiKey, getChecklist } from "./utilities/workerRequests"
+import env from "../environment"
+import * as redis from "../utilities/redis"
+import { generateApiKey, getChecklist } from "../utilities/workerRequests"
import {
events,
installation,
@@ -9,22 +9,22 @@ import {
users,
cache,
} from "@budibase/backend-core"
-import fs from "fs"
-import { watch } from "./watch"
-import * as automations from "./automations"
-import * as fileSystem from "./utilities/fileSystem"
-import { default as eventEmitter, init as eventInit } from "./events"
-import * as migrations from "./migrations"
-import * as bullboard from "./automations/bullboard"
+import { watch } from "../watch"
+import * as automations from "../automations"
+import * as fileSystem from "../utilities/fileSystem"
+import { default as eventEmitter, init as eventInit } from "../events"
+import * as migrations from "../migrations"
+import * as bullboard from "../automations/bullboard"
import * as pro from "@budibase/pro"
-import * as api from "./api"
-import sdk from "./sdk"
-import { initialise as initialiseWebsockets } from "./websockets"
-import { automationsEnabled, printFeatures } from "./features"
+import * as api from "../api"
+import sdk from "../sdk"
+import { initialise as initialiseWebsockets } from "../websockets"
+import { automationsEnabled, printFeatures } from "../features"
+import * as jsRunner from "../jsRunner"
import Koa from "koa"
import { Server } from "http"
import { AddressInfo } from "net"
-import * as jsRunner from "./jsRunner"
+import fs from "fs"
let STARTUP_RAN = false
@@ -61,8 +61,11 @@ function shutdown(server?: Server) {
}
}
-export async function startup(app?: Koa, server?: Server) {
- if (STARTUP_RAN) {
+export async function startup(
+ opts: { app?: Koa; server?: Server; rerun?: boolean } = {}
+) {
+ const { app, server, rerun } = opts
+ if (STARTUP_RAN && !rerun) {
return
}
printFeatures()
@@ -139,9 +142,9 @@ export async function startup(app?: Koa, server?: Server) {
try {
const user = await users.UserDB.createAdminUser(
bbAdminEmail,
- bbAdminPassword,
tenantId,
{
+ password: bbAdminPassword,
hashPassword: true,
requirePassword: true,
skipPasswordValidation: true,
diff --git a/packages/server/src/startup/tests/startup.spec.ts b/packages/server/src/startup/tests/startup.spec.ts
new file mode 100644
index 0000000000..ed31bc45b7
--- /dev/null
+++ b/packages/server/src/startup/tests/startup.spec.ts
@@ -0,0 +1,34 @@
+import TestConfiguration from "../../tests/utilities/TestConfiguration"
+import { startup } from "../index"
+import { users, utils, tenancy } from "@budibase/backend-core"
+
+describe("check BB_ADMIN environment variables", () => {
+ const config = new TestConfiguration()
+ beforeAll(async () => {
+ await config.init()
+ })
+
+ it("should be able to create a user with the BB_ADMIN environment variables", async () => {
+ const EMAIL = "budibase@budibase.com",
+ PASSWORD = "budibase"
+ await tenancy.doInTenant(tenancy.DEFAULT_TENANT_ID, async () => {
+ await config.withEnv(
+ {
+ BB_ADMIN_USER_EMAIL: EMAIL,
+ BB_ADMIN_USER_PASSWORD: PASSWORD,
+ MULTI_TENANCY: "0",
+ SELF_HOSTED: "1",
+ },
+ async () => {
+ await startup({ rerun: true })
+ const user = await users.getGlobalUserByEmail(EMAIL, {
+ cleanup: false,
+ })
+ expect(user).toBeDefined()
+ expect(user?.password).toBeDefined()
+ expect(await utils.compare(PASSWORD, user?.password!)).toEqual(true)
+ }
+ )
+ })
+ })
+})
diff --git a/packages/server/src/tests/utilities/api/datasource.ts b/packages/server/src/tests/utilities/api/datasource.ts
index 06aa9b4e1e..0296f58f7d 100644
--- a/packages/server/src/tests/utilities/api/datasource.ts
+++ b/packages/server/src/tests/utilities/api/datasource.ts
@@ -4,6 +4,7 @@ import {
CreateDatasourceResponse,
UpdateDatasourceResponse,
UpdateDatasourceRequest,
+ QueryJson,
} from "@budibase/types"
import { Expectations, TestAPI } from "./base"
@@ -45,4 +46,24 @@ export class DatasourceAPI extends TestAPI {
expectations,
})
}
+
+ delete = async (datasource: Datasource, expectations?: Expectations) => {
+ return await this._delete(
+ `/api/datasources/${datasource._id!}/${datasource._rev!}`,
+ { expectations }
+ )
+ }
+
+ get = async (id: string, expectations?: Expectations) => {
+ return await this._get(`/api/datasources/${id}`, {
+ expectations,
+ })
+ }
+
+ query = async (query: QueryJson, expectations?: Expectations) => {
+ return await this._post(`/api/datasources/query`, {
+ body: query,
+ expectations,
+ })
+ }
}
diff --git a/packages/server/src/tests/utilities/api/query.ts b/packages/server/src/tests/utilities/api/query.ts
index 089132dee8..2d5f7970cd 100644
--- a/packages/server/src/tests/utilities/api/query.ts
+++ b/packages/server/src/tests/utilities/api/query.ts
@@ -6,10 +6,11 @@ import {
PreviewQueryResponse,
} from "@budibase/types"
import { Expectations, TestAPI } from "./base"
+import { constants } from "@budibase/backend-core"
export class QueryAPI extends TestAPI {
- save = async (body: Query): Promise => {
- return await this._post(`/api/queries`, { body })
+ save = async (body: Query, expectations?: Expectations): Promise => {
+ return await this._post(`/api/queries`, { body, expectations })
}
execute = async (
@@ -26,9 +27,36 @@ export class QueryAPI extends TestAPI {
)
}
- previewQuery = async (queryPreview: PreviewQueryRequest) => {
+ preview = async (
+ queryPreview: PreviewQueryRequest,
+ expectations?: Expectations
+ ) => {
return await this._post(`/api/queries/preview`, {
body: queryPreview,
+ expectations,
})
}
+
+ delete = async (query: Query, expectations?: Expectations) => {
+ return await this._delete(`/api/queries/${query._id!}/${query._rev!}`, {
+ expectations,
+ })
+ }
+
+ get = async (queryId: string, expectations?: Expectations) => {
+ return await this._get(`/api/queries/${queryId}`, { expectations })
+ }
+
+ getProd = async (queryId: string, expectations?: Expectations) => {
+ return await this._get(`/api/queries/${queryId}`, {
+ expectations,
+ headers: {
+ [constants.Header.APP_ID]: this.config.getProdAppId(),
+ },
+ })
+ }
+
+ fetch = async (expectations?: Expectations) => {
+ return await this._get(`/api/queries`, { expectations })
+ }
}
diff --git a/packages/server/src/tests/utilities/api/table.ts b/packages/server/src/tests/utilities/api/table.ts
index 49105a3883..d918ba8b9a 100644
--- a/packages/server/src/tests/utilities/api/table.ts
+++ b/packages/server/src/tests/utilities/api/table.ts
@@ -1,4 +1,6 @@
import {
+ BulkImportRequest,
+ BulkImportResponse,
MigrateRequest,
MigrateResponse,
SaveTableRequest,
@@ -39,4 +41,28 @@ export class TableAPI extends TestAPI {
expectations,
})
}
+
+ import = async (
+ tableId: string,
+ data: BulkImportRequest,
+ expectations?: Expectations
+ ): Promise => {
+ return await this._post(
+ `/api/tables/${tableId}/import`,
+ {
+ body: data,
+ expectations,
+ }
+ )
+ }
+
+ destroy = async (
+ tableId: string,
+ revId: string,
+ expectations?: Expectations
+ ): Promise => {
+ return await this._delete(`/api/tables/${tableId}/${revId}`, {
+ expectations,
+ })
+ }
}
diff --git a/packages/server/src/tests/utilities/api/viewV2.ts b/packages/server/src/tests/utilities/api/viewV2.ts
index 2bc2357551..9741240f27 100644
--- a/packages/server/src/tests/utilities/api/viewV2.ts
+++ b/packages/server/src/tests/utilities/api/viewV2.ts
@@ -4,9 +4,9 @@ import {
ViewV2,
SearchViewRowRequest,
PaginatedSearchRowResponse,
+ ViewResponseEnriched,
} from "@budibase/types"
import { Expectations, TestAPI } from "./base"
-import sdk from "../../../sdk"
export class ViewV2API extends TestAPI {
create = async (
@@ -45,9 +45,8 @@ export class ViewV2API extends TestAPI {
}
get = async (viewId: string) => {
- return await this.config.doInContext(this.config.appId, () =>
- sdk.views.get(viewId)
- )
+ return (await this._get(`/api/v2/views/${viewId}`))
+ .data
}
search = async (
diff --git a/packages/server/src/tests/utilities/structures.ts b/packages/server/src/tests/utilities/structures.ts
index 5b50bd1175..2a32489c30 100644
--- a/packages/server/src/tests/utilities/structures.ts
+++ b/packages/server/src/tests/utilities/structures.ts
@@ -26,32 +26,56 @@ import {
WebhookActionType,
} from "@budibase/types"
import { LoopInput, LoopStepType } from "../../definitions/automations"
+import { merge } from "lodash"
+import { generator } from "@budibase/backend-core/tests"
const { BUILTIN_ROLE_IDS } = roles
-export function basicTable(): Table {
- return {
- name: "TestTable",
- type: "table",
- sourceId: INTERNAL_TABLE_SOURCE_ID,
- sourceType: TableSourceType.INTERNAL,
- schema: {
- name: {
- type: FieldType.STRING,
- name: "name",
- constraints: {
- type: "string",
+export function tableForDatasource(
+ datasource?: Datasource,
+ ...extra: Partial[]
+): Table {
+ return merge(
+ {
+ name: generator.guid(),
+ type: "table",
+ sourceType: datasource
+ ? TableSourceType.EXTERNAL
+ : TableSourceType.INTERNAL,
+ sourceId: datasource ? datasource._id! : INTERNAL_TABLE_SOURCE_ID,
+ schema: {},
+ },
+ ...extra
+ )
+}
+
+export function basicTable(
+ datasource?: Datasource,
+ ...extra: Partial[]
+): Table {
+ return tableForDatasource(
+ datasource,
+ {
+ name: "TestTable",
+ schema: {
+ name: {
+ type: FieldType.STRING,
+ name: "name",
+ constraints: {
+ type: "string",
+ },
},
- },
- description: {
- type: FieldType.STRING,
- name: "description",
- constraints: {
- type: "string",
+ description: {
+ type: FieldType.STRING,
+ name: "description",
+ constraints: {
+ type: "string",
+ },
},
},
},
- }
+ ...extra
+ )
}
export function basicView(tableId: string) {
diff --git a/packages/server/src/threads/query.ts b/packages/server/src/threads/query.ts
index 97e7a05cf7..54322b1156 100644
--- a/packages/server/src/threads/query.ts
+++ b/packages/server/src/threads/query.ts
@@ -167,7 +167,7 @@ class QueryRunner {
this.hasRerun = true
}
- await threadUtils.invalidateDynamicVariables(this.cachedVariables)
+ await threadUtils.invalidateCachedVariable(this.cachedVariables)
return this.execute()
}
@@ -254,7 +254,7 @@ class QueryRunner {
let { parameters } = this
const queryId = variable.queryId,
name = variable.name
- let value = await threadUtils.checkCacheForDynamicVariable(queryId, name)
+ let value = await threadUtils.getCachedVariable(queryId, name)
if (!value) {
value = this.queryResponse[queryId]
? this.queryResponse[queryId]
diff --git a/packages/server/src/threads/utils.ts b/packages/server/src/threads/utils.ts
index cd547cacae..bf0d8f2231 100644
--- a/packages/server/src/threads/utils.ts
+++ b/packages/server/src/threads/utils.ts
@@ -5,7 +5,7 @@ import { redis, db as dbCore } from "@budibase/backend-core"
import * as jsRunner from "../jsRunner"
const VARIABLE_TTL_SECONDS = 3600
-let client: any
+let client: redis.Client | null = null
async function getClient() {
if (!client) {
@@ -36,23 +36,15 @@ export function threadSetup() {
db.init()
}
-export async function checkCacheForDynamicVariable(
- queryId: string,
- variable: string
-) {
- const cache = await getClient()
- return cache.get(makeVariableKey(queryId, variable))
+export async function getCachedVariable(queryId: string, variable: string) {
+ return (await getClient()).get(makeVariableKey(queryId, variable))
}
-export async function invalidateDynamicVariables(cachedVars: QueryVariable[]) {
+export async function invalidateCachedVariable(vars: QueryVariable[]) {
const cache = await getClient()
- let promises = []
- for (let variable of cachedVars) {
- promises.push(
- cache.delete(makeVariableKey(variable.queryId, variable.name))
- )
- }
- await Promise.all(promises)
+ await Promise.all(
+ vars.map(v => cache.delete(makeVariableKey(v.queryId, v.name)))
+ )
}
export async function storeDynamicVariable(
@@ -93,7 +85,7 @@ export default {
hasExtraData,
formatResponse,
storeDynamicVariable,
- invalidateDynamicVariables,
- checkCacheForDynamicVariable,
+ invalidateCachedVariable,
+ getCachedVariable,
threadSetup,
}
diff --git a/packages/server/src/utilities/schema.ts b/packages/server/src/utilities/schema.ts
index 85dfdd3506..34113759ed 100644
--- a/packages/server/src/utilities/schema.ts
+++ b/packages/server/src/utilities/schema.ts
@@ -54,7 +54,7 @@ export function validate(rows: Rows, schema: TableSchema): ValidationResults {
type: columnType,
subtype: columnSubtype,
autocolumn: isAutoColumn,
- } = schema[columnName]
+ } = schema[columnName] || {}
// If the column had an invalid value we don't want to override it
if (results.schemaValidation[columnName] === false) {
diff --git a/packages/types/src/api/web/app/table.ts b/packages/types/src/api/web/app/table.ts
index f4d6720516..ffe59ae395 100644
--- a/packages/types/src/api/web/app/table.ts
+++ b/packages/types/src/api/web/app/table.ts
@@ -3,16 +3,11 @@ import {
Row,
Table,
TableRequest,
- TableSchema,
View,
- ViewV2,
+ ViewV2Enriched,
} from "../../../documents"
-interface ViewV2Response extends ViewV2 {
- schema: TableSchema
-}
-
-export type TableViewsResponse = { [key: string]: View | ViewV2Response }
+export type TableViewsResponse = { [key: string]: View | ViewV2Enriched }
export interface TableResponse extends Table {
views?: TableViewsResponse
diff --git a/packages/types/src/api/web/app/view.ts b/packages/types/src/api/web/app/view.ts
index 30e7bf77d7..c00bc0e468 100644
--- a/packages/types/src/api/web/app/view.ts
+++ b/packages/types/src/api/web/app/view.ts
@@ -1,14 +1,13 @@
-import { ViewV2, UIFieldMetadata } from "../../../documents"
+import { ViewV2, ViewV2Enriched } from "../../../documents"
export interface ViewResponse {
data: ViewV2
}
-export interface CreateViewRequest
- extends Omit {
- schema?: Record
+export interface ViewResponseEnriched {
+ data: ViewV2Enriched
}
-export interface UpdateViewRequest extends Omit {
- schema?: Record
-}
+export interface CreateViewRequest extends Omit {}
+
+export interface UpdateViewRequest extends ViewV2 {}
diff --git a/packages/types/src/documents/app/view.ts b/packages/types/src/documents/app/view.ts
index 7b93d24f3d..8a36b96b4e 100644
--- a/packages/types/src/documents/app/view.ts
+++ b/packages/types/src/documents/app/view.ts
@@ -1,5 +1,5 @@
import { SearchFilter, SortOrder, SortType } from "../../api"
-import { UIFieldMetadata } from "./table"
+import { TableSchema, UIFieldMetadata } from "./table"
import { Document } from "../document"
import { DBView } from "../../sdk"
@@ -48,6 +48,10 @@ export interface ViewV2 {
schema?: Record
}
+export interface ViewV2Enriched extends ViewV2 {
+ schema?: TableSchema
+}
+
export type ViewSchema = ViewCountOrSumSchema | ViewStatisticsSchema
export interface ViewCountOrSumSchema {
diff --git a/packages/worker/src/api/controllers/global/users.ts b/packages/worker/src/api/controllers/global/users.ts
index 0c1342fa08..4c1af90d38 100644
--- a/packages/worker/src/api/controllers/global/users.ts
+++ b/packages/worker/src/api/controllers/global/users.ts
@@ -146,16 +146,12 @@ export const adminUser = async (
}
try {
- const finalUser = await userSdk.db.createAdminUser(
- email,
- tenantId,
+ const finalUser = await userSdk.db.createAdminUser(email, tenantId, {
password,
- {
- ssoId,
- hashPassword,
- requirePassword,
- }
- )
+ ssoId,
+ hashPassword,
+ requirePassword,
+ })
// events
let account: CloudAccount | undefined
diff --git a/yarn.lock b/yarn.lock
index 4deda92484..f4c0f56414 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -7261,37 +7261,7 @@ axios-retry@^3.1.9:
"@babel/runtime" "^7.15.4"
is-retry-allowed "^2.2.0"
-axios@0.24.0:
- version "0.24.0"
- resolved "https://registry.yarnpkg.com/axios/-/axios-0.24.0.tgz#804e6fa1e4b9c5288501dd9dff56a7a0940d20d6"
- integrity sha512-Q6cWsys88HoPgAaFAVUb0WpPk0O8iTeisR9IMqy9G8AbO4NlpVknrnQS03zzF9PGAWgO3cgletO3VjV/P7VztA==
- dependencies:
- follow-redirects "^1.14.4"
-
-axios@1.1.3:
- version "1.1.3"
- resolved "https://registry.yarnpkg.com/axios/-/axios-1.1.3.tgz#8274250dada2edf53814ed7db644b9c2866c1e35"
- integrity sha512-00tXVRwKx/FZr/IDVFt4C+f9FYairX517WoGCL6dpOntqLkZofjhu43F/Xl44UOpqa+9sLFDrG/XAnFsUYgkDA==
- dependencies:
- follow-redirects "^1.15.0"
- form-data "^4.0.0"
- proxy-from-env "^1.1.0"
-
-axios@^0.21.1, axios@^0.21.4:
- version "0.21.4"
- resolved "https://registry.yarnpkg.com/axios/-/axios-0.21.4.tgz#c67b90dc0568e5c1cf2b0b858c43ba28e2eda575"
- integrity sha512-ut5vewkiu8jjGBdqpM44XxjuCjq9LAKeHVmoVfHVzy8eHgxxq8SbAVQNovDA8mVi05kP0Ea/n/UzcSHcTJQfNg==
- dependencies:
- follow-redirects "^1.14.0"
-
-axios@^0.26.0:
- version "0.26.1"
- resolved "https://registry.yarnpkg.com/axios/-/axios-0.26.1.tgz#1ede41c51fcf51bbbd6fd43669caaa4f0495aaa9"
- integrity sha512-fPwcX4EvnSHuInCMItEhAGnaSEXRBjtzh9fOtsE6E1G6p7vl7edEeZe11QHf18+6+9gR5PbKV/sGKNaD8YaMeA==
- dependencies:
- follow-redirects "^1.14.8"
-
-axios@^1.0.0, axios@^1.1.3, axios@^1.5.0:
+axios@0.24.0, axios@1.1.3, axios@1.6.3, axios@^0.21.1, axios@^0.21.4, axios@^0.26.0, axios@^1.0.0, axios@^1.1.3, axios@^1.5.0:
version "1.6.3"
resolved "https://registry.yarnpkg.com/axios/-/axios-1.6.3.tgz#7f50f23b3aa246eff43c54834272346c396613f4"
integrity sha512-fWyNdeawGam70jXSVlKl+SUNVcL6j6W79CuSIPfi6HnDUmSCH6gyUys/HrqHeA/wU0Az41rRgean494d0Jb+ww==
@@ -11242,11 +11212,6 @@ fn.name@1.x.x:
resolved "https://registry.yarnpkg.com/fn.name/-/fn.name-1.1.0.tgz#26cad8017967aea8731bc42961d04a3d5988accc"
integrity sha512-GRnmB5gPyJpAhTQdSZTSp9uaPSvl09KoYcMQtsB9rQoOmzs9dH6ffeccH+Z+cv6P68Hu5bC6JjRh4Ah/mHSNRw==
-follow-redirects@^1.14.0, follow-redirects@^1.14.4, follow-redirects@^1.14.8:
- version "1.15.6"
- resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.6.tgz#7f815c0cda4249c74ff09e95ef97c23b5fd0399b"
- integrity sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==
-
follow-redirects@^1.15.0:
version "1.15.2"
resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13"
@@ -12365,12 +12330,7 @@ http-assert@^1.3.0:
deep-equal "~1.0.1"
http-errors "~1.8.0"
-http-cache-semantics@3.8.1:
- version "3.8.1"
- resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz#39b0e16add9b605bf0a9ef3d9daaf4843b4cacd2"
- integrity sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w==
-
-http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1:
+http-cache-semantics@3.8.1, http-cache-semantics@4.1.1, http-cache-semantics@^4.0.0, http-cache-semantics@^4.1.0, http-cache-semantics@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/http-cache-semantics/-/http-cache-semantics-4.1.1.tgz#abe02fcb2985460bf0323be664436ec3476a6d5a"
integrity sha512-er295DKPVsV82j5kw1Gjt+ADA/XYHsajl82cGNQG2eyoPkvgUhX+nDIyelzhIWbbsXP39EHcI6l5tYs2FYqYXQ==
@@ -13315,6 +13275,11 @@ isobject@^3.0.1:
resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df"
integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg==
+isobject@^4.0.0:
+ version "4.0.0"
+ resolved "https://registry.yarnpkg.com/isobject/-/isobject-4.0.0.tgz#3f1c9155e73b192022a80819bacd0343711697b0"
+ integrity sha512-S/2fF5wH8SJA/kmwr6HYhK/RI/OkhD84k8ntalo0iJjZikgq1XFvR5M8NPT1x5F7fBwCG3qHfnzeP/Vh/ZxCUA==
+
isolated-vm@^4.7.2:
version "4.7.2"
resolved "https://registry.yarnpkg.com/isolated-vm/-/isolated-vm-4.7.2.tgz#5670d5cce1d92004f9b825bec5b0b11fc7501b65"
@@ -15909,7 +15874,7 @@ msgpackr-extract@^3.0.2:
"@msgpackr-extract/msgpackr-extract-linux-x64" "3.0.2"
"@msgpackr-extract/msgpackr-extract-win32-x64" "3.0.2"
-msgpackr@^1.5.2:
+msgpackr@1.10.1, msgpackr@^1.5.2:
version "1.10.1"
resolved "https://registry.yarnpkg.com/msgpackr/-/msgpackr-1.10.1.tgz#51953bb4ce4f3494f0c4af3f484f01cfbb306555"
integrity sha512-r5VRLv9qouXuLiIBrLpl2d5ZvPt8svdQTl5/vMvE4nzDMyEX4sgW5yWhuBBj5UmgwOTWj8CIdSXn5sAfsHAWIQ==
@@ -16077,7 +16042,7 @@ nice-try@^1.0.4:
resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366"
integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==
-nock@^13.5.4:
+nock@13.5.4, nock@^13.5.4:
version "13.5.4"
resolved "https://registry.yarnpkg.com/nock/-/nock-13.5.4.tgz#8918f0addc70a63736170fef7106a9721e0dc479"
integrity sha512-yAyTfdeNJGGBFxWdzSKCBYxs5FxLbCg5X5Q4ets974hcQzG1+qCxvIyOo4j2Ry6MUlhWVMX4OoYDefAIIwupjw==
@@ -16113,25 +16078,13 @@ node-addon-api@^6.1.0:
resolved "https://registry.yarnpkg.com/node-addon-api/-/node-addon-api-6.1.0.tgz#ac8470034e58e67d0c6f1204a18ae6995d9c0d76"
integrity sha512-+eawOlIgy680F0kBzPUNFhMZGtJ1YmqM6l4+Crf4IkImjYrO/mqPwRMh352g23uIaQKFItcQ64I7KMaJxHgAVA==
-node-fetch@2.6.0:
- version "2.6.0"
- resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.0.tgz#e633456386d4aa55863f676a7ab0daa8fdecb0fd"
- integrity sha512-8dG4H5ujfvFiqDmVu9fQ5bOHUC15JMjMY/Zumv26oOvvVJjM67KF8koCWIabKQ1GJIa9r2mMZscBq/TbdOcmNA==
-
-node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7:
+node-fetch@2.6.0, node-fetch@2.6.7, node-fetch@^2.6.0, node-fetch@^2.6.1, node-fetch@^2.6.7, node-fetch@^2.6.9, node-fetch@^2.7.0:
version "2.6.7"
resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad"
integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==
dependencies:
whatwg-url "^5.0.0"
-node-fetch@^2.6.9, node-fetch@^2.7.0:
- version "2.7.0"
- resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.7.0.tgz#d0f0fa6e3e2dc1d27efcd8ad99d550bda94d187d"
- integrity sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==
- dependencies:
- whatwg-url "^5.0.0"
-
node-forge@^1.3.1:
version "1.3.1"
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-1.3.1.tgz#be8da2af243b2417d5f646a770663a92b7e9ded3"
@@ -17276,15 +17229,7 @@ passport-strategy@1.x.x, passport-strategy@^1.0.0:
resolved "https://registry.yarnpkg.com/passport-strategy/-/passport-strategy-1.0.0.tgz#b5539aa8fc225a3d1ad179476ddf236b440f52e4"
integrity sha512-CB97UUvDKJde2V0KDWWB3lyf6PC3FaZP7YxZ2G8OAtn9p4HI9j9JLP9qjOGZFvyl8uwNT8qM+hGnz/n16NI7oA==
-passport@^0.4.0:
- version "0.4.1"
- resolved "https://registry.yarnpkg.com/passport/-/passport-0.4.1.tgz#941446a21cb92fc688d97a0861c38ce9f738f270"
- integrity sha512-IxXgZZs8d7uFSt3eqNjM9NQ3g3uQCW5avD8mRNoXV99Yig50vjuaez6dQK2qC0kVWPRTujxY0dWgGfT09adjYg==
- dependencies:
- passport-strategy "1.x.x"
- pause "0.0.1"
-
-passport@^0.6.0:
+passport@0.6.0, passport@^0.4.0, passport@^0.6.0:
version "0.6.0"
resolved "https://registry.yarnpkg.com/passport/-/passport-0.6.0.tgz#e869579fab465b5c0b291e841e6cc95c005fac9d"
integrity sha512-0fe+p3ZnrWRW74fe8+SvCyf4a3Pb2/h7gFkQ8yTJpAO50gDzlfjZUZTO1k5Eg9kUct22OxHLqDZoKUWRHOh9ug==
@@ -18003,9 +17948,9 @@ postgres-interval@^1.1.0:
xtend "^4.0.0"
posthog-js@^1.116.6:
- version "1.116.6"
- resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.116.6.tgz#9a5c9f49230a76642f4c44d93b96710f886c2880"
- integrity sha512-rvt8HxzJD4c2B/xsUa4jle8ApdqljeBI2Qqjp4XJMohQf18DXRyM6b96H5/UMs8jxYuZG14Er0h/kEIWeU6Fmw==
+ version "1.117.0"
+ resolved "https://registry.yarnpkg.com/posthog-js/-/posthog-js-1.117.0.tgz#59c3e520f6269f76ea82dce8760fbc33cdd7f48f"
+ integrity sha512-+I8q5G9YG6r6wOLKPT+C+AV7MRhyVFJMTJS7dfwLmmT+mkVxQ5bfC59hBkJUObOR+YRn5jn2JT/sgIslU94EZg==
dependencies:
fflate "^0.4.8"
preact "^10.19.3"
@@ -18585,7 +18530,7 @@ pseudomap@^1.0.2:
resolved "https://registry.yarnpkg.com/pseudomap/-/pseudomap-1.0.2.tgz#f052a28da70e618917ef0a8ac34c1ae5a68286b3"
integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ==
-psl@^1.1.28, psl@^1.1.33:
+psl@^1.1.33:
version "1.9.0"
resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7"
integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag==
@@ -19602,11 +19547,6 @@ sax@1.2.1:
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a"
integrity sha512-8I2a3LovHTOpm7NV5yOyO8IHqgVsfK4+UuySrXU8YXkSRX7k6hCV9b3HrkKCr3nMpgj+0bmocaJJWpvp1oc7ZA==
-sax@>=0.1.1:
- version "1.3.0"
- resolved "https://registry.yarnpkg.com/sax/-/sax-1.3.0.tgz#a5dbe77db3be05c9d1ee7785dbd3ea9de51593d0"
- integrity sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==
-
sax@>=0.6.0:
version "1.2.4"
resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9"
@@ -19688,40 +19628,13 @@ semver-diff@^3.1.1:
dependencies:
semver "^6.3.0"
-"semver@2 || 3 || 4 || 5", semver@^5.5.0, semver@^5.6.0, semver@^5.7.1:
- version "5.7.2"
- resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.2.tgz#48d55db737c3287cd4835e17fa13feace1c41ef8"
- integrity sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==
-
-semver@7.5.3, semver@^7.0.0, semver@^7.1.1, semver@^7.1.2, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3:
+"semver@2 || 3 || 4 || 5", semver@7.5.3, semver@^5.5.0, semver@^5.6.0, semver@^5.7.1, semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1, semver@^7.0.0, semver@^7.1.1, semver@^7.1.2, semver@^7.3.2, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7, semver@^7.3.8, semver@^7.5.3, semver@^7.5.4, semver@~2.3.1, semver@~7.0.0:
version "7.5.3"
resolved "https://registry.yarnpkg.com/semver/-/semver-7.5.3.tgz#161ce8c2c6b4b3bdca6caadc9fa3317a4c4fe88e"
integrity sha512-QBlUtyVk/5EeHbi7X0fw6liDZc7BBmEaSYn01fMU1OUYbf6GPsbTtd8WmnqbI20SeycoHSeiybkE/q1Q+qlThQ==
dependencies:
lru-cache "^6.0.0"
-semver@^6.0.0, semver@^6.1.1, semver@^6.1.2, semver@^6.2.0, semver@^6.3.0, semver@^6.3.1:
- version "6.3.1"
- resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.1.tgz#556d2ef8689146e46dcea4bfdd095f3434dffcb4"
- integrity sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==
-
-semver@^7.5.4:
- version "7.6.0"
- resolved "https://registry.yarnpkg.com/semver/-/semver-7.6.0.tgz#1a46a4db4bffcccd97b743b5005c8325f23d4e2d"
- integrity sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==
- dependencies:
- lru-cache "^6.0.0"
-
-semver@~2.3.1:
- version "2.3.2"
- resolved "https://registry.yarnpkg.com/semver/-/semver-2.3.2.tgz#b9848f25d6cf36333073ec9ef8856d42f1233e52"
- integrity sha512-abLdIKCosKfpnmhS52NCTjO4RiLspDfsn37prjzGrp9im5DPJOgh82Os92vtwGh6XdQryKI/7SREZnV+aqiXrA==
-
-semver@~7.0.0:
- version "7.0.0"
- resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e"
- integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A==
-
seq-queue@^0.0.5:
version "0.0.5"
resolved "https://registry.yarnpkg.com/seq-queue/-/seq-queue-0.0.5.tgz#d56812e1c017a6e4e7c3e3a37a1da6d78dd3c93e"
@@ -21307,7 +21220,7 @@ touch@^3.1.0:
dependencies:
nopt "~1.0.10"
-"tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2:
+tough-cookie@4.1.3, "tough-cookie@^2.3.3 || ^3.0.1 || ^4.0.0", tough-cookie@^4.0.0, tough-cookie@^4.1.2, tough-cookie@~2.5.0:
version "4.1.3"
resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-4.1.3.tgz#97b9adb0728b42280aa3d814b6b999b2ff0318bf"
integrity sha512-aX/y5pVRkfRnfmuX+OdbSdXvPe6ieKX/G2s7e98f4poJHnqH3281gDPm/metm6E/WRamfx7WC4HUqkWHfQHprw==
@@ -21317,14 +21230,6 @@ touch@^3.1.0:
universalify "^0.2.0"
url-parse "^1.5.3"
-tough-cookie@~2.5.0:
- version "2.5.0"
- resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2"
- integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g==
- dependencies:
- psl "^1.1.28"
- punycode "^2.1.1"
-
tr46@^2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/tr46/-/tr46-2.1.0.tgz#fa87aa81ca5d5941da8cbf1f9b749dc969a4e240"
@@ -21801,6 +21706,14 @@ unpipe@1.0.0:
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
integrity sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==
+unset-value@2.0.1:
+ version "2.0.1"
+ resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-2.0.1.tgz#57bed0c22d26f28d69acde5df9a11b77c74d2df3"
+ integrity sha512-2hvrBfjUE00PkqN+q0XP6yRAOGrR06uSiUoIQGZkc7GxvQ9H7v8quUPNtZjMg4uux69i8HWpIjLPUKwCuRGyNg==
+ dependencies:
+ has-value "^2.0.2"
+ isobject "^4.0.0"
+
untildify@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/untildify/-/untildify-4.0.0.tgz#2bc947b953652487e4600949fb091e3ae8cd919b"
@@ -22571,33 +22484,10 @@ xml-parse-from-string@^1.0.0:
resolved "https://registry.yarnpkg.com/xml-parse-from-string/-/xml-parse-from-string-1.0.1.tgz#a9029e929d3dbcded169f3c6e28238d95a5d5a28"
integrity sha512-ErcKwJTF54uRzzNMXq2X5sMIy88zJvfN2DmdoQvy7PAFJ+tPRU6ydWuOKNMyfmOjdyBQTFREi60s0Y0SyI0G0g==
-xml2js@0.1.x:
- version "0.1.14"
- resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.1.14.tgz#5274e67f5a64c5f92974cd85139e0332adc6b90c"
- integrity sha512-pbdws4PPPNc1HPluSUKamY4GWMk592K7qwcj6BExbVOhhubub8+pMda/ql68b6L3luZs/OGjGSB5goV7SnmgnA==
- dependencies:
- sax ">=0.1.1"
-
-xml2js@0.4.19:
- version "0.4.19"
- resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7"
- integrity sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q==
- dependencies:
- sax ">=0.6.0"
- xmlbuilder "~9.0.1"
-
-xml2js@0.5.0:
- version "0.5.0"
- resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.5.0.tgz#d9440631fbb2ed800203fad106f2724f62c493b7"
- integrity sha512-drPFnkQJik/O+uPKpqSgr22mpuFHqKdbS835iAQrUC73L2F5WkboIRd63ai/2Yg6I1jzifPFKH2NTK+cfglkIA==
- dependencies:
- sax ">=0.6.0"
- xmlbuilder "~11.0.0"
-
-xml2js@^0.4.19, xml2js@^0.4.5:
- version "0.4.23"
- resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66"
- integrity sha512-ySPiMjM0+pLDftHgXY4By0uswI3SPKLDw/i3UXbnO8M/p28zqexCUoPmQFrYD+/1BzhGJSs2i1ERWKJAtiLrug==
+xml2js@0.1.x, xml2js@0.4.19, xml2js@0.5.0, xml2js@0.6.2, xml2js@^0.4.19, xml2js@^0.4.5:
+ version "0.6.2"
+ resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.6.2.tgz#dd0b630083aa09c161e25a4d0901e2b2a929b499"
+ integrity sha512-T4rieHaC1EXcES0Kxxj4JWgaUQHDk+qwHcYOCFHfiwKz7tOVPLq7Hjq9dM1WCMhylqMEfP7hMcOIChvotiZegA==
dependencies:
sax ">=0.6.0"
xmlbuilder "~11.0.0"
@@ -22607,11 +22497,6 @@ xmlbuilder@~11.0.0:
resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3"
integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA==
-xmlbuilder@~9.0.1:
- version "9.0.7"
- resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d"
- integrity sha512-7YXTQc3P2l9+0rjaUbLwMKRhtmwg1M1eDf6nag7urC7pIPYLD9W/jmzQ4ptRSUbodw5S0jfoGTflLemQibSpeQ==
-
xmlchars@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb"