Merge remote-tracking branch 'origin/master' into feature/automation-row-ux-update
This commit is contained in:
commit
d50a8e0746
|
@ -179,6 +179,13 @@ jobs:
|
||||||
- run: yarn --frozen-lockfile
|
- run: yarn --frozen-lockfile
|
||||||
|
|
||||||
- name: Test server
|
- name: Test server
|
||||||
|
env:
|
||||||
|
DD_CIVISIBILITY_AGENTLESS_ENABLED: true
|
||||||
|
DD_API_KEY: "${{ secrets.DATADOG_API_KEY }}"
|
||||||
|
DD_SITE: "datadoghq.eu"
|
||||||
|
NODE_OPTIONS: "-r dd-trace/ci/init"
|
||||||
|
DD_ENV: "ci"
|
||||||
|
DD_SERVICE: "budibase/packages/server"
|
||||||
run: |
|
run: |
|
||||||
if ${{ env.USE_NX_AFFECTED }}; then
|
if ${{ env.USE_NX_AFFECTED }}; then
|
||||||
yarn test --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
|
yarn test --scope=@budibase/server --since=${{ env.NX_BASE_BRANCH }}
|
||||||
|
|
|
@ -8,6 +8,8 @@ bb-airgapped.tar.gz
|
||||||
packages/server/build/oldClientVersions/**/*
|
packages/server/build/oldClientVersions/**/*
|
||||||
packages/builder/src/components/deploy/clientVersions.json
|
packages/builder/src/components/deploy/clientVersions.json
|
||||||
|
|
||||||
|
packages/server/src/integrations/tests/utils/*.lock
|
||||||
|
|
||||||
# Logs
|
# Logs
|
||||||
logs
|
logs
|
||||||
*.log
|
*.log
|
||||||
|
|
|
@ -641,7 +641,7 @@ couchdb:
|
||||||
# @ignore
|
# @ignore
|
||||||
repository: budibase/couchdb
|
repository: budibase/couchdb
|
||||||
# @ignore
|
# @ignore
|
||||||
tag: v3.2.1
|
tag: v3.3.3
|
||||||
# @ignore
|
# @ignore
|
||||||
pullPolicy: Always
|
pullPolicy: Always
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
{
|
{
|
||||||
"version": "2.28.3",
|
"version": "2.28.6",
|
||||||
"npmClient": "yarn",
|
"npmClient": "yarn",
|
||||||
"packages": [
|
"packages": [
|
||||||
"packages/*",
|
"packages/*",
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
Subproject commit a03225549e3ce61f43d0da878da162e08941b939
|
Subproject commit 247f56d455abbd64da17d865275ed978f577549f
|
|
@ -8,6 +8,7 @@ import {
|
||||||
DatabaseOpts,
|
DatabaseOpts,
|
||||||
DatabasePutOpts,
|
DatabasePutOpts,
|
||||||
DatabaseQueryOpts,
|
DatabaseQueryOpts,
|
||||||
|
DBError,
|
||||||
Document,
|
Document,
|
||||||
isDocument,
|
isDocument,
|
||||||
RowResponse,
|
RowResponse,
|
||||||
|
@ -41,7 +42,7 @@ function buildNano(couchInfo: { url: string; cookie: string }) {
|
||||||
|
|
||||||
type DBCall<T> = () => Promise<T>
|
type DBCall<T> = () => Promise<T>
|
||||||
|
|
||||||
class CouchDBError extends Error {
|
class CouchDBError extends Error implements DBError {
|
||||||
status: number
|
status: number
|
||||||
statusCode: number
|
statusCode: number
|
||||||
reason: string
|
reason: string
|
||||||
|
|
|
@ -93,15 +93,21 @@ function isApps() {
|
||||||
return environment.SERVICE_TYPE === ServiceType.APPS
|
return environment.SERVICE_TYPE === ServiceType.APPS
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isQA() {
|
||||||
|
return environment.BUDIBASE_ENVIRONMENT === "QA"
|
||||||
|
}
|
||||||
|
|
||||||
const environment = {
|
const environment = {
|
||||||
isTest,
|
isTest,
|
||||||
isJest,
|
isJest,
|
||||||
isDev,
|
isDev,
|
||||||
isWorker,
|
isWorker,
|
||||||
isApps,
|
isApps,
|
||||||
|
isQA,
|
||||||
isProd: () => {
|
isProd: () => {
|
||||||
return !isDev()
|
return !isDev()
|
||||||
},
|
},
|
||||||
|
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
|
||||||
JS_BCRYPT: process.env.JS_BCRYPT,
|
JS_BCRYPT: process.env.JS_BCRYPT,
|
||||||
JWT_SECRET: process.env.JWT_SECRET,
|
JWT_SECRET: process.env.JWT_SECRET,
|
||||||
JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK,
|
JWT_SECRET_FALLBACK: process.env.JWT_SECRET_FALLBACK,
|
||||||
|
@ -120,6 +126,7 @@ const environment = {
|
||||||
REDIS_CLUSTERED: process.env.REDIS_CLUSTERED,
|
REDIS_CLUSTERED: process.env.REDIS_CLUSTERED,
|
||||||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||||
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||||
|
AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN,
|
||||||
AWS_REGION: process.env.AWS_REGION,
|
AWS_REGION: process.env.AWS_REGION,
|
||||||
MINIO_URL: process.env.MINIO_URL,
|
MINIO_URL: process.env.MINIO_URL,
|
||||||
MINIO_ENABLED: process.env.MINIO_ENABLED || 1,
|
MINIO_ENABLED: process.env.MINIO_ENABLED || 1,
|
||||||
|
|
|
@ -101,6 +101,11 @@ export function ObjectStore(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// for AWS Credentials using temporary session token
|
||||||
|
if (!env.MINIO_ENABLED && env.AWS_SESSION_TOKEN) {
|
||||||
|
config.sessionToken = env.AWS_SESSION_TOKEN
|
||||||
|
}
|
||||||
|
|
||||||
// custom S3 is in use i.e. minio
|
// custom S3 is in use i.e. minio
|
||||||
if (env.MINIO_URL) {
|
if (env.MINIO_URL) {
|
||||||
if (opts.presigning && env.MINIO_ENABLED) {
|
if (opts.presigning && env.MINIO_ENABLED) {
|
||||||
|
|
|
@ -63,12 +63,12 @@ class InMemoryQueue implements Partial<Queue> {
|
||||||
* Same callback API as Bull, each callback passed to this will consume messages as they are
|
* Same callback API as Bull, each callback passed to this will consume messages as they are
|
||||||
* available. Please note this is a queue service, not a notification service, so each
|
* available. Please note this is a queue service, not a notification service, so each
|
||||||
* consumer will receive different messages.
|
* consumer will receive different messages.
|
||||||
* @param func The callback function which will return a "Job", the same
|
|
||||||
* as the Bull API, within this job the property "data" contains the JSON message. Please
|
* as the Bull API, within this job the property "data" contains the JSON message. Please
|
||||||
* note this is incredibly limited compared to Bull as in reality the Job would contain
|
* note this is incredibly limited compared to Bull as in reality the Job would contain
|
||||||
* a lot more information about the queue and current status of Bull cluster.
|
* a lot more information about the queue and current status of Bull cluster.
|
||||||
*/
|
*/
|
||||||
async process(func: any) {
|
async process(concurrencyOrFunc: number | any, func?: any) {
|
||||||
|
func = typeof concurrencyOrFunc === "number" ? func : concurrencyOrFunc
|
||||||
this._emitter.on("message", async () => {
|
this._emitter.on("message", async () => {
|
||||||
if (this._messages.length <= 0) {
|
if (this._messages.length <= 0) {
|
||||||
return
|
return
|
||||||
|
|
|
@ -21,6 +21,7 @@ let cleanupInterval: NodeJS.Timeout
|
||||||
async function cleanup() {
|
async function cleanup() {
|
||||||
for (let queue of QUEUES) {
|
for (let queue of QUEUES) {
|
||||||
await queue.clean(CLEANUP_PERIOD_MS, "completed")
|
await queue.clean(CLEANUP_PERIOD_MS, "completed")
|
||||||
|
await queue.clean(CLEANUP_PERIOD_MS, "failed")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,16 @@
|
||||||
import { getDB } from "../db/db"
|
import { getDB } from "../db/db"
|
||||||
import { getGlobalDBName } from "../context"
|
import { getGlobalDBName } from "../context"
|
||||||
|
import { TenantInfo } from "@budibase/types"
|
||||||
|
|
||||||
export function getTenantDB(tenantId: string) {
|
export function getTenantDB(tenantId: string) {
|
||||||
return getDB(getGlobalDBName(tenantId))
|
return getDB(getGlobalDBName(tenantId))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export async function saveTenantInfo(tenantInfo: TenantInfo) {
|
||||||
|
const db = getTenantDB(tenantInfo.tenantId)
|
||||||
|
// save the tenant info to db
|
||||||
|
return await db.put({
|
||||||
|
_id: "tenant_info",
|
||||||
|
...tenantInfo,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
import TableSelector from "./TableSelector.svelte"
|
import TableSelector from "./TableSelector.svelte"
|
||||||
import FieldSelector from "./FieldSelector.svelte"
|
import FieldSelector from "./FieldSelector.svelte"
|
||||||
import SchemaSetup from "./SchemaSetup.svelte"
|
import SchemaSetup from "./SchemaSetup.svelte"
|
||||||
|
import RowSelector from "./RowSelector.svelte"
|
||||||
import {
|
import {
|
||||||
Button,
|
Button,
|
||||||
Select,
|
Select,
|
||||||
|
@ -14,6 +15,8 @@
|
||||||
DatePicker,
|
DatePicker,
|
||||||
DrawerContent,
|
DrawerContent,
|
||||||
Helpers,
|
Helpers,
|
||||||
|
Toggle,
|
||||||
|
Divider,
|
||||||
} from "@budibase/bbui"
|
} from "@budibase/bbui"
|
||||||
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
|
import CreateWebhookModal from "components/automation/Shared/CreateWebhookModal.svelte"
|
||||||
import { automationStore, selectedAutomation, tables } from "stores/builder"
|
import { automationStore, selectedAutomation, tables } from "stores/builder"
|
||||||
|
@ -40,7 +43,8 @@
|
||||||
EditorModes,
|
EditorModes,
|
||||||
} from "components/common/CodeEditor"
|
} from "components/common/CodeEditor"
|
||||||
import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte"
|
import FilterBuilder from "components/design/settings/controls/FilterEditor/FilterBuilder.svelte"
|
||||||
import { LuceneUtils, Utils, memo } from "@budibase/frontend-core"
|
import { QueryUtils, Utils, memo } from "@budibase/frontend-core"
|
||||||
|
|
||||||
import {
|
import {
|
||||||
getSchemaForDatasourcePlus,
|
getSchemaForDatasourcePlus,
|
||||||
getEnvironmentBindings,
|
getEnvironmentBindings,
|
||||||
|
@ -129,6 +133,7 @@
|
||||||
$: customStepLayouts($memoBlock, schemaProperties)
|
$: customStepLayouts($memoBlock, schemaProperties)
|
||||||
|
|
||||||
const customStepLayouts = block => {
|
const customStepLayouts = block => {
|
||||||
|
console.log("BUILDING", inputData["row"])
|
||||||
if (
|
if (
|
||||||
rowSteps.includes(block.stepId) ||
|
rowSteps.includes(block.stepId) ||
|
||||||
(rowTriggers.includes(block.stepId) && isTestModal)
|
(rowTriggers.includes(block.stepId) && isTestModal)
|
||||||
|
@ -256,7 +261,6 @@
|
||||||
}).schema
|
}).schema
|
||||||
delete request._tableId
|
delete request._tableId
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
if (isTestModal) {
|
if (isTestModal) {
|
||||||
let newTestData = { schema }
|
let newTestData = { schema }
|
||||||
|
@ -489,7 +493,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
function saveFilters(key) {
|
function saveFilters(key) {
|
||||||
const filters = LuceneUtils.buildLuceneQuery(tempFilters)
|
const filters = QueryUtils.buildQuery(tempFilters)
|
||||||
|
|
||||||
onChange({
|
onChange({
|
||||||
[key]: filters,
|
[key]: filters,
|
||||||
|
@ -639,6 +643,24 @@
|
||||||
<div class="label-wrapper">
|
<div class="label-wrapper">
|
||||||
<Label>{label}</Label>
|
<Label>{label}</Label>
|
||||||
</div>
|
</div>
|
||||||
|
{JSON.stringify(inputData)}
|
||||||
|
<div class="toggle-container">
|
||||||
|
<Toggle
|
||||||
|
value={inputData?.meta?.useAttachmentBinding}
|
||||||
|
text={"Use bindings"}
|
||||||
|
size={"XS"}
|
||||||
|
on:change={e => {
|
||||||
|
// DEAN - review this
|
||||||
|
onChange({
|
||||||
|
row: { [key]: "" }, //null
|
||||||
|
meta: {
|
||||||
|
[key]: e.detail,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
|
||||||
<div class="attachment-field-width">
|
<div class="attachment-field-width">
|
||||||
<KeyValueBuilder
|
<KeyValueBuilder
|
||||||
on:change={e =>
|
on:change={e =>
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
import { capitalise } from "helpers"
|
import { capitalise } from "helpers"
|
||||||
import { memo } from "@budibase/frontend-core"
|
import { memo } from "@budibase/frontend-core"
|
||||||
import PropField from "./PropField.svelte"
|
import PropField from "./PropField.svelte"
|
||||||
|
import { cloneDeep, isPlainObject, mergeWith } from "lodash"
|
||||||
|
|
||||||
const dispatch = createEventDispatcher()
|
const dispatch = createEventDispatcher()
|
||||||
|
|
||||||
|
@ -42,21 +43,29 @@
|
||||||
let customPopover
|
let customPopover
|
||||||
let popoverAnchor
|
let popoverAnchor
|
||||||
let editableRow = {}
|
let editableRow = {}
|
||||||
let columns = new Set()
|
|
||||||
|
|
||||||
// Avoid unnecessary updates
|
//??
|
||||||
|
let editableMeta = {}
|
||||||
|
let editableFields = {}
|
||||||
|
// let columns = new Set()
|
||||||
|
|
||||||
|
// Avoid unnecessary updates - DEAN double check after refactor
|
||||||
$: memoStore.set({
|
$: memoStore.set({
|
||||||
row,
|
row,
|
||||||
meta,
|
meta,
|
||||||
})
|
})
|
||||||
|
|
||||||
// Legacy support
|
|
||||||
$: fields = $memoStore?.meta?.fields
|
$: fields = $memoStore?.meta?.fields
|
||||||
|
|
||||||
$: if ($memoStore?.meta?.columns) {
|
$: if ($memoStore?.meta?.fields) {
|
||||||
columns = new Set(meta?.columns)
|
editableFields = cloneDeep($memoStore?.meta?.fields)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Needs to go now... entirely
|
||||||
|
// $: if ($memoStore?.meta?.columns) {
|
||||||
|
// columns = new Set(meta?.columns)
|
||||||
|
// }
|
||||||
|
|
||||||
$: parsedBindings = bindings.map(binding => {
|
$: parsedBindings = bindings.map(binding => {
|
||||||
let clone = Object.assign({}, binding)
|
let clone = Object.assign({}, binding)
|
||||||
clone.icon = "ShareAndroid"
|
clone.icon = "ShareAndroid"
|
||||||
|
@ -73,59 +82,62 @@
|
||||||
schemaFields = Object.entries(table?.schema ?? {})
|
schemaFields = Object.entries(table?.schema ?? {})
|
||||||
.filter(entry => {
|
.filter(entry => {
|
||||||
const [key, field] = entry
|
const [key, field] = entry
|
||||||
return field.type !== "formula" && !field.autocolumn
|
return field.type !== "formula" && !field.autocolumn // DEAN - revise autocolumn exclusion for testmodal
|
||||||
})
|
})
|
||||||
.sort(
|
.sort(
|
||||||
([, schemaA], [, schemaB]) =>
|
([, schemaA], [, schemaB]) =>
|
||||||
(schemaA.type === "attachment") - (schemaB.type === "attachment")
|
(schemaA.type === "attachment") - (schemaB.type === "attachment")
|
||||||
)
|
)
|
||||||
|
|
||||||
// Parse out any unused data.
|
// Parse out any data not in the schema.
|
||||||
if ($memoStore?.meta?.columns) {
|
for (const column in editableFields) {
|
||||||
for (const column of meta?.columns) {
|
if (!(column in table?.schema)) {
|
||||||
if (!(column in table?.schema)) {
|
delete editableFields[column]
|
||||||
columns.delete(column)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
columns = new Set(columns)
|
}
|
||||||
|
editableFields = editableFields
|
||||||
|
}
|
||||||
|
|
||||||
|
// Go through the table schema and build out the editable content
|
||||||
|
// schemaFields.forEach(entry => {
|
||||||
|
for (const entry of schemaFields) {
|
||||||
|
const [key, fieldSchema] = entry
|
||||||
|
if ($memoStore?.row?.[key] && !editableRow?.[key]) {
|
||||||
|
editableRow = {
|
||||||
|
...editableRow,
|
||||||
|
[key]: $memoStore?.row[key],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Legacy
|
||||||
|
if (editableFields[key]?.clearRelationships) {
|
||||||
|
const emptyField = coerce(
|
||||||
|
!$memoStore?.row.hasOwnProperty(key) ? "" : $memoStore?.row[key],
|
||||||
|
fieldSchema.type
|
||||||
|
)
|
||||||
|
|
||||||
|
// remove this and place the field in the editable row.
|
||||||
|
delete editableFields[key]?.clearRelationships
|
||||||
|
|
||||||
|
// Default the field
|
||||||
|
editableRow = {
|
||||||
|
...editableRow,
|
||||||
|
[key]: emptyField,
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log("DEAN EMPTY - clearRelationships", emptyField)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (columns.size) {
|
// Possible to go through the automation fields schema?
|
||||||
for (const key of columns) {
|
console.log("ACTUAL ROW", row)
|
||||||
const entry = schemaFields.find(entry => {
|
console.log("EDITABLE FIELDS", editableFields)
|
||||||
const [fieldKey] = entry
|
console.log("EDITABLE ROW", editableRow)
|
||||||
return fieldKey == key
|
|
||||||
})
|
|
||||||
|
|
||||||
if (entry) {
|
|
||||||
const [_, fieldSchema] = entry
|
|
||||||
editableRow = {
|
|
||||||
...editableRow,
|
|
||||||
[key]: coerce(
|
|
||||||
!(key in $memoStore?.row) ? "" : $memoStore?.row[key],
|
|
||||||
fieldSchema.type
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
schemaFields.forEach(entry => {
|
|
||||||
const [key] = entry
|
|
||||||
if ($memoStore?.row?.[key] && !editableRow?.[key]) {
|
|
||||||
editableRow = {
|
|
||||||
...editableRow,
|
|
||||||
[key]: $memoStore?.row[key],
|
|
||||||
}
|
|
||||||
columns.add(key)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
columns = new Set(columns)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Legacy - add explicitly cleared relationships to the request.
|
// Legacy - add explicitly cleared relationships to the request.
|
||||||
$: if (schemaFields?.length && fields) {
|
// DEAN - review this
|
||||||
|
$: if (schemaFields?.length && fields && false) {
|
||||||
// Meta fields processing.
|
// Meta fields processing.
|
||||||
Object.keys(fields).forEach(key => {
|
Object.keys(fields).forEach(key => {
|
||||||
if (fields[key]?.clearRelationships) {
|
if (fields[key]?.clearRelationships) {
|
||||||
|
@ -181,87 +193,121 @@
|
||||||
return value
|
return value
|
||||||
}
|
}
|
||||||
|
|
||||||
const onChange = u => {
|
const onChange = update => {
|
||||||
const update = {
|
const customizer = (objValue, srcValue, key) => {
|
||||||
_tableId: tableId,
|
if (isPlainObject(objValue) && isPlainObject(srcValue)) {
|
||||||
row: { ...$memoStore.row },
|
const result = mergeWith({}, objValue, srcValue, customizer)
|
||||||
meta: { ...$memoStore.meta },
|
let outcome = Object.keys(result).reduce((acc, key) => {
|
||||||
...u,
|
if (result[key] !== null) {
|
||||||
|
acc[key] = result[key]
|
||||||
|
} else {
|
||||||
|
console.log(key + " is null", objValue)
|
||||||
|
}
|
||||||
|
return acc
|
||||||
|
}, {})
|
||||||
|
return outcome
|
||||||
|
}
|
||||||
|
return srcValue
|
||||||
}
|
}
|
||||||
dispatch("change", update)
|
|
||||||
}
|
|
||||||
|
|
||||||
const fieldUpdate = (e, field) => {
|
const result = mergeWith(
|
||||||
const update = {
|
{},
|
||||||
row: {
|
{
|
||||||
...$memoStore?.row,
|
row: editableRow,
|
||||||
[field]: e.detail,
|
meta: {
|
||||||
|
fields: editableFields,
|
||||||
|
},
|
||||||
},
|
},
|
||||||
}
|
update,
|
||||||
onChange(update)
|
customizer
|
||||||
|
)
|
||||||
|
console.log("Row Selector - MERGED", result)
|
||||||
|
dispatch("change", result)
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{#if columns.size}
|
{#each schemaFields || [] as [field, schema]}
|
||||||
{#each schemaFields as [field, schema]}
|
{#if !schema.autocolumn && editableFields.hasOwnProperty(field)}
|
||||||
{#if !schema.autocolumn && columns.has(field)}
|
<PropField label={field} fullWidth={attachmentTypes.includes(schema.type)}>
|
||||||
<PropField
|
<div class="prop-control-wrap">
|
||||||
label={field}
|
{#if isTestModal}
|
||||||
fullWidth={attachmentTypes.includes(schema.type)}
|
<RowSelectorTypes
|
||||||
>
|
{isTestModal}
|
||||||
<div class="prop-control-wrap">
|
{field}
|
||||||
{#if isTestModal}
|
{schema}
|
||||||
|
bindings={parsedBindings}
|
||||||
|
value={editableRow}
|
||||||
|
meta={{
|
||||||
|
fields: editableFields,
|
||||||
|
}}
|
||||||
|
{onChange}
|
||||||
|
/>
|
||||||
|
{:else}
|
||||||
|
<DrawerBindableSlot
|
||||||
|
title={$memoStore?.row?.title || field}
|
||||||
|
panel={AutomationBindingPanel}
|
||||||
|
type={schema.type}
|
||||||
|
{schema}
|
||||||
|
value={editableRow[field]}
|
||||||
|
on:change={e => {
|
||||||
|
onChange({
|
||||||
|
row: {
|
||||||
|
[field]: e.detail.row[field],
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}}
|
||||||
|
{bindings}
|
||||||
|
allowJS={true}
|
||||||
|
updateOnChange={false}
|
||||||
|
drawerLeft="260px"
|
||||||
|
>
|
||||||
<RowSelectorTypes
|
<RowSelectorTypes
|
||||||
{isTestModal}
|
{isTestModal}
|
||||||
{field}
|
{field}
|
||||||
{schema}
|
{schema}
|
||||||
bindings={parsedBindings}
|
bindings={parsedBindings}
|
||||||
value={$memoStore?.row}
|
value={editableRow}
|
||||||
onChange={fieldUpdate}
|
meta={{
|
||||||
|
fields: editableFields,
|
||||||
|
}}
|
||||||
|
onChange={change => {
|
||||||
|
console.log("RowSelectorTypes > RowSelector > ", change)
|
||||||
|
onChange(change)
|
||||||
|
}}
|
||||||
/>
|
/>
|
||||||
{:else}
|
</DrawerBindableSlot>
|
||||||
<DrawerBindableSlot
|
{/if}
|
||||||
title={$memoStore?.row?.title || field}
|
<Icon
|
||||||
panel={AutomationBindingPanel}
|
hoverable
|
||||||
type={schema.type}
|
name="Close"
|
||||||
{schema}
|
on:click={() => {
|
||||||
value={editableRow[field]}
|
// Clear row data
|
||||||
on:change={e => fieldUpdate(e, field)}
|
const update = { ...editableRow }
|
||||||
{bindings}
|
update[field] = null
|
||||||
allowJS={true}
|
// delete update[field]
|
||||||
updateOnChange={false}
|
|
||||||
drawerLeft="260px"
|
// Clear any related metadata
|
||||||
>
|
// delete editableFields[field]
|
||||||
<RowSelectorTypes
|
// editableFields[field] = null
|
||||||
{isTestModal}
|
console.log("REMOVE STATE", {
|
||||||
{field}
|
row: update,
|
||||||
{schema}
|
meta: { fields: { ...editableFields, [field]: null } },
|
||||||
bindings={parsedBindings}
|
})
|
||||||
value={editableRow}
|
onChange({
|
||||||
onChange={fieldUpdate}
|
row: update,
|
||||||
/>
|
meta: { fields: { ...editableFields, [field]: null } },
|
||||||
</DrawerBindableSlot>
|
})
|
||||||
{/if}
|
}}
|
||||||
<Icon
|
/>
|
||||||
hoverable
|
</div>
|
||||||
name="Close"
|
</PropField>
|
||||||
on:click={() => {
|
{/if}
|
||||||
columns.delete(field)
|
{/each}
|
||||||
const update = { ...editableRow }
|
|
||||||
delete update[field]
|
|
||||||
onChange({ row: update, meta: { columns: Array.from(columns) } })
|
|
||||||
}}
|
|
||||||
/>
|
|
||||||
</div>
|
|
||||||
</PropField>
|
|
||||||
{/if}
|
|
||||||
{/each}
|
|
||||||
{/if}
|
|
||||||
|
|
||||||
{#if table && schemaFields}
|
{#if table && schemaFields}
|
||||||
<div
|
<div
|
||||||
class="add-fields-btn"
|
class="add-fields-btn"
|
||||||
class:empty={!columns?.size}
|
class:empty={Object.is(editableFields, {})}
|
||||||
bind:this={popoverAnchor}
|
bind:this={popoverAnchor}
|
||||||
>
|
>
|
||||||
<ActionButton
|
<ActionButton
|
||||||
|
@ -292,14 +338,14 @@
|
||||||
{#if !schema.autocolumn}
|
{#if !schema.autocolumn}
|
||||||
<li
|
<li
|
||||||
class="table_field spectrum-Menu-item"
|
class="table_field spectrum-Menu-item"
|
||||||
class:is-selected={columns.has(field)}
|
class:is-selected={editableFields.hasOwnProperty(field)}
|
||||||
on:click={e => {
|
on:click={e => {
|
||||||
if (columns.has(field)) {
|
if (editableFields.hasOwnProperty(field)) {
|
||||||
columns.delete(field)
|
editableFields[field] = null
|
||||||
} else {
|
} else {
|
||||||
columns.add(field)
|
editableFields[field] = {}
|
||||||
}
|
}
|
||||||
onChange({ meta: { columns: Array.from(columns) } })
|
onChange({ meta: { fields: editableFields } })
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<Icon
|
<Icon
|
||||||
|
|
|
@ -1,5 +1,11 @@
|
||||||
<script>
|
<script>
|
||||||
import { Select, DatePicker, Multiselect, TextArea } from "@budibase/bbui"
|
import {
|
||||||
|
Select,
|
||||||
|
DatePicker,
|
||||||
|
Multiselect,
|
||||||
|
TextArea,
|
||||||
|
Toggle,
|
||||||
|
} from "@budibase/bbui"
|
||||||
import { FieldType } from "@budibase/types"
|
import { FieldType } from "@budibase/types"
|
||||||
import LinkedRowSelector from "components/common/LinkedRowSelector.svelte"
|
import LinkedRowSelector from "components/common/LinkedRowSelector.svelte"
|
||||||
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
|
import DrawerBindableInput from "../../common/bindings/DrawerBindableInput.svelte"
|
||||||
|
@ -12,9 +18,12 @@
|
||||||
export let field
|
export let field
|
||||||
export let schema
|
export let schema
|
||||||
export let value
|
export let value
|
||||||
|
export let meta
|
||||||
export let bindings
|
export let bindings
|
||||||
export let isTestModal
|
export let isTestModal
|
||||||
|
|
||||||
|
$: console.log(field + "VALUE???", value[field])
|
||||||
|
|
||||||
$: parsedBindings = bindings.map(binding => {
|
$: parsedBindings = bindings.map(binding => {
|
||||||
let clone = Object.assign({}, binding)
|
let clone = Object.assign({}, binding)
|
||||||
clone.icon = "ShareAndroid"
|
clone.icon = "ShareAndroid"
|
||||||
|
@ -33,38 +42,54 @@
|
||||||
|
|
||||||
function handleAttachmentParams(keyValueObj) {
|
function handleAttachmentParams(keyValueObj) {
|
||||||
let params = {}
|
let params = {}
|
||||||
|
// DEAN - review this
|
||||||
if (
|
if (!keyValueObj) {
|
||||||
(schema.type === FieldType.ATTACHMENT_SINGLE ||
|
return null
|
||||||
schema.type === FieldType.SIGNATURE_SINGLE) &&
|
|
||||||
Object.keys(keyValueObj).length === 0
|
|
||||||
) {
|
|
||||||
return []
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!Array.isArray(keyValueObj) && keyValueObj) {
|
if (!Array.isArray(keyValueObj) && keyValueObj) {
|
||||||
keyValueObj = [keyValueObj]
|
keyValueObj = [keyValueObj]
|
||||||
}
|
}
|
||||||
|
|
||||||
if (keyValueObj.length) {
|
if (keyValueObj.length) {
|
||||||
for (let param of keyValueObj) {
|
for (let param of keyValueObj) {
|
||||||
params[param.url] = param.filename
|
params[param.url || ""] = param.filename || ""
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
console.log("handleAttachmentParams ", params)
|
||||||
return params
|
return params
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
{#if schemaHasOptions(schema) && schema.type !== "array"}
|
{#if schemaHasOptions(schema) && schema.type !== "array"}
|
||||||
<Select
|
<Select
|
||||||
on:change={e => onChange(e, field)}
|
on:change={e =>
|
||||||
|
onChange({
|
||||||
|
row: {
|
||||||
|
[field]: e.detail,
|
||||||
|
},
|
||||||
|
})}
|
||||||
value={value[field]}
|
value={value[field]}
|
||||||
options={schema.constraints.inclusion}
|
options={schema.constraints.inclusion}
|
||||||
/>
|
/>
|
||||||
{:else if schema.type === "datetime"}
|
{:else if schema.type === "datetime"}
|
||||||
<DatePicker value={value[field]} on:change={e => onChange(e, field)} />
|
<DatePicker
|
||||||
|
value={value[field]}
|
||||||
|
on:change={e =>
|
||||||
|
onChange({
|
||||||
|
row: {
|
||||||
|
[field]: e.detail,
|
||||||
|
},
|
||||||
|
})}
|
||||||
|
/>
|
||||||
{:else if schema.type === "boolean"}
|
{:else if schema.type === "boolean"}
|
||||||
<Select
|
<Select
|
||||||
on:change={e => onChange(e, field)}
|
on:change={e =>
|
||||||
|
onChange({
|
||||||
|
row: {
|
||||||
|
[field]: e.detail,
|
||||||
|
},
|
||||||
|
})}
|
||||||
value={value[field]}
|
value={value[field]}
|
||||||
options={[
|
options={[
|
||||||
{ label: "True", value: "true" },
|
{ label: "True", value: "true" },
|
||||||
|
@ -75,10 +100,23 @@
|
||||||
<Multiselect
|
<Multiselect
|
||||||
value={value[field]}
|
value={value[field]}
|
||||||
options={schema.constraints.inclusion}
|
options={schema.constraints.inclusion}
|
||||||
on:change={e => onChange(e, field)}
|
on:change={e =>
|
||||||
|
onChange({
|
||||||
|
row: {
|
||||||
|
[field]: e.detail,
|
||||||
|
},
|
||||||
|
})}
|
||||||
/>
|
/>
|
||||||
{:else if schema.type === "longform"}
|
{:else if schema.type === "longform"}
|
||||||
<TextArea value={value[field]} on:change={e => onChange(e, field)} />
|
<TextArea
|
||||||
|
value={value[field]}
|
||||||
|
on:change={e =>
|
||||||
|
onChange({
|
||||||
|
row: {
|
||||||
|
[field]: e.detail,
|
||||||
|
},
|
||||||
|
})}
|
||||||
|
/>
|
||||||
{:else if schema.type === "json"}
|
{:else if schema.type === "json"}
|
||||||
<span>
|
<span>
|
||||||
<Editor
|
<Editor
|
||||||
|
@ -86,7 +124,11 @@
|
||||||
mode="json"
|
mode="json"
|
||||||
on:change={e => {
|
on:change={e => {
|
||||||
if (e.detail?.value !== value[field]) {
|
if (e.detail?.value !== value[field]) {
|
||||||
onChange(e, field, schema.type)
|
onChange({
|
||||||
|
row: {
|
||||||
|
[field]: e.detail,
|
||||||
|
},
|
||||||
|
})
|
||||||
}
|
}
|
||||||
}}
|
}}
|
||||||
value={value[field]}
|
value={value[field]}
|
||||||
|
@ -96,7 +138,12 @@
|
||||||
<LinkedRowSelector
|
<LinkedRowSelector
|
||||||
linkedRows={value[field]}
|
linkedRows={value[field]}
|
||||||
{schema}
|
{schema}
|
||||||
on:change={e => onChange(e, field)}
|
on:change={e =>
|
||||||
|
onChange({
|
||||||
|
row: {
|
||||||
|
[field]: e.detail,
|
||||||
|
},
|
||||||
|
})}
|
||||||
useLabel={false}
|
useLabel={false}
|
||||||
/>
|
/>
|
||||||
{:else if schema.type === "bb_reference" || schema.type === "bb_reference_single"}
|
{:else if schema.type === "bb_reference" || schema.type === "bb_reference_single"}
|
||||||
|
@ -104,51 +151,113 @@
|
||||||
linkedRows={value[field]}
|
linkedRows={value[field]}
|
||||||
{schema}
|
{schema}
|
||||||
linkedTableId={"ta_users"}
|
linkedTableId={"ta_users"}
|
||||||
on:change={e => onChange(e, field)}
|
on:change={e =>
|
||||||
|
onChange({
|
||||||
|
row: {
|
||||||
|
[field]: e.detail,
|
||||||
|
},
|
||||||
|
})}
|
||||||
useLabel={false}
|
useLabel={false}
|
||||||
/>
|
/>
|
||||||
{:else if attachmentTypes.includes(schema.type)}
|
{:else if attachmentTypes.includes(schema.type)}
|
||||||
<div class="attachment-field-spacing">
|
<div class="attachment-field-container">
|
||||||
<KeyValueBuilder
|
<div class="toggle-container">
|
||||||
on:change={e =>
|
<Toggle
|
||||||
onChange(
|
value={meta?.fields?.[field]?.useAttachmentBinding}
|
||||||
{
|
text={"Use bindings"}
|
||||||
detail:
|
size={"XS"}
|
||||||
schema.type === FieldType.ATTACHMENT_SINGLE ||
|
on:change={e => {
|
||||||
schema.type === FieldType.SIGNATURE_SINGLE
|
const fromFalse =
|
||||||
? e.detail.length > 0
|
!meta?.fields?.[field]?.useAttachmentBinding && e.detail === true
|
||||||
? {
|
onChange({
|
||||||
url: e.detail[0].name,
|
...(fromFalse
|
||||||
filename: e.detail[0].value,
|
? {
|
||||||
}
|
row: {
|
||||||
: {}
|
[field]: "", //clear the value if switching
|
||||||
: e.detail.map(({ name, value }) => ({
|
},
|
||||||
url: name,
|
}
|
||||||
filename: value,
|
: {}),
|
||||||
})),
|
meta: {
|
||||||
},
|
fields: {
|
||||||
field
|
[field]: {
|
||||||
)}
|
useAttachmentBinding: e.detail,
|
||||||
object={handleAttachmentParams(value[field] || {})}
|
},
|
||||||
allowJS
|
},
|
||||||
{bindings}
|
},
|
||||||
keyBindings
|
})
|
||||||
customButtonText={schema.type === FieldType.SIGNATURE_SINGLE
|
}}
|
||||||
? "Add signature"
|
/>
|
||||||
: "Add attachment"}
|
</div>
|
||||||
keyPlaceholder={"URL"}
|
|
||||||
valuePlaceholder={"Filename"}
|
{#if !meta?.fields?.[field]?.useAttachmentBinding}
|
||||||
actionButtonDisabled={(schema.type === FieldType.ATTACHMENT_SINGLE ||
|
<div class="attachment-field-spacing">
|
||||||
schema.type === FieldType.SIGNATURE_SINGLE) &&
|
<KeyValueBuilder
|
||||||
Object.keys(value[field] || {}).length >= 1}
|
on:change={e =>
|
||||||
/>
|
onChange({
|
||||||
|
row: {
|
||||||
|
[field]:
|
||||||
|
schema.type === FieldType.ATTACHMENT_SINGLE ||
|
||||||
|
schema.type === FieldType.SIGNATURE_SINGLE
|
||||||
|
? e.detail.length > 0
|
||||||
|
? {
|
||||||
|
url: e.detail[0].name,
|
||||||
|
filename: e.detail[0].value,
|
||||||
|
}
|
||||||
|
: {}
|
||||||
|
: e.detail.map(({ name, value }) => ({
|
||||||
|
url: name,
|
||||||
|
filename: value,
|
||||||
|
})),
|
||||||
|
},
|
||||||
|
})}
|
||||||
|
object={handleAttachmentParams(value[field], false)}
|
||||||
|
allowJS
|
||||||
|
{bindings}
|
||||||
|
keyBindings
|
||||||
|
customButtonText={schema.type === FieldType.SIGNATURE_SINGLE
|
||||||
|
? "Add signature"
|
||||||
|
: "Add attachment"}
|
||||||
|
keyPlaceholder={"URL"}
|
||||||
|
valuePlaceholder={"Filename"}
|
||||||
|
actionButtonDisabled={(schema.type === FieldType.ATTACHMENT_SINGLE ||
|
||||||
|
schema.type === FieldType.SIGNATURE_SINGLE) &&
|
||||||
|
Object.keys(value[field] || {}).length >= 1}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
{:else}
|
||||||
|
<div class="json-input-spacing">
|
||||||
|
{JSON.stringify(value[field])}
|
||||||
|
<svelte:component
|
||||||
|
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||||
|
panel={AutomationBindingPanel}
|
||||||
|
value={value[field]}
|
||||||
|
on:change={e =>
|
||||||
|
onChange({
|
||||||
|
row: {
|
||||||
|
[field]: e.detail,
|
||||||
|
},
|
||||||
|
})}
|
||||||
|
type="string"
|
||||||
|
bindings={parsedBindings}
|
||||||
|
allowJS={true}
|
||||||
|
updateOnChange={false}
|
||||||
|
title={schema.name}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
{/if}
|
||||||
</div>
|
</div>
|
||||||
{:else if ["string", "number", "bigint", "barcodeqr", "array"].includes(schema.type)}
|
{:else if ["string", "number", "bigint", "barcodeqr", "array"].includes(schema.type)}
|
||||||
|
{JSON.stringify(value[field])}
|
||||||
<svelte:component
|
<svelte:component
|
||||||
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
this={isTestModal ? ModalBindableInput : DrawerBindableInput}
|
||||||
panel={AutomationBindingPanel}
|
panel={AutomationBindingPanel}
|
||||||
value={value[field]}
|
value={value[field]}
|
||||||
on:change={e => onChange(e, field)}
|
on:change={e =>
|
||||||
|
onChange({
|
||||||
|
row: {
|
||||||
|
[field]: e.detail,
|
||||||
|
},
|
||||||
|
})}
|
||||||
type="string"
|
type="string"
|
||||||
bindings={parsedBindings}
|
bindings={parsedBindings}
|
||||||
allowJS={true}
|
allowJS={true}
|
||||||
|
@ -159,7 +268,8 @@
|
||||||
{/if}
|
{/if}
|
||||||
|
|
||||||
<style>
|
<style>
|
||||||
.attachment-field-spacing {
|
.attachment-field-spacing,
|
||||||
|
.json-input-spacing {
|
||||||
margin-top: var(--spacing-s);
|
margin-top: var(--spacing-s);
|
||||||
border: 1px solid var(--spectrum-global-color-gray-400);
|
border: 1px solid var(--spectrum-global-color-gray-400);
|
||||||
border-radius: 4px;
|
border-radius: 4px;
|
||||||
|
|
|
@ -334,7 +334,7 @@
|
||||||
// Add in defaults and initial definition
|
// Add in defaults and initial definition
|
||||||
const definition = fieldDefinitions[type?.toUpperCase()]
|
const definition = fieldDefinitions[type?.toUpperCase()]
|
||||||
if (definition?.constraints) {
|
if (definition?.constraints) {
|
||||||
editableColumn.constraints = definition.constraints
|
editableColumn.constraints = cloneDeep(definition.constraints)
|
||||||
}
|
}
|
||||||
|
|
||||||
editableColumn.type = definition.type
|
editableColumn.type = definition.type
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
} from "@budibase/bbui"
|
} from "@budibase/bbui"
|
||||||
import download from "downloadjs"
|
import download from "downloadjs"
|
||||||
import { API } from "api"
|
import { API } from "api"
|
||||||
import { LuceneUtils } from "@budibase/frontend-core"
|
import { QueryUtils } from "@budibase/frontend-core"
|
||||||
import { utils } from "@budibase/shared-core"
|
import { utils } from "@budibase/shared-core"
|
||||||
import { ROW_EXPORT_FORMATS } from "constants/backend"
|
import { ROW_EXPORT_FORMATS } from "constants/backend"
|
||||||
|
|
||||||
|
@ -49,7 +49,7 @@
|
||||||
exportFormat = Array.isArray(options) ? options[0]?.key : []
|
exportFormat = Array.isArray(options) ? options[0]?.key : []
|
||||||
}
|
}
|
||||||
|
|
||||||
$: luceneFilter = LuceneUtils.buildLuceneQuery(appliedFilters)
|
$: query = QueryUtils.buildQuery(appliedFilters)
|
||||||
$: exportOpDisplay = buildExportOpDisplay(
|
$: exportOpDisplay = buildExportOpDisplay(
|
||||||
sorting,
|
sorting,
|
||||||
filterDisplay,
|
filterDisplay,
|
||||||
|
@ -139,7 +139,7 @@
|
||||||
tableId: view,
|
tableId: view,
|
||||||
format: exportFormat,
|
format: exportFormat,
|
||||||
search: {
|
search: {
|
||||||
query: luceneFilter,
|
query,
|
||||||
sort: sorting?.sortColumn,
|
sort: sorting?.sortColumn,
|
||||||
sortOrder: sorting?.sortOrder,
|
sortOrder: sorting?.sortOrder,
|
||||||
paginate: false,
|
paginate: false,
|
||||||
|
|
|
@ -38,4 +38,5 @@
|
||||||
{processFiles}
|
{processFiles}
|
||||||
handleFileTooLarge={$admin.cloud ? handleFileTooLarge : null}
|
handleFileTooLarge={$admin.cloud ? handleFileTooLarge : null}
|
||||||
{fileSizeLimit}
|
{fileSizeLimit}
|
||||||
|
on:change
|
||||||
/>
|
/>
|
||||||
|
|
|
@ -29,7 +29,7 @@
|
||||||
on:click={() => onSelect(data)}
|
on:click={() => onSelect(data)}
|
||||||
>
|
>
|
||||||
<span class="spectrum-Menu-itemLabel">
|
<span class="spectrum-Menu-itemLabel">
|
||||||
{data.label}
|
{data.datasource?.name ? `${data.datasource.name} - ` : ""}{data.label}
|
||||||
</span>
|
</span>
|
||||||
<svg
|
<svg
|
||||||
class="spectrum-Icon spectrum-UIIcon-Checkmark100 spectrum-Menu-checkmark spectrum-Menu-itemIcon"
|
class="spectrum-Icon spectrum-UIIcon-Checkmark100 spectrum-Menu-checkmark spectrum-Menu-itemIcon"
|
||||||
|
|
|
@ -55,6 +55,9 @@
|
||||||
label: m.name,
|
label: m.name,
|
||||||
tableId: m._id,
|
tableId: m._id,
|
||||||
type: "table",
|
type: "table",
|
||||||
|
datasource: $datasources.list.find(
|
||||||
|
ds => ds._id === m.sourceId || m.datasourceId
|
||||||
|
),
|
||||||
}))
|
}))
|
||||||
$: viewsV1 = $viewsStore.list.map(view => ({
|
$: viewsV1 = $viewsStore.list.map(view => ({
|
||||||
...view,
|
...view,
|
||||||
|
|
|
@ -12,7 +12,7 @@
|
||||||
import { dndzone } from "svelte-dnd-action"
|
import { dndzone } from "svelte-dnd-action"
|
||||||
import { generate } from "shortid"
|
import { generate } from "shortid"
|
||||||
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
|
import DrawerBindableInput from "components/common/bindings/DrawerBindableInput.svelte"
|
||||||
import { LuceneUtils, Constants } from "@budibase/frontend-core"
|
import { QueryUtils, Constants } from "@budibase/frontend-core"
|
||||||
import { selectedComponent, componentStore } from "stores/builder"
|
import { selectedComponent, componentStore } from "stores/builder"
|
||||||
import { getComponentForSetting } from "components/design/settings/componentSettings"
|
import { getComponentForSetting } from "components/design/settings/componentSettings"
|
||||||
import PropertyControl from "components/design/settings/controls/PropertyControl.svelte"
|
import PropertyControl from "components/design/settings/controls/PropertyControl.svelte"
|
||||||
|
@ -119,7 +119,7 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
const getOperatorOptions = condition => {
|
const getOperatorOptions = condition => {
|
||||||
return LuceneUtils.getValidOperatorsForType({ type: condition.valueType })
|
return QueryUtils.getValidOperatorsForType({ type: condition.valueType })
|
||||||
}
|
}
|
||||||
|
|
||||||
const onOperatorChange = (condition, newOperator) => {
|
const onOperatorChange = (condition, newOperator) => {
|
||||||
|
@ -138,7 +138,7 @@
|
||||||
condition.referenceValue = null
|
condition.referenceValue = null
|
||||||
|
|
||||||
// Ensure a valid operator is set
|
// Ensure a valid operator is set
|
||||||
const validOperators = LuceneUtils.getValidOperatorsForType({
|
const validOperators = QueryUtils.getValidOperatorsForType({
|
||||||
type: newType,
|
type: newType,
|
||||||
}).map(x => x.value)
|
}).map(x => x.value)
|
||||||
if (!validOperators.includes(condition.operator)) {
|
if (!validOperators.includes(condition.operator)) {
|
||||||
|
|
|
@ -5,8 +5,6 @@
|
||||||
const { styleable, builderStore } = getContext("sdk")
|
const { styleable, builderStore } = getContext("sdk")
|
||||||
const component = getContext("component")
|
const component = getContext("component")
|
||||||
|
|
||||||
let handlingOnClick = false
|
|
||||||
|
|
||||||
export let disabled = false
|
export let disabled = false
|
||||||
export let text = ""
|
export let text = ""
|
||||||
export let onClick
|
export let onClick
|
||||||
|
@ -19,17 +17,9 @@
|
||||||
// For internal use only for now - not defined in the manifest
|
// For internal use only for now - not defined in the manifest
|
||||||
export let active = false
|
export let active = false
|
||||||
|
|
||||||
const handleOnClick = async () => {
|
|
||||||
handlingOnClick = true
|
|
||||||
|
|
||||||
if (onClick) {
|
|
||||||
await onClick()
|
|
||||||
}
|
|
||||||
|
|
||||||
handlingOnClick = false
|
|
||||||
}
|
|
||||||
|
|
||||||
let node
|
let node
|
||||||
|
let touched = false
|
||||||
|
let handlingOnClick = false
|
||||||
|
|
||||||
$: $component.editing && node?.focus()
|
$: $component.editing && node?.focus()
|
||||||
$: componentText = getComponentText(text, $builderStore, $component)
|
$: componentText = getComponentText(text, $builderStore, $component)
|
||||||
|
@ -42,7 +32,18 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
const updateText = e => {
|
const updateText = e => {
|
||||||
builderStore.actions.updateProp("text", e.target.textContent)
|
if (touched) {
|
||||||
|
builderStore.actions.updateProp("text", e.target.textContent)
|
||||||
|
}
|
||||||
|
touched = false
|
||||||
|
}
|
||||||
|
|
||||||
|
const handleOnClick = async () => {
|
||||||
|
handlingOnClick = true
|
||||||
|
if (onClick) {
|
||||||
|
await onClick()
|
||||||
|
}
|
||||||
|
handlingOnClick = false
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
@ -57,6 +58,7 @@
|
||||||
on:blur={$component.editing ? updateText : null}
|
on:blur={$component.editing ? updateText : null}
|
||||||
bind:this={node}
|
bind:this={node}
|
||||||
class:active
|
class:active
|
||||||
|
on:input={() => (touched = true)}
|
||||||
>
|
>
|
||||||
{#if icon}
|
{#if icon}
|
||||||
<i class="{icon} {size}" />
|
<i class="{icon} {size}" />
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
<script>
|
<script>
|
||||||
import { getContext } from "svelte"
|
import { getContext } from "svelte"
|
||||||
import { Pagination, ProgressCircle } from "@budibase/bbui"
|
import { Pagination, ProgressCircle } from "@budibase/bbui"
|
||||||
import { fetchData, LuceneUtils } from "@budibase/frontend-core"
|
import { fetchData, QueryUtils } from "@budibase/frontend-core"
|
||||||
|
|
||||||
export let dataSource
|
export let dataSource
|
||||||
export let filter
|
export let filter
|
||||||
|
@ -19,7 +19,7 @@
|
||||||
|
|
||||||
// We need to manage our lucene query manually as we want to allow components
|
// We need to manage our lucene query manually as we want to allow components
|
||||||
// to extend it
|
// to extend it
|
||||||
$: defaultQuery = LuceneUtils.buildLuceneQuery(filter)
|
$: defaultQuery = QueryUtils.buildQuery(filter)
|
||||||
$: query = extendQuery(defaultQuery, queryExtensions)
|
$: query = extendQuery(defaultQuery, queryExtensions)
|
||||||
$: fetch = createFetch(dataSource)
|
$: fetch = createFetch(dataSource)
|
||||||
$: fetch.update({
|
$: fetch.update({
|
||||||
|
|
|
@ -90,9 +90,11 @@
|
||||||
columns.forEach((column, idx) => {
|
columns.forEach((column, idx) => {
|
||||||
overrides[column.field] = {
|
overrides[column.field] = {
|
||||||
displayName: column.label,
|
displayName: column.label,
|
||||||
width: column.width,
|
|
||||||
order: idx,
|
order: idx,
|
||||||
}
|
}
|
||||||
|
if (column.width) {
|
||||||
|
overrides[column.field].width = column.width
|
||||||
|
}
|
||||||
})
|
})
|
||||||
return overrides
|
return overrides
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@
|
||||||
export let size
|
export let size
|
||||||
|
|
||||||
let node
|
let node
|
||||||
|
let touched = false
|
||||||
|
|
||||||
$: $component.editing && node?.focus()
|
$: $component.editing && node?.focus()
|
||||||
$: placeholder = $builderStore.inBuilder && !text && !$component.editing
|
$: placeholder = $builderStore.inBuilder && !text && !$component.editing
|
||||||
|
@ -47,7 +48,10 @@
|
||||||
|
|
||||||
// Convert contenteditable HTML to text and save
|
// Convert contenteditable HTML to text and save
|
||||||
const updateText = e => {
|
const updateText = e => {
|
||||||
builderStore.actions.updateProp("text", e.target.textContent)
|
if (touched) {
|
||||||
|
builderStore.actions.updateProp("text", e.target.textContent)
|
||||||
|
}
|
||||||
|
touched = false
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
@ -62,6 +66,7 @@
|
||||||
class:underline
|
class:underline
|
||||||
class="spectrum-Heading {sizeClass} {alignClass}"
|
class="spectrum-Heading {sizeClass} {alignClass}"
|
||||||
on:blur={$component.editing ? updateText : null}
|
on:blur={$component.editing ? updateText : null}
|
||||||
|
on:input={() => (touched = true)}
|
||||||
>
|
>
|
||||||
{componentText}
|
{componentText}
|
||||||
</h1>
|
</h1>
|
||||||
|
|
|
@ -16,6 +16,7 @@
|
||||||
export let size
|
export let size
|
||||||
|
|
||||||
let node
|
let node
|
||||||
|
let touched = false
|
||||||
|
|
||||||
$: $component.editing && node?.focus()
|
$: $component.editing && node?.focus()
|
||||||
$: externalLink = url && typeof url === "string" && !url.startsWith("/")
|
$: externalLink = url && typeof url === "string" && !url.startsWith("/")
|
||||||
|
@ -62,7 +63,10 @@
|
||||||
}
|
}
|
||||||
|
|
||||||
const updateText = e => {
|
const updateText = e => {
|
||||||
builderStore.actions.updateProp("text", e.target.textContent)
|
if (touched) {
|
||||||
|
builderStore.actions.updateProp("text", e.target.textContent)
|
||||||
|
}
|
||||||
|
touched = false
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
@ -76,6 +80,7 @@
|
||||||
class:underline
|
class:underline
|
||||||
class="align--{align || 'left'} size--{size || 'M'}"
|
class="align--{align || 'left'} size--{size || 'M'}"
|
||||||
on:blur={$component.editing ? updateText : null}
|
on:blur={$component.editing ? updateText : null}
|
||||||
|
on:input={() => (touched = true)}
|
||||||
>
|
>
|
||||||
{componentText}
|
{componentText}
|
||||||
</div>
|
</div>
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
export let size
|
export let size
|
||||||
|
|
||||||
let node
|
let node
|
||||||
|
let touched = false
|
||||||
|
|
||||||
$: $component.editing && node?.focus()
|
$: $component.editing && node?.focus()
|
||||||
$: placeholder = $builderStore.inBuilder && !text && !$component.editing
|
$: placeholder = $builderStore.inBuilder && !text && !$component.editing
|
||||||
|
@ -46,7 +47,10 @@
|
||||||
|
|
||||||
// Convert contenteditable HTML to text and save
|
// Convert contenteditable HTML to text and save
|
||||||
const updateText = e => {
|
const updateText = e => {
|
||||||
builderStore.actions.updateProp("text", e.target.textContent)
|
if (touched) {
|
||||||
|
builderStore.actions.updateProp("text", e.target.textContent)
|
||||||
|
}
|
||||||
|
touched = false
|
||||||
}
|
}
|
||||||
</script>
|
</script>
|
||||||
|
|
||||||
|
@ -61,6 +65,7 @@
|
||||||
class:underline
|
class:underline
|
||||||
class="spectrum-Body {sizeClass} {alignClass}"
|
class="spectrum-Body {sizeClass} {alignClass}"
|
||||||
on:blur={$component.editing ? updateText : null}
|
on:blur={$component.editing ? updateText : null}
|
||||||
|
on:input={() => (touched = true)}
|
||||||
>
|
>
|
||||||
{componentText}
|
{componentText}
|
||||||
</p>
|
</p>
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
import { getContext, onDestroy } from "svelte"
|
import { getContext, onDestroy } from "svelte"
|
||||||
import { ModalContent, Modal } from "@budibase/bbui"
|
import { ModalContent, Modal } from "@budibase/bbui"
|
||||||
import FilterModal from "./FilterModal.svelte"
|
import FilterModal from "./FilterModal.svelte"
|
||||||
import { LuceneUtils } from "@budibase/frontend-core"
|
import { QueryUtils } from "@budibase/frontend-core"
|
||||||
import Button from "../Button.svelte"
|
import Button from "../Button.svelte"
|
||||||
|
|
||||||
export let dataProvider
|
export let dataProvider
|
||||||
|
@ -36,7 +36,7 @@
|
||||||
// Add query extension to data provider
|
// Add query extension to data provider
|
||||||
$: {
|
$: {
|
||||||
if (filters?.length) {
|
if (filters?.length) {
|
||||||
const queryExtension = LuceneUtils.buildLuceneQuery(filters)
|
const queryExtension = QueryUtils.buildQuery(filters)
|
||||||
addExtension?.($component.id, queryExtension)
|
addExtension?.($component.id, queryExtension)
|
||||||
} else {
|
} else {
|
||||||
removeExtension?.($component.id)
|
removeExtension?.($component.id)
|
||||||
|
|
|
@ -26,6 +26,10 @@
|
||||||
// Register field with form
|
// Register field with form
|
||||||
const formApi = formContext?.formApi
|
const formApi = formContext?.formApi
|
||||||
const labelPos = fieldGroupContext?.labelPosition || "above"
|
const labelPos = fieldGroupContext?.labelPosition || "above"
|
||||||
|
|
||||||
|
let touched = false
|
||||||
|
let labelNode
|
||||||
|
|
||||||
$: formStep = formStepContext ? $formStepContext || 1 : 1
|
$: formStep = formStepContext ? $formStepContext || 1 : 1
|
||||||
$: formField = formApi?.registerField(
|
$: formField = formApi?.registerField(
|
||||||
field,
|
field,
|
||||||
|
@ -36,14 +40,12 @@
|
||||||
validation,
|
validation,
|
||||||
formStep
|
formStep
|
||||||
)
|
)
|
||||||
|
|
||||||
$: schemaType =
|
$: schemaType =
|
||||||
fieldSchema?.type !== "formula" && fieldSchema?.type !== "bigint"
|
fieldSchema?.type !== "formula" && fieldSchema?.type !== "bigint"
|
||||||
? fieldSchema?.type
|
? fieldSchema?.type
|
||||||
: "string"
|
: "string"
|
||||||
|
|
||||||
// Focus label when editing
|
// Focus label when editing
|
||||||
let labelNode
|
|
||||||
$: $component.editing && labelNode?.focus()
|
$: $component.editing && labelNode?.focus()
|
||||||
|
|
||||||
// Update form properties in parent component on every store change
|
// Update form properties in parent component on every store change
|
||||||
|
@ -57,7 +59,10 @@
|
||||||
$: labelClass = labelPos === "above" ? "" : `spectrum-FieldLabel--${labelPos}`
|
$: labelClass = labelPos === "above" ? "" : `spectrum-FieldLabel--${labelPos}`
|
||||||
|
|
||||||
const updateLabel = e => {
|
const updateLabel = e => {
|
||||||
builderStore.actions.updateProp("label", e.target.textContent)
|
if (touched) {
|
||||||
|
builderStore.actions.updateProp("label", e.target.textContent)
|
||||||
|
}
|
||||||
|
touched = false
|
||||||
}
|
}
|
||||||
|
|
||||||
onDestroy(() => {
|
onDestroy(() => {
|
||||||
|
@ -79,6 +84,7 @@
|
||||||
bind:this={labelNode}
|
bind:this={labelNode}
|
||||||
contenteditable={$component.editing}
|
contenteditable={$component.editing}
|
||||||
on:blur={$component.editing ? updateLabel : null}
|
on:blur={$component.editing ? updateLabel : null}
|
||||||
|
on:input={() => (touched = true)}
|
||||||
class:hidden={!label}
|
class:hidden={!label}
|
||||||
class:readonly
|
class:readonly
|
||||||
for={fieldState?.fieldId}
|
for={fieldState?.fieldId}
|
||||||
|
|
|
@ -31,7 +31,7 @@ import { enrichButtonActions } from "./utils/buttonActions.js"
|
||||||
import { processStringSync, makePropSafe } from "@budibase/string-templates"
|
import { processStringSync, makePropSafe } from "@budibase/string-templates"
|
||||||
import {
|
import {
|
||||||
fetchData,
|
fetchData,
|
||||||
LuceneUtils,
|
QueryUtils,
|
||||||
Constants,
|
Constants,
|
||||||
RowUtils,
|
RowUtils,
|
||||||
memo,
|
memo,
|
||||||
|
@ -65,7 +65,7 @@ export default {
|
||||||
getAction,
|
getAction,
|
||||||
fetchDatasourceSchema,
|
fetchDatasourceSchema,
|
||||||
fetchData,
|
fetchData,
|
||||||
LuceneUtils,
|
QueryUtils,
|
||||||
ContextScopes: Constants.ContextScopes,
|
ContextScopes: Constants.ContextScopes,
|
||||||
getAPIKey,
|
getAPIKey,
|
||||||
enrichButtonActions,
|
enrichButtonActions,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { LuceneUtils } from "@budibase/frontend-core"
|
import { QueryUtils } from "@budibase/frontend-core"
|
||||||
|
|
||||||
export const getActiveConditions = conditions => {
|
export const getActiveConditions = conditions => {
|
||||||
if (!conditions?.length) {
|
if (!conditions?.length) {
|
||||||
|
@ -33,8 +33,8 @@ export const getActiveConditions = conditions => {
|
||||||
value: condition.referenceValue,
|
value: condition.referenceValue,
|
||||||
}
|
}
|
||||||
|
|
||||||
const query = LuceneUtils.buildLuceneQuery([luceneCondition])
|
const query = QueryUtils.buildQuery([luceneCondition])
|
||||||
const result = LuceneUtils.runLuceneQuery([luceneCondition], query)
|
const result = QueryUtils.runQuery([luceneCondition], query)
|
||||||
return result.length > 0
|
return result.length > 0
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
} from "@budibase/bbui"
|
} from "@budibase/bbui"
|
||||||
import { FieldType, SearchFilterOperator } from "@budibase/types"
|
import { FieldType, SearchFilterOperator } from "@budibase/types"
|
||||||
import { generate } from "shortid"
|
import { generate } from "shortid"
|
||||||
import { LuceneUtils, Constants } from "@budibase/frontend-core"
|
import { QueryUtils, Constants } from "@budibase/frontend-core"
|
||||||
import { getContext } from "svelte"
|
import { getContext } from "svelte"
|
||||||
import FilterUsers from "./FilterUsers.svelte"
|
import FilterUsers from "./FilterUsers.svelte"
|
||||||
import { getFields } from "../utils/searchFields"
|
import { getFields } from "../utils/searchFields"
|
||||||
|
@ -112,7 +112,7 @@
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
|
||||||
return LuceneUtils.getValidOperatorsForType(
|
return QueryUtils.getValidOperatorsForType(
|
||||||
filter,
|
filter,
|
||||||
filter.field || filter.name,
|
filter.field || filter.name,
|
||||||
datasource
|
datasource
|
||||||
|
|
|
@ -81,6 +81,7 @@
|
||||||
}
|
}
|
||||||
input {
|
input {
|
||||||
flex: 1 1 auto;
|
flex: 1 1 auto;
|
||||||
|
width: 0;
|
||||||
border: none;
|
border: none;
|
||||||
padding: var(--cell-padding);
|
padding: var(--cell-padding);
|
||||||
overflow: hidden;
|
overflow: hidden;
|
||||||
|
|
|
@ -116,7 +116,9 @@
|
||||||
{#each displayColumns as column}
|
{#each displayColumns as column}
|
||||||
<div class="column">
|
<div class="column">
|
||||||
<Icon size="S" name={getColumnIcon(column)} />
|
<Icon size="S" name={getColumnIcon(column)} />
|
||||||
{column.label}
|
<div class="column-label" title={column.label}>
|
||||||
|
{column.label}
|
||||||
|
</div>
|
||||||
</div>
|
</div>
|
||||||
<ToggleActionButtonGroup
|
<ToggleActionButtonGroup
|
||||||
on:click={e => toggleColumn(column, e.detail)}
|
on:click={e => toggleColumn(column, e.detail)}
|
||||||
|
@ -139,7 +141,8 @@
|
||||||
display: grid;
|
display: grid;
|
||||||
align-items: center;
|
align-items: center;
|
||||||
grid-template-columns: 1fr auto;
|
grid-template-columns: 1fr auto;
|
||||||
gap: 8px;
|
grid-row-gap: 8px;
|
||||||
|
grid-column-gap: 24px;
|
||||||
}
|
}
|
||||||
.columns :global(.spectrum-Switch) {
|
.columns :global(.spectrum-Switch) {
|
||||||
margin-right: 0;
|
margin-right: 0;
|
||||||
|
@ -148,4 +151,11 @@
|
||||||
display: flex;
|
display: flex;
|
||||||
gap: 8px;
|
gap: 8px;
|
||||||
}
|
}
|
||||||
|
.column-label {
|
||||||
|
min-width: 80px;
|
||||||
|
max-width: 200px;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
</style>
|
</style>
|
||||||
|
|
|
@ -29,7 +29,6 @@
|
||||||
.permissionPicker {
|
.permissionPicker {
|
||||||
display: flex;
|
display: flex;
|
||||||
gap: var(--spacing-xs);
|
gap: var(--spacing-xs);
|
||||||
padding-left: calc(var(--spacing-xl) * 2);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
.permissionPicker :global(.spectrum-Icon) {
|
.permissionPicker :global(.spectrum-Icon) {
|
||||||
|
|
|
@ -23,14 +23,24 @@
|
||||||
0
|
0
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const updateBounds = () => {
|
||||||
|
bounds.set(body.getBoundingClientRect())
|
||||||
|
}
|
||||||
|
|
||||||
onMount(() => {
|
onMount(() => {
|
||||||
// Observe and record the height of the body
|
// Observe and record the height of the body
|
||||||
const observer = new ResizeObserver(() => {
|
const resizeObserver = new ResizeObserver(updateBounds)
|
||||||
bounds.set(body.getBoundingClientRect())
|
resizeObserver.observe(body)
|
||||||
})
|
|
||||||
observer.observe(body)
|
// Capture any wheel events on the page to ensure our scroll offset is
|
||||||
|
// correct. We don't care about touch events as we only need this for
|
||||||
|
// hovering over rows with a mouse.
|
||||||
|
window.addEventListener("wheel", updateBounds, true)
|
||||||
|
|
||||||
|
// Clean up listeners
|
||||||
return () => {
|
return () => {
|
||||||
observer.disconnect()
|
resizeObserver.disconnect()
|
||||||
|
window.removeEventListener("wheel", updateBounds, true)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
</script>
|
</script>
|
||||||
|
|
|
@ -94,6 +94,7 @@ export const createActions = context => {
|
||||||
nonPlus,
|
nonPlus,
|
||||||
schemaMutations,
|
schemaMutations,
|
||||||
schema,
|
schema,
|
||||||
|
notifications,
|
||||||
} = context
|
} = context
|
||||||
|
|
||||||
// Gets the appropriate API for the configured datasource type
|
// Gets the appropriate API for the configured datasource type
|
||||||
|
@ -125,16 +126,25 @@ export const createActions = context => {
|
||||||
// Saves the datasource definition
|
// Saves the datasource definition
|
||||||
const saveDefinition = async newDefinition => {
|
const saveDefinition = async newDefinition => {
|
||||||
// Update local state
|
// Update local state
|
||||||
|
const originalDefinition = get(definition)
|
||||||
definition.set(newDefinition)
|
definition.set(newDefinition)
|
||||||
|
|
||||||
// Update server
|
// Update server
|
||||||
if (get(config).canSaveSchema) {
|
if (get(config).canSaveSchema) {
|
||||||
await getAPI()?.actions.saveDefinition(newDefinition)
|
try {
|
||||||
|
await getAPI()?.actions.saveDefinition(newDefinition)
|
||||||
|
|
||||||
// Broadcast change so external state can be updated, as this change
|
// Broadcast change so external state can be updated, as this change
|
||||||
// will not be received by the builder websocket because we caused it
|
// will not be received by the builder websocket because we caused it
|
||||||
// ourselves
|
// ourselves
|
||||||
dispatch("updatedatasource", newDefinition)
|
dispatch("updatedatasource", newDefinition)
|
||||||
|
} catch (error) {
|
||||||
|
const msg = error?.message || error || "Unknown error"
|
||||||
|
get(notifications).error(`Error saving schema: ${msg}`)
|
||||||
|
|
||||||
|
// Reset the definition if saving failed
|
||||||
|
definition.set(originalDefinition)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
import { writable, derived, get } from "svelte/store"
|
import { writable, derived, get } from "svelte/store"
|
||||||
import { cloneDeep } from "lodash/fp"
|
import { cloneDeep } from "lodash/fp"
|
||||||
import { LuceneUtils } from "../utils"
|
import { QueryUtils } from "../utils"
|
||||||
import { convertJSONSchemaToTableSchema } from "../utils/json"
|
import { convertJSONSchemaToTableSchema } from "../utils/json"
|
||||||
|
|
||||||
const { buildLuceneQuery, luceneLimit, runLuceneQuery, luceneSort } =
|
const { buildQuery, limit: queryLimit, runQuery, sort } = QueryUtils
|
||||||
LuceneUtils
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Parent class which handles the implementation of fetching data from an
|
* Parent class which handles the implementation of fetching data from an
|
||||||
|
@ -177,10 +176,10 @@ export default class DataFetch {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build the lucene query
|
// Build the query
|
||||||
let query = this.options.query
|
let query = this.options.query
|
||||||
if (!query) {
|
if (!query) {
|
||||||
query = buildLuceneQuery(filter)
|
query = buildQuery(filter)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update store
|
// Update store
|
||||||
|
@ -229,17 +228,17 @@ export default class DataFetch {
|
||||||
|
|
||||||
// If we don't support searching, do a client search
|
// If we don't support searching, do a client search
|
||||||
if (!this.features.supportsSearch && clientSideSearching) {
|
if (!this.features.supportsSearch && clientSideSearching) {
|
||||||
rows = runLuceneQuery(rows, query)
|
rows = runQuery(rows, query)
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we don't support sorting, do a client-side sort
|
// If we don't support sorting, do a client-side sort
|
||||||
if (!this.features.supportsSort && clientSideSorting) {
|
if (!this.features.supportsSort && clientSideSorting) {
|
||||||
rows = luceneSort(rows, sortColumn, sortOrder, sortType)
|
rows = sort(rows, sortColumn, sortOrder, sortType)
|
||||||
}
|
}
|
||||||
|
|
||||||
// If we don't support pagination, do a client-side limit
|
// If we don't support pagination, do a client-side limit
|
||||||
if (!this.features.supportsPagination && clientSideLimiting) {
|
if (!this.features.supportsPagination && clientSideLimiting) {
|
||||||
rows = luceneLimit(rows, limit)
|
rows = queryLimit(rows, limit)
|
||||||
}
|
}
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
import { get } from "svelte/store"
|
import { get } from "svelte/store"
|
||||||
import DataFetch from "./DataFetch.js"
|
import DataFetch from "./DataFetch.js"
|
||||||
import { TableNames } from "../constants"
|
import { TableNames } from "../constants"
|
||||||
import { LuceneUtils } from "../utils"
|
import { QueryUtils } from "../utils"
|
||||||
|
|
||||||
export default class UserFetch extends DataFetch {
|
export default class UserFetch extends DataFetch {
|
||||||
constructor(opts) {
|
constructor(opts) {
|
||||||
|
@ -33,7 +33,7 @@ export default class UserFetch extends DataFetch {
|
||||||
let finalQuery
|
let finalQuery
|
||||||
// convert old format to new one - we now allow use of the lucene format
|
// convert old format to new one - we now allow use of the lucene format
|
||||||
const { appId, paginated, ...rest } = query
|
const { appId, paginated, ...rest } = query
|
||||||
if (!LuceneUtils.hasFilters(query) && rest.email != null) {
|
if (!QueryUtils.hasFilters(query) && rest.email != null) {
|
||||||
finalQuery = { string: { email: rest.email } }
|
finalQuery = { string: { email: rest.email } }
|
||||||
} else {
|
} else {
|
||||||
finalQuery = rest
|
finalQuery = rest
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
export { dataFilters as LuceneUtils } from "@budibase/shared-core"
|
export { dataFilters as QueryUtils } from "@budibase/shared-core"
|
||||||
export * as JSONUtils from "./json"
|
export * as JSONUtils from "./json"
|
||||||
export * as CookieUtils from "./cookies"
|
export * as CookieUtils from "./cookies"
|
||||||
export * as RoleUtils from "./roles"
|
export * as RoleUtils from "./roles"
|
||||||
|
|
|
@ -48,6 +48,7 @@ async function init() {
|
||||||
HTTP_LOGGING: "0",
|
HTTP_LOGGING: "0",
|
||||||
VERSION: "0.0.0+local",
|
VERSION: "0.0.0+local",
|
||||||
PASSWORD_MIN_LENGTH: "1",
|
PASSWORD_MIN_LENGTH: "1",
|
||||||
|
SQS_SEARCH_ENABLE: "1",
|
||||||
}
|
}
|
||||||
|
|
||||||
config = { ...config, ...existingConfig }
|
config = { ...config, ...existingConfig }
|
||||||
|
|
|
@ -860,8 +860,10 @@
|
||||||
"json",
|
"json",
|
||||||
"internal",
|
"internal",
|
||||||
"barcodeqr",
|
"barcodeqr",
|
||||||
|
"signature_single",
|
||||||
"bigint",
|
"bigint",
|
||||||
"bb_reference"
|
"bb_reference",
|
||||||
|
"bb_reference_single"
|
||||||
],
|
],
|
||||||
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
|
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
|
||||||
},
|
},
|
||||||
|
@ -1067,8 +1069,10 @@
|
||||||
"json",
|
"json",
|
||||||
"internal",
|
"internal",
|
||||||
"barcodeqr",
|
"barcodeqr",
|
||||||
|
"signature_single",
|
||||||
"bigint",
|
"bigint",
|
||||||
"bb_reference"
|
"bb_reference",
|
||||||
|
"bb_reference_single"
|
||||||
],
|
],
|
||||||
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
|
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
|
||||||
},
|
},
|
||||||
|
@ -1285,8 +1289,10 @@
|
||||||
"json",
|
"json",
|
||||||
"internal",
|
"internal",
|
||||||
"barcodeqr",
|
"barcodeqr",
|
||||||
|
"signature_single",
|
||||||
"bigint",
|
"bigint",
|
||||||
"bb_reference"
|
"bb_reference",
|
||||||
|
"bb_reference_single"
|
||||||
],
|
],
|
||||||
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
|
"description": "Defines the type of the column, most explain themselves, a link column is a relationship."
|
||||||
},
|
},
|
||||||
|
|
|
@ -782,8 +782,10 @@ components:
|
||||||
- json
|
- json
|
||||||
- internal
|
- internal
|
||||||
- barcodeqr
|
- barcodeqr
|
||||||
|
- signature_single
|
||||||
- bigint
|
- bigint
|
||||||
- bb_reference
|
- bb_reference
|
||||||
|
- bb_reference_single
|
||||||
description: Defines the type of the column, most explain themselves, a link
|
description: Defines the type of the column, most explain themselves, a link
|
||||||
column is a relationship.
|
column is a relationship.
|
||||||
constraints:
|
constraints:
|
||||||
|
@ -948,8 +950,10 @@ components:
|
||||||
- json
|
- json
|
||||||
- internal
|
- internal
|
||||||
- barcodeqr
|
- barcodeqr
|
||||||
|
- signature_single
|
||||||
- bigint
|
- bigint
|
||||||
- bb_reference
|
- bb_reference
|
||||||
|
- bb_reference_single
|
||||||
description: Defines the type of the column, most explain themselves, a link
|
description: Defines the type of the column, most explain themselves, a link
|
||||||
column is a relationship.
|
column is a relationship.
|
||||||
constraints:
|
constraints:
|
||||||
|
@ -1121,8 +1125,10 @@ components:
|
||||||
- json
|
- json
|
||||||
- internal
|
- internal
|
||||||
- barcodeqr
|
- barcodeqr
|
||||||
|
- signature_single
|
||||||
- bigint
|
- bigint
|
||||||
- bb_reference
|
- bb_reference
|
||||||
|
- bb_reference_single
|
||||||
description: Defines the type of the column, most explain themselves, a link
|
description: Defines the type of the column, most explain themselves, a link
|
||||||
column is a relationship.
|
column is a relationship.
|
||||||
constraints:
|
constraints:
|
||||||
|
|
|
@ -358,11 +358,14 @@ async function performAppCreate(ctx: UserCtx<CreateAppRequest, App>) {
|
||||||
await createApp(appId)
|
await createApp(appId)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Initialise the app migration version as the latest one
|
const latestMigrationId = appMigrations.getLatestEnabledMigrationId()
|
||||||
await appMigrations.updateAppMigrationMetadata({
|
if (latestMigrationId) {
|
||||||
appId,
|
// Initialise the app migration version as the latest one
|
||||||
version: appMigrations.getLatestMigrationId(),
|
await appMigrations.updateAppMigrationMetadata({
|
||||||
})
|
appId,
|
||||||
|
version: latestMigrationId,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
await cache.app.invalidateAppMetadata(appId, newApplication)
|
await cache.app.invalidateAppMetadata(appId, newApplication)
|
||||||
return newApplication
|
return newApplication
|
||||||
|
|
|
@ -3,7 +3,7 @@ import { migrate as migrationImpl, MIGRATIONS } from "../../migrations"
|
||||||
import { Ctx } from "@budibase/types"
|
import { Ctx } from "@budibase/types"
|
||||||
import {
|
import {
|
||||||
getAppMigrationVersion,
|
getAppMigrationVersion,
|
||||||
getLatestMigrationId,
|
getLatestEnabledMigrationId,
|
||||||
} from "../../appMigrations"
|
} from "../../appMigrations"
|
||||||
|
|
||||||
export async function migrate(ctx: Ctx) {
|
export async function migrate(ctx: Ctx) {
|
||||||
|
@ -27,7 +27,9 @@ export async function getMigrationStatus(ctx: Ctx) {
|
||||||
|
|
||||||
const latestAppliedMigration = await getAppMigrationVersion(appId)
|
const latestAppliedMigration = await getAppMigrationVersion(appId)
|
||||||
|
|
||||||
const migrated = latestAppliedMigration === getLatestMigrationId()
|
const latestMigrationId = getLatestEnabledMigrationId()
|
||||||
|
const migrated =
|
||||||
|
!latestMigrationId || latestAppliedMigration >= latestMigrationId
|
||||||
|
|
||||||
ctx.body = { migrated }
|
ctx.body = { migrated }
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
|
|
|
@ -25,6 +25,7 @@ import {
|
||||||
outputProcessing,
|
outputProcessing,
|
||||||
} from "../../../utilities/rowProcessor"
|
} from "../../../utilities/rowProcessor"
|
||||||
import { cloneDeep } from "lodash"
|
import { cloneDeep } from "lodash"
|
||||||
|
import { generateIdForRow } from "./utils"
|
||||||
|
|
||||||
export async function handleRequest<T extends Operation>(
|
export async function handleRequest<T extends Operation>(
|
||||||
operation: T,
|
operation: T,
|
||||||
|
@ -55,11 +56,19 @@ export async function patch(ctx: UserCtx<PatchRowRequest, PatchRowResponse>) {
|
||||||
throw { validation: validateResult.errors }
|
throw { validation: validateResult.errors }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const beforeRow = await sdk.rows.external.getRow(tableId, _id, {
|
||||||
|
relationships: true,
|
||||||
|
})
|
||||||
|
|
||||||
const response = await handleRequest(Operation.UPDATE, tableId, {
|
const response = await handleRequest(Operation.UPDATE, tableId, {
|
||||||
id: breakRowIdField(_id),
|
id: breakRowIdField(_id),
|
||||||
row: dataToUpdate,
|
row: dataToUpdate,
|
||||||
})
|
})
|
||||||
const row = await sdk.rows.external.getRow(tableId, _id, {
|
|
||||||
|
// The id might have been changed, so the refetching would fail. Recalculating the id just in case
|
||||||
|
const updatedId =
|
||||||
|
generateIdForRow({ ...beforeRow, ...dataToUpdate }, table) || _id
|
||||||
|
const row = await sdk.rows.external.getRow(tableId, updatedId, {
|
||||||
relationships: true,
|
relationships: true,
|
||||||
})
|
})
|
||||||
const enrichedRow = await outputProcessing(table, row, {
|
const enrichedRow = await outputProcessing(table, row, {
|
||||||
|
|
|
@ -84,9 +84,11 @@ export const save = async (ctx: UserCtx<Row, Row>) => {
|
||||||
if (body && body._id) {
|
if (body && body._id) {
|
||||||
return patch(ctx as UserCtx<PatchRowRequest, PatchRowResponse>)
|
return patch(ctx as UserCtx<PatchRowRequest, PatchRowResponse>)
|
||||||
}
|
}
|
||||||
const { row, table, squashed } = await quotas.addRow(() =>
|
const { row, table, squashed } = tableId.includes("datasource_plus")
|
||||||
sdk.rows.save(tableId, ctx.request.body, ctx.user?._id)
|
? await sdk.rows.save(tableId, ctx.request.body, ctx.user?._id)
|
||||||
)
|
: await quotas.addRow(() =>
|
||||||
|
sdk.rows.save(tableId, ctx.request.body, ctx.user?._id)
|
||||||
|
)
|
||||||
ctx.status = 200
|
ctx.status = 200
|
||||||
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
|
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:save`, appId, row, table)
|
||||||
ctx.message = `${table.name} saved successfully`
|
ctx.message = `${table.name} saved successfully`
|
||||||
|
@ -152,7 +154,9 @@ async function deleteRows(ctx: UserCtx<DeleteRowRequest>) {
|
||||||
deleteRequest.rows = await processDeleteRowsRequest(ctx)
|
deleteRequest.rows = await processDeleteRowsRequest(ctx)
|
||||||
|
|
||||||
const { rows } = await pickApi(tableId).bulkDestroy(ctx)
|
const { rows } = await pickApi(tableId).bulkDestroy(ctx)
|
||||||
await quotas.removeRows(rows.length)
|
if (!tableId.includes("datasource_plus")) {
|
||||||
|
await quotas.removeRows(rows.length)
|
||||||
|
}
|
||||||
|
|
||||||
for (let row of rows) {
|
for (let row of rows) {
|
||||||
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
|
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, row)
|
||||||
|
@ -167,7 +171,9 @@ async function deleteRow(ctx: UserCtx<DeleteRowRequest>) {
|
||||||
const tableId = utils.getTableId(ctx)
|
const tableId = utils.getTableId(ctx)
|
||||||
|
|
||||||
const resp = await pickApi(tableId).destroy(ctx)
|
const resp = await pickApi(tableId).destroy(ctx)
|
||||||
await quotas.removeRow()
|
if (!tableId.includes("datasource_plus")) {
|
||||||
|
await quotas.removeRow()
|
||||||
|
}
|
||||||
|
|
||||||
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, resp.row)
|
ctx.eventEmitter && ctx.eventEmitter.emitRow(`row:delete`, appId, resp.row)
|
||||||
gridSocket?.emitRowDeletion(ctx, resp.row)
|
gridSocket?.emitRowDeletion(ctx, resp.row)
|
||||||
|
|
|
@ -31,7 +31,7 @@ export async function searchView(
|
||||||
// Enrich saved query with ephemeral query params.
|
// Enrich saved query with ephemeral query params.
|
||||||
// We prevent searching on any fields that are saved as part of the query, as
|
// We prevent searching on any fields that are saved as part of the query, as
|
||||||
// that could let users find rows they should not be allowed to access.
|
// that could let users find rows they should not be allowed to access.
|
||||||
let query = dataFilters.buildLuceneQuery(view.query || [])
|
let query = dataFilters.buildQuery(view.query || [])
|
||||||
if (body.query) {
|
if (body.query) {
|
||||||
// Extract existing fields
|
// Extract existing fields
|
||||||
const existingFields =
|
const existingFields =
|
||||||
|
|
|
@ -31,7 +31,7 @@ import {
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import {
|
import {
|
||||||
getAppMigrationVersion,
|
getAppMigrationVersion,
|
||||||
getLatestMigrationId,
|
getLatestEnabledMigrationId,
|
||||||
} from "../../../appMigrations"
|
} from "../../../appMigrations"
|
||||||
|
|
||||||
import send from "koa-send"
|
import send from "koa-send"
|
||||||
|
@ -133,7 +133,7 @@ const requiresMigration = async (ctx: Ctx) => {
|
||||||
ctx.throw("AppId could not be found")
|
ctx.throw("AppId could not be found")
|
||||||
}
|
}
|
||||||
|
|
||||||
const latestMigration = getLatestMigrationId()
|
const latestMigration = getLatestEnabledMigrationId()
|
||||||
if (!latestMigration) {
|
if (!latestMigration) {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,91 +0,0 @@
|
||||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
|
||||||
|
|
||||||
exports[`/datasources fetch returns all the datasources from the server 1`] = `
|
|
||||||
[
|
|
||||||
{
|
|
||||||
"config": {},
|
|
||||||
"entities": [
|
|
||||||
{
|
|
||||||
"_id": "ta_users",
|
|
||||||
"_rev": "1-73b7912e6cbdd3d696febc60f3715844",
|
|
||||||
"createdAt": "2020-01-01T00:00:00.000Z",
|
|
||||||
"name": "Users",
|
|
||||||
"primaryDisplay": "email",
|
|
||||||
"schema": {
|
|
||||||
"email": {
|
|
||||||
"constraints": {
|
|
||||||
"email": true,
|
|
||||||
"length": {
|
|
||||||
"maximum": "",
|
|
||||||
},
|
|
||||||
"presence": true,
|
|
||||||
"type": "string",
|
|
||||||
},
|
|
||||||
"name": "email",
|
|
||||||
"type": "string",
|
|
||||||
},
|
|
||||||
"firstName": {
|
|
||||||
"constraints": {
|
|
||||||
"presence": false,
|
|
||||||
"type": "string",
|
|
||||||
},
|
|
||||||
"name": "firstName",
|
|
||||||
"type": "string",
|
|
||||||
},
|
|
||||||
"lastName": {
|
|
||||||
"constraints": {
|
|
||||||
"presence": false,
|
|
||||||
"type": "string",
|
|
||||||
},
|
|
||||||
"name": "lastName",
|
|
||||||
"type": "string",
|
|
||||||
},
|
|
||||||
"roleId": {
|
|
||||||
"constraints": {
|
|
||||||
"inclusion": [
|
|
||||||
"ADMIN",
|
|
||||||
"POWER",
|
|
||||||
"BASIC",
|
|
||||||
"PUBLIC",
|
|
||||||
],
|
|
||||||
"presence": false,
|
|
||||||
"type": "string",
|
|
||||||
},
|
|
||||||
"name": "roleId",
|
|
||||||
"type": "options",
|
|
||||||
},
|
|
||||||
"status": {
|
|
||||||
"constraints": {
|
|
||||||
"inclusion": [
|
|
||||||
"active",
|
|
||||||
"inactive",
|
|
||||||
],
|
|
||||||
"presence": false,
|
|
||||||
"type": "string",
|
|
||||||
},
|
|
||||||
"name": "status",
|
|
||||||
"type": "options",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
"sourceId": "bb_internal",
|
|
||||||
"sourceType": "internal",
|
|
||||||
"type": "table",
|
|
||||||
"updatedAt": "2020-01-01T00:00:00.000Z",
|
|
||||||
"views": {},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
"name": "Budibase DB",
|
|
||||||
"source": "BUDIBASE",
|
|
||||||
"type": "budibase",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"config": {},
|
|
||||||
"createdAt": "2020-01-01T00:00:00.000Z",
|
|
||||||
"isSQL": true,
|
|
||||||
"name": "Test",
|
|
||||||
"source": "POSTGRES",
|
|
||||||
"type": "datasource",
|
|
||||||
"updatedAt": "2020-01-01T00:00:00.000Z",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
`;
|
|
|
@ -4,14 +4,12 @@ import { getCachedVariable } from "../../../threads/utils"
|
||||||
import { context, events } from "@budibase/backend-core"
|
import { context, events } from "@budibase/backend-core"
|
||||||
import sdk from "../../../sdk"
|
import sdk from "../../../sdk"
|
||||||
|
|
||||||
import tk from "timekeeper"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
import { mocks } from "@budibase/backend-core/tests"
|
|
||||||
import {
|
import {
|
||||||
Datasource,
|
Datasource,
|
||||||
FieldSchema,
|
FieldSchema,
|
||||||
BBReferenceFieldSubType,
|
BBReferenceFieldSubType,
|
||||||
FieldType,
|
FieldType,
|
||||||
QueryPreview,
|
|
||||||
RelationshipType,
|
RelationshipType,
|
||||||
SourceName,
|
SourceName,
|
||||||
Table,
|
Table,
|
||||||
|
@ -21,36 +19,34 @@ import {
|
||||||
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
import { DatabaseName, getDatasource } from "../../../integrations/tests/utils"
|
||||||
import { tableForDatasource } from "../../../tests/utilities/structures"
|
import { tableForDatasource } from "../../../tests/utilities/structures"
|
||||||
|
|
||||||
tk.freeze(mocks.date.MOCK_DATE)
|
|
||||||
|
|
||||||
let { basicDatasource } = setup.structures
|
|
||||||
|
|
||||||
describe("/datasources", () => {
|
describe("/datasources", () => {
|
||||||
let request = setup.getRequest()
|
const config = setup.getConfig()
|
||||||
let config = setup.getConfig()
|
let datasource: Datasource
|
||||||
let datasource: any
|
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
await config.init()
|
||||||
|
})
|
||||||
afterAll(setup.afterAll)
|
afterAll(setup.afterAll)
|
||||||
|
|
||||||
async function setupTest() {
|
beforeEach(async () => {
|
||||||
await config.init()
|
datasource = await config.api.datasource.create({
|
||||||
datasource = await config.createDatasource()
|
type: "datasource",
|
||||||
|
name: "Test",
|
||||||
|
source: SourceName.POSTGRES,
|
||||||
|
config: {},
|
||||||
|
})
|
||||||
jest.clearAllMocks()
|
jest.clearAllMocks()
|
||||||
}
|
})
|
||||||
|
|
||||||
beforeAll(setupTest)
|
|
||||||
|
|
||||||
describe("create", () => {
|
describe("create", () => {
|
||||||
it("should create a new datasource", async () => {
|
it("should create a new datasource", async () => {
|
||||||
const res = await request
|
const ds = await config.api.datasource.create({
|
||||||
.post(`/api/datasources`)
|
type: "datasource",
|
||||||
.send(basicDatasource())
|
name: "Test",
|
||||||
.set(config.defaultHeaders())
|
source: SourceName.POSTGRES,
|
||||||
.expect("Content-Type", /json/)
|
config: {},
|
||||||
.expect(200)
|
})
|
||||||
|
expect(ds.name).toEqual("Test")
|
||||||
expect(res.body.datasource.name).toEqual("Test")
|
|
||||||
expect(res.body.errors).toEqual({})
|
|
||||||
expect(events.datasource.created).toHaveBeenCalledTimes(1)
|
expect(events.datasource.created).toHaveBeenCalledTimes(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -72,88 +68,71 @@ describe("/datasources", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("update", () => {
|
describe("dynamic variables", () => {
|
||||||
it("should update an existing datasource", async () => {
|
it("should invalidate changed or removed variables", async () => {
|
||||||
datasource.name = "Updated Test"
|
let datasource = await config.api.datasource.create({
|
||||||
const res = await request
|
type: "datasource",
|
||||||
.put(`/api/datasources/${datasource._id}`)
|
name: "Rest",
|
||||||
.send(datasource)
|
source: SourceName.REST,
|
||||||
.set(config.defaultHeaders())
|
config: {},
|
||||||
.expect("Content-Type", /json/)
|
})
|
||||||
.expect(200)
|
|
||||||
|
|
||||||
expect(res.body.datasource.name).toEqual("Updated Test")
|
const query = await config.api.query.save({
|
||||||
expect(res.body.errors).toBeUndefined()
|
datasourceId: datasource._id!,
|
||||||
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
|
fields: {
|
||||||
})
|
path: "www.google.com",
|
||||||
|
},
|
||||||
|
parameters: [],
|
||||||
|
transformer: null,
|
||||||
|
queryVerb: "read",
|
||||||
|
name: datasource.name!,
|
||||||
|
schema: {},
|
||||||
|
readable: true,
|
||||||
|
})
|
||||||
|
|
||||||
describe("dynamic variables", () => {
|
datasource = await config.api.datasource.update({
|
||||||
async function preview(
|
...datasource,
|
||||||
datasource: any,
|
config: {
|
||||||
fields: { path: string; queryString: string }
|
dynamicVariables: [
|
||||||
) {
|
{
|
||||||
const queryPreview: QueryPreview = {
|
queryId: query._id,
|
||||||
fields,
|
name: "variable3",
|
||||||
datasourceId: datasource._id,
|
value: "{{ data.0.[value] }}",
|
||||||
parameters: [],
|
},
|
||||||
transformer: null,
|
],
|
||||||
queryVerb: "read",
|
},
|
||||||
name: datasource.name,
|
})
|
||||||
schema: {},
|
|
||||||
readable: true,
|
|
||||||
}
|
|
||||||
return config.api.query.preview(queryPreview)
|
|
||||||
}
|
|
||||||
|
|
||||||
it("should invalidate changed or removed variables", async () => {
|
// preview once to cache variables
|
||||||
const { datasource, query } = await config.dynamicVariableDatasource()
|
await config.api.query.preview({
|
||||||
// preview once to cache variables
|
fields: {
|
||||||
await preview(datasource, {
|
|
||||||
path: "www.example.com",
|
path: "www.example.com",
|
||||||
queryString: "test={{ variable3 }}",
|
queryString: "test={{ variable3 }}",
|
||||||
})
|
},
|
||||||
// check variables in cache
|
datasourceId: datasource._id!,
|
||||||
let contents = await getCachedVariable(query._id!, "variable3")
|
parameters: [],
|
||||||
expect(contents.rows.length).toEqual(1)
|
transformer: null,
|
||||||
|
queryVerb: "read",
|
||||||
// update the datasource to remove the variables
|
name: datasource.name!,
|
||||||
datasource.config!.dynamicVariables = []
|
schema: {},
|
||||||
const res = await request
|
readable: true,
|
||||||
.put(`/api/datasources/${datasource._id}`)
|
|
||||||
.send(datasource)
|
|
||||||
.set(config.defaultHeaders())
|
|
||||||
.expect("Content-Type", /json/)
|
|
||||||
.expect(200)
|
|
||||||
expect(res.body.errors).toBeUndefined()
|
|
||||||
|
|
||||||
// check variables no longer in cache
|
|
||||||
contents = await getCachedVariable(query._id!, "variable3")
|
|
||||||
expect(contents).toBe(null)
|
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// check variables in cache
|
||||||
|
let contents = await getCachedVariable(query._id!, "variable3")
|
||||||
|
expect(contents.rows.length).toEqual(1)
|
||||||
|
|
||||||
|
// update the datasource to remove the variables
|
||||||
|
datasource.config!.dynamicVariables = []
|
||||||
|
await config.api.datasource.update(datasource)
|
||||||
|
|
||||||
|
// check variables no longer in cache
|
||||||
|
contents = await getCachedVariable(query._id!, "variable3")
|
||||||
|
expect(contents).toBe(null)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("fetch", () => {
|
describe("permissions", () => {
|
||||||
beforeAll(setupTest)
|
|
||||||
|
|
||||||
it("returns all the datasources from the server", async () => {
|
|
||||||
const res = await request
|
|
||||||
.get(`/api/datasources`)
|
|
||||||
.set(config.defaultHeaders())
|
|
||||||
.expect("Content-Type", /json/)
|
|
||||||
.expect(200)
|
|
||||||
|
|
||||||
const datasources = res.body
|
|
||||||
|
|
||||||
// remove non-deterministic fields
|
|
||||||
for (let source of datasources) {
|
|
||||||
delete source._id
|
|
||||||
delete source._rev
|
|
||||||
}
|
|
||||||
|
|
||||||
expect(datasources).toMatchSnapshot()
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should apply authorization to endpoint", async () => {
|
it("should apply authorization to endpoint", async () => {
|
||||||
await checkBuilderEndpoint({
|
await checkBuilderEndpoint({
|
||||||
config,
|
config,
|
||||||
|
@ -161,41 +140,8 @@ describe("/datasources", () => {
|
||||||
url: `/api/datasources`,
|
url: `/api/datasources`,
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
|
||||||
|
|
||||||
describe("find", () => {
|
it("should apply authorization to delete endpoint", async () => {
|
||||||
it("should be able to find a datasource", async () => {
|
|
||||||
const res = await request
|
|
||||||
.get(`/api/datasources/${datasource._id}`)
|
|
||||||
.set(config.defaultHeaders())
|
|
||||||
.expect(200)
|
|
||||||
expect(res.body._rev).toBeDefined()
|
|
||||||
expect(res.body._id).toEqual(datasource._id)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("destroy", () => {
|
|
||||||
beforeAll(setupTest)
|
|
||||||
|
|
||||||
it("deletes queries for the datasource after deletion and returns a success message", async () => {
|
|
||||||
await config.createQuery()
|
|
||||||
|
|
||||||
await request
|
|
||||||
.delete(`/api/datasources/${datasource._id}/${datasource._rev}`)
|
|
||||||
.set(config.defaultHeaders())
|
|
||||||
.expect(200)
|
|
||||||
|
|
||||||
const res = await request
|
|
||||||
.get(`/api/datasources`)
|
|
||||||
.set(config.defaultHeaders())
|
|
||||||
.expect("Content-Type", /json/)
|
|
||||||
.expect(200)
|
|
||||||
|
|
||||||
expect(res.body.length).toEqual(1)
|
|
||||||
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should apply authorization to endpoint", async () => {
|
|
||||||
await checkBuilderEndpoint({
|
await checkBuilderEndpoint({
|
||||||
config,
|
config,
|
||||||
method: "DELETE",
|
method: "DELETE",
|
||||||
|
@ -204,175 +150,296 @@ describe("/datasources", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("check secret replacement", () => {
|
|
||||||
async function makeDatasource() {
|
|
||||||
datasource = basicDatasource()
|
|
||||||
datasource.datasource.config.password = "testing"
|
|
||||||
const res = await request
|
|
||||||
.post(`/api/datasources`)
|
|
||||||
.send(datasource)
|
|
||||||
.set(config.defaultHeaders())
|
|
||||||
.expect("Content-Type", /json/)
|
|
||||||
.expect(200)
|
|
||||||
return res.body.datasource
|
|
||||||
}
|
|
||||||
|
|
||||||
it("should save a datasource with password", async () => {
|
|
||||||
const datasource = await makeDatasource()
|
|
||||||
expect(datasource.config.password).toBe("--secret-value--")
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should not the password on update with the --secret-value--", async () => {
|
|
||||||
const datasource = await makeDatasource()
|
|
||||||
await request
|
|
||||||
.put(`/api/datasources/${datasource._id}`)
|
|
||||||
.send(datasource)
|
|
||||||
.set(config.defaultHeaders())
|
|
||||||
.expect("Content-Type", /json/)
|
|
||||||
.expect(200)
|
|
||||||
await context.doInAppContext(config.getAppId(), async () => {
|
|
||||||
const dbDatasource: any = await sdk.datasources.get(datasource._id)
|
|
||||||
expect(dbDatasource.config.password).toBe("testing")
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe.each([
|
describe.each([
|
||||||
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
[DatabaseName.POSTGRES, getDatasource(DatabaseName.POSTGRES)],
|
||||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||||
])("fetch schema (%s)", (_, dsProvider) => {
|
])("%s", (_, dsProvider) => {
|
||||||
beforeAll(async () => {
|
let rawDatasource: Datasource
|
||||||
datasource = await config.api.datasource.create(await dsProvider)
|
beforeEach(async () => {
|
||||||
|
rawDatasource = await dsProvider
|
||||||
|
datasource = await config.api.datasource.create(rawDatasource)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fetching schema will not drop tables or columns", async () => {
|
describe("get", () => {
|
||||||
const datasourceId = datasource!._id!
|
it("should be able to get a datasource", async () => {
|
||||||
|
const ds = await config.api.datasource.get(datasource._id!)
|
||||||
|
expect(ds).toEqual({
|
||||||
|
config: expect.any(Object),
|
||||||
|
plus: datasource.plus,
|
||||||
|
source: datasource.source,
|
||||||
|
isSQL: true,
|
||||||
|
type: "datasource_plus",
|
||||||
|
_id: datasource._id,
|
||||||
|
_rev: expect.any(String),
|
||||||
|
createdAt: expect.any(String),
|
||||||
|
updatedAt: expect.any(String),
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
const simpleTable = await config.api.table.save(
|
it("should not return database password", async () => {
|
||||||
tableForDatasource(datasource, {
|
const ds = await config.api.datasource.get(datasource._id!)
|
||||||
name: "simple",
|
expect(ds.config!.password).toBe("--secret-value--")
|
||||||
schema: {
|
})
|
||||||
name: {
|
})
|
||||||
name: "name",
|
|
||||||
type: FieldType.STRING,
|
describe("list", () => {
|
||||||
|
it("returns all the datasources", async () => {
|
||||||
|
const datasources = await config.api.datasource.fetch()
|
||||||
|
expect(datasources).toContainEqual(expect.objectContaining(datasource))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("put", () => {
|
||||||
|
it("should update an existing datasource", async () => {
|
||||||
|
const newName = generator.guid()
|
||||||
|
datasource.name = newName
|
||||||
|
const updatedDs = await config.api.datasource.update(datasource)
|
||||||
|
expect(updatedDs.name).toEqual(newName)
|
||||||
|
expect(events.datasource.updated).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should not overwrite database password with --secret-value--", async () => {
|
||||||
|
const password = await context.doInAppContext(
|
||||||
|
config.getAppId(),
|
||||||
|
async () => {
|
||||||
|
const ds = await sdk.datasources.get(datasource._id!)
|
||||||
|
return ds.config!.password
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(password).not.toBe("--secret-value--")
|
||||||
|
|
||||||
|
const ds = await config.api.datasource.get(datasource._id!)
|
||||||
|
expect(ds.config!.password).toBe("--secret-value--")
|
||||||
|
|
||||||
|
await config.api.datasource.update(
|
||||||
|
await config.api.datasource.get(datasource._id!)
|
||||||
|
)
|
||||||
|
|
||||||
|
const newPassword = await context.doInAppContext(
|
||||||
|
config.getAppId(),
|
||||||
|
async () => {
|
||||||
|
const ds = await sdk.datasources.get(datasource._id!)
|
||||||
|
return ds.config!.password
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
expect(newPassword).not.toBe("--secret-value--")
|
||||||
|
expect(newPassword).toBe(password)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("destroy", () => {
|
||||||
|
it("deletes queries for the datasource after deletion and returns a success message", async () => {
|
||||||
|
await config.api.query.save({
|
||||||
|
datasourceId: datasource._id!,
|
||||||
|
name: "Test Query",
|
||||||
|
parameters: [],
|
||||||
|
fields: {},
|
||||||
|
schema: {},
|
||||||
|
queryVerb: "read",
|
||||||
|
transformer: null,
|
||||||
|
readable: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
await config.api.datasource.delete(datasource)
|
||||||
|
const datasources = await config.api.datasource.fetch()
|
||||||
|
expect(datasources).not.toContainEqual(
|
||||||
|
expect.objectContaining(datasource)
|
||||||
|
)
|
||||||
|
expect(events.datasource.deleted).toHaveBeenCalledTimes(1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("schema", () => {
|
||||||
|
it("fetching schema will not drop tables or columns", async () => {
|
||||||
|
const datasourceId = datasource!._id!
|
||||||
|
|
||||||
|
const simpleTable = await config.api.table.save(
|
||||||
|
tableForDatasource(datasource, {
|
||||||
|
name: "simple",
|
||||||
|
schema: {
|
||||||
|
name: {
|
||||||
|
name: "name",
|
||||||
|
type: FieldType.STRING,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const stringName = "string"
|
||||||
|
const fullSchema: {
|
||||||
|
[type in SupportedSqlTypes]: FieldSchema & { type: type }
|
||||||
|
} = {
|
||||||
|
[FieldType.STRING]: {
|
||||||
|
name: stringName,
|
||||||
|
type: FieldType.STRING,
|
||||||
|
constraints: {
|
||||||
|
presence: true,
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
[FieldType.LONGFORM]: {
|
||||||
)
|
name: "longform",
|
||||||
|
type: FieldType.LONGFORM,
|
||||||
const fullSchema: {
|
|
||||||
[type in SupportedSqlTypes]: FieldSchema & { type: type }
|
|
||||||
} = {
|
|
||||||
[FieldType.STRING]: {
|
|
||||||
name: "string",
|
|
||||||
type: FieldType.STRING,
|
|
||||||
constraints: {
|
|
||||||
presence: true,
|
|
||||||
},
|
},
|
||||||
},
|
[FieldType.OPTIONS]: {
|
||||||
[FieldType.LONGFORM]: {
|
name: "options",
|
||||||
name: "longform",
|
type: FieldType.OPTIONS,
|
||||||
type: FieldType.LONGFORM,
|
constraints: {
|
||||||
},
|
presence: { allowEmpty: false },
|
||||||
[FieldType.OPTIONS]: {
|
|
||||||
name: "options",
|
|
||||||
type: FieldType.OPTIONS,
|
|
||||||
constraints: {
|
|
||||||
presence: { allowEmpty: false },
|
|
||||||
},
|
|
||||||
},
|
|
||||||
[FieldType.NUMBER]: {
|
|
||||||
name: "number",
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
},
|
|
||||||
[FieldType.BOOLEAN]: {
|
|
||||||
name: "boolean",
|
|
||||||
type: FieldType.BOOLEAN,
|
|
||||||
},
|
|
||||||
[FieldType.ARRAY]: {
|
|
||||||
name: "array",
|
|
||||||
type: FieldType.ARRAY,
|
|
||||||
},
|
|
||||||
[FieldType.DATETIME]: {
|
|
||||||
name: "datetime",
|
|
||||||
type: FieldType.DATETIME,
|
|
||||||
dateOnly: true,
|
|
||||||
timeOnly: false,
|
|
||||||
},
|
|
||||||
[FieldType.LINK]: {
|
|
||||||
name: "link",
|
|
||||||
type: FieldType.LINK,
|
|
||||||
tableId: simpleTable._id!,
|
|
||||||
relationshipType: RelationshipType.ONE_TO_MANY,
|
|
||||||
fieldName: "link",
|
|
||||||
},
|
|
||||||
[FieldType.FORMULA]: {
|
|
||||||
name: "formula",
|
|
||||||
type: FieldType.FORMULA,
|
|
||||||
formula: "any formula",
|
|
||||||
},
|
|
||||||
[FieldType.BARCODEQR]: {
|
|
||||||
name: "barcodeqr",
|
|
||||||
type: FieldType.BARCODEQR,
|
|
||||||
},
|
|
||||||
[FieldType.BIGINT]: {
|
|
||||||
name: "bigint",
|
|
||||||
type: FieldType.BIGINT,
|
|
||||||
},
|
|
||||||
[FieldType.BB_REFERENCE]: {
|
|
||||||
name: "bb_reference",
|
|
||||||
type: FieldType.BB_REFERENCE,
|
|
||||||
subtype: BBReferenceFieldSubType.USER,
|
|
||||||
},
|
|
||||||
[FieldType.BB_REFERENCE_SINGLE]: {
|
|
||||||
name: "bb_reference_single",
|
|
||||||
type: FieldType.BB_REFERENCE_SINGLE,
|
|
||||||
subtype: BBReferenceFieldSubType.USER,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
await config.api.table.save(
|
|
||||||
tableForDatasource(datasource, {
|
|
||||||
name: "full",
|
|
||||||
schema: fullSchema,
|
|
||||||
})
|
|
||||||
)
|
|
||||||
|
|
||||||
const persisted = await config.api.datasource.get(datasourceId)
|
|
||||||
await config.api.datasource.fetchSchema(datasourceId)
|
|
||||||
|
|
||||||
const updated = await config.api.datasource.get(datasourceId)
|
|
||||||
const expected: Datasource = {
|
|
||||||
...persisted,
|
|
||||||
entities:
|
|
||||||
persisted?.entities &&
|
|
||||||
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
|
||||||
(acc, [tableName, table]) => {
|
|
||||||
acc[tableName] = {
|
|
||||||
...table,
|
|
||||||
primaryDisplay: expect.not.stringMatching(
|
|
||||||
new RegExp(`^${table.primaryDisplay || ""}$`)
|
|
||||||
),
|
|
||||||
schema: Object.entries(table.schema).reduce<TableSchema>(
|
|
||||||
(acc, [fieldName, field]) => {
|
|
||||||
acc[fieldName] = expect.objectContaining({
|
|
||||||
...field,
|
|
||||||
})
|
|
||||||
return acc
|
|
||||||
},
|
|
||||||
{}
|
|
||||||
),
|
|
||||||
}
|
|
||||||
return acc
|
|
||||||
},
|
},
|
||||||
{}
|
},
|
||||||
),
|
[FieldType.NUMBER]: {
|
||||||
|
name: "number",
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
},
|
||||||
|
[FieldType.BOOLEAN]: {
|
||||||
|
name: "boolean",
|
||||||
|
type: FieldType.BOOLEAN,
|
||||||
|
},
|
||||||
|
[FieldType.ARRAY]: {
|
||||||
|
name: "array",
|
||||||
|
type: FieldType.ARRAY,
|
||||||
|
},
|
||||||
|
[FieldType.DATETIME]: {
|
||||||
|
name: "datetime",
|
||||||
|
type: FieldType.DATETIME,
|
||||||
|
dateOnly: true,
|
||||||
|
timeOnly: false,
|
||||||
|
},
|
||||||
|
[FieldType.LINK]: {
|
||||||
|
name: "link",
|
||||||
|
type: FieldType.LINK,
|
||||||
|
tableId: simpleTable._id!,
|
||||||
|
relationshipType: RelationshipType.ONE_TO_MANY,
|
||||||
|
fieldName: "link",
|
||||||
|
},
|
||||||
|
[FieldType.FORMULA]: {
|
||||||
|
name: "formula",
|
||||||
|
type: FieldType.FORMULA,
|
||||||
|
formula: "any formula",
|
||||||
|
},
|
||||||
|
[FieldType.BARCODEQR]: {
|
||||||
|
name: "barcodeqr",
|
||||||
|
type: FieldType.BARCODEQR,
|
||||||
|
},
|
||||||
|
[FieldType.BIGINT]: {
|
||||||
|
name: "bigint",
|
||||||
|
type: FieldType.BIGINT,
|
||||||
|
},
|
||||||
|
[FieldType.BB_REFERENCE]: {
|
||||||
|
name: "bb_reference",
|
||||||
|
type: FieldType.BB_REFERENCE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
},
|
||||||
|
[FieldType.BB_REFERENCE_SINGLE]: {
|
||||||
|
name: "bb_reference_single",
|
||||||
|
type: FieldType.BB_REFERENCE_SINGLE,
|
||||||
|
subtype: BBReferenceFieldSubType.USER,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
_rev: expect.any(String),
|
await config.api.table.save(
|
||||||
}
|
tableForDatasource(datasource, {
|
||||||
expect(updated).toEqual(expected)
|
name: "full",
|
||||||
|
schema: fullSchema,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const persisted = await config.api.datasource.get(datasourceId)
|
||||||
|
await config.api.datasource.fetchSchema({ datasourceId })
|
||||||
|
|
||||||
|
const updated = await config.api.datasource.get(datasourceId)
|
||||||
|
const expected: Datasource = {
|
||||||
|
...persisted,
|
||||||
|
entities:
|
||||||
|
persisted?.entities &&
|
||||||
|
Object.entries(persisted.entities).reduce<Record<string, Table>>(
|
||||||
|
(acc, [tableName, table]) => {
|
||||||
|
acc[tableName] = {
|
||||||
|
...table,
|
||||||
|
primaryDisplay: expect.not.stringMatching(
|
||||||
|
new RegExp(`^${table.primaryDisplay || ""}$`)
|
||||||
|
),
|
||||||
|
schema: Object.entries(table.schema).reduce<TableSchema>(
|
||||||
|
(acc, [fieldName, field]) => {
|
||||||
|
// the constraint will be unset - as the DB doesn't recognise it as not null
|
||||||
|
if (fieldName === stringName) {
|
||||||
|
field.constraints = {}
|
||||||
|
}
|
||||||
|
acc[fieldName] = expect.objectContaining({
|
||||||
|
...field,
|
||||||
|
})
|
||||||
|
return acc
|
||||||
|
},
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
}
|
||||||
|
return acc
|
||||||
|
},
|
||||||
|
{}
|
||||||
|
),
|
||||||
|
|
||||||
|
_rev: expect.any(String),
|
||||||
|
updatedAt: expect.any(String),
|
||||||
|
}
|
||||||
|
expect(updated).toEqual(expected)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("verify", () => {
|
||||||
|
it("should be able to verify the connection", async () => {
|
||||||
|
await config.api.datasource.verify(
|
||||||
|
{
|
||||||
|
datasource: rawDatasource,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
body: {
|
||||||
|
connected: true,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
|
||||||
|
it("should state an invalid datasource cannot connect", async () => {
|
||||||
|
await config.api.datasource.verify(
|
||||||
|
{
|
||||||
|
datasource: {
|
||||||
|
...rawDatasource,
|
||||||
|
config: {
|
||||||
|
...rawDatasource.config,
|
||||||
|
password: "wrongpassword",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
body: {
|
||||||
|
connected: false,
|
||||||
|
error: /.*/, // error message differs between databases
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("info", () => {
|
||||||
|
it("should fetch information about postgres datasource", async () => {
|
||||||
|
const table = await config.api.table.save(
|
||||||
|
tableForDatasource(datasource, {
|
||||||
|
schema: {
|
||||||
|
name: {
|
||||||
|
name: "name",
|
||||||
|
type: FieldType.STRING,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
)
|
||||||
|
|
||||||
|
const info = await config.api.datasource.info(datasource)
|
||||||
|
expect(info.tableNames).toContain(table.name)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -38,7 +38,7 @@ describe.each([
|
||||||
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
[DatabaseName.MYSQL, getDatasource(DatabaseName.MYSQL)],
|
||||||
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
[DatabaseName.SQL_SERVER, getDatasource(DatabaseName.SQL_SERVER)],
|
||||||
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
[DatabaseName.MARIADB, getDatasource(DatabaseName.MARIADB)],
|
||||||
])("/rows (%s)", (__, dsProvider) => {
|
])("/rows (%s)", (providerType, dsProvider) => {
|
||||||
const isInternal = dsProvider === undefined
|
const isInternal = dsProvider === undefined
|
||||||
const config = setup.getConfig()
|
const config = setup.getConfig()
|
||||||
|
|
||||||
|
@ -134,6 +134,10 @@ describe.each([
|
||||||
// error. This is to account for the fact that parallel writes can result
|
// error. This is to account for the fact that parallel writes can result
|
||||||
// in some quota updates getting lost. We don't have any need to solve this
|
// in some quota updates getting lost. We don't have any need to solve this
|
||||||
// right now, so we just allow for some error.
|
// right now, so we just allow for some error.
|
||||||
|
if (expected === 0) {
|
||||||
|
expect(usage).toEqual(0)
|
||||||
|
return
|
||||||
|
}
|
||||||
expect(usage).toBeGreaterThan(expected * 0.9)
|
expect(usage).toBeGreaterThan(expected * 0.9)
|
||||||
expect(usage).toBeLessThan(expected * 1.1)
|
expect(usage).toBeLessThan(expected * 1.1)
|
||||||
}
|
}
|
||||||
|
@ -158,7 +162,7 @@ describe.each([
|
||||||
})
|
})
|
||||||
expect(row.name).toEqual("Test Contact")
|
expect(row.name).toEqual("Test Contact")
|
||||||
expect(row._rev).toBeDefined()
|
expect(row._rev).toBeDefined()
|
||||||
await assertRowUsage(rowUsage + 1)
|
await assertRowUsage(isInternal ? rowUsage + 1 : rowUsage)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("fails to create a row for a table that does not exist", async () => {
|
it("fails to create a row for a table that does not exist", async () => {
|
||||||
|
@ -230,7 +234,7 @@ describe.each([
|
||||||
expect(row["Row ID"]).toBeGreaterThan(previousId)
|
expect(row["Row ID"]).toBeGreaterThan(previousId)
|
||||||
previousId = row["Row ID"]
|
previousId = row["Row ID"]
|
||||||
}
|
}
|
||||||
await assertRowUsage(rowUsage + 10)
|
await assertRowUsage(isInternal ? rowUsage + 10 : rowUsage)
|
||||||
})
|
})
|
||||||
|
|
||||||
isInternal &&
|
isInternal &&
|
||||||
|
@ -693,6 +697,49 @@ describe.each([
|
||||||
})
|
})
|
||||||
expect(resp.relationship.length).toBe(1)
|
expect(resp.relationship.length).toBe(1)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
!isInternal &&
|
||||||
|
// TODO: SQL is having issues creating composite keys
|
||||||
|
providerType !== DatabaseName.SQL_SERVER &&
|
||||||
|
it("should support updating fields that are part of a composite key", async () => {
|
||||||
|
const tableRequest = saveTableRequest({
|
||||||
|
primary: ["number", "string"],
|
||||||
|
schema: {
|
||||||
|
string: {
|
||||||
|
type: FieldType.STRING,
|
||||||
|
name: "string",
|
||||||
|
},
|
||||||
|
number: {
|
||||||
|
type: FieldType.NUMBER,
|
||||||
|
name: "number",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
delete tableRequest.schema.id
|
||||||
|
|
||||||
|
const table = await config.api.table.save(tableRequest)
|
||||||
|
|
||||||
|
const stringValue = generator.word()
|
||||||
|
const naturalValue = generator.integer({ min: 0, max: 1000 })
|
||||||
|
|
||||||
|
const existing = await config.api.row.save(table._id!, {
|
||||||
|
string: stringValue,
|
||||||
|
number: naturalValue,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(existing._id).toEqual(`%5B${naturalValue}%2C'${stringValue}'%5D`)
|
||||||
|
|
||||||
|
const row = await config.api.row.patch(table._id!, {
|
||||||
|
_id: existing._id!,
|
||||||
|
_rev: existing._rev!,
|
||||||
|
tableId: table._id!,
|
||||||
|
string: stringValue,
|
||||||
|
number: 1500,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(row._id).toEqual(`%5B${"1500"}%2C'${stringValue}'%5D`)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("destroy", () => {
|
describe("destroy", () => {
|
||||||
|
@ -708,18 +755,21 @@ describe.each([
|
||||||
rows: [createdRow],
|
rows: [createdRow],
|
||||||
})
|
})
|
||||||
expect(res[0]._id).toEqual(createdRow._id)
|
expect(res[0]._id).toEqual(createdRow._id)
|
||||||
await assertRowUsage(rowUsage - 1)
|
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to bulk delete rows, including a row that doesn't exist", async () => {
|
it("should be able to bulk delete rows, including a row that doesn't exist", async () => {
|
||||||
const createdRow = await config.api.row.save(table._id!, {})
|
const createdRow = await config.api.row.save(table._id!, {})
|
||||||
|
const createdRow2 = await config.api.row.save(table._id!, {})
|
||||||
|
|
||||||
const res = await config.api.row.bulkDelete(table._id!, {
|
const res = await config.api.row.bulkDelete(table._id!, {
|
||||||
rows: [createdRow, { _id: "9999999" }],
|
rows: [createdRow, createdRow2, { _id: "9999999" }],
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(res[0]._id).toEqual(createdRow._id)
|
expect(res.map(r => r._id)).toEqual(
|
||||||
expect(res.length).toEqual(1)
|
expect.arrayContaining([createdRow._id, createdRow2._id])
|
||||||
|
)
|
||||||
|
expect(res.length).toEqual(2)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -771,7 +821,7 @@ describe.each([
|
||||||
|
|
||||||
expect(res.length).toEqual(2)
|
expect(res.length).toEqual(2)
|
||||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||||
await assertRowUsage(rowUsage - 2)
|
await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should be able to delete a variety of row set types", async () => {
|
it("should be able to delete a variety of row set types", async () => {
|
||||||
|
@ -788,7 +838,7 @@ describe.each([
|
||||||
|
|
||||||
expect(res.length).toEqual(3)
|
expect(res.length).toEqual(3)
|
||||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||||
await assertRowUsage(rowUsage - 3)
|
await assertRowUsage(isInternal ? rowUsage - 3 : rowUsage)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should accept a valid row object and delete the row", async () => {
|
it("should accept a valid row object and delete the row", async () => {
|
||||||
|
@ -799,7 +849,7 @@ describe.each([
|
||||||
|
|
||||||
expect(res.id).toEqual(row1._id)
|
expect(res.id).toEqual(row1._id)
|
||||||
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
await config.api.row.get(table._id!, row1._id!, { status: 404 })
|
||||||
await assertRowUsage(rowUsage - 1)
|
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("Should ignore malformed/invalid delete requests", async () => {
|
it("Should ignore malformed/invalid delete requests", async () => {
|
||||||
|
@ -1637,3 +1687,5 @@ describe.each([
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
// todo: remove me
|
||||||
|
|
|
@ -1111,7 +1111,7 @@ describe.each([
|
||||||
const createdRow = await config.api.row.save(table._id!, {})
|
const createdRow = await config.api.row.save(table._id!, {})
|
||||||
const rowUsage = await getRowUsage()
|
const rowUsage = await getRowUsage()
|
||||||
await config.api.row.bulkDelete(view.id, { rows: [createdRow] })
|
await config.api.row.bulkDelete(view.id, { rows: [createdRow] })
|
||||||
await assertRowUsage(rowUsage - 1)
|
await assertRowUsage(isInternal ? rowUsage - 1 : rowUsage)
|
||||||
await config.api.row.get(table._id!, createdRow._id!, {
|
await config.api.row.get(table._id!, createdRow._id!, {
|
||||||
status: 404,
|
status: 404,
|
||||||
})
|
})
|
||||||
|
@ -1127,7 +1127,7 @@ describe.each([
|
||||||
|
|
||||||
await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] })
|
await config.api.row.bulkDelete(view.id, { rows: [rows[0], rows[2]] })
|
||||||
|
|
||||||
await assertRowUsage(rowUsage - 2)
|
await assertRowUsage(isInternal ? rowUsage - 2 : rowUsage)
|
||||||
|
|
||||||
await config.api.row.get(table._id!, rows[0]._id!, {
|
await config.api.row.get(table._id!, rows[0]._id!, {
|
||||||
status: 404,
|
status: 404,
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import { Duration, cache, context, db, env } from "@budibase/backend-core"
|
import { Duration, cache, db, env } from "@budibase/backend-core"
|
||||||
import { Database, DocumentType, Document } from "@budibase/types"
|
import { Database, DocumentType, Document } from "@budibase/types"
|
||||||
|
|
||||||
export interface AppMigrationDoc extends Document {
|
export interface AppMigrationDoc extends Document {
|
||||||
|
@ -25,15 +25,15 @@ export async function getAppMigrationVersion(appId: string): Promise<string> {
|
||||||
|
|
||||||
let metadata: AppMigrationDoc | undefined = await cache.get(cacheKey)
|
let metadata: AppMigrationDoc | undefined = await cache.get(cacheKey)
|
||||||
|
|
||||||
// We don't want to cache in dev, in order to be able to tweak it
|
// returned cached version if we found one
|
||||||
if (metadata && !env.isDev()) {
|
if (metadata?.version) {
|
||||||
return metadata.version
|
return metadata.version
|
||||||
}
|
}
|
||||||
|
|
||||||
let version
|
let version
|
||||||
try {
|
try {
|
||||||
metadata = await getFromDB(appId)
|
metadata = await getFromDB(appId)
|
||||||
version = metadata.version
|
version = metadata.version || ""
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
if (err.status !== 404) {
|
if (err.status !== 404) {
|
||||||
throw err
|
throw err
|
||||||
|
@ -42,7 +42,10 @@ export async function getAppMigrationVersion(appId: string): Promise<string> {
|
||||||
version = ""
|
version = ""
|
||||||
}
|
}
|
||||||
|
|
||||||
await cache.store(cacheKey, version, EXPIRY_SECONDS)
|
// only cache if we have a valid version
|
||||||
|
if (version) {
|
||||||
|
await cache.store(cacheKey, version, EXPIRY_SECONDS)
|
||||||
|
}
|
||||||
|
|
||||||
return version
|
return version
|
||||||
}
|
}
|
||||||
|
@ -54,8 +57,7 @@ export async function updateAppMigrationMetadata({
|
||||||
appId: string
|
appId: string
|
||||||
version: string
|
version: string
|
||||||
}): Promise<void> {
|
}): Promise<void> {
|
||||||
const db = context.getAppDB()
|
const appDb = db.getDB(appId)
|
||||||
|
|
||||||
let appMigrationDoc: AppMigrationDoc
|
let appMigrationDoc: AppMigrationDoc
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
@ -70,7 +72,7 @@ export async function updateAppMigrationMetadata({
|
||||||
version: "",
|
version: "",
|
||||||
history: {},
|
history: {},
|
||||||
}
|
}
|
||||||
await db.put(appMigrationDoc)
|
await appDb.put(appMigrationDoc)
|
||||||
appMigrationDoc = await getFromDB(appId)
|
appMigrationDoc = await getFromDB(appId)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -82,7 +84,7 @@ export async function updateAppMigrationMetadata({
|
||||||
[version]: { runAt: new Date().toISOString() },
|
[version]: { runAt: new Date().toISOString() },
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
await db.put(updatedMigrationDoc)
|
await appDb.put(updatedMigrationDoc)
|
||||||
|
|
||||||
const cacheKey = getCacheKey(appId)
|
const cacheKey = getCacheKey(appId)
|
||||||
|
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import queue from "./queue"
|
import { getAppMigrationQueue } from "./queue"
|
||||||
import { Next } from "koa"
|
import { Next } from "koa"
|
||||||
import { getAppMigrationVersion } from "./appMigrationMetadata"
|
import { getAppMigrationVersion } from "./appMigrationMetadata"
|
||||||
import { MIGRATIONS } from "./migrations"
|
import { MIGRATIONS } from "./migrations"
|
||||||
|
@ -10,32 +10,55 @@ export * from "./appMigrationMetadata"
|
||||||
export type AppMigration = {
|
export type AppMigration = {
|
||||||
id: string
|
id: string
|
||||||
func: () => Promise<void>
|
func: () => Promise<void>
|
||||||
|
// disabled so that by default all migrations listed are enabled
|
||||||
|
disabled?: boolean
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getLatestMigrationId = () =>
|
export function getLatestEnabledMigrationId(migrations?: AppMigration[]) {
|
||||||
MIGRATIONS.map(m => m.id)
|
let latestMigrationId: string | undefined
|
||||||
.sort()
|
if (!migrations) {
|
||||||
.reverse()[0]
|
migrations = MIGRATIONS
|
||||||
|
}
|
||||||
|
for (let migration of migrations) {
|
||||||
|
// if a migration is disabled, all migrations after it are disabled
|
||||||
|
if (migration.disabled) {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
latestMigrationId = migration.id
|
||||||
|
}
|
||||||
|
return latestMigrationId
|
||||||
|
}
|
||||||
|
|
||||||
const getTimestamp = (versionId: string) => versionId?.split("_")[0] || ""
|
function getTimestamp(versionId: string) {
|
||||||
|
return versionId?.split("_")[0] || ""
|
||||||
|
}
|
||||||
|
|
||||||
export async function checkMissingMigrations(
|
export async function checkMissingMigrations(
|
||||||
ctx: UserCtx,
|
ctx: UserCtx,
|
||||||
next: Next,
|
next: Next,
|
||||||
appId: string
|
appId: string
|
||||||
) {
|
) {
|
||||||
const currentVersion = await getAppMigrationVersion(appId)
|
const latestMigration = getLatestEnabledMigrationId()
|
||||||
const latestMigration = getLatestMigrationId()
|
|
||||||
|
|
||||||
if (getTimestamp(currentVersion) < getTimestamp(latestMigration)) {
|
// no migrations set - edge case, don't try to do anything
|
||||||
|
if (!latestMigration) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
|
||||||
|
const currentVersion = await getAppMigrationVersion(appId)
|
||||||
|
const queue = getAppMigrationQueue()
|
||||||
|
|
||||||
|
if (
|
||||||
|
queue &&
|
||||||
|
latestMigration &&
|
||||||
|
getTimestamp(currentVersion) < getTimestamp(latestMigration)
|
||||||
|
) {
|
||||||
await queue.add(
|
await queue.add(
|
||||||
{
|
{
|
||||||
appId,
|
appId,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
jobId: `${appId}_${latestMigration}`,
|
jobId: `${appId}_${latestMigration}`,
|
||||||
removeOnComplete: true,
|
|
||||||
removeOnFail: true,
|
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,15 @@
|
||||||
// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one
|
// This file should never be manually modified, use `yarn add-app-migration` in order to add a new one
|
||||||
|
|
||||||
|
import env from "../environment"
|
||||||
import { AppMigration } from "."
|
import { AppMigration } from "."
|
||||||
|
|
||||||
|
import m20240604153647_initial_sqs from "./migrations/20240604153647_initial_sqs"
|
||||||
|
|
||||||
|
// Migrations will be executed sorted by ID
|
||||||
export const MIGRATIONS: AppMigration[] = [
|
export const MIGRATIONS: AppMigration[] = [
|
||||||
// Migrations will be executed sorted by id
|
{
|
||||||
|
id: "20240604153647_initial_sqs",
|
||||||
|
func: m20240604153647_initial_sqs,
|
||||||
|
disabled: !env.SQS_SEARCH_ENABLE,
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
|
@ -0,0 +1,52 @@
|
||||||
|
import { context } from "@budibase/backend-core"
|
||||||
|
import { allLinkDocs } from "../../db/utils"
|
||||||
|
import LinkDocumentImpl from "../../db/linkedRows/LinkDocument"
|
||||||
|
import sdk from "../../sdk"
|
||||||
|
import env from "../../environment"
|
||||||
|
|
||||||
|
const migration = async () => {
|
||||||
|
const linkDocs = await allLinkDocs()
|
||||||
|
|
||||||
|
const docsToUpdate = []
|
||||||
|
for (const linkDoc of linkDocs) {
|
||||||
|
if (linkDoc.tableId) {
|
||||||
|
// It already had the required data
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
// it already has the junction table ID - no need to migrate
|
||||||
|
if (!linkDoc.tableId) {
|
||||||
|
const newLink = new LinkDocumentImpl(
|
||||||
|
linkDoc.doc1.tableId,
|
||||||
|
linkDoc.doc1.fieldName,
|
||||||
|
linkDoc.doc1.rowId,
|
||||||
|
linkDoc.doc2.tableId,
|
||||||
|
linkDoc.doc2.fieldName,
|
||||||
|
linkDoc.doc2.rowId
|
||||||
|
)
|
||||||
|
newLink._id = linkDoc._id!
|
||||||
|
newLink._rev = linkDoc._rev
|
||||||
|
docsToUpdate.push(newLink)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const db = context.getAppDB()
|
||||||
|
if (docsToUpdate.length) {
|
||||||
|
await db.bulkDocs(docsToUpdate)
|
||||||
|
}
|
||||||
|
|
||||||
|
// at the end make sure design doc is ready
|
||||||
|
await sdk.tables.sqs.syncDefinition()
|
||||||
|
// only do initial search if environment is using SQS already
|
||||||
|
// initial search makes sure that all the indexes have been created
|
||||||
|
// and are ready to use, avoiding any initial waits for large tables
|
||||||
|
if (env.SQS_SEARCH_ENABLE) {
|
||||||
|
const tables = await sdk.tables.getAllInternalTables()
|
||||||
|
// do these one by one - running in parallel could cause problems
|
||||||
|
for (let table of tables) {
|
||||||
|
await db.sql(`select * from ${table._id} limit 1`)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default migration
|
|
@ -0,0 +1,120 @@
|
||||||
|
import * as setup from "../../../api/routes/tests/utilities"
|
||||||
|
import { basicTable } from "../../../tests/utilities/structures"
|
||||||
|
import { db as dbCore, SQLITE_DESIGN_DOC_ID } from "@budibase/backend-core"
|
||||||
|
import {
|
||||||
|
LinkDocument,
|
||||||
|
DocumentType,
|
||||||
|
SQLiteDefinition,
|
||||||
|
SQLiteType,
|
||||||
|
} from "@budibase/types"
|
||||||
|
import {
|
||||||
|
generateJunctionTableID,
|
||||||
|
generateLinkID,
|
||||||
|
generateRowID,
|
||||||
|
} from "../../../db/utils"
|
||||||
|
import { processMigrations } from "../../migrationsProcessor"
|
||||||
|
import migration from "../20240604153647_initial_sqs"
|
||||||
|
import { AppMigration } from "src/appMigrations"
|
||||||
|
|
||||||
|
const MIGRATIONS: AppMigration[] = [
|
||||||
|
{
|
||||||
|
id: "20240604153647_initial_sqs",
|
||||||
|
func: migration,
|
||||||
|
disabled: false,
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
const config = setup.getConfig()
|
||||||
|
let tableId: string
|
||||||
|
|
||||||
|
function oldLinkDocInfo() {
|
||||||
|
const tableId1 = `${DocumentType.TABLE}_a`,
|
||||||
|
tableId2 = `${DocumentType.TABLE}_b`
|
||||||
|
return {
|
||||||
|
tableId1,
|
||||||
|
tableId2,
|
||||||
|
rowId1: generateRowID(tableId1, "b"),
|
||||||
|
rowId2: generateRowID(tableId2, "a"),
|
||||||
|
col1: "columnB",
|
||||||
|
col2: "columnA",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function oldLinkDocID() {
|
||||||
|
const { tableId1, tableId2, rowId1, rowId2, col1, col2 } = oldLinkDocInfo()
|
||||||
|
return generateLinkID(tableId1, tableId2, rowId1, rowId2, col1, col2)
|
||||||
|
}
|
||||||
|
|
||||||
|
function oldLinkDocument(): Omit<LinkDocument, "tableId"> {
|
||||||
|
const { tableId1, tableId2, rowId1, rowId2, col1, col2 } = oldLinkDocInfo()
|
||||||
|
return {
|
||||||
|
type: "link",
|
||||||
|
_id: oldLinkDocID(),
|
||||||
|
doc1: {
|
||||||
|
tableId: tableId1,
|
||||||
|
fieldName: col1,
|
||||||
|
rowId: rowId1,
|
||||||
|
},
|
||||||
|
doc2: {
|
||||||
|
tableId: tableId2,
|
||||||
|
fieldName: col2,
|
||||||
|
rowId: rowId2,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function sqsDisabled(cb: () => Promise<void>) {
|
||||||
|
await config.withEnv({ SQS_SEARCH_ENABLE: "" }, cb)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function sqsEnabled(cb: () => Promise<void>) {
|
||||||
|
await config.withEnv({ SQS_SEARCH_ENABLE: "1" }, cb)
|
||||||
|
}
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
await sqsDisabled(async () => {
|
||||||
|
await config.init()
|
||||||
|
const table = await config.api.table.save(basicTable())
|
||||||
|
tableId = table._id!
|
||||||
|
const db = dbCore.getDB(config.appId!)
|
||||||
|
// old link document
|
||||||
|
await db.put(oldLinkDocument())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("SQS migration", () => {
|
||||||
|
it("test migration runs as expected against an older DB", async () => {
|
||||||
|
const db = dbCore.getDB(config.appId!)
|
||||||
|
// confirm nothing exists initially
|
||||||
|
await sqsDisabled(async () => {
|
||||||
|
let error: any | undefined
|
||||||
|
try {
|
||||||
|
await db.get(SQLITE_DESIGN_DOC_ID)
|
||||||
|
} catch (err: any) {
|
||||||
|
error = err
|
||||||
|
}
|
||||||
|
expect(error).toBeDefined()
|
||||||
|
expect(error.status).toBe(404)
|
||||||
|
})
|
||||||
|
await sqsEnabled(async () => {
|
||||||
|
await processMigrations(config.appId!, MIGRATIONS)
|
||||||
|
const designDoc = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||||
|
expect(designDoc.sql.tables).toBeDefined()
|
||||||
|
const mainTableDef = designDoc.sql.tables[tableId]
|
||||||
|
expect(mainTableDef).toBeDefined()
|
||||||
|
expect(mainTableDef.fields.name).toEqual(SQLiteType.TEXT)
|
||||||
|
expect(mainTableDef.fields.description).toEqual(SQLiteType.TEXT)
|
||||||
|
|
||||||
|
const { tableId1, tableId2, rowId1, rowId2 } = oldLinkDocInfo()
|
||||||
|
const linkDoc = await db.get<LinkDocument>(oldLinkDocID())
|
||||||
|
expect(linkDoc.tableId).toEqual(
|
||||||
|
generateJunctionTableID(tableId1, tableId2)
|
||||||
|
)
|
||||||
|
// should have swapped the documents
|
||||||
|
expect(linkDoc.doc1.tableId).toEqual(tableId2)
|
||||||
|
expect(linkDoc.doc1.rowId).toEqual(rowId2)
|
||||||
|
expect(linkDoc.doc2.tableId).toEqual(tableId1)
|
||||||
|
expect(linkDoc.doc2.rowId).toEqual(rowId1)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -1,4 +1,4 @@
|
||||||
import { context, locks } from "@budibase/backend-core"
|
import { context, locks, logging } from "@budibase/backend-core"
|
||||||
import { LockName, LockType } from "@budibase/types"
|
import { LockName, LockType } from "@budibase/types"
|
||||||
|
|
||||||
import {
|
import {
|
||||||
|
@ -12,47 +12,58 @@ export async function processMigrations(
|
||||||
migrations: AppMigration[]
|
migrations: AppMigration[]
|
||||||
) {
|
) {
|
||||||
console.log(`Processing app migration for "${appId}"`)
|
console.log(`Processing app migration for "${appId}"`)
|
||||||
|
try {
|
||||||
|
// first step - setup full context - tenancy, app and guards
|
||||||
|
await context.doInAppMigrationContext(appId, async () => {
|
||||||
|
console.log(`Acquiring app migration lock for "${appId}"`)
|
||||||
|
await locks.doWithLock(
|
||||||
|
{
|
||||||
|
name: LockName.APP_MIGRATION,
|
||||||
|
type: LockType.AUTO_EXTEND,
|
||||||
|
resource: appId,
|
||||||
|
},
|
||||||
|
async () => {
|
||||||
|
console.log(`Lock acquired starting app migration for "${appId}"`)
|
||||||
|
let currentVersion = await getAppMigrationVersion(appId)
|
||||||
|
|
||||||
await locks.doWithLock(
|
const pendingMigrations = migrations
|
||||||
{
|
.filter(m => m.id > currentVersion)
|
||||||
name: LockName.APP_MIGRATION,
|
.sort((a, b) => a.id.localeCompare(b.id))
|
||||||
type: LockType.AUTO_EXTEND,
|
|
||||||
resource: appId,
|
|
||||||
},
|
|
||||||
async () => {
|
|
||||||
await context.doInAppMigrationContext(appId, async () => {
|
|
||||||
let currentVersion = await getAppMigrationVersion(appId)
|
|
||||||
|
|
||||||
const pendingMigrations = migrations
|
const migrationIds = migrations.map(m => m.id).sort()
|
||||||
.filter(m => m.id > currentVersion)
|
console.log(
|
||||||
.sort((a, b) => a.id.localeCompare(b.id))
|
`App migrations to run for "${appId}" - ${migrationIds.join(",")}`
|
||||||
|
)
|
||||||
|
|
||||||
const migrationIds = migrations.map(m => m.id).sort()
|
let index = 0
|
||||||
|
for (const { id, func } of pendingMigrations) {
|
||||||
|
const expectedMigration =
|
||||||
|
migrationIds[migrationIds.indexOf(currentVersion) + 1]
|
||||||
|
|
||||||
let index = 0
|
if (expectedMigration !== id) {
|
||||||
for (const { id, func } of pendingMigrations) {
|
throw new Error(
|
||||||
const expectedMigration =
|
`Migration ${id} could not run, update for "${id}" is running but ${expectedMigration} is expected`
|
||||||
migrationIds[migrationIds.indexOf(currentVersion) + 1]
|
)
|
||||||
|
}
|
||||||
|
|
||||||
if (expectedMigration !== id) {
|
const counter = `(${++index}/${pendingMigrations.length})`
|
||||||
throw `Migration ${id} could not run, update for "${id}" is running but ${expectedMigration} is expected`
|
console.info(`Running migration ${id}... ${counter}`, {
|
||||||
|
migrationId: id,
|
||||||
|
appId,
|
||||||
|
})
|
||||||
|
await func()
|
||||||
|
await updateAppMigrationMetadata({
|
||||||
|
appId,
|
||||||
|
version: id,
|
||||||
|
})
|
||||||
|
currentVersion = id
|
||||||
}
|
}
|
||||||
|
|
||||||
const counter = `(${++index}/${pendingMigrations.length})`
|
|
||||||
console.info(`Running migration ${id}... ${counter}`, {
|
|
||||||
migrationId: id,
|
|
||||||
appId,
|
|
||||||
})
|
|
||||||
await func()
|
|
||||||
await updateAppMigrationMetadata({
|
|
||||||
appId,
|
|
||||||
version: id,
|
|
||||||
})
|
|
||||||
currentVersion = id
|
|
||||||
}
|
}
|
||||||
})
|
)
|
||||||
}
|
})
|
||||||
)
|
console.log(`App migration for "${appId}" processed`)
|
||||||
|
} catch (err) {
|
||||||
console.log(`App migration for "${appId}" processed`)
|
logging.logAlert("Failed to run app migration", err)
|
||||||
|
throw err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,15 +1,45 @@
|
||||||
import { queue } from "@budibase/backend-core"
|
import { queue, logging } from "@budibase/backend-core"
|
||||||
import { Job } from "bull"
|
import { Job } from "bull"
|
||||||
import { MIGRATIONS } from "./migrations"
|
import { MIGRATIONS } from "./migrations"
|
||||||
import { processMigrations } from "./migrationsProcessor"
|
import { processMigrations } from "./migrationsProcessor"
|
||||||
|
|
||||||
const appMigrationQueue = queue.createQueue(queue.JobQueue.APP_MIGRATION)
|
const MAX_ATTEMPTS = 3
|
||||||
appMigrationQueue.process(processMessage)
|
// max number of migrations to run at same time, per node
|
||||||
|
const MIGRATION_CONCURRENCY = 5
|
||||||
|
|
||||||
async function processMessage(job: Job) {
|
export type AppMigrationJob = {
|
||||||
|
appId: string
|
||||||
|
}
|
||||||
|
|
||||||
|
let appMigrationQueue: queue.Queue<AppMigrationJob> | undefined
|
||||||
|
|
||||||
|
export function init() {
|
||||||
|
appMigrationQueue = queue.createQueue<AppMigrationJob>(
|
||||||
|
queue.JobQueue.APP_MIGRATION,
|
||||||
|
{
|
||||||
|
jobOptions: {
|
||||||
|
attempts: MAX_ATTEMPTS,
|
||||||
|
removeOnComplete: true,
|
||||||
|
removeOnFail: true,
|
||||||
|
},
|
||||||
|
maxStalledCount: MAX_ATTEMPTS,
|
||||||
|
removeStalledCb: async (job: Job) => {
|
||||||
|
logging.logAlert(
|
||||||
|
`App migration failed, queue job ID: ${job.id} - reason: ${job.failedReason}`
|
||||||
|
)
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
return appMigrationQueue.process(MIGRATION_CONCURRENCY, processMessage)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processMessage(job: Job<AppMigrationJob>) {
|
||||||
const { appId } = job.data
|
const { appId } = job.data
|
||||||
|
|
||||||
await processMigrations(appId, MIGRATIONS)
|
await processMigrations(appId, MIGRATIONS)
|
||||||
}
|
}
|
||||||
|
|
||||||
export default appMigrationQueue
|
export function getAppMigrationQueue() {
|
||||||
|
return appMigrationQueue
|
||||||
|
}
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import { Header } from "@budibase/backend-core"
|
import { Header } from "@budibase/backend-core"
|
||||||
import * as setup from "../../api/routes/tests/utilities"
|
import * as setup from "../../api/routes/tests/utilities"
|
||||||
import * as migrations from "../migrations"
|
import * as migrations from "../migrations"
|
||||||
|
import { AppMigration, getLatestEnabledMigrationId } from "../index"
|
||||||
import { getAppMigrationVersion } from "../appMigrationMetadata"
|
import { getAppMigrationVersion } from "../appMigrationMetadata"
|
||||||
|
|
||||||
jest.mock<typeof migrations>("../migrations", () => ({
|
jest.mock<typeof migrations>("../migrations", () => ({
|
||||||
|
@ -52,4 +53,29 @@ describe("migrations", () => {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should disable all migrations after one that is disabled", () => {
|
||||||
|
const MIGRATION_ID1 = "20231211105810_new-test",
|
||||||
|
MIGRATION_ID2 = "20231211105812_new-test",
|
||||||
|
MIGRATION_ID3 = "20231211105814_new-test"
|
||||||
|
// create some migrations to test with
|
||||||
|
const migrations: AppMigration[] = [
|
||||||
|
{
|
||||||
|
id: MIGRATION_ID1,
|
||||||
|
func: async () => {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: MIGRATION_ID2,
|
||||||
|
func: async () => {},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
id: MIGRATION_ID3,
|
||||||
|
func: async () => {},
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
expect(getLatestEnabledMigrationId(migrations)).toBe(MIGRATION_ID3)
|
||||||
|
migrations[1].disabled = true
|
||||||
|
expect(getLatestEnabledMigrationId(migrations)).toBe(MIGRATION_ID1)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -99,6 +99,15 @@ export function getError(err: any) {
|
||||||
return typeof err !== "string" ? err.toString() : err
|
return typeof err !== "string" ? err.toString() : err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function guardAttachment(attachmentObject: any) {
|
||||||
|
if (!("url" in attachmentObject) || !("filename" in attachmentObject)) {
|
||||||
|
const providedKeys = Object.keys(attachmentObject).join(", ")
|
||||||
|
throw new Error(
|
||||||
|
`Attachments must have both "url" and "filename" keys. You have provided: ${providedKeys}`
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export async function sendAutomationAttachmentsToStorage(
|
export async function sendAutomationAttachmentsToStorage(
|
||||||
tableId: string,
|
tableId: string,
|
||||||
row: Row
|
row: Row
|
||||||
|
@ -116,9 +125,15 @@ export async function sendAutomationAttachmentsToStorage(
|
||||||
schema?.type === FieldType.ATTACHMENT_SINGLE ||
|
schema?.type === FieldType.ATTACHMENT_SINGLE ||
|
||||||
schema?.type === FieldType.SIGNATURE_SINGLE
|
schema?.type === FieldType.SIGNATURE_SINGLE
|
||||||
) {
|
) {
|
||||||
|
if (Array.isArray(value)) {
|
||||||
|
value.forEach(item => guardAttachment(item))
|
||||||
|
} else {
|
||||||
|
guardAttachment(value)
|
||||||
|
}
|
||||||
attachmentRows[prop] = value
|
attachmentRows[prop] = value
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for (const [prop, attachments] of Object.entries(attachmentRows)) {
|
for (const [prop, attachments] of Object.entries(attachmentRows)) {
|
||||||
if (!attachments) {
|
if (!attachments) {
|
||||||
continue
|
continue
|
||||||
|
@ -135,7 +150,6 @@ export async function sendAutomationAttachmentsToStorage(
|
||||||
|
|
||||||
return row
|
return row
|
||||||
}
|
}
|
||||||
|
|
||||||
async function generateAttachmentRow(attachment: AutomationAttachment) {
|
async function generateAttachmentRow(attachment: AutomationAttachment) {
|
||||||
const prodAppId = context.getProdAppId()
|
const prodAppId = context.getProdAppId()
|
||||||
|
|
||||||
|
|
|
@ -3,6 +3,7 @@ import { KoaAdapter } from "@bull-board/koa"
|
||||||
import { queue } from "@budibase/backend-core"
|
import { queue } from "@budibase/backend-core"
|
||||||
import * as automation from "../threads/automation"
|
import * as automation from "../threads/automation"
|
||||||
import { backups } from "@budibase/pro"
|
import { backups } from "@budibase/pro"
|
||||||
|
import { getAppMigrationQueue } from "../appMigrations/queue"
|
||||||
import { createBullBoard } from "@bull-board/api"
|
import { createBullBoard } from "@bull-board/api"
|
||||||
import BullQueue from "bull"
|
import BullQueue from "bull"
|
||||||
|
|
||||||
|
@ -16,10 +17,14 @@ const PATH_PREFIX = "/bulladmin"
|
||||||
export async function init() {
|
export async function init() {
|
||||||
// Set up queues for bull board admin
|
// Set up queues for bull board admin
|
||||||
const backupQueue = backups.getBackupQueue()
|
const backupQueue = backups.getBackupQueue()
|
||||||
|
const appMigrationQueue = getAppMigrationQueue()
|
||||||
const queues = [automationQueue]
|
const queues = [automationQueue]
|
||||||
if (backupQueue) {
|
if (backupQueue) {
|
||||||
queues.push(backupQueue)
|
queues.push(backupQueue)
|
||||||
}
|
}
|
||||||
|
if (appMigrationQueue) {
|
||||||
|
queues.push(appMigrationQueue)
|
||||||
|
}
|
||||||
const adapters = []
|
const adapters = []
|
||||||
const serverAdapter: any = new KoaAdapter()
|
const serverAdapter: any = new KoaAdapter()
|
||||||
for (let queue of queues) {
|
for (let queue of queues) {
|
||||||
|
|
|
@ -90,7 +90,6 @@ export async function run({ inputs, appId, emitter }: AutomationStepInput) {
|
||||||
tableId: inputs.row.tableId,
|
tableId: inputs.row.tableId,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
try {
|
try {
|
||||||
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
|
inputs.row = await cleanUpRow(inputs.row.tableId, inputs.row)
|
||||||
inputs.row = await sendAutomationAttachmentsToStorage(
|
inputs.row = await sendAutomationAttachmentsToStorage(
|
||||||
|
|
|
@ -118,6 +118,14 @@ export async function run({ inputs }: AutomationStepInput) {
|
||||||
}
|
}
|
||||||
to = to || undefined
|
to = to || undefined
|
||||||
|
|
||||||
|
if (attachments) {
|
||||||
|
if (Array.isArray(attachments)) {
|
||||||
|
attachments.forEach(item => automationUtils.guardAttachment(item))
|
||||||
|
} else {
|
||||||
|
automationUtils.guardAttachment(attachments)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
let response = await sendSmtpEmail({
|
let response = await sendSmtpEmail({
|
||||||
to,
|
to,
|
||||||
|
|
|
@ -128,4 +128,31 @@ describe("test the create row action", () => {
|
||||||
expect(objectData).toBeDefined()
|
expect(objectData).toBeDefined()
|
||||||
expect(objectData.ContentLength).toBeGreaterThan(0)
|
expect(objectData.ContentLength).toBeGreaterThan(0)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should check that attachment without the correct keys throws an error", async () => {
|
||||||
|
let attachmentTable = await config.createTable(
|
||||||
|
basicTableWithAttachmentField()
|
||||||
|
)
|
||||||
|
|
||||||
|
let attachmentRow: any = {
|
||||||
|
tableId: attachmentTable._id,
|
||||||
|
}
|
||||||
|
|
||||||
|
let filename = "test2.txt"
|
||||||
|
let presignedUrl = await uploadTestFile(filename)
|
||||||
|
let attachmentObject = {
|
||||||
|
wrongKey: presignedUrl,
|
||||||
|
anotherWrongKey: filename,
|
||||||
|
}
|
||||||
|
|
||||||
|
attachmentRow.single_file_attachment = attachmentObject
|
||||||
|
const res = await setup.runStep(setup.actions.CREATE_ROW.stepId, {
|
||||||
|
row: attachmentRow,
|
||||||
|
})
|
||||||
|
|
||||||
|
expect(res.success).toEqual(false)
|
||||||
|
expect(res.response).toEqual(
|
||||||
|
'Error: Attachments must have both "url" and "filename" keys. You have provided: wrongKey, anotherWrongKey'
|
||||||
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|
|
@ -59,6 +59,9 @@ class LinkDocumentImpl implements LinkDocument {
|
||||||
this.doc1 = docA.tableId > docB.tableId ? docA : docB
|
this.doc1 = docA.tableId > docB.tableId ? docA : docB
|
||||||
this.doc2 = docA.tableId > docB.tableId ? docB : docA
|
this.doc2 = docA.tableId > docB.tableId ? docB : docA
|
||||||
}
|
}
|
||||||
|
_rev?: string | undefined
|
||||||
|
createdAt?: string | number | undefined
|
||||||
|
updatedAt?: string | undefined
|
||||||
}
|
}
|
||||||
|
|
||||||
export default LinkDocumentImpl
|
export default LinkDocumentImpl
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
import newid from "./newid"
|
import newid from "./newid"
|
||||||
import { db as dbCore } from "@budibase/backend-core"
|
import { context, db as dbCore } from "@budibase/backend-core"
|
||||||
import {
|
import {
|
||||||
DatabaseQueryOpts,
|
DatabaseQueryOpts,
|
||||||
Datasource,
|
Datasource,
|
||||||
|
@ -10,6 +10,7 @@ import {
|
||||||
RelationshipFieldMetadata,
|
RelationshipFieldMetadata,
|
||||||
SourceName,
|
SourceName,
|
||||||
VirtualDocumentType,
|
VirtualDocumentType,
|
||||||
|
LinkDocument,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
|
|
||||||
export { DocumentType, VirtualDocumentType } from "@budibase/types"
|
export { DocumentType, VirtualDocumentType } from "@budibase/types"
|
||||||
|
@ -137,10 +138,24 @@ export function generateLinkID(
|
||||||
/**
|
/**
|
||||||
* Gets parameters for retrieving link docs, this is a utility function for the getDocParams function.
|
* Gets parameters for retrieving link docs, this is a utility function for the getDocParams function.
|
||||||
*/
|
*/
|
||||||
export function getLinkParams(otherProps: any = {}) {
|
function getLinkParams(otherProps: Partial<DatabaseQueryOpts> = {}) {
|
||||||
return getDocParams(DocumentType.LINK, null, otherProps)
|
return getDocParams(DocumentType.LINK, null, otherProps)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Gets all the link docs document from the current app db.
|
||||||
|
*/
|
||||||
|
export async function allLinkDocs() {
|
||||||
|
const db = context.getAppDB()
|
||||||
|
|
||||||
|
const response = await db.allDocs<LinkDocument>(
|
||||||
|
getLinkParams({
|
||||||
|
include_docs: true,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
return response.rows.map(row => row.doc!)
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generates a new layout ID.
|
* Generates a new layout ID.
|
||||||
* @returns The new layout ID which the layout doc can be stored under.
|
* @returns The new layout ID which the layout doc can be stored under.
|
||||||
|
|
|
@ -48,6 +48,7 @@ const environment = {
|
||||||
MINIO_URL: process.env.MINIO_URL,
|
MINIO_URL: process.env.MINIO_URL,
|
||||||
WORKER_URL: process.env.WORKER_URL,
|
WORKER_URL: process.env.WORKER_URL,
|
||||||
AWS_REGION: process.env.AWS_REGION,
|
AWS_REGION: process.env.AWS_REGION,
|
||||||
|
AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN,
|
||||||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||||
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||||
REDIS_URL: process.env.REDIS_URL,
|
REDIS_URL: process.env.REDIS_URL,
|
||||||
|
@ -96,6 +97,7 @@ const environment = {
|
||||||
DISABLE_THREADING: process.env.DISABLE_THREADING,
|
DISABLE_THREADING: process.env.DISABLE_THREADING,
|
||||||
DISABLE_AUTOMATION_LOGS: process.env.DISABLE_AUTOMATION_LOGS,
|
DISABLE_AUTOMATION_LOGS: process.env.DISABLE_AUTOMATION_LOGS,
|
||||||
DISABLE_RATE_LIMITING: process.env.DISABLE_RATE_LIMITING,
|
DISABLE_RATE_LIMITING: process.env.DISABLE_RATE_LIMITING,
|
||||||
|
DISABLE_APP_MIGRATIONS: process.env.SKIP_APP_MIGRATIONS || false,
|
||||||
MULTI_TENANCY: process.env.MULTI_TENANCY,
|
MULTI_TENANCY: process.env.MULTI_TENANCY,
|
||||||
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
ENABLE_ANALYTICS: process.env.ENABLE_ANALYTICS,
|
||||||
SELF_HOSTED: process.env.SELF_HOSTED,
|
SELF_HOSTED: process.env.SELF_HOSTED,
|
||||||
|
|
|
@ -4,19 +4,14 @@ import {
|
||||||
MakeRequestResponse,
|
MakeRequestResponse,
|
||||||
} from "../api/routes/public/tests/utils"
|
} from "../api/routes/public/tests/utils"
|
||||||
import * as setup from "../api/routes/tests/utilities"
|
import * as setup from "../api/routes/tests/utilities"
|
||||||
import {
|
import { Datasource, FieldType } from "@budibase/types"
|
||||||
Datasource,
|
|
||||||
FieldType,
|
|
||||||
Table,
|
|
||||||
TableRequest,
|
|
||||||
TableSourceType,
|
|
||||||
} from "@budibase/types"
|
|
||||||
import {
|
import {
|
||||||
DatabaseName,
|
DatabaseName,
|
||||||
getDatasource,
|
getDatasource,
|
||||||
rawQuery,
|
rawQuery,
|
||||||
} from "../integrations/tests/utils"
|
} from "../integrations/tests/utils"
|
||||||
import { generator } from "@budibase/backend-core/tests"
|
import { generator } from "@budibase/backend-core/tests"
|
||||||
|
import { tableForDatasource } from "../../src/tests/utilities/structures"
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
fetch.mockSearch()
|
fetch.mockSearch()
|
||||||
|
|
||||||
|
@ -47,8 +42,7 @@ jest.mock("../websockets", () => ({
|
||||||
describe("mysql integrations", () => {
|
describe("mysql integrations", () => {
|
||||||
let makeRequest: MakeRequestResponse,
|
let makeRequest: MakeRequestResponse,
|
||||||
rawDatasource: Datasource,
|
rawDatasource: Datasource,
|
||||||
datasource: Datasource,
|
datasource: Datasource
|
||||||
primaryMySqlTable: Table
|
|
||||||
|
|
||||||
beforeAll(async () => {
|
beforeAll(async () => {
|
||||||
await config.init()
|
await config.init()
|
||||||
|
@ -60,38 +54,12 @@ describe("mysql integrations", () => {
|
||||||
datasource = await config.api.datasource.create(rawDatasource)
|
datasource = await config.api.datasource.create(rawDatasource)
|
||||||
})
|
})
|
||||||
|
|
||||||
beforeEach(async () => {
|
|
||||||
primaryMySqlTable = await config.createTable({
|
|
||||||
name: uniqueTableName(),
|
|
||||||
type: "table",
|
|
||||||
primary: ["id"],
|
|
||||||
schema: {
|
|
||||||
id: {
|
|
||||||
name: "id",
|
|
||||||
type: FieldType.AUTO,
|
|
||||||
autocolumn: true,
|
|
||||||
},
|
|
||||||
name: {
|
|
||||||
name: "name",
|
|
||||||
type: FieldType.STRING,
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
name: "description",
|
|
||||||
type: FieldType.STRING,
|
|
||||||
},
|
|
||||||
value: {
|
|
||||||
name: "value",
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
sourceId: datasource._id,
|
|
||||||
sourceType: TableSourceType.EXTERNAL,
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
afterAll(config.end)
|
afterAll(config.end)
|
||||||
|
|
||||||
it("validate table schema", async () => {
|
it("validate table schema", async () => {
|
||||||
|
// Creating a table so that `entities` is populated.
|
||||||
|
await config.api.table.save(tableForDatasource(datasource))
|
||||||
|
|
||||||
const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
|
const res = await makeRequest("get", `/api/datasources/${datasource._id}`)
|
||||||
|
|
||||||
expect(res.status).toBe(200)
|
expect(res.status).toBe(200)
|
||||||
|
@ -115,54 +83,6 @@ describe("mysql integrations", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("POST /api/datasources/verify", () => {
|
|
||||||
it("should be able to verify the connection", async () => {
|
|
||||||
await config.api.datasource.verify(
|
|
||||||
{
|
|
||||||
datasource: rawDatasource,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
body: {
|
|
||||||
connected: true,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
|
|
||||||
it("should state an invalid datasource cannot connect", async () => {
|
|
||||||
await config.api.datasource.verify(
|
|
||||||
{
|
|
||||||
datasource: {
|
|
||||||
...rawDatasource,
|
|
||||||
config: {
|
|
||||||
...rawDatasource.config,
|
|
||||||
password: "wrongpassword",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
body: {
|
|
||||||
connected: false,
|
|
||||||
error:
|
|
||||||
"Access denied for the specified user. User does not have the necessary privileges or the provided credentials are incorrect. Please verify the credentials, and ensure that the user has appropriate permissions.",
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("POST /api/datasources/info", () => {
|
|
||||||
it("should fetch information about mysql datasource", async () => {
|
|
||||||
const primaryName = primaryMySqlTable.name
|
|
||||||
const response = await makeRequest("post", "/api/datasources/info", {
|
|
||||||
datasource: datasource,
|
|
||||||
})
|
|
||||||
expect(response.status).toBe(200)
|
|
||||||
expect(response.body.tableNames).toBeDefined()
|
|
||||||
expect(response.body.tableNames.indexOf(primaryName)).not.toBe(-1)
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("Integration compatibility with mysql search_path", () => {
|
describe("Integration compatibility with mysql search_path", () => {
|
||||||
let datasource: Datasource, rawDatasource: Datasource
|
let datasource: Datasource, rawDatasource: Datasource
|
||||||
const database = generator.guid()
|
const database = generator.guid()
|
||||||
|
@ -231,57 +151,6 @@ describe("mysql integrations", () => {
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("POST /api/tables/", () => {
|
|
||||||
it("will rename a column", async () => {
|
|
||||||
await makeRequest("post", "/api/tables/", primaryMySqlTable)
|
|
||||||
|
|
||||||
let renameColumnOnTable: TableRequest = {
|
|
||||||
...primaryMySqlTable,
|
|
||||||
schema: {
|
|
||||||
id: {
|
|
||||||
name: "id",
|
|
||||||
type: FieldType.AUTO,
|
|
||||||
autocolumn: true,
|
|
||||||
externalType: "unsigned integer",
|
|
||||||
},
|
|
||||||
name: {
|
|
||||||
name: "name",
|
|
||||||
type: FieldType.STRING,
|
|
||||||
externalType: "text",
|
|
||||||
},
|
|
||||||
description: {
|
|
||||||
name: "description",
|
|
||||||
type: FieldType.STRING,
|
|
||||||
externalType: "text",
|
|
||||||
},
|
|
||||||
age: {
|
|
||||||
name: "age",
|
|
||||||
type: FieldType.NUMBER,
|
|
||||||
externalType: "float(8,2)",
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
const response = await makeRequest(
|
|
||||||
"post",
|
|
||||||
"/api/tables/",
|
|
||||||
renameColumnOnTable
|
|
||||||
)
|
|
||||||
|
|
||||||
const ds = (
|
|
||||||
await makeRequest("post", `/api/datasources/${datasource._id}/schema`)
|
|
||||||
).body.datasource
|
|
||||||
|
|
||||||
expect(response.status).toEqual(200)
|
|
||||||
expect(Object.keys(ds.entities![primaryMySqlTable.name].schema)).toEqual([
|
|
||||||
"id",
|
|
||||||
"name",
|
|
||||||
"description",
|
|
||||||
"age",
|
|
||||||
])
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe("POST /api/datasources/:datasourceId/schema", () => {
|
describe("POST /api/datasources/:datasourceId/schema", () => {
|
||||||
let tableName: string
|
let tableName: string
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -566,7 +566,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
||||||
query.filters.equal[`_${GOOGLE_SHEETS_PRIMARY_KEY}`] = id
|
query.filters.equal[`_${GOOGLE_SHEETS_PRIMARY_KEY}`] = id
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let filtered = dataFilters.runLuceneQuery(rows, query.filters)
|
let filtered = dataFilters.runQuery(rows, query.filters)
|
||||||
if (hasFilters && query.paginate) {
|
if (hasFilters && query.paginate) {
|
||||||
filtered = filtered.slice(offset, offset + limit)
|
filtered = filtered.slice(offset, offset + limit)
|
||||||
}
|
}
|
||||||
|
@ -585,7 +585,7 @@ class GoogleSheetsIntegration implements DatasourcePlus {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
const [sortField, sortInfo] = Object.entries(query.sort)[0]
|
const [sortField, sortInfo] = Object.entries(query.sort)[0]
|
||||||
response = dataFilters.luceneSort(
|
response = dataFilters.sort(
|
||||||
response,
|
response,
|
||||||
sortField,
|
sortField,
|
||||||
sortInfo.direction,
|
sortInfo.direction,
|
||||||
|
|
|
@ -4,8 +4,9 @@ import * as mongodb from "./mongodb"
|
||||||
import * as mysql from "./mysql"
|
import * as mysql from "./mysql"
|
||||||
import * as mssql from "./mssql"
|
import * as mssql from "./mssql"
|
||||||
import * as mariadb from "./mariadb"
|
import * as mariadb from "./mariadb"
|
||||||
import { GenericContainer } from "testcontainers"
|
import { GenericContainer, StartedTestContainer } from "testcontainers"
|
||||||
import { testContainerUtils } from "@budibase/backend-core/tests"
|
import { testContainerUtils } from "@budibase/backend-core/tests"
|
||||||
|
import cloneDeep from "lodash/cloneDeep"
|
||||||
|
|
||||||
export type DatasourceProvider = () => Promise<Datasource>
|
export type DatasourceProvider = () => Promise<Datasource>
|
||||||
|
|
||||||
|
@ -65,9 +66,39 @@ export async function rawQuery(ds: Datasource, sql: string): Promise<any> {
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function startContainer(container: GenericContainer) {
|
export async function startContainer(container: GenericContainer) {
|
||||||
container = container.withReuse().withLabels({ "com.budibase": "true" })
|
const imageName = (container as any).imageName.string as string
|
||||||
|
const key = imageName.replaceAll("/", "-").replaceAll(":", "-")
|
||||||
|
|
||||||
const startedContainer = await container.start()
|
container = container
|
||||||
|
.withReuse()
|
||||||
|
.withLabels({ "com.budibase": "true" })
|
||||||
|
.withName(key)
|
||||||
|
|
||||||
|
let startedContainer: StartedTestContainer | undefined = undefined
|
||||||
|
let lastError = undefined
|
||||||
|
for (let i = 0; i < 10; i++) {
|
||||||
|
try {
|
||||||
|
// container.start() is not an idempotent operation, calling `start`
|
||||||
|
// modifies the internal state of a GenericContainer instance such that
|
||||||
|
// the hash it uses to determine reuse changes. We need to clone the
|
||||||
|
// container before calling start to ensure that we're using the same
|
||||||
|
// reuse hash every time.
|
||||||
|
const containerCopy = cloneDeep(container)
|
||||||
|
startedContainer = await containerCopy.start()
|
||||||
|
lastError = undefined
|
||||||
|
break
|
||||||
|
} catch (e: any) {
|
||||||
|
lastError = e
|
||||||
|
await new Promise(resolve => setTimeout(resolve, 1000))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!startedContainer) {
|
||||||
|
if (lastError) {
|
||||||
|
throw lastError
|
||||||
|
}
|
||||||
|
throw new Error(`failed to start container: ${imageName}`)
|
||||||
|
}
|
||||||
|
|
||||||
const info = testContainerUtils.getContainerById(startedContainer.getId())
|
const info = testContainerUtils.getContainerById(startedContainer.getId())
|
||||||
if (!info) {
|
if (!info) {
|
||||||
|
|
|
@ -29,6 +29,9 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
}
|
}
|
||||||
|
|
||||||
const port = (await ports).find(x => x.container === 1433)?.host
|
const port = (await ports).find(x => x.container === 1433)?.host
|
||||||
|
if (!port) {
|
||||||
|
throw new Error("SQL Server port not found")
|
||||||
|
}
|
||||||
|
|
||||||
const datasource: Datasource = {
|
const datasource: Datasource = {
|
||||||
type: "datasource_plus",
|
type: "datasource_plus",
|
||||||
|
|
|
@ -38,6 +38,9 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
}
|
}
|
||||||
|
|
||||||
const port = (await ports).find(x => x.container === 3306)?.host
|
const port = (await ports).find(x => x.container === 3306)?.host
|
||||||
|
if (!port) {
|
||||||
|
throw new Error("MySQL port not found")
|
||||||
|
}
|
||||||
|
|
||||||
const datasource: Datasource = {
|
const datasource: Datasource = {
|
||||||
type: "datasource_plus",
|
type: "datasource_plus",
|
||||||
|
|
|
@ -21,6 +21,9 @@ export async function getDatasource(): Promise<Datasource> {
|
||||||
}
|
}
|
||||||
|
|
||||||
const port = (await ports).find(x => x.container === 5432)?.host
|
const port = (await ports).find(x => x.container === 5432)?.host
|
||||||
|
if (!port) {
|
||||||
|
throw new Error("Postgres port not found")
|
||||||
|
}
|
||||||
|
|
||||||
const datasource: Datasource = {
|
const datasource: Datasource = {
|
||||||
type: "datasource_plus",
|
type: "datasource_plus",
|
||||||
|
|
|
@ -280,12 +280,35 @@ function copyExistingPropsOver(
|
||||||
utils.unreachable(existingColumnType)
|
utils.unreachable(existingColumnType)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// copy the BB schema in case of special props
|
||||||
if (shouldKeepSchema) {
|
if (shouldKeepSchema) {
|
||||||
|
const fetchedColumnDefinition: FieldSchema | undefined =
|
||||||
|
table.schema[key]
|
||||||
table.schema[key] = {
|
table.schema[key] = {
|
||||||
...existingTableSchema[key],
|
...existingTableSchema[key],
|
||||||
externalType:
|
externalType:
|
||||||
existingTableSchema[key].externalType ||
|
existingTableSchema[key].externalType ||
|
||||||
table.schema[key]?.externalType,
|
table.schema[key]?.externalType,
|
||||||
|
autocolumn: fetchedColumnDefinition?.autocolumn,
|
||||||
|
} as FieldSchema
|
||||||
|
// check constraints which can be fetched from the DB (they could be updated)
|
||||||
|
if (fetchedColumnDefinition?.constraints) {
|
||||||
|
// inclusions are the enum values (select/options)
|
||||||
|
const fetchedConstraints = fetchedColumnDefinition.constraints
|
||||||
|
const oldConstraints = table.schema[key].constraints
|
||||||
|
table.schema[key].constraints = {
|
||||||
|
...table.schema[key].constraints,
|
||||||
|
inclusion: fetchedConstraints.inclusion?.length
|
||||||
|
? fetchedConstraints.inclusion
|
||||||
|
: oldConstraints?.inclusion,
|
||||||
|
}
|
||||||
|
// true or undefined - consistent with old API
|
||||||
|
if (fetchedConstraints.presence) {
|
||||||
|
table.schema[key].constraints!.presence =
|
||||||
|
fetchedConstraints.presence
|
||||||
|
} else if (oldConstraints?.presence === true) {
|
||||||
|
delete table.schema[key].constraints?.presence
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,9 +1,16 @@
|
||||||
import { UserCtx } from "@budibase/types"
|
import { UserCtx } from "@budibase/types"
|
||||||
import { checkMissingMigrations } from "../appMigrations"
|
import { checkMissingMigrations } from "../appMigrations"
|
||||||
|
import env from "../environment"
|
||||||
|
|
||||||
export default async (ctx: UserCtx, next: any) => {
|
export default async (ctx: UserCtx, next: any) => {
|
||||||
const { appId } = ctx
|
const { appId } = ctx
|
||||||
|
|
||||||
|
// migrations can be disabled via environment variable if you
|
||||||
|
// need to completely disable migrations, e.g. for testing
|
||||||
|
if (env.DISABLE_APP_MIGRATIONS) {
|
||||||
|
return next()
|
||||||
|
}
|
||||||
|
|
||||||
if (!appId) {
|
if (!appId) {
|
||||||
return next()
|
return next()
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,6 +14,7 @@ import {
|
||||||
CONSTANT_INTERNAL_ROW_COLS,
|
CONSTANT_INTERNAL_ROW_COLS,
|
||||||
generateJunctionTableID,
|
generateJunctionTableID,
|
||||||
} from "../../../../db/utils"
|
} from "../../../../db/utils"
|
||||||
|
import { isEqual } from "lodash"
|
||||||
|
|
||||||
const FieldTypeMap: Record<FieldType, SQLiteType> = {
|
const FieldTypeMap: Record<FieldType, SQLiteType> = {
|
||||||
[FieldType.BOOLEAN]: SQLiteType.NUMERIC,
|
[FieldType.BOOLEAN]: SQLiteType.NUMERIC,
|
||||||
|
@ -107,8 +108,22 @@ async function buildBaseDefinition(): Promise<PreSaveSQLiteDefinition> {
|
||||||
|
|
||||||
export async function syncDefinition(): Promise<void> {
|
export async function syncDefinition(): Promise<void> {
|
||||||
const db = context.getAppDB()
|
const db = context.getAppDB()
|
||||||
|
let existing: SQLiteDefinition | undefined
|
||||||
|
try {
|
||||||
|
existing = await db.get<SQLiteDefinition>(SQLITE_DESIGN_DOC_ID)
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err.status !== 404) {
|
||||||
|
throw err
|
||||||
|
}
|
||||||
|
}
|
||||||
const definition = await buildBaseDefinition()
|
const definition = await buildBaseDefinition()
|
||||||
await db.put(definition)
|
if (existing) {
|
||||||
|
definition._rev = existing._rev
|
||||||
|
}
|
||||||
|
// only write if something has changed
|
||||||
|
if (!existing || !isEqual(existing.sql, definition.sql)) {
|
||||||
|
await db.put(definition)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function addTable(table: Table) {
|
export async function addTable(table: Table) {
|
||||||
|
|
|
@ -15,6 +15,7 @@ import * as fileSystem from "../utilities/fileSystem"
|
||||||
import { default as eventEmitter, init as eventInit } from "../events"
|
import { default as eventEmitter, init as eventInit } from "../events"
|
||||||
import * as migrations from "../migrations"
|
import * as migrations from "../migrations"
|
||||||
import * as bullboard from "../automations/bullboard"
|
import * as bullboard from "../automations/bullboard"
|
||||||
|
import * as appMigrations from "../appMigrations/queue"
|
||||||
import * as pro from "@budibase/pro"
|
import * as pro from "@budibase/pro"
|
||||||
import * as api from "../api"
|
import * as api from "../api"
|
||||||
import sdk from "../sdk"
|
import sdk from "../sdk"
|
||||||
|
@ -69,6 +70,9 @@ export async function startup(
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
printFeatures()
|
printFeatures()
|
||||||
|
if (env.BUDIBASE_ENVIRONMENT) {
|
||||||
|
console.log(`service running environment: "${env.BUDIBASE_ENVIRONMENT}"`)
|
||||||
|
}
|
||||||
STARTUP_RAN = true
|
STARTUP_RAN = true
|
||||||
if (app && server && !env.CLUSTER_MODE) {
|
if (app && server && !env.CLUSTER_MODE) {
|
||||||
console.log(`Budibase running on ${JSON.stringify(server.address())}`)
|
console.log(`Budibase running on ${JSON.stringify(server.address())}`)
|
||||||
|
@ -114,8 +118,10 @@ export async function startup(
|
||||||
// configure events to use the pro audit log write
|
// configure events to use the pro audit log write
|
||||||
// can't integrate directly into backend-core due to cyclic issues
|
// can't integrate directly into backend-core due to cyclic issues
|
||||||
queuePromises.push(events.processors.init(pro.sdk.auditLogs.write))
|
queuePromises.push(events.processors.init(pro.sdk.auditLogs.write))
|
||||||
|
// app migrations and automations on other service
|
||||||
if (automationsEnabled()) {
|
if (automationsEnabled()) {
|
||||||
queuePromises.push(automations.init())
|
queuePromises.push(automations.init())
|
||||||
|
queuePromises.push(appMigrations.init())
|
||||||
}
|
}
|
||||||
queuePromises.push(initPro())
|
queuePromises.push(initPro())
|
||||||
if (app) {
|
if (app) {
|
||||||
|
|
|
@ -6,6 +6,7 @@ import {
|
||||||
UpdateDatasourceRequest,
|
UpdateDatasourceRequest,
|
||||||
QueryJson,
|
QueryJson,
|
||||||
BuildSchemaFromSourceResponse,
|
BuildSchemaFromSourceResponse,
|
||||||
|
FetchDatasourceInfoResponse,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { Expectations, TestAPI } from "./base"
|
import { Expectations, TestAPI } from "./base"
|
||||||
|
|
||||||
|
@ -61,6 +62,10 @@ export class DatasourceAPI extends TestAPI {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fetch = async (expectations?: Expectations) => {
|
||||||
|
return await this._get<Datasource[]>(`/api/datasources`, { expectations })
|
||||||
|
}
|
||||||
|
|
||||||
query = async (
|
query = async (
|
||||||
query: Omit<QueryJson, "meta"> & Partial<Pick<QueryJson, "meta">>,
|
query: Omit<QueryJson, "meta"> & Partial<Pick<QueryJson, "meta">>,
|
||||||
expectations?: Expectations
|
expectations?: Expectations
|
||||||
|
@ -71,10 +76,29 @@ export class DatasourceAPI extends TestAPI {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
fetchSchema = async (id: string, expectations?: Expectations) => {
|
fetchSchema = async (
|
||||||
|
{
|
||||||
|
datasourceId,
|
||||||
|
tablesFilter,
|
||||||
|
}: { datasourceId: string; tablesFilter?: string[] },
|
||||||
|
expectations?: Expectations
|
||||||
|
) => {
|
||||||
return await this._post<BuildSchemaFromSourceResponse>(
|
return await this._post<BuildSchemaFromSourceResponse>(
|
||||||
`/api/datasources/${id}/schema`,
|
`/api/datasources/${datasourceId}/schema`,
|
||||||
{
|
{
|
||||||
|
expectations: expectations,
|
||||||
|
body: {
|
||||||
|
tablesFilter: tablesFilter,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
info = async (datasource: Datasource, expectations?: Expectations) => {
|
||||||
|
return await this._post<FetchDatasourceInfoResponse>(
|
||||||
|
`/api/datasources/info`,
|
||||||
|
{
|
||||||
|
body: { datasource },
|
||||||
expectations,
|
expectations,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
|
@ -138,10 +138,10 @@ export const removeKeyNumbering = (key: string): string => {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Builds a lucene JSON query from the filter structure generated in the builder
|
* Builds a JSON query from the filter structure generated in the builder
|
||||||
* @param filter the builder filter structure
|
* @param filter the builder filter structure
|
||||||
*/
|
*/
|
||||||
export const buildLuceneQuery = (filter: SearchFilter[]) => {
|
export const buildQuery = (filter: SearchFilter[]) => {
|
||||||
let query: SearchFilters = {
|
let query: SearchFilters = {
|
||||||
string: {},
|
string: {},
|
||||||
fuzzy: {},
|
fuzzy: {},
|
||||||
|
@ -260,11 +260,11 @@ export const buildLuceneQuery = (filter: SearchFilter[]) => {
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Performs a client-side lucene search on an array of data
|
* Performs a client-side search on an array of data
|
||||||
* @param docs the data
|
* @param docs the data
|
||||||
* @param query the JSON lucene query
|
* @param query the JSON query
|
||||||
*/
|
*/
|
||||||
export const runLuceneQuery = (docs: any[], query?: SearchFilters) => {
|
export const runQuery = (docs: any[], query?: SearchFilters) => {
|
||||||
if (!docs || !Array.isArray(docs)) {
|
if (!docs || !Array.isArray(docs)) {
|
||||||
return []
|
return []
|
||||||
}
|
}
|
||||||
|
@ -451,7 +451,7 @@ export const runLuceneQuery = (docs: any[], query?: SearchFilters) => {
|
||||||
* @param sortOrder the sort order ("ascending" or "descending")
|
* @param sortOrder the sort order ("ascending" or "descending")
|
||||||
* @param sortType the type of sort ("string" or "number")
|
* @param sortType the type of sort ("string" or "number")
|
||||||
*/
|
*/
|
||||||
export const luceneSort = (
|
export const sort = (
|
||||||
docs: any[],
|
docs: any[],
|
||||||
sort: string,
|
sort: string,
|
||||||
sortOrder: SortDirection,
|
sortOrder: SortDirection,
|
||||||
|
@ -481,7 +481,7 @@ export const luceneSort = (
|
||||||
* @param docs the data
|
* @param docs the data
|
||||||
* @param limit the number of docs to limit to
|
* @param limit the number of docs to limit to
|
||||||
*/
|
*/
|
||||||
export const luceneLimit = (docs: any[], limit: string) => {
|
export const limit = (docs: any[], limit: string) => {
|
||||||
const numLimit = parseFloat(limit)
|
const numLimit = parseFloat(limit)
|
||||||
if (isNaN(numLimit)) {
|
if (isNaN(numLimit)) {
|
||||||
return docs
|
return docs
|
||||||
|
|
|
@ -4,9 +4,9 @@ import {
|
||||||
FieldType,
|
FieldType,
|
||||||
SearchFilter,
|
SearchFilter,
|
||||||
} from "@budibase/types"
|
} from "@budibase/types"
|
||||||
import { buildLuceneQuery, runLuceneQuery } from "../filters"
|
import { buildQuery, runQuery } from "../filters"
|
||||||
|
|
||||||
describe("runLuceneQuery", () => {
|
describe("runQuery", () => {
|
||||||
const docs = [
|
const docs = [
|
||||||
{
|
{
|
||||||
order_id: 1,
|
order_id: 1,
|
||||||
|
@ -70,14 +70,14 @@ describe("runLuceneQuery", () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
it("should return input docs if no search query is provided", () => {
|
it("should return input docs if no search query is provided", () => {
|
||||||
expect(runLuceneQuery(docs)).toBe(docs)
|
expect(runQuery(docs)).toBe(docs)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return matching rows for equal filter", () => {
|
it("should return matching rows for equal filter", () => {
|
||||||
const query = buildQuery({
|
const query = buildQuery({
|
||||||
equal: { order_status: 4 },
|
equal: { order_status: 4 },
|
||||||
})
|
})
|
||||||
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2])
|
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([1, 2])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return matching row for notEqual filter", () => {
|
it("should return matching row for notEqual filter", () => {
|
||||||
|
@ -85,12 +85,12 @@ describe("runLuceneQuery", () => {
|
||||||
notEqual: { order_status: 4 },
|
notEqual: { order_status: 4 },
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3])
|
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([3])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return starts with matching rows for fuzzy and string filters", () => {
|
it("should return starts with matching rows for fuzzy and string filters", () => {
|
||||||
expect(
|
expect(
|
||||||
runLuceneQuery(
|
runQuery(
|
||||||
docs,
|
docs,
|
||||||
buildQuery({
|
buildQuery({
|
||||||
fuzzy: { description: "sm" },
|
fuzzy: { description: "sm" },
|
||||||
|
@ -98,7 +98,7 @@ describe("runLuceneQuery", () => {
|
||||||
).map(row => row.description)
|
).map(row => row.description)
|
||||||
).toEqual(["Small box"])
|
).toEqual(["Small box"])
|
||||||
expect(
|
expect(
|
||||||
runLuceneQuery(
|
runQuery(
|
||||||
docs,
|
docs,
|
||||||
buildQuery({
|
buildQuery({
|
||||||
string: { description: "SM" },
|
string: { description: "SM" },
|
||||||
|
@ -117,7 +117,7 @@ describe("runLuceneQuery", () => {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3])
|
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([3])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return rows with numeric strings within a range filter", () => {
|
it("should return rows with numeric strings within a range filter", () => {
|
||||||
|
@ -129,7 +129,7 @@ describe("runLuceneQuery", () => {
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([3])
|
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([3])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return rows with ISO date strings within a range filter", () => {
|
it("should return rows with ISO date strings within a range filter", () => {
|
||||||
|
@ -142,7 +142,7 @@ describe("runLuceneQuery", () => {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2])
|
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([2])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return return all docs if an invalid doc value is passed into a range filter", async () => {
|
it("should return return all docs if an invalid doc value is passed into a range filter", async () => {
|
||||||
|
@ -170,7 +170,7 @@ describe("runLuceneQuery", () => {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(runLuceneQuery(docs, query)).toEqual(docs)
|
expect(runQuery(docs, query)).toEqual(docs)
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return rows with matches on empty filter", () => {
|
it("should return rows with matches on empty filter", () => {
|
||||||
|
@ -180,7 +180,7 @@ describe("runLuceneQuery", () => {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1])
|
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([1])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should return rows with matches on notEmpty filter", () => {
|
it("should return rows with matches on notEmpty filter", () => {
|
||||||
|
@ -190,7 +190,7 @@ describe("runLuceneQuery", () => {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2, 3])
|
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([2, 3])
|
||||||
})
|
})
|
||||||
|
|
||||||
it.each([[523, 259], "523,259"])(
|
it.each([[523, 259], "523,259"])(
|
||||||
|
@ -202,7 +202,7 @@ describe("runLuceneQuery", () => {
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(runLuceneQuery(docs, query).map(row => row.customer_id)).toEqual([
|
expect(runQuery(docs, query).map(row => row.customer_id)).toEqual([
|
||||||
259, 523,
|
259, 523,
|
||||||
])
|
])
|
||||||
}
|
}
|
||||||
|
@ -218,7 +218,7 @@ describe("runLuceneQuery", () => {
|
||||||
contains: { description: ["box"] },
|
contains: { description: ["box"] },
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual(
|
expect(runQuery(docs, query).map(row => row.order_id)).toEqual(
|
||||||
expectedResult
|
expectedResult
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
@ -230,7 +230,7 @@ describe("runLuceneQuery", () => {
|
||||||
oneOf: { label: ["FRAGILE"] },
|
oneOf: { label: ["FRAGILE"] },
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([1, 2])
|
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([1, 2])
|
||||||
})
|
})
|
||||||
|
|
||||||
it("should handle when a value is null or undefined", () => {
|
it("should handle when a value is null or undefined", () => {
|
||||||
|
@ -240,14 +240,14 @@ describe("runLuceneQuery", () => {
|
||||||
oneOf: { label: ["FRAGILE"] },
|
oneOf: { label: ["FRAGILE"] },
|
||||||
})
|
})
|
||||||
|
|
||||||
expect(runLuceneQuery(docs, query).map(row => row.order_id)).toEqual([2])
|
expect(runQuery(docs, query).map(row => row.order_id)).toEqual([2])
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
describe("buildLuceneQuery", () => {
|
describe("buildQuery", () => {
|
||||||
it("should return a basic search query template if the input is not an array", () => {
|
it("should return a basic search query template if the input is not an array", () => {
|
||||||
const filter: any = "NOT_AN_ARRAY"
|
const filter: any = "NOT_AN_ARRAY"
|
||||||
expect(buildLuceneQuery(filter)).toEqual({
|
expect(buildQuery(filter)).toEqual({
|
||||||
string: {},
|
string: {},
|
||||||
fuzzy: {},
|
fuzzy: {},
|
||||||
range: {},
|
range: {},
|
||||||
|
@ -277,7 +277,7 @@ describe("buildLuceneQuery", () => {
|
||||||
value: "1000,1212,3400",
|
value: "1000,1212,3400",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
expect(buildLuceneQuery(filter)).toEqual({
|
expect(buildQuery(filter)).toEqual({
|
||||||
string: {},
|
string: {},
|
||||||
fuzzy: {},
|
fuzzy: {},
|
||||||
range: {},
|
range: {},
|
||||||
|
@ -311,7 +311,7 @@ describe("buildLuceneQuery", () => {
|
||||||
value: "{{ list_of_customer_ids }}",
|
value: "{{ list_of_customer_ids }}",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
expect(buildLuceneQuery(filter)).toEqual({
|
expect(buildQuery(filter)).toEqual({
|
||||||
string: {},
|
string: {},
|
||||||
fuzzy: {},
|
fuzzy: {},
|
||||||
range: {},
|
range: {},
|
||||||
|
@ -351,7 +351,7 @@ describe("buildLuceneQuery", () => {
|
||||||
value: "true",
|
value: "true",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
expect(buildLuceneQuery(filter)).toEqual({
|
expect(buildQuery(filter)).toEqual({
|
||||||
string: {},
|
string: {},
|
||||||
fuzzy: {},
|
fuzzy: {},
|
||||||
range: {},
|
range: {},
|
||||||
|
@ -392,7 +392,7 @@ describe("buildLuceneQuery", () => {
|
||||||
value: "Large box,Heavy box,Small box",
|
value: "Large box,Heavy box,Small box",
|
||||||
},
|
},
|
||||||
]
|
]
|
||||||
expect(buildLuceneQuery(filter)).toEqual({
|
expect(buildQuery(filter)).toEqual({
|
||||||
string: {},
|
string: {},
|
||||||
fuzzy: {},
|
fuzzy: {},
|
||||||
range: {},
|
range: {},
|
||||||
|
|
|
@ -1 +1,2 @@
|
||||||
export * from "./environment"
|
export * from "./environment"
|
||||||
|
export * from "./status"
|
||||||
|
|
|
@ -0,0 +1,11 @@
|
||||||
|
export type SystemStatusResponse = {
|
||||||
|
passing?: boolean
|
||||||
|
checks?: {
|
||||||
|
login: boolean
|
||||||
|
search: boolean
|
||||||
|
}
|
||||||
|
health?: {
|
||||||
|
passing: boolean
|
||||||
|
}
|
||||||
|
version?: string
|
||||||
|
}
|
|
@ -30,4 +30,7 @@ export interface SQLiteDefinition {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export type PreSaveSQLiteDefinition = Omit<SQLiteDefinition, "_rev">
|
export interface PreSaveSQLiteDefinition
|
||||||
|
extends Omit<SQLiteDefinition, "_rev"> {
|
||||||
|
_rev?: string
|
||||||
|
}
|
||||||
|
|
|
@ -7,3 +7,4 @@ export * from "./schedule"
|
||||||
export * from "./templates"
|
export * from "./templates"
|
||||||
export * from "./environmentVariables"
|
export * from "./environmentVariables"
|
||||||
export * from "./auditLogs"
|
export * from "./auditLogs"
|
||||||
|
export * from "./tenantInfo"
|
||||||
|
|
|
@ -0,0 +1,13 @@
|
||||||
|
import { Document } from "../document"
|
||||||
|
|
||||||
|
export interface TenantInfo extends Document {
|
||||||
|
owner: {
|
||||||
|
email: string
|
||||||
|
password?: string
|
||||||
|
ssoId?: string
|
||||||
|
givenName?: string
|
||||||
|
familyName?: string
|
||||||
|
budibaseUserId?: string
|
||||||
|
}
|
||||||
|
tenantId: string
|
||||||
|
}
|
|
@ -165,3 +165,13 @@ export interface Database {
|
||||||
deleteIndex(...args: any[]): Promise<any>
|
deleteIndex(...args: any[]): Promise<any>
|
||||||
getIndexes(...args: any[]): Promise<any>
|
getIndexes(...args: any[]): Promise<any>
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface DBError extends Error {
|
||||||
|
status: number
|
||||||
|
statusCode: number
|
||||||
|
reason: string
|
||||||
|
name: string
|
||||||
|
errid: string
|
||||||
|
error: string
|
||||||
|
description: string
|
||||||
|
}
|
||||||
|
|
|
@ -31,6 +31,7 @@ async function init() {
|
||||||
HTTP_LOGGING: "0",
|
HTTP_LOGGING: "0",
|
||||||
VERSION: "0.0.0+local",
|
VERSION: "0.0.0+local",
|
||||||
PASSWORD_MIN_LENGTH: "1",
|
PASSWORD_MIN_LENGTH: "1",
|
||||||
|
SQS_SEARCH_ENABLE: "1",
|
||||||
}
|
}
|
||||||
|
|
||||||
config = { ...config, ...existingConfig }
|
config = { ...config, ...existingConfig }
|
||||||
|
|
|
@ -0,0 +1,10 @@
|
||||||
|
import { tenancy } from "@budibase/backend-core"
|
||||||
|
import { TenantInfo, Ctx } from "@budibase/types"
|
||||||
|
|
||||||
|
export const save = async (ctx: Ctx<TenantInfo>) => {
|
||||||
|
const response = await tenancy.saveTenantInfo(ctx.request.body)
|
||||||
|
ctx.body = {
|
||||||
|
_id: response.id,
|
||||||
|
_rev: response.rev,
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,16 +1,24 @@
|
||||||
import { accounts } from "@budibase/backend-core"
|
import { accounts, env as coreEnv } from "@budibase/backend-core"
|
||||||
|
import { Ctx, SystemStatusResponse } from "@budibase/types"
|
||||||
import env from "../../../environment"
|
import env from "../../../environment"
|
||||||
import { BBContext } from "@budibase/types"
|
|
||||||
|
|
||||||
export const fetch = async (ctx: BBContext) => {
|
export const fetch = async (ctx: Ctx<void, SystemStatusResponse>) => {
|
||||||
|
let status: SystemStatusResponse | undefined
|
||||||
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
if (!env.SELF_HOSTED && !env.DISABLE_ACCOUNT_PORTAL) {
|
||||||
const status = await accounts.getStatus()
|
status = await accounts.getStatus()
|
||||||
ctx.body = status
|
}
|
||||||
} else {
|
|
||||||
ctx.body = {
|
if (!status) {
|
||||||
|
status = {
|
||||||
health: {
|
health: {
|
||||||
passing: true,
|
passing: true,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (coreEnv.VERSION) {
|
||||||
|
status.version = coreEnv.VERSION
|
||||||
|
}
|
||||||
|
|
||||||
|
ctx.body = status
|
||||||
}
|
}
|
||||||
|
|
|
@ -76,6 +76,10 @@ const PUBLIC_ENDPOINTS = [
|
||||||
route: "/api/global/users/invite",
|
route: "/api/global/users/invite",
|
||||||
method: "GET",
|
method: "GET",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
route: "/api/global/tenant",
|
||||||
|
method: "POST",
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
const NO_TENANCY_ENDPOINTS = [
|
const NO_TENANCY_ENDPOINTS = [
|
||||||
|
@ -121,6 +125,10 @@ const NO_TENANCY_ENDPOINTS = [
|
||||||
route: "/api/global/users/invite/:code",
|
route: "/api/global/users/invite/:code",
|
||||||
method: "GET",
|
method: "GET",
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
route: "/api/global/tenant",
|
||||||
|
method: "POST",
|
||||||
|
},
|
||||||
]
|
]
|
||||||
|
|
||||||
// most public endpoints are gets, but some are posts
|
// most public endpoints are gets, but some are posts
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
import Router from "@koa/router"
|
||||||
|
import Joi from "joi"
|
||||||
|
import { auth } from "@budibase/backend-core"
|
||||||
|
import * as controller from "../../controllers/global/tenant"
|
||||||
|
import cloudRestricted from "../../../middleware/cloudRestricted"
|
||||||
|
|
||||||
|
const router: Router = new Router()
|
||||||
|
const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("")
|
||||||
|
|
||||||
|
function buildTenantInfoValidation() {
|
||||||
|
return auth.joiValidator.body(
|
||||||
|
Joi.object({
|
||||||
|
owner: Joi.object({
|
||||||
|
email: Joi.string().required(),
|
||||||
|
password: OPTIONAL_STRING,
|
||||||
|
ssoId: OPTIONAL_STRING,
|
||||||
|
givenName: OPTIONAL_STRING,
|
||||||
|
familyName: OPTIONAL_STRING,
|
||||||
|
budibaseUserId: OPTIONAL_STRING,
|
||||||
|
}).required(),
|
||||||
|
tenantId: Joi.string().required(),
|
||||||
|
}).required()
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
router.post(
|
||||||
|
"/api/global/tenant",
|
||||||
|
cloudRestricted,
|
||||||
|
buildTenantInfoValidation(),
|
||||||
|
controller.save
|
||||||
|
)
|
||||||
|
|
||||||
|
export default router
|
|
@ -0,0 +1,47 @@
|
||||||
|
import { TenantInfo } from "@budibase/types"
|
||||||
|
import { TestConfiguration } from "../../../../tests"
|
||||||
|
import { tenancy as _tenancy } from "@budibase/backend-core"
|
||||||
|
|
||||||
|
const tenancy = jest.mocked(_tenancy)
|
||||||
|
|
||||||
|
describe("/api/global/tenant", () => {
|
||||||
|
const config = new TestConfiguration()
|
||||||
|
|
||||||
|
beforeAll(async () => {
|
||||||
|
await config.beforeAll()
|
||||||
|
})
|
||||||
|
|
||||||
|
afterAll(async () => {
|
||||||
|
await config.afterAll()
|
||||||
|
})
|
||||||
|
|
||||||
|
beforeEach(() => {
|
||||||
|
jest.clearAllMocks()
|
||||||
|
})
|
||||||
|
|
||||||
|
describe("POST /api/global/tenant", () => {
|
||||||
|
it("should save the tenantInfo", async () => {
|
||||||
|
tenancy.saveTenantInfo = jest.fn().mockImplementation(async () => ({
|
||||||
|
id: "DOC_ID",
|
||||||
|
ok: true,
|
||||||
|
rev: "DOC_REV",
|
||||||
|
}))
|
||||||
|
const tenantInfo: TenantInfo = {
|
||||||
|
owner: {
|
||||||
|
email: "test@example.com",
|
||||||
|
password: "PASSWORD",
|
||||||
|
ssoId: "SSO_ID",
|
||||||
|
givenName: "Jane",
|
||||||
|
familyName: "Doe",
|
||||||
|
budibaseUserId: "USER_ID",
|
||||||
|
},
|
||||||
|
tenantId: "tenant123",
|
||||||
|
}
|
||||||
|
const response = await config.api.tenants.saveTenantInfo(tenantInfo)
|
||||||
|
|
||||||
|
expect(_tenancy.saveTenantInfo).toHaveBeenCalledTimes(1)
|
||||||
|
expect(_tenancy.saveTenantInfo).toHaveBeenCalledWith(tenantInfo)
|
||||||
|
expect(response.text).toEqual('{"_id":"DOC_ID","_rev":"DOC_REV"}')
|
||||||
|
})
|
||||||
|
})
|
||||||
|
})
|
|
@ -4,7 +4,6 @@ import { auth } from "@budibase/backend-core"
|
||||||
import Joi from "joi"
|
import Joi from "joi"
|
||||||
import cloudRestricted from "../../../middleware/cloudRestricted"
|
import cloudRestricted from "../../../middleware/cloudRestricted"
|
||||||
import { users } from "../validation"
|
import { users } from "../validation"
|
||||||
import * as selfController from "../../controllers/global/self"
|
|
||||||
|
|
||||||
const router: Router = new Router()
|
const router: Router = new Router()
|
||||||
const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("")
|
const OPTIONAL_STRING = Joi.string().optional().allow(null).allow("")
|
||||||
|
@ -140,12 +139,5 @@ router
|
||||||
.get("/api/global/users/tenant/:id", controller.tenantUserLookup)
|
.get("/api/global/users/tenant/:id", controller.tenantUserLookup)
|
||||||
// global endpoint but needs to come at end (blocks other endpoints otherwise)
|
// global endpoint but needs to come at end (blocks other endpoints otherwise)
|
||||||
.get("/api/global/users/:id", auth.builderOrAdmin, controller.find)
|
.get("/api/global/users/:id", auth.builderOrAdmin, controller.find)
|
||||||
// DEPRECATED - use new versions with self API
|
|
||||||
.get("/api/global/users/self", selfController.getSelf)
|
|
||||||
.post(
|
|
||||||
"/api/global/users/self",
|
|
||||||
users.buildUserSaveValidation(),
|
|
||||||
selfController.updateSelf
|
|
||||||
)
|
|
||||||
|
|
||||||
export default router
|
export default router
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
import Router from "@koa/router"
|
import Router from "@koa/router"
|
||||||
import { api as pro } from "@budibase/pro"
|
import { api as pro } from "@budibase/pro"
|
||||||
import userRoutes from "./global/users"
|
import userRoutes from "./global/users"
|
||||||
|
import tenantRoutes from "./global/tenant"
|
||||||
import configRoutes from "./global/configs"
|
import configRoutes from "./global/configs"
|
||||||
import workspaceRoutes from "./global/workspaces"
|
import workspaceRoutes from "./global/workspaces"
|
||||||
import templateRoutes from "./global/templates"
|
import templateRoutes from "./global/templates"
|
||||||
|
@ -40,6 +41,7 @@ export const routes: Router[] = [
|
||||||
accountRoutes,
|
accountRoutes,
|
||||||
restoreRoutes,
|
restoreRoutes,
|
||||||
eventRoutes,
|
eventRoutes,
|
||||||
|
tenantRoutes,
|
||||||
pro.scim,
|
pro.scim,
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -27,6 +27,7 @@ describe("/api/system/status", () => {
|
||||||
health: {
|
health: {
|
||||||
passing: true,
|
passing: true,
|
||||||
},
|
},
|
||||||
|
version: expect.any(String),
|
||||||
})
|
})
|
||||||
expect(accounts.getStatus).toHaveBeenCalledTimes(0)
|
expect(accounts.getStatus).toHaveBeenCalledTimes(0)
|
||||||
config.cloudHosted()
|
config.cloudHosted()
|
||||||
|
|
|
@ -24,6 +24,7 @@ const environment = {
|
||||||
// auth
|
// auth
|
||||||
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
MINIO_ACCESS_KEY: process.env.MINIO_ACCESS_KEY,
|
||||||
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
MINIO_SECRET_KEY: process.env.MINIO_SECRET_KEY,
|
||||||
|
AWS_SESSION_TOKEN: process.env.AWS_SESSION_TOKEN,
|
||||||
SALT_ROUNDS: process.env.SALT_ROUNDS,
|
SALT_ROUNDS: process.env.SALT_ROUNDS,
|
||||||
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
REDIS_PASSWORD: process.env.REDIS_PASSWORD,
|
||||||
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
|
COOKIE_DOMAIN: process.env.COOKIE_DOMAIN,
|
||||||
|
@ -46,6 +47,7 @@ const environment = {
|
||||||
SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
|
SMTP_FALLBACK_ENABLED: process.env.SMTP_FALLBACK_ENABLED,
|
||||||
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
|
DISABLE_DEVELOPER_LICENSE: process.env.DISABLE_DEVELOPER_LICENSE,
|
||||||
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
|
SQS_SEARCH_ENABLE: process.env.SQS_SEARCH_ENABLE,
|
||||||
|
BUDIBASE_ENVIRONMENT: process.env.BUDIBASE_ENVIRONMENT,
|
||||||
// smtp
|
// smtp
|
||||||
SMTP_USER: process.env.SMTP_USER,
|
SMTP_USER: process.env.SMTP_USER,
|
||||||
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
|
SMTP_PASSWORD: process.env.SMTP_PASSWORD,
|
||||||
|
|
|
@ -88,7 +88,11 @@ const shutdown = () => {
|
||||||
}
|
}
|
||||||
|
|
||||||
export default server.listen(parseInt(env.PORT || "4002"), async () => {
|
export default server.listen(parseInt(env.PORT || "4002"), async () => {
|
||||||
console.log(`Worker running on ${JSON.stringify(server.address())}`)
|
let startupLog = `Worker running on ${JSON.stringify(server.address())}`
|
||||||
|
if (env.BUDIBASE_ENVIRONMENT) {
|
||||||
|
startupLog = `${startupLog} - environment: "${env.BUDIBASE_ENVIRONMENT}"`
|
||||||
|
}
|
||||||
|
console.log(startupLog)
|
||||||
await initPro()
|
await initPro()
|
||||||
await redis.clients.init()
|
await redis.clients.init()
|
||||||
cache.docWritethrough.init()
|
cache.docWritethrough.init()
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
|
import { TenantInfo } from "@budibase/types"
|
||||||
import TestConfiguration from "../TestConfiguration"
|
import TestConfiguration from "../TestConfiguration"
|
||||||
import { TestAPI, TestAPIOpts } from "./base"
|
import { TestAPI, TestAPIOpts } from "./base"
|
||||||
|
|
||||||
|
@ -14,4 +15,12 @@ export class TenantAPI extends TestAPI {
|
||||||
.set(opts?.headers)
|
.set(opts?.headers)
|
||||||
.expect(opts?.status ? opts.status : 204)
|
.expect(opts?.status ? opts.status : 204)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
saveTenantInfo = (tenantInfo: TenantInfo) => {
|
||||||
|
return this.request
|
||||||
|
.post("/api/global/tenant")
|
||||||
|
.set(this.config.internalAPIHeaders())
|
||||||
|
.send(tenantInfo)
|
||||||
|
.expect(200)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue